hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
06f7ba5494f36a557a0930eb72784b81e4b82c52
| 3,163
|
py
|
Python
|
KDAT/src/system/GetHardwareInfo.py
|
GavinGL/workspace
|
e8080656a235eee7419994b3be24396945ba2889
|
[
"MIT"
] | null | null | null |
KDAT/src/system/GetHardwareInfo.py
|
GavinGL/workspace
|
e8080656a235eee7419994b3be24396945ba2889
|
[
"MIT"
] | null | null | null |
KDAT/src/system/GetHardwareInfo.py
|
GavinGL/workspace
|
e8080656a235eee7419994b3be24396945ba2889
|
[
"MIT"
] | null | null | null |
# -*- coding: UTF-8 -*-
import os,ConfigParser
from modules.myConnect import myFtp
def SaveHardInfo(link,homedir):
'''获取cpuinfo'''
try:
execmd = "cat /proc/cpuinfo > /ramdisk/system_cpuinfo.log"
stdin, stdout, stderr = link.exec_command (execmd)
except Exception:
pass
cp_link = ConfigParser.SafeConfigParser()
cp_link.read(os.getcwd()+'/env.conf')
cp_link.set('myconfig','currentstep',str(cp_link.getint('myconfig','currentstep')+1))
cp_link.write(open(os.getcwd()+'/env.conf','w'))
'''获取meminfo'''
try:
execmd = "cat /proc/meminfo > /ramdisk/system_meminfo.log"
stdin, stdout, stderr = link.exec_command (execmd)
except Exception:
pass
cp_link = ConfigParser.SafeConfigParser()
cp_link.read(os.getcwd()+'/env.conf')
cp_link.set('myconfig','currentstep',str(cp_link.getint('myconfig','currentstep')+1))
cp_link.write(open(os.getcwd()+'/env.conf','w'))
'''获取mtdinfo'''
try:
execmd = "cat /proc/mtd > /ramdisk/system_mtd.log"
stdin, stdout, stderr = link.exec_command (execmd)
except Exception:
pass
cp_link = ConfigParser.SafeConfigParser()
cp_link.read(os.getcwd()+'/env.conf')
cp_link.set('myconfig','currentstep',str(cp_link.getint('myconfig','currentstep')+1))
cp_link.write(open(os.getcwd()+'/env.conf','w'))
'''获取partition info'''
try:
execmd = "cat /proc/partitions > /ramdisk/system_partitions.log"
stdin, stdout, stderr = link.exec_command (execmd)
except Exception:
pass
cp_link = ConfigParser.SafeConfigParser()
cp_link.read(os.getcwd()+'/env.conf')
cp_link.set('myconfig','currentstep',str(cp_link.getint('myconfig','currentstep')+1))
cp_link.write(open(os.getcwd()+'/env.conf','w'))
def HardGet(homedir):
cp_link = ConfigParser.SafeConfigParser()
cp_link.read(os.getcwd()+'/env.conf')
'''获取并删除日志文件'''
try:
myFtp("get","system",homedir,"system_cpuinfo.log")
except Exception:
pass
cp_link = ConfigParser.SafeConfigParser()
cp_link.read(os.getcwd()+'/env.conf')
cp_link.set('myconfig','currentstep',str(cp_link.getint('myconfig','currentstep')+1))
cp_link.write(open(os.getcwd()+'/env.conf','w'))
try:
myFtp("get","system",homedir,"system_meminfo.log")
except Exception:
pass
cp_link = ConfigParser.SafeConfigParser()
cp_link.read(os.getcwd()+'/env.conf')
cp_link.set('myconfig','currentstep',str(cp_link.getint('myconfig','currentstep')+1))
cp_link.write(open(os.getcwd()+'/env.conf','w'))
try:
myFtp("get","system",homedir,"system_mtd.log")
except Exception:
pass
cp_link = ConfigParser.SafeConfigParser()
cp_link.read(os.getcwd()+'/env.conf')
cp_link.set('myconfig','currentstep',str(cp_link.getint('myconfig','currentstep')+1))
cp_link.write(open(os.getcwd()+'/env.conf','w'))
try:
myFtp("get","system",homedir,"system_partitions.log")
except Exception:
pass
cp_link = ConfigParser.SafeConfigParser()
cp_link.read(os.getcwd()+'/env.conf')
cp_link.set('myconfig','currentstep',str(cp_link.getint('myconfig','currentstep')+1))
cp_link.write(open(os.getcwd()+'/env.conf','w'))
def GetHardInfo(link,homedir):
SaveHardInfo(link,homedir)
HardGet(homedir)
| 34.010753
| 87
| 0.695226
| 420
| 3,163
| 5.107143
| 0.135714
| 0.117483
| 0.087179
| 0.118881
| 0.80979
| 0.80979
| 0.795804
| 0.795804
| 0.795804
| 0.795804
| 0
| 0.003211
| 0.113816
| 3,163
| 93
| 88
| 34.010753
| 0.762041
| 0.010117
| 0
| 0.805195
| 0
| 0
| 0.255477
| 0.043141
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038961
| false
| 0.103896
| 0.025974
| 0
| 0.064935
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
06f7c40daced55b6db668774ad9eac5044756b26
| 24,522
|
py
|
Python
|
rpctests.py
|
jverhoeven/pyzmqrpc
|
7ac8f07ebf8b494016dfb829ddf62a78c8c6756c
|
[
"MIT"
] | 5
|
2015-02-05T12:21:07.000Z
|
2019-04-26T03:12:18.000Z
|
rpctests.py
|
jverhoeven/pyzmqrpc
|
7ac8f07ebf8b494016dfb829ddf62a78c8c6756c
|
[
"MIT"
] | null | null | null |
rpctests.py
|
jverhoeven/pyzmqrpc
|
7ac8f07ebf8b494016dfb829ddf62a78c8c6756c
|
[
"MIT"
] | 4
|
2015-12-25T04:20:29.000Z
|
2020-10-22T17:00:08.000Z
|
'''
Created on Mar 31, 2014
@author: Jan Verhoeven
@copyright: MIT license, see http://opensource.org/licenses/MIT
'''
from __future__ import print_function
import time
import logging
import unittest
from zmqrpc.ZmqProxy import ZmqProxyRep2PubThread, ZmqProxySub2ReqThread, ZmqProxyRep2ReqThread, ZmqProxySub2PubThread, ZmqBufferedProxyRep2ReqThread
from zmqrpc.ZmqReceiver import ZmqReceiverThread
from zmqrpc.ZmqSender import ZmqSender
from zmqrpc.ZmqRpcServer import ZmqRpcServerThread
from zmqrpc.ZmqRpcClient import ZmqRpcClient
logger = logging.getLogger('zmqrpc')
logger.setLevel(logging.DEBUG)
# Track state from invoking method in a special class since this is in global scope.
class TestState(object):
def __init__(self):
self.last_invoked_param1 = None
test_state = TestState()
def invoke_test(param1, param2):
test_state.last_invoked_param1 = param1
return "{0}:{1}".format(param1, param2)
def invoke_test_that_throws_exception(param1, param2):
del param1 # Unused
del param2 # Unused
raise Exception("Something went wrong")
class TestZmqPackage(unittest.TestCase):
def test_01_req_rep_sockets(self):
# Basic send/receive over REQ/REP sockets
print("Test if sending works over REQ/REP socket, includes a username/password")
sender = ZmqSender(zmq_req_endpoints=["tcp://localhost:47000"], username="username", password="password")
receiver_thread = ZmqReceiverThread(zmq_rep_bind_address="tcp://*:47000", username="username", password="password")
receiver_thread.start()
sender.send("test", time_out_waiting_for_response_in_sec=3)
self.assertEquals(receiver_thread.last_received_message(), 'test')
print("Test if sending wrong password over REP/REQ connection results in error (actually timeout)")
sender = ZmqSender(zmq_req_endpoints=["tcp://localhost:47000"], username="username", password="wrongpassword")
try:
sender.send("test", time_out_waiting_for_response_in_sec=3)
print("Error. Did get answer from remote system which was not expected")
except:
# Could not send message, which is ok in this case
print("Success.")
receiver_thread.stop()
receiver_thread.join()
sender.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
@unittest.skip("do not know why this does not work. Something with inproc address")
def test_02_req_rep_sockets_over_inproc(self):
# Basic send/receive over REQ/REP sockets
print("Test if sending works over REQ/REP socket using inproc, includes a username/password")
sender = ZmqSender(zmq_req_endpoints=["inproc://test"], username="username", password="password")
receiver_thread = ZmqReceiverThread(zmq_rep_bind_address="inproc://test", username="username", password="password")
receiver_thread.start()
sender.send("test", time_out_waiting_for_response_in_sec=3)
self.assertEquals(receiver_thread.last_received_message(), 'test')
print("Test if sending wrong password over REP/REQ connection results in error (actually timeout)")
sender = ZmqSender(zmq_req_endpoints=["inproc://test"], username="username", password="wrongpassword")
try:
sender.send("test", time_out_waiting_for_response_in_sec=3)
print("Error. Did get answer from remote system which was not expected")
except:
# Could not send message, which is ok in this case
print("Success.")
receiver_thread.stop()
receiver_thread.join()
sender.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
def test_03_pub_sub_without_passwords(self):
# Basic send/receive over PUB/SUB sockets
print("Test if sending works over PUB/SUB sockets without passwords")
sender = ZmqSender(zmq_pub_endpoint="tcp://*:47001")
receiver_thread = ZmqReceiverThread(zmq_sub_connect_addresses=["tcp://localhost:47001"])
receiver_thread.start()
# Take 0.5 second for sockets to connect to prevent 'slow joiner' problem
time.sleep(0.5)
sender.send("test")
# Sleep for pub/sub not guaranteed to be done on completing send_pub_socket
time.sleep(0.1)
self.assertEqual(receiver_thread.last_received_message(), 'test')
receiver_thread.stop()
receiver_thread.join()
sender.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
@unittest.skip("do not know why this does not work. Something with inproc address")
def test_04_pub_sub_without_passwords_over_inproc(self):
# Basic send/receive over PUB/SUB sockets
print("Test if sending works over PUB/SUB sockets without passwords")
sender = ZmqSender(zmq_pub_endpoint="inproc://my_test")
receiver_thread = ZmqReceiverThread(zmq_sub_connect_addresses=["inproc://my_test"])
receiver_thread.start()
# Take 0.5 second for sockets to connect to prevent 'slow joiner' problem
time.sleep(0.5)
sender.send("test")
# Sleep for pub/sub not guaranteed to be done on completing send_pub_socket
time.sleep(0.1)
self.assertEqual(receiver_thread.last_received_message(), 'test')
receiver_thread.stop()
receiver_thread.join()
sender.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
def test_05_rpc1_req_rep(self):
# RPC invoke method over REQ/REP sockets
print("Test if invoking a method works over REQ/REP RPC socket, includes a username/password")
client = ZmqRpcClient(zmq_req_endpoints=["tcp://localhost:55000"], username="username", password="password")
server_thread = ZmqRpcServerThread(zmq_rep_bind_address="tcp://*:55000", rpc_functions={"invoke_test": invoke_test}, username="username", password="password")
server_thread.start()
response = client.invoke(function_name="invoke_test", function_parameters={"param1": "value1", "param2": "value2"}, time_out_waiting_for_response_in_sec=3)
server_thread.stop()
server_thread.join()
client.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
self.assertEquals(response, "value1:value2")
def test_06_rpc1_req_rep_invalid_function(self):
# RPC invoke method over REQ/REP sockets
print("Test if invoking a non existing method throws proper error over REQ/REP RPC socket, includes a username/password")
client = ZmqRpcClient(zmq_req_endpoints=["tcp://localhost:55000"], username="username", password="password")
server_thread = ZmqRpcServerThread(zmq_rep_bind_address="tcp://*:55000", rpc_functions={"invoke_test": invoke_test}, username="username", password="password")
server_thread.start()
try:
client.invoke(function_name="invoke_test_does_not_exist", function_parameters={"param1": "value1", "param2": "value2"}, time_out_waiting_for_response_in_sec=3)
except Exception as e:
self.assertEquals(str(e), "Function 'invoke_test_does_not_exist' is not implemented on server. Check rpc_functions on server if it contains the function name")
server_thread.stop()
server_thread.join()
client.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
def test_07_rpc1_req_rep_exception_raised(self):
# RPC invoke method over REQ/REP sockets
print("Test if invoking an existing method that throws an exception over REQ/REP RPC socket, includes a username/password")
client = ZmqRpcClient(zmq_req_endpoints=["tcp://localhost:55000"], username="username", password="password")
server_thread = ZmqRpcServerThread(zmq_rep_bind_address="tcp://*:55000", rpc_functions={"invoke_test_that_throws_exception": invoke_test_that_throws_exception}, username="username", password="password")
server_thread.start()
try:
client.invoke(function_name="invoke_test_that_throws_exception", function_parameters={"param1": "value1", "param2": "value2"}, time_out_waiting_for_response_in_sec=3)
except Exception as e:
self.assertEqual(str(e), "Exception raised when calling function invoke_test_that_throws_exception. Exception: Something went wrong ")
server_thread.stop()
server_thread.join()
client.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
def test_08_rpc1_pub_sub(self):
# RPC invoke method over REQ/REP sockets
print("Test if invoking a method works over PUB/SUB RPC socket")
client = ZmqRpcClient(zmq_pub_endpoint="tcp://*:54000")
server_thread = ZmqRpcServerThread(zmq_sub_connect_addresses=["tcp://localhost:54000"], rpc_functions={"invoke_test": invoke_test}, username="username", password="password")
server_thread.start()
# Wait a bit to avoid slow joiner...
time.sleep(1)
response = client.invoke(function_name="invoke_test", function_parameters={"param1": "value1sub", "param2": "value2pub"}, time_out_waiting_for_response_in_sec=3)
# Wait a bit to make sure message is sent...
time.sleep(1)
server_thread.stop()
server_thread.join()
client.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
# Response should be empty with PUB/SUB
self.assertEquals(response, None)
self.assertEquals(test_state.last_invoked_param1, "value1sub")
def test_09_pub_sub_timeout(self):
# Basic send/receive over PUB/SUB sockets
print("Test a timeout")
sender = ZmqSender(zmq_pub_endpoint="tcp://*:47001")
receiver_thread = ZmqReceiverThread(zmq_sub_connect_addresses=["tcp://localhost:47001"], recreate_sockets_on_timeout_of_sec=3)
receiver_thread.start()
# Slow joiner
time.sleep(0.1)
first_socket = receiver_thread.receiver.sub_sockets[0].zmq_socket
sender.send("test")
# Take 2 seconds to see if it works in case of within the 3 seconds window.
time.sleep(2)
self.assertEqual(receiver_thread.last_received_message(), 'test')
# Now send another but with 2 seconds delay, which should be ok
sender.send("test2")
time.sleep(2)
self.assertEqual(receiver_thread.last_received_message(), 'test2')
self.assertEqual(receiver_thread.receiver.sub_sockets[0].zmq_socket, first_socket)
# Now send another but with 4 seconds delay, which should restart the sockets, but message should arrive
sender.send("test3")
time.sleep(4)
self.assertEqual(receiver_thread.last_received_message(), 'test3')
second_socket = receiver_thread.receiver.sub_sockets[0].zmq_socket
self.assertNotEqual(second_socket, first_socket)
# Now send another but with 2 seconds delay, which should be ok
sender.send("test4")
time.sleep(2)
self.assertEqual(receiver_thread.last_received_message(), 'test4')
self.assertEqual(receiver_thread.receiver.sub_sockets[0].zmq_socket, second_socket)
receiver_thread.stop()
receiver_thread.join()
sender.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
def test_10_pub_sub_timeout_per_socket(self):
# Basic send/receive over PUB/SUB sockets
print("Test a timeout per socket")
sender = ZmqSender(zmq_pub_endpoint="tcp://*:47001")
receiver_thread = ZmqReceiverThread(zmq_sub_connect_addresses=[("tcp://localhost:47001", 3)], recreate_sockets_on_timeout_of_sec=10)
receiver_thread.start()
# Slow joiner
time.sleep(0.1)
first_socket = receiver_thread.receiver.sub_sockets[0].zmq_socket
sender.send("test")
# Take 2 seconds to see if it works in case of within the 3 seconds window.
time.sleep(2)
self.assertEqual(receiver_thread.last_received_message(), 'test')
# Now send another but with 2 seconds delay, which should be ok, followed by 4 heartbeats.
# Socket should not be refreshed.
sender.send("test2")
time.sleep(2)
sender.send_heartbeat()
time.sleep(2)
sender.send_heartbeat()
time.sleep(2)
sender.send_heartbeat()
time.sleep(2)
sender.send_heartbeat()
self.assertEqual(receiver_thread.last_received_message(), 'test2')
self.assertEqual(receiver_thread.receiver.sub_sockets[0].zmq_socket, first_socket)
# Now send another but with 4 seconds delay, which should restart the sockets, but message should arrive
sender.send("test3")
time.sleep(4)
self.assertEqual(receiver_thread.last_received_message(), 'test3')
second_socket = receiver_thread.receiver.sub_sockets[0].zmq_socket
self.assertNotEqual(second_socket, first_socket)
# Now send another but with 2 seconds delay, which should be ok
sender.send("test4")
time.sleep(2)
self.assertEqual(receiver_thread.last_received_message(), 'test4')
self.assertEqual(receiver_thread.receiver.sub_sockets[0].zmq_socket, second_socket)
receiver_thread.stop()
receiver_thread.join()
sender.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
def test_10a_pub_sub_timeout_per_socket_using_heartbeat_function(self):
# Basic send/receive over PUB/SUB sockets
print("Test a timeout per socket with RPC using heartbeat")
client = ZmqRpcClient(zmq_pub_endpoint="tcp://*:47001")
server_thread = ZmqRpcServerThread(zmq_sub_connect_addresses=[("tcp://localhost:47001", 3)], rpc_functions={"invoke_test": invoke_test}, recreate_sockets_on_timeout_of_sec=10)
server_thread.start()
# Slow joiner
time.sleep(0.1)
first_socket = server_thread.server.sub_sockets[0].zmq_socket
client.invoke(function_name="invoke_test", function_parameters={"param1": "testxx-value1", "param2": "value2"}, time_out_waiting_for_response_in_sec=3)
# Take 2 seconds to see if it works in case of within the 3 seconds window.
time.sleep(2)
self.assertEquals(test_state.last_invoked_param1, "testxx-value1")
# Now send another but with 2 seconds delay, which should be ok, then followed by a couple of heartbeats which should keep the existing socket.
client.invoke(function_name="invoke_test", function_parameters={"param1": "testxx-value2", "param2": "value2"}, time_out_waiting_for_response_in_sec=3)
time.sleep(2)
client.send_heartbeat()
time.sleep(2)
client.send_heartbeat()
time.sleep(2)
client.send_heartbeat()
time.sleep(2)
self.assertEquals(test_state.last_invoked_param1, "testxx-value2")
self.assertEqual(server_thread.server.sub_sockets[0].zmq_socket, first_socket)
# Now send another but with 4 seconds delay, which should restart the sockets, but message should arrive
client.invoke(function_name="invoke_test", function_parameters={"param1": "testxx-value3", "param2": "value2"}, time_out_waiting_for_response_in_sec=3)
time.sleep(4)
self.assertEquals(test_state.last_invoked_param1, "testxx-value3")
second_socket = server_thread.server.sub_sockets[0].zmq_socket
self.assertNotEqual(second_socket, first_socket)
# Now send another but with 2 seconds delay, which should be ok
client.invoke(function_name="invoke_test", function_parameters={"param1": "testxx-value4", "param2": "value2"}, time_out_waiting_for_response_in_sec=3)
time.sleep(2)
self.assertEquals(test_state.last_invoked_param1, "testxx-value4")
self.assertEqual(server_thread.server.sub_sockets[0].zmq_socket, second_socket)
server_thread.stop()
server_thread.join()
client.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
def test_11_rpc1_req_rep_with_rep_req_proxy_without_password(self):
# RPC invoke method over REQ/REP sockets with an extra rep/req proxy in between
print("Test if invoking a method works over REQ/REP RPC socket, using an extra rep/req proxy")
client = ZmqRpcClient(zmq_req_endpoints=["tcp://localhost:53000"])
proxy_rep_req_thread = ZmqProxyRep2ReqThread(zmq_rep_bind_address='tcp://*:53000', zmq_req_connect_addresses=["tcp://localhost:53001"])
proxy_rep_req_thread.start()
server_thread = ZmqRpcServerThread(zmq_rep_bind_address="tcp://*:53001", rpc_functions={"invoke_test": invoke_test})
server_thread.start()
time.sleep(1)
response = client.invoke(function_name="invoke_test", function_parameters={"param1": "value1", "param2": "value2"}, time_out_waiting_for_response_in_sec=3)
server_thread.stop()
server_thread.join()
proxy_rep_req_thread.stop()
proxy_rep_req_thread.join()
client.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
self.assertEquals(response, "value1:value2")
def test_12_rpc1_req_rep_with_rep_req_proxy(self):
# RPC invoke method over REQ/REP sockets with an extra rep/req proxy in between
print("Test if invoking a method works over REQ/REP RPC socket, includes a username/password and also an extra rep/req proxy")
client = ZmqRpcClient(zmq_req_endpoints=["tcp://localhost:52000"], username="username", password="password")
proxy_rep_req_thread = ZmqProxyRep2ReqThread(zmq_rep_bind_address='tcp://*:52000', zmq_req_connect_addresses=["tcp://localhost:52001"], username_rep="username", password_rep="password", username_req="username2", password_req="password2")
proxy_rep_req_thread.start()
server_thread = ZmqRpcServerThread(zmq_rep_bind_address="tcp://*:52001", rpc_functions={"invoke_test": invoke_test}, username="username2", password="password2")
server_thread.start()
response = client.invoke(function_name="invoke_test", function_parameters={"param1": "value1", "param2": "value2"}, time_out_waiting_for_response_in_sec=3)
server_thread.stop()
server_thread.join()
proxy_rep_req_thread.stop()
proxy_rep_req_thread.join()
client.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
self.assertEquals(response, "value1:value2")
def test_13_rpc1_pub_sub_with_pub_sub_proxy(self):
# RPC invoke method over PUB/SUB sockets and a PUB/SUB proxy
print("Test if invoking a method works over PUB/SUB RPC socket and a PUB/SUB proxy in between")
server_thread = ZmqRpcServerThread(zmq_sub_connect_addresses=["tcp://localhost:4567"], rpc_functions={"invoke_test": invoke_test})
server_thread.start()
proxy_pub_sub_thread = ZmqProxySub2PubThread(zmq_pub_bind_address="tcp://*:4567", zmq_sub_connect_addresses=['tcp://localhost:4566'])
proxy_pub_sub_thread.start()
client = ZmqRpcClient(zmq_pub_endpoint="tcp://*:4566")
# Wait a bit to avoid slow joiner...
time.sleep(1)
response = client.invoke(function_name="invoke_test", function_parameters={"param1": "value2sub", "param2": "value2pub"}, time_out_waiting_for_response_in_sec=3)
# Wait a bit to make sure message is sent...
time.sleep(1)
server_thread.stop()
server_thread.join()
proxy_pub_sub_thread.stop()
proxy_pub_sub_thread.join()
client.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
# Response should be empty with PUB/SUB
self.assertEquals(response, None)
self.assertEquals(test_state.last_invoked_param1, "value2sub")
def test_14_proxy(self):
# With proxy elements
print("Add a proxy setup to the end to end chain pub->proxy.req->proxy.rep->pub->sub")
sender = ZmqSender(zmq_pub_endpoint="tcp://*:57000")
proxy_sub_req_thread = ZmqProxySub2ReqThread(zmq_sub_connect_addresses=['tcp://localhost:57000'], zmq_req_connect_addresses=["tcp://localhost:57001"], username_req="username", password_req="password")
proxy_sub_req_thread.start()
proxy_rep_pub_thread = ZmqProxyRep2PubThread(zmq_rep_bind_address='tcp://*:57001', zmq_pub_bind_address='tcp://*:57002', username_rep="username", password_rep="password")
proxy_rep_pub_thread.start()
receiver_thread = ZmqReceiverThread(zmq_sub_connect_addresses=["tcp://localhost:57002"])
receiver_thread.start()
# Take 0.5 second for sockets to connect to prevent 'slow joiner' problem
time.sleep(0.5)
sender.send("test")
# Sleep for pub/sub not guaranteed to be done on completing send_pub_socket
time.sleep(1)
print("last received message by proxy_sub_req_thread: {0}".format(proxy_sub_req_thread.last_received_message()))
print("last received message by proxy_rep_pub_thread: {0}".format(proxy_rep_pub_thread.last_received_message()))
print("last received message by receiver_thread: {0}".format(receiver_thread.last_received_message()))
self.assertEqual(receiver_thread.last_received_message(), 'test')
receiver_thread.stop()
receiver_thread.join()
proxy_sub_req_thread.stop()
proxy_sub_req_thread.join()
proxy_rep_pub_thread.stop()
proxy_rep_pub_thread.join()
sender.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
def test_15_rpc1_req_rep_with_rep_req_buffered_proxy(self):
# RPC invoke method over REQ/REP sockets with an extra rep/req proxy in between
print("Test if invoking a method works over Buffered REQ/REP RPC socket, includes a username/password")
test_state.last_invoked_param1 = None
client = ZmqRpcClient(zmq_req_endpoints=["tcp://localhost:51000"], username="username", password="password")
buf_proxy_rep_req_thread = ZmqBufferedProxyRep2ReqThread(zmq_rep_bind_address='tcp://*:51000', zmq_req_connect_addresses=["tcp://localhost:51001"], buffered_pub_address="tcp://*:59878", buffered_sub_address="tcp://localhost:59878", username_rep="username", password_rep="password", username_req="username2", password_req="password2")
buf_proxy_rep_req_thread.start()
server_thread = ZmqRpcServerThread(zmq_rep_bind_address="tcp://*:51001", rpc_functions={"invoke_test": invoke_test}, username="username2", password="password2")
server_thread.start()
time.sleep(1)
response = client.invoke(function_name="invoke_test", function_parameters={"param1": "value1viaproxy", "param2": "value2viaproxy"}, time_out_waiting_for_response_in_sec=30)
time.sleep(1)
self.assertEquals(response, None)
self.assertEquals(test_state.last_invoked_param1, "value1viaproxy")
#Now send a couple of messages while nothing is receiving to validate buffering is owrking fine
server_thread.stop()
server_thread.join()
test_state.last_invoked_param1 = None
response = client.invoke(function_name="invoke_test", function_parameters={"param1": "value1-2viaproxy", "param2": "value2viaproxy"}, time_out_waiting_for_response_in_sec=30)
# Wait some time to be sure it has been processed and the system is retrying delivery.
time.sleep(5)
server_thread = ZmqRpcServerThread(zmq_rep_bind_address="tcp://*:51001", rpc_functions={"invoke_test": invoke_test}, username="username2", password="password2")
server_thread.start()
# Wait some time to be sure it has been processed and the system is retrying delivery. A retry cycle is max 1 sec.
time.sleep(2)
self.assertEquals(test_state.last_invoked_param1, "value1-2viaproxy")
server_thread.stop()
server_thread.join()
buf_proxy_rep_req_thread.stop()
buf_proxy_rep_req_thread.join()
client.destroy()
# Cleaning up sockets takes some time
time.sleep(1)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s %(levelname)s %(name)s %(message)s')
logger = logging.getLogger("zmprpc")
logger.setLevel(logging.DEBUG)
unittest.main()
| 46.180791
| 341
| 0.701126
| 3,158
| 24,522
| 5.199177
| 0.098797
| 0.028504
| 0.014617
| 0.017602
| 0.838358
| 0.810707
| 0.769413
| 0.75437
| 0.747975
| 0.714721
| 0
| 0.025909
| 0.197292
| 24,522
| 531
| 342
| 46.180791
| 0.80822
| 0.149784
| 0
| 0.62426
| 0
| 0.011834
| 0.206797
| 0.031337
| 0
| 0
| 0
| 0
| 0.112426
| 1
| 0.056213
| false
| 0.106509
| 0.026627
| 0
| 0.091716
| 0.076923
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
66275bd8f886f81daf9876680e2b19a617bb4048
| 11,720
|
py
|
Python
|
fec_raw/models/line_item.py
|
datadesk/django-fec-raw-data
|
9d1f49e5ecc1552c55b635c63c1bf021871e4c0b
|
[
"MIT"
] | 3
|
2016-06-01T18:16:36.000Z
|
2021-07-20T14:51:40.000Z
|
fec_raw/models/line_item.py
|
datadesk/django-fec-raw-data
|
9d1f49e5ecc1552c55b635c63c1bf021871e4c0b
|
[
"MIT"
] | 9
|
2015-11-24T06:22:56.000Z
|
2021-06-10T17:45:57.000Z
|
fec_raw/models/line_item.py
|
datadesk/django-fec-raw-data
|
9d1f49e5ecc1552c55b635c63c1bf021871e4c0b
|
[
"MIT"
] | 1
|
2020-12-01T21:22:53.000Z
|
2020-12-01T21:22:53.000Z
|
from decimal import Decimal
from django.db import models
from fec_raw.fields import CharField, IntegerField, DecimalField
class RawContribution(models.Model):
"""
One-to-one mapping of the fields in the raw data CSVs.
Specific to Schedule A line items.
"""
filing_no = IntegerField(
db_index=True
)
form_type = CharField(
max_length=8
)
filer_committee_id_number = CharField(
max_length=9
)
transaction_id = CharField(
max_length=20,
null=True,
blank=True
)
back_reference_tran_id_number = CharField(
max_length=20,
null=True,
blank=True
)
back_reference_sched_name = CharField(
max_length=8,
null=True,
blank=True
)
entity_type = CharField(
max_length=3,
null=True,
blank=True
)
contributor_organization_name = CharField(
max_length=200,
null=True,
blank=True
)
contributor_last_name = CharField(
max_length=50,
null=True,
blank=True
)
contributor_first_name = CharField(
max_length=50,
null=True,
blank=True
)
contributor_middle_name = CharField(
max_length=50,
null=True,
blank=True
)
contributor_prefix = CharField(
max_length=10,
null=True,
blank=True
)
contributor_suffix = CharField(
max_length=10,
null=True,
blank=True
)
contributor_street_1 = CharField(
max_length=50,
null=True,
blank=True
)
contributor_street_2 = CharField(
max_length=50,
null=True,
blank=True
)
contributor_city = CharField(
max_length=30,
null=True,
blank=True
)
contributor_state = CharField(
max_length=2,
null=True,
blank=True
)
contributor_zip_code = CharField(
max_length=20,
null=True,
blank=True
)
election_code = CharField(
max_length=5,
null=True,
blank=True
)
election_other_description = CharField(
max_length=200,
null=True,
blank=True
)
contribution_date = models.DateField(
auto_now=False,
null=True
)
contribution_amount = DecimalField(
max_digits=16,
decimal_places=2,
default=0,
null=True
)
contribution_aggregate = DecimalField(
max_digits=16,
decimal_places=2,
default=0,
null=True
)
contribution_purpose_descrip = CharField(
max_length=100,
null=True,
blank=True
)
contributor_employer = CharField(
max_length=50,
null=True,
blank=True
)
contributor_occupation = CharField(
max_length=50,
null=True,
blank=True
)
donor_committee_fec_id = CharField(
max_length=9,
null=True,
blank=True
)
donor_committee_name = CharField(
max_length=200,
null=True,
blank=True
)
donor_candidate_fec_id = CharField(
max_length=9,
null=True,
blank=True
)
donor_candidate_last_name = CharField(
max_length=50,
null=True,
blank=True
)
donor_candidate_first_name = CharField(
max_length=50,
null=True,
blank=True
)
donor_candidate_middle_name = CharField(
max_length=50,
null=True,
blank=True
)
donor_candidate_prefix = CharField(
max_length=200,
null=True,
blank=True
)
donor_candidate_suffix = CharField(
max_length=10,
null=True,
blank=True
)
donor_candidate_office = CharField(
max_length=1,
null=True,
blank=True
)
donor_candidate_state = CharField(
max_length=2,
null=True,
blank=True
)
donor_candidate_district = CharField(
max_length=2,
null=True,
blank=True
)
conduit_name = CharField(
max_length=200,
null=True,
blank=True
)
conduit_street1 = CharField(
max_length=50,
null=True,
blank=True
)
conduit_street2 = CharField(
max_length=50,
null=True,
blank=True
)
conduit_city = CharField(
max_length=30,
null=True,
blank=True
)
conduit_state = CharField(
max_length=2,
null=True,
blank=True
)
conduit_zip_code = CharField(
max_length=20,
null=True,
blank=True
)
memo_code = CharField(
max_length=1,
null=True,
blank=True
)
memo_text_description = CharField(
max_length=200,
null=True,
blank=True
)
reference_code = CharField(
max_length=9,
null=True,
blank=True)
# For lobbyist contributions from F3L SA line items
lobbyist_registrant_organization_name = CharField(
max_length=200,
null=True,
blank=True)
lobbyist_registrant_last_name = CharField(
max_length=50,
null=True,
blank=True)
lobbyist_registrant_first_name = CharField(
max_length=50,
null=True,
blank=True)
lobbyist_registrant_middle_name = CharField(
max_length=50,
null=True,
blank=True)
lobbyist_registrant_prefix = CharField(
max_length=10,
null=True,
blank=True)
lobbyist_registrant_suffix = CharField(
max_length=10,
null=True,
blank=True)
lobbyist_registrant_street_1 = CharField(
max_length=50,
null=True,
blank=True)
lobbyist_registrant_street_2 = CharField(
max_length=50,
null=True,
blank=True)
lobbyist_registrant_city = CharField(
max_length=30,
null=True,
blank=True)
lobbyist_registrant_state = CharField(
max_length=2,
null=True,
blank=True)
lobbyist_registrant_zip_code = CharField(
max_length=20,
null=True,
blank=True)
contribution_purpose_code = CharField(
max_length=100,
null=True,
blank=True)
bundled_amount_period = DecimalField(
max_digits=16,
decimal_places=2,
default=0,
null=True)
bundled_amount_semi_annual = DecimalField(
max_digits=16,
decimal_places=2,
default=0,
null=True)
lobbyist_registrant_employer = CharField(
max_length=50,
null=True,
blank=True)
lobbyist_registrant_occupation = CharField(
max_length=50,
null=True,
blank=True)
associated_text_record = CharField(
max_length=200,
null=True,
blank=True)
memo_text = CharField(
max_length=100,
null=True,
blank=True)
reference_code = CharField(
max_length=50,
null=True,
blank=True)
def __unicode__(self):
return 'SA {} in filing {}'.format(
self.transaction_id,
self.filing_no)
class RawIndependentExpenditure(models.Model):
"""
One-to-one mapping of the fields in the raw data CSVs.
Specific to Schedule E line items.
"""
filing_no = models.IntegerField(
db_index=True)
form_type = CharField(
max_length=8)
filer_committee_id_number = CharField(
max_length=9)
transaction_id_number = CharField(
max_length=20)
back_reference_tran_id_number = CharField(max_length=20,
null=True,
blank=True)
back_reference_sched_name = CharField(max_length=8,
null=True,
blank=True)
entity_type = CharField(max_length=3,
null=True,
blank=True)
payee_organization_name = CharField(
max_length=200,
null=True,
blank=True)
payee_last_name = CharField(
max_length=50,
null=True,
blank=True)
payee_first_name = CharField(
max_length=50,
null=True,
blank=True)
payee_middle_name = CharField(
max_length=50,
null=True,
blank=True)
payee_prefix = CharField(
max_length=10,
null=True,
blank=True)
payee_suffix = CharField(
max_length=10,
null=True,
blank=True)
payee_street_1 = CharField(
max_length=50,
null=True,
blank=True)
payee_street_2 = CharField(
max_length=50,
null=True,
blank=True)
payee_city = CharField(
max_length=30,
null=True,
blank=True)
payee_state = CharField(
max_length=2,
null=True,
blank=True)
payee_zip_code = CharField(
max_length=20,
null=True,
blank=True)
election_code = CharField(
max_length=5,
null=True,
blank=True)
election_other_description = CharField(
max_length=20,
null=True,
blank=True)
dissemination_date = models.DateField(
auto_now=False,
null=True)
expenditure_amount = DecimalField(
max_digits=16,
decimal_places=2,
default=0)
disbursement_date = models.DateField(
auto_now=False,
null=True)
calendar_y_t_d_per_election_office = DecimalField(
max_digits=16,
decimal_places=2,
default=0)
expenditure_purpose_descrip = CharField(
max_length=100,
null=True,
blank=True)
category_code = CharField(
max_length=3,
null=True,
blank=True)
payee_cmtte_fec_id_number = CharField(
max_length=9,
null=True,
blank=True)
support_oppose_code = CharField(max_length=1,
null=True,
blank=True)
candidate_id_number = CharField(max_length=9,
null=True,
blank=True)
candidate_last_name = CharField(
max_length=50,
null=True,
blank=True)
candidate_first_name = CharField(
max_length=50,
null=True,
blank=True)
candidate_middle_name = CharField(
max_length=50,
null=True,
blank=True)
candidate_prefix = CharField(
max_length=10,
null=True,
blank=True)
candidate_suffix = CharField(
max_length=10,
null=True,
blank=True)
candidate_office = CharField(
max_length=3,
null=True,
blank=True)
candidate_district = CharField(
max_length=2,
null=True,
blank=True)
candidate_state = CharField(
max_length=2,
null=True,
blank=True)
completing_last_name = CharField(
max_length=50,
null=True,
blank=True)
completing_first_name = CharField(
max_length=50,
null=True,
blank=True)
completing_middle_name = CharField(
max_length=50,
null=True,
blank=True)
completing_prefix = CharField(
max_length=10,
null=True,
blank=True)
completing_suffix = CharField(
max_length=10,
null=True,
blank=True)
date_signed = models.DateField(
auto_now=False, null=True)
memo_code = CharField(
max_length=1,
null=True,
blank=True)
memo_text_description = CharField(
max_length=100,
null=True,
blank=True)
def __unicode__(self):
return 'IE {} in filing {}'.format(
self.transaction_id_number,
self.filing_no)
| 23.772819
| 64
| 0.581741
| 1,274
| 11,720
| 5.090267
| 0.107535
| 0.124595
| 0.272012
| 0.243793
| 0.91781
| 0.893909
| 0.864302
| 0.859522
| 0.797533
| 0.600925
| 0
| 0.027656
| 0.339761
| 11,720
| 492
| 65
| 23.821138
| 0.810416
| 0.019625
| 0
| 0.667364
| 0
| 0
| 0.003144
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004184
| false
| 0
| 0.006276
| 0.004184
| 0.248954
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b0738c0407f69b875728176b0af446aab260cb27
| 7,948
|
py
|
Python
|
idm/api/proto/chat_manager_service_pb2_grpc.py
|
RobbiNespu/hyperboria
|
7db858386f1a20e8d49bc16f53bfd7f1e4d03f7e
|
[
"Unlicense"
] | 54
|
2021-01-07T03:02:36.000Z
|
2022-03-28T17:19:29.000Z
|
idm/api/proto/chat_manager_service_pb2_grpc.py
|
the-superpirate/hyperboria
|
74776166158d07b199677f9738862e5f1fa54367
|
[
"Unlicense"
] | 10
|
2021-01-08T17:38:59.000Z
|
2022-02-28T14:34:45.000Z
|
idm/api/proto/chat_manager_service_pb2_grpc.py
|
the-superpirate/hyperboria
|
74776166158d07b199677f9738862e5f1fa54367
|
[
"Unlicense"
] | 16
|
2020-12-28T18:31:44.000Z
|
2022-02-22T15:00:53.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from idm.api.proto import \
chat_manager_service_pb2 as \
idm_dot_api_dot_proto_dot_chat__manager__service__pb2
class ChatManagerStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.create_chat = channel.unary_unary(
'/idm.api.proto.ChatManager/create_chat',
request_serializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.CreateChatRequest.SerializeToString,
response_deserializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.Chat.FromString,
)
self.get_chat = channel.unary_unary(
'/idm.api.proto.ChatManager/get_chat',
request_serializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.GetChatRequest.SerializeToString,
response_deserializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.Chat.FromString,
)
self.list_chats = channel.unary_unary(
'/idm.api.proto.ChatManager/list_chats',
request_serializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.ListChatsRequest.SerializeToString,
response_deserializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.Chats.FromString,
)
self.update_chat = channel.unary_unary(
'/idm.api.proto.ChatManager/update_chat',
request_serializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.UpdateChatRequest.SerializeToString,
response_deserializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.Chat.FromString,
)
class ChatManagerServicer(object):
"""Missing associated documentation comment in .proto file."""
def create_chat(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def get_chat(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def list_chats(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def update_chat(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ChatManagerServicer_to_server(servicer, server):
rpc_method_handlers = {
'create_chat': grpc.unary_unary_rpc_method_handler(
servicer.create_chat,
request_deserializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.CreateChatRequest.FromString,
response_serializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.Chat.SerializeToString,
),
'get_chat': grpc.unary_unary_rpc_method_handler(
servicer.get_chat,
request_deserializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.GetChatRequest.FromString,
response_serializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.Chat.SerializeToString,
),
'list_chats': grpc.unary_unary_rpc_method_handler(
servicer.list_chats,
request_deserializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.ListChatsRequest.FromString,
response_serializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.Chats.SerializeToString,
),
'update_chat': grpc.unary_unary_rpc_method_handler(
servicer.update_chat,
request_deserializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.UpdateChatRequest.FromString,
response_serializer=idm_dot_api_dot_proto_dot_chat__manager__service__pb2.Chat.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'idm.api.proto.ChatManager', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class ChatManager(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def create_chat(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/idm.api.proto.ChatManager/create_chat',
idm_dot_api_dot_proto_dot_chat__manager__service__pb2.CreateChatRequest.SerializeToString,
idm_dot_api_dot_proto_dot_chat__manager__service__pb2.Chat.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def get_chat(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/idm.api.proto.ChatManager/get_chat',
idm_dot_api_dot_proto_dot_chat__manager__service__pb2.GetChatRequest.SerializeToString,
idm_dot_api_dot_proto_dot_chat__manager__service__pb2.Chat.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def list_chats(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/idm.api.proto.ChatManager/list_chats',
idm_dot_api_dot_proto_dot_chat__manager__service__pb2.ListChatsRequest.SerializeToString,
idm_dot_api_dot_proto_dot_chat__manager__service__pb2.Chats.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def update_chat(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/idm.api.proto.ChatManager/update_chat',
idm_dot_api_dot_proto_dot_chat__manager__service__pb2.UpdateChatRequest.SerializeToString,
idm_dot_api_dot_proto_dot_chat__manager__service__pb2.Chat.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 47.592814
| 125
| 0.69313
| 853
| 7,948
| 5.954279
| 0.119578
| 0.05631
| 0.092144
| 0.107501
| 0.87025
| 0.87025
| 0.860799
| 0.845639
| 0.7848
| 0.762355
| 0
| 0.004287
| 0.236915
| 7,948
| 166
| 126
| 47.879518
| 0.833141
| 0.079139
| 0
| 0.544776
| 1
| 0
| 0.075204
| 0.044294
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074627
| false
| 0
| 0.014925
| 0.029851
| 0.141791
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b037e5f28eb6ca83adf738792fcc3594a297e156
| 56
|
py
|
Python
|
language-python-test/test/features/arithmetic/mult.py
|
wbadart/language-python
|
6c048c215ff7fe4a5d5cc36ba3c17a666af74821
|
[
"BSD-3-Clause"
] | null | null | null |
language-python-test/test/features/arithmetic/mult.py
|
wbadart/language-python
|
6c048c215ff7fe4a5d5cc36ba3c17a666af74821
|
[
"BSD-3-Clause"
] | null | null | null |
language-python-test/test/features/arithmetic/mult.py
|
wbadart/language-python
|
6c048c215ff7fe4a5d5cc36ba3c17a666af74821
|
[
"BSD-3-Clause"
] | null | null | null |
18 * 1234
18 * 1234 * 2
0 * 1
1 * 0
0.0 * 1.0
1.0 * 0.0
| 8
| 13
| 0.446429
| 17
| 56
| 1.470588
| 0.294118
| 0.32
| 0.24
| 0.32
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.694444
| 0.357143
| 56
| 6
| 14
| 9.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c6ed445a899ed138ed08a3fcd3d8a4b793ce87e7
| 25,365
|
py
|
Python
|
nfv/nfv-vim/nfv_vim/rpc/_rpc_message_instance.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2020-02-07T19:01:36.000Z
|
2022-02-23T01:41:46.000Z
|
nfv/nfv-vim/nfv_vim/rpc/_rpc_message_instance.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 1
|
2021-01-14T12:02:25.000Z
|
2021-01-14T12:02:25.000Z
|
nfv/nfv-vim/nfv_vim/rpc/_rpc_message_instance.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2021-01-13T08:39:21.000Z
|
2022-02-09T00:21:55.000Z
|
#
# Copyright (c) 2015-2016 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from nfv_common import debug
from nfv_vim.rpc._rpc_defs import RPC_MSG_RESULT
from nfv_vim.rpc._rpc_defs import RPC_MSG_TYPE
from nfv_vim.rpc._rpc_defs import RPC_MSG_VERSION
from nfv_vim.rpc._rpc_message import RPCMessage
DLOG = debug.debug_get_logger('nfv_vim.rpc.instance')
class APIRequestCreateInstance(RPCMessage):
"""
RPC API Request Message - Create Instance
"""
name = None
instance_type_uuid = None
image_uuid = None
vcpus = None
memory_mb = None
disk_gb = None
ephemeral_gb = None
swap_gb = None
network_uuid = None
auto_recovery = None
live_migration_timeout = None
live_migration_max_downtime = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.CREATE_INSTANCE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestCreateInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['name'] = self.name
msg['instance_type_uuid'] = self.instance_type_uuid
msg['image_uuid'] = self.image_uuid
msg['vcpus'] = self.vcpus
msg['memory_mb'] = self.memory_mb
msg['disk_gb'] = self.disk_gb
msg['ephemeral_gb'] = self.ephemeral_gb
msg['swap_gb'] = self.swap_gb
msg['network_uuid'] = self.network_uuid
msg['sw:wrs:auto_recovery'] = self.auto_recovery
msg['hw:wrs:live_migration_timeout'] = self.live_migration_timeout
msg['hw:wrs:live_migration_max_downtime'] \
= self.live_migration_max_downtime
def deserialize_payload(self, msg):
self.name = msg.get('name', None)
self.instance_type_uuid = msg.get('instance_type_uuid', None)
self.image_uuid = msg.get('image_uuid', None)
self.vcpus = msg.get('vcpus', None)
self.memory_mb = msg.get('memory_mb', None)
self.disk_gb = msg.get('disk_gb', None)
self.ephemeral_gb = msg.get('ephemeral_gb', None)
self.swap_gb = msg.get('swap_gb', None)
self.network_uuid = msg.get('network_uuid', None)
self.auto_recovery = msg.get('sw:wrs:auto_recovery', None)
self.live_migration_timeout = msg.get('hw:wrs:live_migration_timeout',
None)
self.live_migration_max_downtime \
= msg.get('hw:wrs:live_migration_max_downtime', None)
def __str__(self):
return "create-instance request: %s" % self.name
class APIResponseCreateInstance(RPCMessage):
"""
RPC API Response Message - Create Instance
"""
uuid = None
name = None
admin_state = None
oper_state = None
avail_status = None
action = None
host_uuid = None
host_name = None
instance_type_original_name = None
image_uuid = None
vcpus = None
memory_mb = None
disk_gb = None
ephemeral_gb = None
swap_gb = None
network_uuid = None
auto_recovery = None
live_migration_timeout = None
live_migration_max_downtime = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.CREATE_INSTANCE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseCreateInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
msg['name'] = self.name
msg['admin_state'] = self.admin_state
msg['oper_state'] = self.oper_state
msg['avail_status'] = self.avail_status
msg['action'] = self.action
msg['host_uuid'] = self.host_uuid
msg['host_name'] = self.host_name
msg['instance_type_original_name'] = self.instance_type_original_name
msg['image_uuid'] = self.image_uuid
msg['vcpus'] = self.vcpus
msg['memory_mb'] = self.memory_mb
msg['disk_gb'] = self.disk_gb
msg['ephemeral_gb'] = self.ephemeral_gb
msg['swap_gb'] = self.swap_gb
msg['network_uuid'] = self.network_uuid
msg['sw:wrs:auto_recovery'] = self.auto_recovery
msg['hw:wrs:live_migration_timeout'] = self.live_migration_timeout
msg['hw:wrs:live_migration_max_downtime'] \
= self.live_migration_max_downtime
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
self.name = msg.get('name', None)
self.admin_state = msg.get('admin_state', None)
self.oper_state = msg.get('oper_state', None)
self.avail_status = msg.get('avail_status', None)
self.action = msg.get('action', None)
self.host_uuid = msg.get('host_uuid', None)
self.host_name = msg.get('host_name', None)
self.instance_type_original_name = msg.get(
'instance_type_original_name', None)
self.image_uuid = msg.get('image_uuid', None)
self.vcpus = msg.get('vcpus', None)
self.memory_mb = msg.get('memory_mb', None)
self.disk_gb = msg.get('disk_gb', None)
self.ephemeral_gb = msg.get('ephemeral_gb', None)
self.swap_gb = msg.get('swap_gb', None)
self.network_uuid = msg.get('network_uuid', None)
self.auto_recovery = msg.get('sw:wrs:auto_recovery', None)
self.live_migration_timeout = msg.get('hw:wrs:live_migration_timeout',
None)
self.live_migration_max_downtime \
= msg.get('hw:wrs:live_migration_max_downtime', None)
def __str__(self):
return "create-instance response: %s" % self.uuid
class APIRequestStartInstance(RPCMessage):
"""
RPC API Request Message - Start Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.START_INSTANCE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestStartInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "start-instance request: %s" % self.uuid
class APIResponseStartInstance(RPCMessage):
"""
RPC API Response Message - Start Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.START_INSTANCE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseStartInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "start-instance response: %s" % self.uuid
class APIRequestStopInstance(RPCMessage):
"""
RPC API Request Message - Stop Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.STOP_INSTANCE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestStopInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "stop-instance request: %s" % self.uuid
class APIResponseStopInstance(RPCMessage):
"""
RPC API Response Message - Stop Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.STOP_INSTANCE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseStopInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "stop-instance response: %s" % self.uuid
class APIRequestPauseInstance(RPCMessage):
"""
RPC API Request Message - Pause Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.PAUSE_INSTANCE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestPauseInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "pause-instance request: %s" % self.uuid
class APIResponsePauseInstance(RPCMessage):
"""
RPC API Response Message - Pause Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.PAUSE_INSTANCE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponsePauseInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "pause-instance response: %s" % self.uuid
class APIRequestUnpauseInstance(RPCMessage):
"""
RPC API Request Message - Unpause Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.UNPAUSE_INSTANCE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestUnpauseInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "unpause-instance request: %s" % self.uuid
class APIResponseUnpauseInstance(RPCMessage):
"""
RPC API Response Message - Unpause Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.UNPAUSE_INSTANCE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseUnpauseInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "unpause-instance response: %s" % self.uuid
class APIRequestSuspendInstance(RPCMessage):
"""
RPC API Request Message - Suspend Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.SUSPEND_INSTANCE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestSuspendInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "suspend-instance request: %s" % self.uuid
class APIResponseSuspendInstance(RPCMessage):
"""
RPC API Response Message - Suspend Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.SUSPEND_INSTANCE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseSuspendInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "suspend-instance response: %s" % self.uuid
class APIRequestResumeInstance(RPCMessage):
"""
RPC API Request Message - Resume Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.RESUME_INSTANCE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestResumeInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "resume-instance request: %s" % self.uuid
class APIResponseResumeInstance(RPCMessage):
"""
RPC API Response Message - Resume Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.RESUME_INSTANCE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseResumeInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "resume-instance response: %s" % self.uuid
class APIRequestRebootInstance(RPCMessage):
"""
RPC API Request Message - Reboot Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.REBOOT_INSTANCE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestRebootInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "reboot-instance request: %s" % self.uuid
class APIResponseRebootInstance(RPCMessage):
"""
RPC API Response Message - Reboot Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.REBOOT_INSTANCE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseRebootInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "reboot-instance response: %s" % self.uuid
class APIRequestLiveMigrateInstance(RPCMessage):
"""
RPC API Request Message - Live Migrate Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.LIVE_MIGRATE_INSTANCE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestLiveMigrateInstance, self).__init__(msg_version,
msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "live-migrate-instance request: %s" % self.uuid
class APIResponseLiveMigrateInstance(RPCMessage):
"""
RPC API Response Message - Live Migrate Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.LIVE_MIGRATE_INSTANCE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseLiveMigrateInstance, self).__init__(msg_version,
msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "live-migrate-instance response: %s" % self.uuid
class APIRequestColdMigrateInstance(RPCMessage):
"""
RPC API Request Message - Cold Migrate Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.COLD_MIGRATE_INSTANCE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestColdMigrateInstance, self).__init__(msg_version,
msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "cold-migrate-instance request: %s" % self.uuid
class APIResponseColdMigrateInstance(RPCMessage):
"""
RPC API Response Message - Cold Migrate Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.COLD_MIGRATE_INSTANCE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseColdMigrateInstance, self).__init__(msg_version,
msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "cold-migrate-instance response: %s" % self.uuid
class APIRequestEvacuateInstance(RPCMessage):
"""
RPC API Request Message - Evacuate Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.EVACUATE_INSTANCE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestEvacuateInstance, self).__init__(msg_version,
msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "evacuate-instance request: %s" % self.uuid
class APIResponseEvacuateInstance(RPCMessage):
"""
RPC API Response Message - Evacuate Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.EVACUATE_INSTANCE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseEvacuateInstance, self).__init__(msg_version,
msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "evacuate-instance response: %s" % self.uuid
class APIRequestDeleteInstance(RPCMessage):
"""
RPC API Request Message - Delete Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.DELETE_INSTANCE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestDeleteInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "delete-instance request: %s" % self.uuid
class APIResponseDeleteInstance(RPCMessage):
"""
RPC API Response Message - Delete Instance
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.DELETE_INSTANCE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseDeleteInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "delete-instance response: %s" % self.uuid
class APIRequestGetInstance(RPCMessage):
"""
RPC API Request Message - Get Instance
"""
get_all = False
filter_by_uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.GET_INSTANCE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestGetInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['get_all'] = self.get_all
msg['filter_by_uuid'] = self.filter_by_uuid
def deserialize_payload(self, msg):
self.get_all = msg.get('get_all', True)
self.filter_by_uuid = msg.get('filter_by_uuid', None)
def __str__(self):
if self.get_all:
return "get-instance request: get-all"
else:
return "get-instance request: %s" % self.filter_by_uuid
class APIResponseGetInstance(RPCMessage):
"""
RPC API Response Message - Get Instance
"""
uuid = None
name = None
admin_state = None
oper_state = None
avail_status = None
action = None
host_uuid = None
host_name = None
instance_type_original_name = None
image_uuid = None
vcpus = None
memory_mb = None
disk_gb = None
ephemeral_gb = None
swap_gb = None
auto_recovery = None
live_migration_timeout = None
live_migration_max_downtime = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.GET_INSTANCE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseGetInstance, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
msg['name'] = self.name
msg['admin_state'] = self.admin_state
msg['oper_state'] = self.oper_state
msg['avail_status'] = self.avail_status
msg['action'] = self.action
msg['host_uuid'] = self.host_uuid
msg['host_name'] = self.host_name
msg['instance_type_original_name'] = self.instance_type_original_name
msg['image_uuid'] = self.image_uuid
msg['vcpus'] = self.vcpus
msg['memory_mb'] = self.memory_mb
msg['disk_gb'] = self.disk_gb
msg['ephemeral_gb'] = self.ephemeral_gb
msg['swap_gb'] = self.swap_gb
msg['sw:wrs:auto_recovery'] = self.auto_recovery
msg['hw:wrs:live_migration_timeout'] = self.live_migration_timeout
msg['hw:wrs:live_migration_max_downtime'] \
= self.live_migration_max_downtime
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
self.name = msg.get('name', None)
self.admin_state = msg.get('admin_state', None)
self.oper_state = msg.get('oper_state', None)
self.avail_status = msg.get('avail_status', None)
self.action = msg.get('action', None)
self.host_uuid = msg.get('host_uuid', None)
self.host_name = msg.get('host_name', None)
self.instance_type_original_name = msg.get(
'instance_type_original_name', None)
self.image_uuid = msg.get('image_uuid', None)
self.vcpus = msg.get('vcpus', None)
self.memory_mb = msg.get('memory_mb', None)
self.disk_gb = msg.get('disk_gb', None)
self.ephemeral_gb = msg.get('ephemeral_gb', None)
self.swap_gb = msg.get('swap_gb', None)
self.auto_recovery = msg.get('sw:wrs:auto_recovery', None)
self.live_migration_timeout = msg.get('hw:wrs:live_migration_timeout',
None)
self.live_migration_max_downtime \
= msg.get('hw:wrs:live_migration_max_downtime', None)
def __str__(self):
return "get-instance response: %s" % self.uuid
| 33.955823
| 79
| 0.612695
| 2,947
| 25,365
| 4.90397
| 0.041737
| 0.033629
| 0.050374
| 0.026986
| 0.876557
| 0.823415
| 0.795115
| 0.785151
| 0.785151
| 0.716856
| 0
| 0.00345
| 0.291583
| 25,365
| 746
| 80
| 34.00134
| 0.800824
| 0.047625
| 0
| 0.758416
| 0
| 0
| 0.095026
| 0.023988
| 0
| 0
| 0
| 0
| 0
| 1
| 0.205941
| false
| 0
| 0.009901
| 0.049505
| 0.465347
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
05b208d1b3fd5552a745480ea0fa94f05dc64eb8
| 22,890
|
py
|
Python
|
angr/procedures/definitions/win32_winusb.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_winusb.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_winusb.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
# pylint:disable=line-too-long
import logging
from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool
from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64
from .. import SIM_PROCEDURES as P
from . import SimLibrary
_l = logging.getLogger(name=__name__)
lib = SimLibrary()
lib.set_default_cc('X86', SimCCStdcall)
lib.set_default_cc('AMD64', SimCCMicrosoftAMD64)
lib.set_library_names("winusb.dll")
prototypes = \
{
#
'WinUsb_Initialize': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["DeviceHandle", "InterfaceHandle"]),
#
'WinUsb_Free': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle"]),
#
'WinUsb_GetAssociatedInterface': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeChar(label="Byte"), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "AssociatedInterfaceIndex", "AssociatedInterfaceHandle"]),
#
'WinUsb_GetDescriptor': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeChar(label="Byte"), SimTypeChar(label="Byte"), SimTypeShort(signed=False, label="UInt16"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "DescriptorType", "Index", "LanguageID", "Buffer", "BufferLength", "LengthTransferred"]),
#
'WinUsb_QueryInterfaceSettings': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeChar(label="Byte"), SimTypePointer(SimStruct({"bLength": SimTypeChar(label="Byte"), "bDescriptorType": SimTypeChar(label="Byte"), "bInterfaceNumber": SimTypeChar(label="Byte"), "bAlternateSetting": SimTypeChar(label="Byte"), "bNumEndpoints": SimTypeChar(label="Byte"), "bInterfaceClass": SimTypeChar(label="Byte"), "bInterfaceSubClass": SimTypeChar(label="Byte"), "bInterfaceProtocol": SimTypeChar(label="Byte"), "iInterface": SimTypeChar(label="Byte")}, name="USB_INTERFACE_DESCRIPTOR", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "AlternateInterfaceNumber", "UsbAltInterfaceDescriptor"]),
#
'WinUsb_QueryDeviceInformation': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "InformationType", "BufferLength", "Buffer"]),
#
'WinUsb_SetCurrentAlternateSetting': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeChar(label="Byte")], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "SettingNumber"]),
#
'WinUsb_GetCurrentAlternateSetting': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "SettingNumber"]),
#
'WinUsb_QueryPipe': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeChar(label="Byte"), SimTypeChar(label="Byte"), SimTypePointer(SimStruct({"PipeType": SimTypeInt(signed=False, label="USBD_PIPE_TYPE"), "PipeId": SimTypeChar(label="Byte"), "MaximumPacketSize": SimTypeShort(signed=False, label="UInt16"), "Interval": SimTypeChar(label="Byte")}, name="WINUSB_PIPE_INFORMATION", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "AlternateInterfaceNumber", "PipeIndex", "PipeInformation"]),
#
'WinUsb_QueryPipeEx': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeChar(label="Byte"), SimTypeChar(label="Byte"), SimTypePointer(SimStruct({"PipeType": SimTypeInt(signed=False, label="USBD_PIPE_TYPE"), "PipeId": SimTypeChar(label="Byte"), "MaximumPacketSize": SimTypeShort(signed=False, label="UInt16"), "Interval": SimTypeChar(label="Byte"), "MaximumBytesPerInterval": SimTypeInt(signed=False, label="UInt32")}, name="WINUSB_PIPE_INFORMATION_EX", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "AlternateSettingNumber", "PipeIndex", "PipeInformationEx"]),
#
'WinUsb_SetPipePolicy': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeChar(label="Byte"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "PipeID", "PolicyType", "ValueLength", "Value"]),
#
'WinUsb_GetPipePolicy': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeChar(label="Byte"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "PipeID", "PolicyType", "ValueLength", "Value"]),
#
'WinUsb_ReadPipe': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeChar(label="Byte"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimStruct({"Internal": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "InternalHigh": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Anonymous": SimUnion({"Anonymous": SimStruct({"Offset": SimTypeInt(signed=False, label="UInt32"), "OffsetHigh": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "Pointer": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="<anon>", label="None"), "hEvent": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="OVERLAPPED", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "PipeID", "Buffer", "BufferLength", "LengthTransferred", "Overlapped"]),
#
'WinUsb_WritePipe': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeChar(label="Byte"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimStruct({"Internal": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "InternalHigh": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Anonymous": SimUnion({"Anonymous": SimStruct({"Offset": SimTypeInt(signed=False, label="UInt32"), "OffsetHigh": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "Pointer": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="<anon>", label="None"), "hEvent": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="OVERLAPPED", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "PipeID", "Buffer", "BufferLength", "LengthTransferred", "Overlapped"]),
#
'WinUsb_ControlTransfer': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimStruct({"RequestType": SimTypeChar(label="Byte"), "Request": SimTypeChar(label="Byte"), "Value": SimTypeShort(signed=False, label="UInt16"), "Index": SimTypeShort(signed=False, label="UInt16"), "Length": SimTypeShort(signed=False, label="UInt16")}, name="WINUSB_SETUP_PACKET", pack=False, align=None), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimStruct({"Internal": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "InternalHigh": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Anonymous": SimUnion({"Anonymous": SimStruct({"Offset": SimTypeInt(signed=False, label="UInt32"), "OffsetHigh": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "Pointer": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="<anon>", label="None"), "hEvent": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="OVERLAPPED", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "SetupPacket", "Buffer", "BufferLength", "LengthTransferred", "Overlapped"]),
#
'WinUsb_ResetPipe': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeChar(label="Byte")], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "PipeID"]),
#
'WinUsb_AbortPipe': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeChar(label="Byte")], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "PipeID"]),
#
'WinUsb_FlushPipe': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeChar(label="Byte")], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "PipeID"]),
#
'WinUsb_SetPowerPolicy': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "PolicyType", "ValueLength", "Value"]),
#
'WinUsb_GetPowerPolicy': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "PolicyType", "ValueLength", "Value"]),
#
'WinUsb_GetOverlappedResult': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimStruct({"Internal": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "InternalHigh": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Anonymous": SimUnion({"Anonymous": SimStruct({"Offset": SimTypeInt(signed=False, label="UInt32"), "OffsetHigh": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "Pointer": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="<anon>", label="None"), "hEvent": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="OVERLAPPED", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "lpOverlapped", "lpNumberOfBytesTransferred", "bWait"]),
#
'WinUsb_ParseConfigurationDescriptor': SimTypeFunction([SimTypePointer(SimStruct({"bLength": SimTypeChar(label="Byte"), "bDescriptorType": SimTypeChar(label="Byte"), "wTotalLength": SimTypeShort(signed=False, label="UInt16"), "bNumInterfaces": SimTypeChar(label="Byte"), "bConfigurationValue": SimTypeChar(label="Byte"), "iConfiguration": SimTypeChar(label="Byte"), "bmAttributes": SimTypeChar(label="Byte"), "MaxPower": SimTypeChar(label="Byte")}, name="USB_CONFIGURATION_DESCRIPTOR", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=True, label="Int32")], SimTypePointer(SimStruct({"bLength": SimTypeChar(label="Byte"), "bDescriptorType": SimTypeChar(label="Byte"), "bInterfaceNumber": SimTypeChar(label="Byte"), "bAlternateSetting": SimTypeChar(label="Byte"), "bNumEndpoints": SimTypeChar(label="Byte"), "bInterfaceClass": SimTypeChar(label="Byte"), "bInterfaceSubClass": SimTypeChar(label="Byte"), "bInterfaceProtocol": SimTypeChar(label="Byte"), "iInterface": SimTypeChar(label="Byte")}, name="USB_INTERFACE_DESCRIPTOR", pack=False, align=None), offset=0), arg_names=["ConfigurationDescriptor", "StartPosition", "InterfaceNumber", "AlternateSetting", "InterfaceClass", "InterfaceSubClass", "InterfaceProtocol"]),
#
'WinUsb_ParseDescriptors': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=True, label="Int32")], SimTypePointer(SimStruct({"bLength": SimTypeChar(label="Byte"), "bDescriptorType": SimTypeChar(label="Byte")}, name="USB_COMMON_DESCRIPTOR", pack=False, align=None), offset=0), arg_names=["DescriptorBuffer", "TotalLength", "StartPosition", "DescriptorType"]),
#
'WinUsb_GetCurrentFrameNumber': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "CurrentFrameNumber", "TimeStamp"]),
#
'WinUsb_GetAdjustedFrameNumber': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None")], SimTypeInt(signed=True, label="Int32"), arg_names=["CurrentFrameNumber", "TimeStamp"]),
#
'WinUsb_RegisterIsochBuffer': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeChar(label="Byte"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "PipeID", "Buffer", "BufferLength", "IsochBufferHandle"]),
#
'WinUsb_UnregisterIsochBuffer': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["IsochBufferHandle"]),
#
'WinUsb_WriteIsochPipe': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimStruct({"Internal": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "InternalHigh": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Anonymous": SimUnion({"Anonymous": SimStruct({"Offset": SimTypeInt(signed=False, label="UInt32"), "OffsetHigh": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "Pointer": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="<anon>", label="None"), "hEvent": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="OVERLAPPED", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["BufferHandle", "Offset", "Length", "FrameNumber", "Overlapped"]),
#
'WinUsb_ReadIsochPipe': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"Offset": SimTypeInt(signed=False, label="UInt32"), "Length": SimTypeInt(signed=False, label="UInt32"), "Status": SimTypeInt(signed=True, label="Int32")}, name="USBD_ISO_PACKET_DESCRIPTOR", pack=False, align=None), label="LPArray", offset=0), SimTypePointer(SimStruct({"Internal": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "InternalHigh": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Anonymous": SimUnion({"Anonymous": SimStruct({"Offset": SimTypeInt(signed=False, label="UInt32"), "OffsetHigh": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "Pointer": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="<anon>", label="None"), "hEvent": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="OVERLAPPED", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["BufferHandle", "Offset", "Length", "FrameNumber", "NumberOfPackets", "IsoPacketDescriptors", "Overlapped"]),
#
'WinUsb_WriteIsochPipeAsap': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimStruct({"Internal": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "InternalHigh": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Anonymous": SimUnion({"Anonymous": SimStruct({"Offset": SimTypeInt(signed=False, label="UInt32"), "OffsetHigh": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "Pointer": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="<anon>", label="None"), "hEvent": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="OVERLAPPED", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["BufferHandle", "Offset", "Length", "ContinueStream", "Overlapped"]),
#
'WinUsb_ReadIsochPipeAsap': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"Offset": SimTypeInt(signed=False, label="UInt32"), "Length": SimTypeInt(signed=False, label="UInt32"), "Status": SimTypeInt(signed=True, label="Int32")}, name="USBD_ISO_PACKET_DESCRIPTOR", pack=False, align=None), label="LPArray", offset=0), SimTypePointer(SimStruct({"Internal": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "InternalHigh": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Anonymous": SimUnion({"Anonymous": SimStruct({"Offset": SimTypeInt(signed=False, label="UInt32"), "OffsetHigh": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "Pointer": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="<anon>", label="None"), "hEvent": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="OVERLAPPED", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["BufferHandle", "Offset", "Length", "ContinueStream", "NumberOfPackets", "IsoPacketDescriptors", "Overlapped"]),
#
'WinUsb_StartTrackingForTimeSync': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimStruct({"TimeTrackingHandle": SimTypeBottom(label="HANDLE"), "IsStartupDelayTolerable": SimTypeBottom(label="BOOLEAN")}, name="USB_START_TRACKING_FOR_TIME_SYNC_INFORMATION", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "StartTrackingInfo"]),
#
'WinUsb_GetCurrentFrameNumberAndQpc': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimStruct({"TimeTrackingHandle": SimTypeBottom(label="HANDLE"), "InputFrameNumber": SimTypeInt(signed=False, label="UInt32"), "InputMicroFrameNumber": SimTypeInt(signed=False, label="UInt32"), "QueryPerformanceCounterAtInputFrameOrMicroFrame": SimTypeBottom(label="LARGE_INTEGER"), "QueryPerformanceCounterFrequency": SimTypeBottom(label="LARGE_INTEGER"), "PredictedAccuracyInMicroSeconds": SimTypeInt(signed=False, label="UInt32"), "CurrentGenerationID": SimTypeInt(signed=False, label="UInt32"), "CurrentQueryPerformanceCounter": SimTypeBottom(label="LARGE_INTEGER"), "CurrentHardwareFrameNumber": SimTypeInt(signed=False, label="UInt32"), "CurrentHardwareMicroFrameNumber": SimTypeInt(signed=False, label="UInt32"), "CurrentUSBFrameNumber": SimTypeInt(signed=False, label="UInt32")}, name="USB_FRAME_NUMBER_AND_QPC_FOR_TIME_SYNC_INFORMATION", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "FrameQpcInfo"]),
#
'WinUsb_StopTrackingForTimeSync': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimStruct({"TimeTrackingHandle": SimTypeBottom(label="HANDLE")}, name="USB_STOP_TRACKING_FOR_TIME_SYNC_INFORMATION", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["InterfaceHandle", "StopTrackingInfo"]),
}
lib.set_prototypes(prototypes)
| 254.333333
| 1,445
| 0.742508
| 2,341
| 22,890
| 7.183682
| 0.094831
| 0.13415
| 0.08753
| 0.131415
| 0.833978
| 0.807397
| 0.803116
| 0.795742
| 0.78272
| 0.775822
| 0
| 0.017402
| 0.073613
| 22,890
| 89
| 1,446
| 257.191011
| 0.775666
| 0.001223
| 0
| 0
| 0
| 0
| 0.272771
| 0.060986
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.104167
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
05ef5eed04a9f84149a34cecde02da13c68c1fb8
| 47
|
py
|
Python
|
websiteconfig.py
|
MrNegativeTW/JustSendIt
|
e2eebe8cecf5a49b32980c046d1167ea0588ff29
|
[
"MIT"
] | null | null | null |
websiteconfig.py
|
MrNegativeTW/JustSendIt
|
e2eebe8cecf5a49b32980c046d1167ea0588ff29
|
[
"MIT"
] | 1
|
2021-04-30T21:16:36.000Z
|
2021-04-30T21:16:36.000Z
|
websiteconfig.py
|
MrNegativeTW/JustSendIt
|
e2eebe8cecf5a49b32980c046d1167ea0588ff29
|
[
"MIT"
] | null | null | null |
SECRET_KEY = 'a83c51c8b7fc804eb395d7c1d753fa28'
| 47
| 47
| 0.893617
| 3
| 47
| 13.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.422222
| 0.042553
| 47
| 1
| 47
| 47
| 0.488889
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
af3bf91d5e97d78e704ee7a09a4739ac2ae5bb1a
| 389
|
py
|
Python
|
extensions/.stubs/clrclasses/System/Security/Claims/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | 1
|
2020-03-25T03:27:24.000Z
|
2020-03-25T03:27:24.000Z
|
extensions/.stubs/clrclasses/System/Security/Claims/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | null | null | null |
extensions/.stubs/clrclasses/System/Security/Claims/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | null | null | null |
from __clrclasses__.System.Security.Claims import Claim
from __clrclasses__.System.Security.Claims import ClaimsIdentity
from __clrclasses__.System.Security.Claims import ClaimsPrincipal
from __clrclasses__.System.Security.Claims import ClaimTypes
from __clrclasses__.System.Security.Claims import ClaimValueTypes
from __clrclasses__.System.Security.Claims import DynamicRoleClaimProvider
| 55.571429
| 74
| 0.892031
| 42
| 389
| 7.690476
| 0.285714
| 0.260062
| 0.371517
| 0.520124
| 0.743034
| 0.743034
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061697
| 389
| 6
| 75
| 64.833333
| 0.884932
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
af6452e5b6b8d2775733c9b23a17653383a20dc0
| 18,959
|
py
|
Python
|
yandex/cloud/serverless/functions/v1/function_service_pb2_grpc.py
|
kbespalov/python-sdk
|
e86563ee850e46a35b4c84053ecd4affdf66a963
|
[
"MIT"
] | null | null | null |
yandex/cloud/serverless/functions/v1/function_service_pb2_grpc.py
|
kbespalov/python-sdk
|
e86563ee850e46a35b4c84053ecd4affdf66a963
|
[
"MIT"
] | null | null | null |
yandex/cloud/serverless/functions/v1/function_service_pb2_grpc.py
|
kbespalov/python-sdk
|
e86563ee850e46a35b4c84053ecd4affdf66a963
|
[
"MIT"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from yandex.cloud.access import access_pb2 as yandex_dot_cloud_dot_access_dot_access__pb2
from yandex.cloud.operation import operation_pb2 as yandex_dot_cloud_dot_operation_dot_operation__pb2
from yandex.cloud.serverless.functions.v1 import function_pb2 as yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__pb2
from yandex.cloud.serverless.functions.v1 import function_service_pb2 as yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2
class FunctionServiceStub(object):
"""A set of methods for managing serverless functions.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Get = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/Get',
request_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.GetFunctionRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__pb2.Function.FromString,
)
self.List = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/List',
request_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionsRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionsResponse.FromString,
)
self.Create = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/Create',
request_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.CreateFunctionRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.Update = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/Update',
request_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.UpdateFunctionRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.Delete = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/Delete',
request_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.DeleteFunctionRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.GetVersion = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/GetVersion',
request_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.GetFunctionVersionRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__pb2.Version.FromString,
)
self.GetVersionByTag = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/GetVersionByTag',
request_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.GetFunctionVersionByTagRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__pb2.Version.FromString,
)
self.ListVersions = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/ListVersions',
request_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionsVersionsRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionsVersionsResponse.FromString,
)
self.SetTag = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/SetTag',
request_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.SetFunctionTagRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.RemoveTag = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/RemoveTag',
request_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.RemoveFunctionTagRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.ListTagHistory = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/ListTagHistory',
request_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionTagHistoryRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionTagHistoryResponse.FromString,
)
self.CreateVersion = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/CreateVersion',
request_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.CreateFunctionVersionRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.ListRuntimes = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/ListRuntimes',
request_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListRuntimesRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListRuntimesResponse.FromString,
)
self.ListOperations = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/ListOperations',
request_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionOperationsRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionOperationsResponse.FromString,
)
self.ListAccessBindings = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/ListAccessBindings',
request_serializer=yandex_dot_cloud_dot_access_dot_access__pb2.ListAccessBindingsRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_access_dot_access__pb2.ListAccessBindingsResponse.FromString,
)
self.SetAccessBindings = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/SetAccessBindings',
request_serializer=yandex_dot_cloud_dot_access_dot_access__pb2.SetAccessBindingsRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.UpdateAccessBindings = channel.unary_unary(
'/yandex.cloud.serverless.functions.v1.FunctionService/UpdateAccessBindings',
request_serializer=yandex_dot_cloud_dot_access_dot_access__pb2.UpdateAccessBindingsRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
class FunctionServiceServicer(object):
"""A set of methods for managing serverless functions.
"""
def Get(self, request, context):
"""Returns the specified function.
To get the list of all available functions, make a [List] request.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def List(self, request, context):
"""Retrieves the list of functions in the specified folder.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Create(self, request, context):
"""Creates a function in the specified folder.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Update(self, request, context):
"""Updates the specified function.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
"""Deletes the specified function.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetVersion(self, request, context):
"""Returns the specified version of a function.
To get the list of available version, make a [ListVersions] request.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetVersionByTag(self, request, context):
"""Returns all versions with the specified tag.
To get the list of all available versions, make a [ListVersions] request.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListVersions(self, request, context):
"""Retrieves the list of versions for the specified function, or of all function versions
in the specified folder.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetTag(self, request, context):
"""Set a tag for the specified version of a function.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RemoveTag(self, request, context):
"""Remove a tag from the specified version of a function.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListTagHistory(self, request, context):
"""Returns the log of tags assigned to versions of the specified function.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateVersion(self, request, context):
"""Creates a version for the specified function.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListRuntimes(self, request, context):
"""Lists available runtime environments for the specified function.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListOperations(self, request, context):
"""Lists operations for the specified function.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListAccessBindings(self, request, context):
"""Lists existing access bindings for the specified function.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetAccessBindings(self, request, context):
"""Sets access bindings for the function.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateAccessBindings(self, request, context):
"""Updates access bindings for the specified function.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_FunctionServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Get': grpc.unary_unary_rpc_method_handler(
servicer.Get,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.GetFunctionRequest.FromString,
response_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__pb2.Function.SerializeToString,
),
'List': grpc.unary_unary_rpc_method_handler(
servicer.List,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionsRequest.FromString,
response_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionsResponse.SerializeToString,
),
'Create': grpc.unary_unary_rpc_method_handler(
servicer.Create,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.CreateFunctionRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'Update': grpc.unary_unary_rpc_method_handler(
servicer.Update,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.UpdateFunctionRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.DeleteFunctionRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'GetVersion': grpc.unary_unary_rpc_method_handler(
servicer.GetVersion,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.GetFunctionVersionRequest.FromString,
response_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__pb2.Version.SerializeToString,
),
'GetVersionByTag': grpc.unary_unary_rpc_method_handler(
servicer.GetVersionByTag,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.GetFunctionVersionByTagRequest.FromString,
response_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__pb2.Version.SerializeToString,
),
'ListVersions': grpc.unary_unary_rpc_method_handler(
servicer.ListVersions,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionsVersionsRequest.FromString,
response_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionsVersionsResponse.SerializeToString,
),
'SetTag': grpc.unary_unary_rpc_method_handler(
servicer.SetTag,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.SetFunctionTagRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'RemoveTag': grpc.unary_unary_rpc_method_handler(
servicer.RemoveTag,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.RemoveFunctionTagRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'ListTagHistory': grpc.unary_unary_rpc_method_handler(
servicer.ListTagHistory,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionTagHistoryRequest.FromString,
response_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionTagHistoryResponse.SerializeToString,
),
'CreateVersion': grpc.unary_unary_rpc_method_handler(
servicer.CreateVersion,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.CreateFunctionVersionRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'ListRuntimes': grpc.unary_unary_rpc_method_handler(
servicer.ListRuntimes,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListRuntimesRequest.FromString,
response_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListRuntimesResponse.SerializeToString,
),
'ListOperations': grpc.unary_unary_rpc_method_handler(
servicer.ListOperations,
request_deserializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionOperationsRequest.FromString,
response_serializer=yandex_dot_cloud_dot_serverless_dot_functions_dot_v1_dot_function__service__pb2.ListFunctionOperationsResponse.SerializeToString,
),
'ListAccessBindings': grpc.unary_unary_rpc_method_handler(
servicer.ListAccessBindings,
request_deserializer=yandex_dot_cloud_dot_access_dot_access__pb2.ListAccessBindingsRequest.FromString,
response_serializer=yandex_dot_cloud_dot_access_dot_access__pb2.ListAccessBindingsResponse.SerializeToString,
),
'SetAccessBindings': grpc.unary_unary_rpc_method_handler(
servicer.SetAccessBindings,
request_deserializer=yandex_dot_cloud_dot_access_dot_access__pb2.SetAccessBindingsRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'UpdateAccessBindings': grpc.unary_unary_rpc_method_handler(
servicer.UpdateAccessBindings,
request_deserializer=yandex_dot_cloud_dot_access_dot_access__pb2.UpdateAccessBindingsRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'yandex.cloud.serverless.functions.v1.FunctionService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 57.62614
| 159
| 0.803576
| 2,086
| 18,959
| 6.800575
| 0.063758
| 0.045679
| 0.071056
| 0.086282
| 0.855068
| 0.849429
| 0.837375
| 0.780065
| 0.774919
| 0.670097
| 0
| 0.008617
| 0.130809
| 18,959
| 328
| 160
| 57.801829
| 0.852236
| 0.073105
| 0
| 0.349206
| 1
| 0
| 0.121793
| 0.066292
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075397
| false
| 0
| 0.019841
| 0
| 0.103175
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
af93855b55588aeddf202a37df5505bb86203708
| 42,417
|
py
|
Python
|
tests/test_core/test_text/test_text_box_layout.py
|
lionel42/pygame_gui
|
27b51f5b811b4569bc463566bc9f2d82ada119f6
|
[
"MIT"
] | null | null | null |
tests/test_core/test_text/test_text_box_layout.py
|
lionel42/pygame_gui
|
27b51f5b811b4569bc463566bc9f2d82ada119f6
|
[
"MIT"
] | null | null | null |
tests/test_core/test_text/test_text_box_layout.py
|
lionel42/pygame_gui
|
27b51f5b811b4569bc463566bc9f2d82ada119f6
|
[
"MIT"
] | null | null | null |
from collections import deque
import pygame
import pygame.freetype
import pytest
from pygame_gui.ui_manager import UIManager
from pygame_gui.core.text.text_box_layout import TextBoxLayout
from pygame_gui.core.text import SimpleTestLayoutRect, TextLineChunkFTFont, HyperlinkTextChunk
class TestTextBoxLayout:
def test_creation(self, _init_pygame, default_ui_manager: UIManager):
input_data = deque([])
TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 200, 300),
view_rect=pygame.Rect(0, 0, 200, 150),
line_spacing=1.0)
def test_creation_with_data(self, _init_pygame, default_ui_manager: UIManager):
input_data = deque([SimpleTestLayoutRect(dimensions=(50, 20)),
SimpleTestLayoutRect(dimensions=(30, 20)),
SimpleTestLayoutRect(dimensions=(90, 20)),
SimpleTestLayoutRect(dimensions=(175, 20)),
SimpleTestLayoutRect(dimensions=(50, 20)),
SimpleTestLayoutRect(dimensions=(30, 20)),
SimpleTestLayoutRect(dimensions=(90, 20)),
SimpleTestLayoutRect(dimensions=(175, 20))])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 200, 300),
view_rect=pygame.Rect(0, 0, 200, 150),
line_spacing=1.0)
assert len(layout.layout_rows) > 0
def test_reprocess_layout_queue(self, _init_pygame, default_ui_manager: UIManager):
input_data = deque([SimpleTestLayoutRect(dimensions=(50, 20)),
SimpleTestLayoutRect(dimensions=(30, 20)),
SimpleTestLayoutRect(dimensions=(90, 20)),
SimpleTestLayoutRect(dimensions=(175, 20)),
SimpleTestLayoutRect(dimensions=(50, 20)),
SimpleTestLayoutRect(dimensions=(30, 20)),
SimpleTestLayoutRect(dimensions=(90, 20)),
SimpleTestLayoutRect(dimensions=(175, 20))])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 200, 300),
view_rect=pygame.Rect(0, 0, 200, 150),
line_spacing=1.0)
assert len(layout.layout_rows) == 4
layout.reprocess_layout_queue(pygame.Rect(0, 0, 100, 300))
assert len(layout.layout_rows) == 9
def test_finalise_to_surf(self, _init_pygame, default_ui_manager: UIManager):
input_data = deque([SimpleTestLayoutRect(dimensions=(50, 20)),
SimpleTestLayoutRect(dimensions=(30, 20)),
SimpleTestLayoutRect(dimensions=(90, 20)),
SimpleTestLayoutRect(dimensions=(175, 20)),
SimpleTestLayoutRect(dimensions=(50, 20)),
SimpleTestLayoutRect(dimensions=(30, 20)),
SimpleTestLayoutRect(dimensions=(90, 20)),
SimpleTestLayoutRect(dimensions=(175, 20))])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 200, 300),
view_rect=pygame.Rect(0, 0, 200, 150),
line_spacing=1.0)
layout_surface = pygame.Surface((200, 300), depth=32, flags=pygame.SRCALPHA)
layout_surface.fill((0, 0, 0, 0))
layout.finalise_to_surf(layout_surface)
assert layout_surface.get_at((10, 10)) != pygame.Color(0, 0, 0, 0)
def test_finalise_to_new(self, _init_pygame, default_ui_manager: UIManager):
input_data = deque([SimpleTestLayoutRect(dimensions=(50, 20)),
SimpleTestLayoutRect(dimensions=(30, 20)),
SimpleTestLayoutRect(dimensions=(90, 20)),
SimpleTestLayoutRect(dimensions=(175, 20)),
SimpleTestLayoutRect(dimensions=(50, 20)),
SimpleTestLayoutRect(dimensions=(30, 20)),
SimpleTestLayoutRect(dimensions=(90, 20)),
SimpleTestLayoutRect(dimensions=(175, 20))])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 200, 300),
view_rect=pygame.Rect(0, 0, 200, 150),
line_spacing=1.0)
layout_surface = layout.finalise_to_new()
assert layout_surface.get_at((10, 10)) != pygame.Color(0, 0, 0, 0)
def test_update_text_with_new_text_end_pos(self, _init_pygame, default_ui_manager: UIManager):
input_data = deque([TextLineChunkFTFont(text='hello',
font=pygame.freetype.Font(None, 20),
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000'))])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 200, 300),
view_rect=pygame.Rect(0, 0, 200, 150),
line_spacing=1.0)
layout_surface = layout.finalise_to_new()
layout.update_text_with_new_text_end_pos(0) # this does nothing unless we pass in text
assert layout_surface.get_at((10, 10)) == pygame.Color(0, 0, 0, 0)
layout.update_text_with_new_text_end_pos(3)
assert layout_surface.get_at((10, 10)) == pygame.Color(255, 0, 0, 255)
def test_clear_final_surface(self, _init_pygame, default_ui_manager: UIManager):
input_data = deque([SimpleTestLayoutRect(dimensions=(50, 20)),
SimpleTestLayoutRect(dimensions=(30, 20)),
SimpleTestLayoutRect(dimensions=(90, 20)),
SimpleTestLayoutRect(dimensions=(175, 20)),
SimpleTestLayoutRect(dimensions=(50, 20)),
SimpleTestLayoutRect(dimensions=(30, 20)),
SimpleTestLayoutRect(dimensions=(90, 20)),
SimpleTestLayoutRect(dimensions=(175, 20))])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 200, 300),
view_rect=pygame.Rect(0, 0, 200, 150),
line_spacing=1.0)
layout_surface = layout.finalise_to_new()
assert layout_surface.get_at((10, 10)) != pygame.Color(0, 0, 0, 0)
layout.clear_final_surface()
assert layout_surface.get_at((10, 10)) == pygame.Color(0, 0, 0, 0)
def test_set_alpha(self, _init_pygame, default_ui_manager: UIManager):
input_data = deque([SimpleTestLayoutRect(dimensions=(50, 20)),
SimpleTestLayoutRect(dimensions=(30, 20)),
SimpleTestLayoutRect(dimensions=(90, 20)),
SimpleTestLayoutRect(dimensions=(175, 20)),
SimpleTestLayoutRect(dimensions=(50, 20)),
SimpleTestLayoutRect(dimensions=(30, 20)),
SimpleTestLayoutRect(dimensions=(90, 20)),
SimpleTestLayoutRect(dimensions=(175, 20))])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 200, 300),
view_rect=pygame.Rect(0, 0, 200, 150),
line_spacing=1.0)
layout_surface = layout.finalise_to_new()
assert layout_surface.get_at((4, 4)) != pygame.Color(0, 0, 0, 0)
layout.set_alpha(128)
layout_surface = layout.finalised_surface
assert layout_surface.get_at((4, 4)).a == 127
def test_add_chunks_to_hover_group(self, _init_pygame, default_ui_manager: UIManager):
input_data = deque([TextLineChunkFTFont(text='hello',
font=pygame.freetype.Font(None, 20),
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
HyperlinkTextChunk(href='test',
text='a link',
font=pygame.freetype.Font(None, 20),
underlined=False,
colour=pygame.Color('#FFFFFF'),
bg_colour=pygame.Color('#FF0000'),
hover_colour=pygame.Color('#0000FF'),
active_colour=pygame.Color('#FFFF00'),
hover_underline=False)
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 200, 300),
view_rect=pygame.Rect(0, 0, 200, 150),
line_spacing=1.0)
links_found = []
layout.add_chunks_to_hover_group(links_found)
assert len(links_found) == 1
def test_insert_layout_rects(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 500, 300),
view_rect=pygame.Rect(0, 0, 500, 150),
line_spacing=1.0)
insert_data = deque([TextLineChunkFTFont(text='n insertion',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000'))])
layout.insert_layout_rects(layout_rects=insert_data,
row_index=0,
item_index=1,
chunk_index=9)
row = layout.layout_rows[0]
chunk = row.items[1]
assert chunk.text == 'this is an insertion'
def test_horiz_centre_all_rows(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 500, 300),
view_rect=pygame.Rect(0, 0, 500, 150),
line_spacing=1.0)
row = layout.layout_rows[0]
assert row.x == 0
layout.horiz_center_all_rows()
assert row.x != 0
assert row.centerx == 250
def test_align_left_all_rows(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 500, 300),
view_rect=pygame.Rect(0, 0, 500, 150),
line_spacing=1.0)
row = layout.layout_rows[0]
assert row.x == 0
layout.horiz_center_all_rows()
assert row.x != 0
assert row.centerx == 250
layout.align_left_all_rows(x_padding=5)
assert row.x == 5
assert row.centerx != 250
def test_align_right_all_rows(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 500, 300),
view_rect=pygame.Rect(0, 0, 500, 150),
line_spacing=1.0)
row = layout.layout_rows[0]
assert row.x == 0
layout.align_right_all_rows(x_padding=5)
assert row.right == 495
def test_vert_center_all_rows(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 500, 300),
view_rect=pygame.Rect(0, 0, 500, 150),
line_spacing=1.0)
row = layout.layout_rows[0]
assert row.y == 0
layout.vert_center_all_rows()
assert row.centery == 150
def test_vert_align_top_all_rows(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 500, 300),
view_rect=pygame.Rect(0, 0, 500, 150),
line_spacing=1.0)
row = layout.layout_rows[0]
assert row.y == 0
layout.vert_center_all_rows()
assert row.centery == 150
layout.vert_align_top_all_rows(y_padding=10)
assert row.y == 10
def test_vert_align_bottom_all_rows(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 500, 300),
view_rect=pygame.Rect(0, 0, 500, 150),
line_spacing=1.0)
row = layout.layout_rows[0]
assert row.y == 0
layout.vert_align_bottom_all_rows(y_padding=8)
assert row.bottom == 292
def test_set_cursor_position(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 100, 300),
view_rect=pygame.Rect(0, 0, 100, 150),
line_spacing=1.0)
layout.set_cursor_position(13)
assert layout.cursor_text_row is not None
assert layout.cursor_text_row.cursor_index == 2
assert layout.cursor_text_row.cursor_draw_width == 17
def test_set_cursor_from_click_pos(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 100, 300),
view_rect=pygame.Rect(0, 0, 100, 150),
line_spacing=1.0)
layout.set_cursor_from_click_pos((17, 24))
assert layout.cursor_text_row is not None
assert layout.cursor_text_row.cursor_index == 2
assert layout.cursor_text_row.cursor_draw_width == 17
def test_toggle_cursor(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 100, 300),
view_rect=pygame.Rect(0, 0, 100, 150),
line_spacing=1.0)
layout.set_cursor_from_click_pos((17, 24))
assert layout.cursor_text_row is not None
assert not layout.cursor_text_row.edit_cursor_active
layout.toggle_cursor()
assert layout.cursor_text_row.edit_cursor_active
layout.toggle_cursor()
assert not layout.cursor_text_row.edit_cursor_active
def test_set_text_selection(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 100, 300),
view_rect=pygame.Rect(0, 0, 100, 150),
line_spacing=1.0)
layout.set_text_selection(5, 17)
assert len(layout.selected_rows) == 2
assert len(layout.selected_chunks) == 4
selected_text = ""
for chunk in layout.selected_chunks:
selected_text += chunk.text
assert selected_text == ' this is a t'
def test_set_default_text_colour(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=True,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 500, 300),
view_rect=pygame.Rect(0, 0, 500, 150),
line_spacing=1.0)
layout.set_default_text_colour(pygame.Color('#00FF00'))
default_chunk_colour = layout.layout_rows[0].items[1].colour
assert default_chunk_colour == pygame.Color('#00FF00')
def test_insert_text(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=True,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 500, 300),
view_rect=pygame.Rect(0, 0, 500, 150),
line_spacing=1.0)
layout.insert_text('nother insertion', 15)
row = layout.layout_rows[0]
chunk = row.items[1]
assert chunk.text == 'this is another insertion'
def test_delete_selected_text(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=True,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 100, 300),
view_rect=pygame.Rect(0, 0, 100, 150),
line_spacing=1.0)
layout.set_text_selection(5, 17)
assert len(layout.selected_rows) == 2
assert len(layout.selected_chunks) == 4
selected_text = ""
for chunk in layout.selected_chunks:
selected_text += chunk.text
assert selected_text == ' this is a t'
layout.delete_selected_text()
assert len(layout.selected_rows) == 0
assert len(layout.selected_chunks) == 0
selected_text = ""
for chunk in layout.selected_chunks:
selected_text += chunk.text
assert selected_text == ''
remaining_text = ''
for row in layout.layout_rows:
for chunk in row.items:
remaining_text += chunk.text
assert remaining_text == 'helloest'
def test_delete_at_cursor(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=True,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 100, 300),
view_rect=pygame.Rect(0, 0, 100, 150),
line_spacing=1.0)
layout.set_cursor_position(14)
layout.delete_at_cursor()
layout.delete_at_cursor()
remaining_text = ''
for row in layout.layout_rows:
for chunk in row.items:
remaining_text += chunk.text
assert remaining_text == 'hello this is test'
def test_backspace_at_cursor(self, _init_pygame, default_ui_manager: UIManager):
the_font = pygame.freetype.Font(None, 20)
input_data = deque([TextLineChunkFTFont(text='hello ',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text='this is a',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=True,
bg_colour=pygame.Color('#FF0000')),
TextLineChunkFTFont(text=' test',
font=the_font,
underlined=False,
colour=pygame.Color('#FFFFFF'),
using_default_text_colour=False,
bg_colour=pygame.Color('#FF0000')),
])
layout = TextBoxLayout(input_data_queue=input_data,
layout_rect=pygame.Rect(0, 0, 100, 300),
view_rect=pygame.Rect(0, 0, 100, 150),
line_spacing=1.0)
layout.set_cursor_position(16)
layout.backspace_at_cursor()
layout.backspace_at_cursor()
remaining_text = ''
for row in layout.layout_rows:
for chunk in row.items:
remaining_text += chunk.text
assert remaining_text == 'hello this is test'
if __name__ == '__main__':
pytest.console_main()
| 54.873221
| 98
| 0.428036
| 3,383
| 42,417
| 5.112918
| 0.051138
| 0.073134
| 0.106146
| 0.08117
| 0.915014
| 0.898711
| 0.897265
| 0.887495
| 0.883679
| 0.879054
| 0
| 0.050817
| 0.493387
| 42,417
| 772
| 99
| 54.944301
| 0.754107
| 0.000943
| 0
| 0.863843
| 0
| 0
| 0.029357
| 0
| 0
| 0
| 0
| 0
| 0.081694
| 1
| 0.037821
| false
| 0
| 0.01059
| 0
| 0.049924
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
afc0a89ec0ad04b412f8a293d0630e43a65a8652
| 4,569
|
py
|
Python
|
experiments/super_mario/process_result.py
|
michalnand/reinforcement_learning_im
|
e29caa2a0b7bca3f9ff45ed949a3d3df3a40c4c1
|
[
"MIT"
] | null | null | null |
experiments/super_mario/process_result.py
|
michalnand/reinforcement_learning_im
|
e29caa2a0b7bca3f9ff45ed949a3d3df3a40c4c1
|
[
"MIT"
] | null | null | null |
experiments/super_mario/process_result.py
|
michalnand/reinforcement_learning_im
|
e29caa2a0b7bca3f9ff45ed949a3d3df3a40c4c1
|
[
"MIT"
] | null | null | null |
import sys
sys.path.insert(0, '../../')
from libs_common.RLStatsCompute import *
import matplotlib.pyplot as plt
result_path = "./results/"
files = []
files.append("./models/dqn_baseline/result/result.log")
rl_stats_compute_dqn = RLStatsCompute(files, result_path + "dqn_baseline.log")
files = []
files.append("./models/dqn_curiosity/result/result.log")
rl_stats_compute_curiosity = RLStatsCompute(files, result_path + "dqn_curiosity.log")
files = []
files.append("./models/dqn_curiosity_em/result/result.log")
rl_stats_compute_curiosity_em = RLStatsCompute(files, result_path + "dqn_curiosity_em.log")
plt.cla()
plt.ylabel("score")
plt.xlabel("episode")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_dqn.games_mean, rl_stats_compute_dqn.episode_mean, label="dqn baseline", color='deepskyblue')
plt.fill_between(rl_stats_compute_dqn.games_mean, rl_stats_compute_dqn.episode_lower, rl_stats_compute_dqn.episode_upper, color='deepskyblue', alpha=0.2)
plt.plot(rl_stats_compute_curiosity.games_mean, rl_stats_compute_curiosity.episode_mean, label="dqn curiosity", color='limegreen')
plt.fill_between(rl_stats_compute_curiosity.games_mean, rl_stats_compute_curiosity.episode_lower, rl_stats_compute_curiosity.episode_upper, color='limegreen', alpha=0.2)
plt.plot(rl_stats_compute_curiosity_em.games_mean, rl_stats_compute_curiosity_em.episode_mean, label="dqn curiosity em", color='red')
plt.fill_between(rl_stats_compute_curiosity_em.games_mean, rl_stats_compute_curiosity_em.episode_lower, rl_stats_compute_curiosity_em.episode_upper, color='red', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "score_per_episode.png", dpi = 300)
plt.cla()
plt.ylabel("score")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_dqn.iterations, rl_stats_compute_dqn.episode_mean, label="dqn baseline", color='deepskyblue')
plt.fill_between(rl_stats_compute_dqn.iterations, rl_stats_compute_dqn.episode_lower, rl_stats_compute_dqn.episode_upper, color='deepskyblue', alpha=0.2)
plt.plot(rl_stats_compute_curiosity.iterations, rl_stats_compute_curiosity.episode_mean, label="dqn curiosity", color='limegreen')
plt.fill_between(rl_stats_compute_curiosity.iterations, rl_stats_compute_curiosity.episode_lower, rl_stats_compute_curiosity.episode_upper, color='limegreen', alpha=0.2)
plt.plot(rl_stats_compute_curiosity_em.iterations, rl_stats_compute_curiosity_em.episode_mean, label="dqn curiosity em", color='red')
plt.fill_between(rl_stats_compute_curiosity_em.iterations, rl_stats_compute_curiosity_em.episode_lower, rl_stats_compute_curiosity_em.episode_upper, color='red', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "score_per_iteration.png", dpi = 300)
'''
plt.cla()
plt.ylabel("value")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_curiosity.iterations, rl_stats_compute_curiosity.curiosity_mean, label="curiosity", color='deepskyblue')
plt.fill_between(rl_stats_compute_curiosity.iterations, rl_stats_compute_curiosity.curiosity_lower, rl_stats_compute_curiosity.curiosity_upper, color='deepskyblue', alpha=0.2)
#plt.plot(rl_stats_compute_curiosity_em.iterations, rl_stats_compute_curiosity_em.curiosity_mean, label="curiosity entropy", color='red')
#plt.fill_between(rl_stats_compute_curiosity_em.iterations, rl_stats_compute_curiosity_em.curiosity_lower, rl_stats_compute_curiosity_em.curiosity_upper, color='red', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "internal_motivation.png", dpi = 300)
'''
'''
plt.cla()
plt.ylabel("value")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_curiosity.iterations, rl_stats_compute_curiosity.forward_loss_mean, label="curiosity", color='deepskyblue')
plt.fill_between(rl_stats_compute_curiosity.iterations, rl_stats_compute_curiosity.forward_loss_lower, rl_stats_compute_curiosity.forward_loss_upper, color='deepskyblue', alpha=0.2)
plt.plot(rl_stats_compute_curiosity_em.iterations, rl_stats_compute_curiosity_em.forward_loss_mean, label="curiosity entropy", color='deepskyblue')
plt.fill_between(rl_stats_compute_curiosity_em.iterations, rl_stats_compute_curiosity_em.forward_loss_lower, rl_stats_compute_curiosity_em.forward_loss_upper, color='deepskyblue', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "forward_model_loss.png", dpi = 300)
'''
| 49.129032
| 190
| 0.821843
| 675
| 4,569
| 5.183704
| 0.100741
| 0.10603
| 0.212061
| 0.276079
| 0.944841
| 0.911117
| 0.860246
| 0.784224
| 0.784224
| 0.767362
| 0
| 0.0104
| 0.052966
| 4,569
| 93
| 191
| 49.129032
| 0.798244
| 0
| 0
| 0.289474
| 0
| 0
| 0.165549
| 0.058595
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.078947
| 0
| 0.078947
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bb6818558d8fb79128454fe459387271a59f64f9
| 130
|
py
|
Python
|
adr/helper_functions/__init__.py
|
CeuAzul/ADR
|
09885062cfa79d19fa6370155d91466bda2a27d7
|
[
"MIT"
] | 16
|
2019-01-31T16:40:55.000Z
|
2021-09-12T15:12:35.000Z
|
adr/helper_functions/__init__.py
|
CeuAzul/ADR
|
09885062cfa79d19fa6370155d91466bda2a27d7
|
[
"MIT"
] | 157
|
2019-01-31T16:44:27.000Z
|
2020-10-05T12:48:50.000Z
|
adr/helper_functions/__init__.py
|
CeuAzul/ADR
|
09885062cfa79d19fa6370155d91466bda2a27d7
|
[
"MIT"
] | 3
|
2020-12-12T20:27:00.000Z
|
2021-03-13T18:44:52.000Z
|
from .algebric import rotate, translate, transform, component_vector_in_absolute_frame, component_vector_coords_in_absolute_frame
| 65
| 129
| 0.9
| 17
| 130
| 6.352941
| 0.705882
| 0.277778
| 0.277778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061538
| 130
| 1
| 130
| 130
| 0.885246
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
bb828ebf5870f30e726ef99c0a99afdf37059dfd
| 240
|
py
|
Python
|
winter_ddd/__init__.py
|
DmitryKhursevich/winter
|
9f3bf462f963059bab1f1bbb309ca57f8a43b46f
|
[
"MIT"
] | null | null | null |
winter_ddd/__init__.py
|
DmitryKhursevich/winter
|
9f3bf462f963059bab1f1bbb309ca57f8a43b46f
|
[
"MIT"
] | null | null | null |
winter_ddd/__init__.py
|
DmitryKhursevich/winter
|
9f3bf462f963059bab1f1bbb309ca57f8a43b46f
|
[
"MIT"
] | null | null | null |
from .aggregate_root import AggregateRoot
from .domain_event import DomainEvent
from .domain_event_handler import domain_event_handler
from .domain_event_handler import global_domain_event_dispatcher
from .domain_events import DomainEvents
| 40
| 64
| 0.895833
| 32
| 240
| 6.34375
| 0.40625
| 0.270936
| 0.221675
| 0.216749
| 0.275862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 240
| 5
| 65
| 48
| 0.922727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bb9d3b5d9324936f1c98135017083dafb3798658
| 14,314
|
py
|
Python
|
paper_uploads/cloudinary/migrations/0001_initial.py
|
dldevinc/paper-uploads
|
9414b6e6fbaa52eadacd9852ce3c4d84c6c2c939
|
[
"BSD-3-Clause"
] | 3
|
2020-06-05T10:43:05.000Z
|
2022-02-22T16:46:16.000Z
|
paper_uploads/cloudinary/migrations/0001_initial.py
|
dldevinc/paper-uploads
|
9414b6e6fbaa52eadacd9852ce3c4d84c6c2c939
|
[
"BSD-3-Clause"
] | 2
|
2021-04-03T12:25:20.000Z
|
2022-02-02T06:10:46.000Z
|
paper_uploads/cloudinary/migrations/0001_initial.py
|
dldevinc/paper-uploads
|
9414b6e6fbaa52eadacd9852ce3c4d84c6c2c939
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 3.0.9 on 2020-09-15 07:08
import cloudinary.models
from django.db import migrations, models
import django.db.models.deletion
import django.db.models.manager
import django.utils.timezone
import paper_uploads.cloudinary.models.mixins
import paper_uploads.models.mixins
class Migration(migrations.Migration):
initial = True
dependencies = [
('paper_uploads', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CloudinaryFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('owner_app_label', models.CharField(editable=False, max_length=100)),
('owner_model_name', models.CharField(editable=False, max_length=100)),
('owner_fieldname', models.CharField(editable=False, max_length=255)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='created at')),
('modified_at', models.DateTimeField(auto_now=True, verbose_name='changed at')),
('basename', models.CharField(editable=False, help_text='Human-readable resource name', max_length=255, verbose_name='basename')),
('extension', models.CharField(editable=False, help_text='Lowercase, without leading dot', max_length=32, verbose_name='extension')),
('size', models.PositiveIntegerField(default=0, editable=False, verbose_name='size')),
('checksum', models.CharField(editable=False, max_length=64, verbose_name='checksum')),
('uploaded_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='uploaded at')),
('file', cloudinary.models.CloudinaryField(max_length=255, verbose_name='file')),
('display_name', models.CharField(blank=True, max_length=255, verbose_name='display name')),
],
options={
'verbose_name': 'file',
'verbose_name_plural': 'files',
'abstract': False,
'default_permissions': (),
},
bases=(paper_uploads.cloudinary.models.mixins.ReadonlyCloudinaryFileProxyMixin, paper_uploads.models.mixins.FileProxyMixin, models.Model),
),
migrations.CreateModel(
name='CloudinaryFileItem',
fields=[
('collectionitembase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='paper_uploads.CollectionItemBase')),
('owner_app_label', models.CharField(editable=False, max_length=100)),
('owner_model_name', models.CharField(editable=False, max_length=100)),
('owner_fieldname', models.CharField(editable=False, max_length=255)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='created at')),
('modified_at', models.DateTimeField(auto_now=True, verbose_name='changed at')),
('basename', models.CharField(editable=False, help_text='Human-readable resource name', max_length=255, verbose_name='basename')),
('extension', models.CharField(editable=False, help_text='Lowercase, without leading dot', max_length=32, verbose_name='extension')),
('size', models.PositiveIntegerField(default=0, editable=False, verbose_name='size')),
('checksum', models.CharField(editable=False, max_length=64, verbose_name='checksum')),
('uploaded_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='uploaded at')),
('file', cloudinary.models.CloudinaryField(max_length=255, verbose_name='file')),
('display_name', models.CharField(blank=True, max_length=255, verbose_name='display name')),
],
options={
'verbose_name': 'File item',
'verbose_name_plural': 'File items',
'abstract': False,
},
bases=('paper_uploads.collectionitembase', paper_uploads.cloudinary.models.mixins.ReadonlyCloudinaryFileProxyMixin, paper_uploads.models.mixins.FileProxyMixin, models.Model),
),
migrations.CreateModel(
name='CloudinaryImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('owner_app_label', models.CharField(editable=False, max_length=100)),
('owner_model_name', models.CharField(editable=False, max_length=100)),
('owner_fieldname', models.CharField(editable=False, max_length=255)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='created at')),
('modified_at', models.DateTimeField(auto_now=True, verbose_name='changed at')),
('basename', models.CharField(editable=False, help_text='Human-readable resource name', max_length=255, verbose_name='basename')),
('extension', models.CharField(editable=False, help_text='Lowercase, without leading dot', max_length=32, verbose_name='extension')),
('size', models.PositiveIntegerField(default=0, editable=False, verbose_name='size')),
('checksum', models.CharField(editable=False, max_length=64, verbose_name='checksum')),
('uploaded_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='uploaded at')),
('title', models.CharField(blank=True, help_text='The title is being used as a tooltip when the user hovers the mouse over the image', max_length=255, verbose_name='title')),
('description', models.TextField(blank=True, help_text='This text will be used by screen readers, search engines, or when the image cannot be loaded', verbose_name='description')),
('width', models.PositiveSmallIntegerField(default=0, editable=False, verbose_name='width')),
('height', models.PositiveSmallIntegerField(default=0, editable=False, verbose_name='height')),
('cropregion', models.CharField(blank=True, editable=False, max_length=24, verbose_name='crop region')),
('file', cloudinary.models.CloudinaryField(max_length=255, verbose_name='file')),
],
options={
'verbose_name': 'image',
'verbose_name_plural': 'images',
'abstract': False,
'default_permissions': (),
},
bases=(paper_uploads.cloudinary.models.mixins.ReadonlyCloudinaryFileProxyMixin, paper_uploads.models.mixins.FileProxyMixin, models.Model),
),
migrations.CreateModel(
name='CloudinaryImageItem',
fields=[
('collectionitembase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='paper_uploads.CollectionItemBase')),
('owner_app_label', models.CharField(editable=False, max_length=100)),
('owner_model_name', models.CharField(editable=False, max_length=100)),
('owner_fieldname', models.CharField(editable=False, max_length=255)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='created at')),
('modified_at', models.DateTimeField(auto_now=True, verbose_name='changed at')),
('basename', models.CharField(editable=False, help_text='Human-readable resource name', max_length=255, verbose_name='basename')),
('extension', models.CharField(editable=False, help_text='Lowercase, without leading dot', max_length=32, verbose_name='extension')),
('size', models.PositiveIntegerField(default=0, editable=False, verbose_name='size')),
('checksum', models.CharField(editable=False, max_length=64, verbose_name='checksum')),
('uploaded_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='uploaded at')),
('title', models.CharField(blank=True, help_text='The title is being used as a tooltip when the user hovers the mouse over the image', max_length=255, verbose_name='title')),
('description', models.TextField(blank=True, help_text='This text will be used by screen readers, search engines, or when the image cannot be loaded', verbose_name='description')),
('width', models.PositiveSmallIntegerField(default=0, editable=False, verbose_name='width')),
('height', models.PositiveSmallIntegerField(default=0, editable=False, verbose_name='height')),
('cropregion', models.CharField(blank=True, editable=False, max_length=24, verbose_name='crop region')),
('file', cloudinary.models.CloudinaryField(max_length=255, verbose_name='file')),
],
options={
'verbose_name': 'Image item',
'verbose_name_plural': 'Image items',
'abstract': False,
},
bases=('paper_uploads.collectionitembase', paper_uploads.cloudinary.models.mixins.ReadonlyCloudinaryFileProxyMixin, paper_uploads.models.mixins.FileProxyMixin, models.Model),
),
migrations.CreateModel(
name='CloudinaryMedia',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('owner_app_label', models.CharField(editable=False, max_length=100)),
('owner_model_name', models.CharField(editable=False, max_length=100)),
('owner_fieldname', models.CharField(editable=False, max_length=255)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='created at')),
('modified_at', models.DateTimeField(auto_now=True, verbose_name='changed at')),
('basename', models.CharField(editable=False, help_text='Human-readable resource name', max_length=255, verbose_name='basename')),
('extension', models.CharField(editable=False, help_text='Lowercase, without leading dot', max_length=32, verbose_name='extension')),
('size', models.PositiveIntegerField(default=0, editable=False, verbose_name='size')),
('checksum', models.CharField(editable=False, max_length=64, verbose_name='checksum')),
('uploaded_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='uploaded at')),
('file', cloudinary.models.CloudinaryField(max_length=255, verbose_name='file')),
('display_name', models.CharField(blank=True, max_length=255, verbose_name='display name')),
],
options={
'verbose_name': 'media',
'verbose_name_plural': 'media',
'abstract': False,
'default_permissions': (),
},
bases=(paper_uploads.cloudinary.models.mixins.ReadonlyCloudinaryFileProxyMixin, paper_uploads.models.mixins.FileProxyMixin, models.Model),
),
migrations.CreateModel(
name='CloudinaryMediaItem',
fields=[
('collectionitembase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='paper_uploads.CollectionItemBase')),
('owner_app_label', models.CharField(editable=False, max_length=100)),
('owner_model_name', models.CharField(editable=False, max_length=100)),
('owner_fieldname', models.CharField(editable=False, max_length=255)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='created at')),
('modified_at', models.DateTimeField(auto_now=True, verbose_name='changed at')),
('basename', models.CharField(editable=False, help_text='Human-readable resource name', max_length=255, verbose_name='basename')),
('extension', models.CharField(editable=False, help_text='Lowercase, without leading dot', max_length=32, verbose_name='extension')),
('size', models.PositiveIntegerField(default=0, editable=False, verbose_name='size')),
('checksum', models.CharField(editable=False, max_length=64, verbose_name='checksum')),
('uploaded_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='uploaded at')),
('file', cloudinary.models.CloudinaryField(max_length=255, verbose_name='file')),
('display_name', models.CharField(blank=True, max_length=255, verbose_name='display name')),
],
options={
'verbose_name': 'Media item',
'verbose_name_plural': 'Media items',
'abstract': False,
},
bases=('paper_uploads.collectionitembase', paper_uploads.cloudinary.models.mixins.ReadonlyCloudinaryFileProxyMixin, paper_uploads.models.mixins.FileProxyMixin, models.Model),
),
migrations.CreateModel(
name='CloudinaryCollection',
fields=[
],
options={
'proxy': True,
'indexes': [],
},
bases=('paper_uploads.collection',),
managers=[
('default_mgr', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='CloudinaryImageCollection',
fields=[
],
options={
'proxy': True,
'indexes': [],
},
bases=('paper_uploads.collection',),
managers=[
('default_mgr', django.db.models.manager.Manager()),
],
),
]
| 70.512315
| 221
| 0.641959
| 1,459
| 14,314
| 6.122001
| 0.101439
| 0.094828
| 0.0927
| 0.112853
| 0.929019
| 0.925213
| 0.925213
| 0.925213
| 0.925213
| 0.925213
| 0
| 0.014896
| 0.226142
| 14,314
| 202
| 222
| 70.861386
| 0.79146
| 0.003144
| 0
| 0.784615
| 1
| 0
| 0.203827
| 0.0232
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.035897
| 0
| 0.05641
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3c01e2318d4f312483b4c1ca874beb6b24cc4a40
| 88,289
|
py
|
Python
|
tests/token/test_IbetStraightBond.py
|
BoostryJP/ibet-SmartContract
|
dc3f73a708ef145e7200ce58fce4e8171e21d3c2
|
[
"Apache-2.0"
] | 10
|
2021-06-12T08:43:50.000Z
|
2022-02-17T14:24:48.000Z
|
tests/token/test_IbetStraightBond.py
|
BoostryJP/ibet-SmartContract
|
dc3f73a708ef145e7200ce58fce4e8171e21d3c2
|
[
"Apache-2.0"
] | 44
|
2021-04-11T06:43:10.000Z
|
2022-03-30T12:42:32.000Z
|
tests/token/test_IbetStraightBond.py
|
BoostryJP/ibet-SmartContract
|
dc3f73a708ef145e7200ce58fce4e8171e21d3c2
|
[
"Apache-2.0"
] | 1
|
2022-03-09T07:27:57.000Z
|
2022-03-09T07:27:57.000Z
|
"""
Copyright BOOSTRY Co., Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
SPDX-License-Identifier: Apache-2.0
"""
import brownie
import pytest
import brownie_utils
def init_args():
name = 'test_bond'
symbol = 'BND'
total_supply = 2 ** 256 - 1
face_value = 2 ** 256 - 1
redemption_date = '20191231'
redemption_value = 2 ** 256 - 1
return_date = '20191231'
return_amount = 'some_return'
purpose = 'some_purpose'
deploy_args = [
name,
symbol,
total_supply,
face_value,
redemption_date,
redemption_value,
return_date,
return_amount,
purpose
]
return deploy_args
def issue_transferable_bond_token(issuer, exchange_address, personal_info_address):
from brownie import IbetStraightBond
name = 'test_bond'
symbol = 'BND'
total_supply = 10000
face_value = 10000
redemption_date = '20191231'
redemption_value = 100
return_date = '20191231'
return_amount = 'some_return'
purpose = 'some_purpose'
deploy_args = [
name,
symbol,
total_supply,
face_value,
redemption_date,
redemption_value,
return_date,
return_amount,
purpose
]
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
bond_token.setTradableExchange.transact(exchange_address, {'from': issuer})
bond_token.setPersonalInfoAddress.transact(personal_info_address, {'from': issuer})
bond_token.setTransferable.transact(True, {'from': issuer})
return bond_token, deploy_args
# TEST_deploy
class TestDeploy:
# Normal_1
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
deploy_args = init_args()
bond_contract = brownie_utils.force_deploy(
issuer,
IbetStraightBond,
*deploy_args
)
# assertion
owner_address = bond_contract.owner()
name = bond_contract.name()
symbol = bond_contract.symbol()
total_supply = bond_contract.totalSupply()
face_value = bond_contract.faceValue()
redemption_date = bond_contract.redemptionDate()
redemption_value = bond_contract.redemptionValue()
return_date = bond_contract.returnDate()
return_amount = bond_contract.returnAmount()
purpose = bond_contract.purpose()
transferable = bond_contract.transferable()
balance = bond_contract.balanceOf(issuer)
is_redeemed = bond_contract.isRedeemed()
status = bond_contract.status()
assert owner_address == issuer
assert name == deploy_args[0]
assert symbol == deploy_args[1]
assert total_supply == deploy_args[2]
assert face_value == deploy_args[3]
assert redemption_date == deploy_args[4]
assert redemption_value == deploy_args[5]
assert return_date == deploy_args[6]
assert return_amount == deploy_args[7]
assert purpose == deploy_args[8]
assert transferable == False
assert balance == total_supply
assert is_redeemed == False
assert status == True
# backward compatible calls
assert bond_contract.initialOfferingStatus() == False
assert bond_contract.getImageURL(0) == ""
# TEST_transfer
class TestTransfer:
#######################################
# Normal
#######################################
# Normal_1
# Transfer to EOA
def test_normal_1(self, users, personal_info):
issuer = users["issuer"]
from_address = issuer
to_address = users["trader"]
transfer_amount = 100
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# register personal info of to_address
personal_info.register.transact(
from_address.address,
"encrypted_message",
{'from': to_address}
)
# transfer
tx = bond_token.transfer.transact(
to_address.address,
transfer_amount,
{"from": issuer}
)
# assertion
assert bond_token.balanceOf(issuer) == deploy_args[3] - transfer_amount
assert bond_token.balanceOf(to_address) == transfer_amount
assert tx.events["Transfer"]["from"] == from_address
assert tx.events["Transfer"]["to"] == to_address
assert tx.events["Transfer"]["value"] == transfer_amount
# Normal_2
# Transfer to contract address
def test_normal_2(self, users, exchange, personal_info):
issuer = users["issuer"]
from_address = issuer
transfer_amount = 100
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=exchange.address,
personal_info_address=personal_info.address
)
# transfer
to_address = exchange.address
tx = bond_token.transfer.transact(
to_address,
transfer_amount,
{"from": from_address}
)
# assertion
assert bond_token.balanceOf(from_address) == deploy_args[3] - transfer_amount
assert bond_token.balanceOf(to_address) == transfer_amount
assert tx.events["Transfer"]["from"] == from_address
assert tx.events["Transfer"]["to"] == to_address
assert tx.events["Transfer"]["value"] == transfer_amount
#######################################
# Error
#######################################
# Error_1
# Insufficient balance
def test_error_1(self, users, personal_info):
issuer = users["issuer"]
from_address = issuer
to_address = users["trader"]
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# register personal info of to_address
personal_info.register.transact(
from_address.address,
"encrypted_message",
{'from': to_address}
)
# transfer
transfer_amount = deploy_args[3] + 1
with brownie.reverts(revert_msg="Sufficient balance is required."):
bond_token.transfer.transact(
to_address.address,
transfer_amount,
{"from": issuer}
)
# assertion
assert bond_token.balanceOf(issuer) == deploy_args[3]
assert bond_token.balanceOf(to_address) == 0
# Error_2
# Cannot access private function
def test_error_2(self, users):
issuer = users["issuer"]
from_address = issuer
to_address = users["trader"]
transfer_amount = 100
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=brownie.ZERO_ADDRESS
)
with pytest.raises(AttributeError):
bond_token.isContract(to_address)
with pytest.raises(AttributeError):
bond_token.transferToAddress.transact(
to_address,
transfer_amount,
"test_data",
{"from": from_address}
)
with pytest.raises(AttributeError):
bond_token.transferToContract.transact(
to_address,
transfer_amount,
"test_data",
{"from": from_address}
)
# Error_3
# Not transferable token
def test_error_3(self, users, IbetStraightBond):
issuer = users["issuer"]
to_address = users["trader"]
transfer_amount = 100
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# set to not transferable
bond_token.setTransferable(
False,
{"from": issuer}
)
# transfer
with brownie.reverts(revert_msg="Must be transferable."):
bond_token.transfer.transact(
to_address,
transfer_amount,
{"from": issuer}
)
# assertion
from_balance = bond_token.balanceOf(issuer)
to_balance = bond_token.balanceOf(to_address)
assert from_balance == deploy_args[3]
assert to_balance == 0
# Error_4
# Transfer to non-tradable exchange
def test_error_4(self, users, exchange):
issuer = users['issuer']
transfer_amount = 100
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=brownie.ZERO_ADDRESS
)
# transfer
with brownie.reverts(revert_msg="Transfers to contract addresses are only possible to tradableExchange."):
bond_token.transfer.transact(
exchange,
transfer_amount,
{"from": issuer}
)
assert bond_token.balanceOf(issuer) == deploy_args[3]
assert bond_token.balanceOf(exchange) == 0
# Error_5
# Transfer to an address with personal information not registered
def test_error_5(self, users, personal_info):
issuer = users["issuer"]
to_address = users["trader"]
transfer_amount = 100
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# transfer
with brownie.reverts(revert_msg="The transfer is only possible if personal information is registered."):
bond_token.transfer.transact(
to_address.address,
transfer_amount,
{"from": issuer}
)
# assertion
assert bond_token.balanceOf(issuer) == deploy_args[3]
assert bond_token.balanceOf(to_address) == 0
# TEST_bulkTransfer
class TestBulkTransfer:
#######################################
# Normal
#######################################
# Normal_1
# Bulk transfer to account address (1 data)
def test_normal_1(self, users, personal_info):
issuer = users["issuer"]
from_address = issuer
to_address = users["trader"]
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# register personal info (to_address)
personal_info.register.transact(
from_address.address,
"encrypted_message",
{"from": to_address}
)
# bulk transfer
to_address_list = [to_address]
amount_list = [1]
bond_token.bulkTransfer.transact(
to_address_list,
amount_list,
{"from": from_address}
)
# assertion
from_balance = bond_token.balanceOf(from_address)
to_balance = bond_token.balanceOf(to_address)
assert from_balance == deploy_args[3] - 1
assert to_balance == 1
# Normal_2
# Bulk transfer to account address (multiple data)
def test_normal_2(self, users, personal_info):
issuer = users["issuer"]
from_address = issuer
to_address = users["trader"]
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# register personal info (to_address)
personal_info.register.transact(
from_address.address,
"encrypted_message",
{"from": to_address}
)
# bulk transfer
to_address_list = []
amount_list = []
for i in range(100):
to_address_list.append(to_address)
amount_list.append(1)
bond_token.bulkTransfer.transact(
to_address_list,
amount_list,
{"from": from_address}
)
# assertion
from_balance = bond_token.balanceOf(from_address)
to_balance = bond_token.balanceOf(to_address)
assert from_balance == deploy_args[3] - 100
assert to_balance == 100
# Normal_3
# Bulk transfer to contract address
def test_normal_3(self, users, exchange, personal_info):
issuer = users["issuer"]
from_address = issuer
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=exchange.address,
personal_info_address=personal_info.address
)
# bulk transfer
to_address_list = [exchange.address]
amount_list = [1]
bond_token.bulkTransfer.transact(
to_address_list,
amount_list,
{"from": from_address}
)
# assertion
from_balance = bond_token.balanceOf(from_address)
to_balance = bond_token.balanceOf(exchange.address)
assert from_balance == deploy_args[3] - 1
assert to_balance == 1
#######################################
# Error
#######################################
# Error_1
# Insufficient balance
def test_error_1(self, users, personal_info):
issuer = users['issuer']
from_address = issuer
to_address = users['trader']
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# register personal info (to_address)
personal_info.register.transact(
from_address,
"encrypted_message",
{"from": to_address}
)
# bulk transfer
with brownie.reverts():
bond_token.bulkTransfer.transact(
[to_address, to_address],
[deploy_args[3], 1],
{'from': issuer}
)
# assertion
assert bond_token.balanceOf(issuer) == deploy_args[3]
assert bond_token.balanceOf(to_address) == 0
# Error_2
# Not transferable token
def test_error_2(self, users, personal_info):
issuer = users["issuer"]
from_address = issuer
to_address = users["trader"]
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
bond_token.setTransferable.transact(
False,
{"from": issuer}
)
# register personal info (to_address)
personal_info.register.transact(
from_address,
"encrypted_message",
{"from": to_address}
)
# bulk transfer
with brownie.reverts():
bond_token.bulkTransfer.transact(
[to_address],
[1],
{"from": issuer}
)
# assertion
from_balance = bond_token.balanceOf(issuer)
to_balance = bond_token.balanceOf(to_address)
assert from_balance == deploy_args[3]
assert to_balance == 0
# Error_3
# Transfer to an address with no personal information registered
def test_error_3(self, users, personal_info):
issuer = users["issuer"]
to_address = users["trader"]
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# bulk transfer
with brownie.reverts():
bond_token.bulkTransfer.transact(
[to_address],
[1],
{'from': issuer}
)
# assertion
from_balance = bond_token.balanceOf(issuer)
to_balance = bond_token.balanceOf(to_address)
assert from_balance == deploy_args[3]
assert to_balance == 0
# TEST_transferFrom
class TestTransferFrom:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, personal_info):
issuer = users['issuer']
from_address = issuer
to_address = users['user1']
value = 100
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# forced transfer
bond_token.transferFrom.transact(
from_address,
to_address,
value,
{'from': issuer}
)
# assertion
assert bond_token.balanceOf(issuer) == deploy_args[3] - value
assert bond_token.balanceOf(to_address) == value
#######################################
# Error
#######################################
# Error_1
# Insufficient balance
def test_error_1(self, users, personal_info):
issuer = users['issuer']
from_address = issuer
to_address = users['user1']
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# forced transfer
with brownie.reverts():
bond_token.transferFrom.transact(
from_address,
to_address,
deploy_args[3] + 1,
{'from': issuer}
)
# assertion
assert bond_token.balanceOf(issuer) == deploy_args[3]
assert bond_token.balanceOf(to_address) == 0
# Error_2
# Not authorized
def test_error_2(self, users, personal_info):
issuer = users['issuer']
from_address = issuer
to_address = users['user1']
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# forced transfer
with brownie.reverts():
bond_token.transferFrom.transact(
from_address,
to_address,
deploy_args[3] + 1,
{'from': to_address}
)
# assertion
assert bond_token.balanceOf(issuer) == deploy_args[3]
assert bond_token.balanceOf(to_address) == 0
# TEST_balanceOf
class TestBalanceOf:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# assertion
balance = bond_token.balanceOf(issuer)
assert balance == deploy_args[3]
# TEST_setTradableExchange
class TestSetTradableExchange:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# change exchange contract
bond_token.setTradableExchange.transact(
brownie.ETH_ADDRESS,
{'from': issuer}
)
# assertion
assert bond_token.tradableExchange() == brownie.ETH_ADDRESS
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# change exchange contract
with brownie.reverts():
bond_token.setTradableExchange.transact(
brownie.ETH_ADDRESS,
{'from': users['user1']}
)
# assertion
assert bond_token.tradableExchange() == brownie.ZERO_ADDRESS
# TEST_setPersonalInfoAddress
class TestSetPersonalInfoAddress:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update contract
bond_token.setPersonalInfoAddress.transact(
brownie.ETH_ADDRESS,
{'from': issuer}
)
# assertion
assert bond_token.personalInfoAddress() == brownie.ETH_ADDRESS
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update contract
with brownie.reverts():
bond_token.setPersonalInfoAddress.transact(
brownie.ETH_ADDRESS,
{'from': users['user1']}
)
# assertion
assert bond_token.personalInfoAddress() == brownie.ZERO_ADDRESS
# TEST_setContactInformation
class TestSetContactInformation:
#######################################
# Normal
#######################################
# 正常系1: 発行(デプロイ) -> 修正
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update
bond_token.setContactInformation.transact(
'updated contact information',
{'from': issuer}
)
# assertion
contact_information = bond_token.contactInformation()
assert contact_information == 'updated contact information'
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update
with brownie.reverts():
bond_token.setContactInformation.transact(
'updated contact information',
{'from': users['user1']}
)
# assertion
contact_information = bond_token.contactInformation()
assert contact_information == ''
# TEST_setPrivacyPolicy
class TestSetPrivacyPolicy:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update
bond_token.setPrivacyPolicy.transact(
'updated privacy policy',
{'from': issuer}
)
# assertion
privacy_policy = bond_token.privacyPolicy()
assert privacy_policy == 'updated privacy policy'
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update
with brownie.reverts():
bond_token.setPrivacyPolicy.transact(
'updated privacy policy',
{'from': users['user1']}
)
# assertion
privacy_policy = bond_token.privacyPolicy()
assert privacy_policy == ''
# TEST_setMemo
class TestSetMemo:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# set memo
bond_token.setMemo.transact(
'updated memo',
{'from': issuer}
)
# assertion
memo = bond_token.memo()
assert memo == 'updated memo'
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# set memo
with brownie.reverts():
bond_token.setMemo.transact(
'updated memo',
{'from': users['user1']}
)
memo = bond_token.memo()
assert memo == ''
# TEST_setInterestRate
class TestSetInterestRate:
#######################################
# Normal
#######################################
# Normal_1
def test_setInterestRate_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update
bond_token.setInterestRate.transact(
123,
{'from': issuer}
)
# assertion
interest_rate = bond_token.interestRate()
assert interest_rate == 123
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_setInterestRate_error_2(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update
with brownie.reverts():
bond_token.setInterestRate.transact(
123,
{'from': users['user1']}
)
# assertion
interest_rate = bond_token.interestRate()
assert interest_rate == 0
# TEST_setInterestPaymentDate
class TestSetInterestPaymentDate:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update
bond_token.setInterestPaymentDate.transact(
'{"interestPaymentDate1":"0331","interestPaymentDate2":"0930"}',
{'from': issuer}
)
# assertion
interest_payment_date = bond_token.interestPaymentDate()
assert interest_payment_date == '{"interestPaymentDate1":"0331","interestPaymentDate2":"0930"}'
#######################################
# Error
#######################################
# Not authorized
def test_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# Owner以外のアドレスから更新 -> Failure
with brownie.reverts():
bond_token.setInterestPaymentDate.transact(
'{"interestPaymentDate1":"0331","interestPaymentDate2":"0930"}',
{'from': users['user1']}
)
# assertion
interest_payment_date = bond_token.interestPaymentDate()
assert interest_payment_date == ''
# TEST_setTransferable
class TestSetTransferable:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update
bond_token.setTransferable.transact(True, {'from': issuer})
# assertion
assert bond_token.transferable() is True
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update
with brownie.reverts():
bond_token.setTransferable.transact(True, {'from': users['user1']})
# assertion
assert bond_token.transferable() is False
# TEST_setStatus
class TestSetStatus:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update
bond_token.setStatus(False, {'from': issuer})
# assertion
assert bond_token.status() is False
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# change exchange contract
with brownie.reverts():
bond_token.setStatus(False, {'from': users['user1']})
# TEST_changeOfferingStatus
class TestChangeOfferingStatus:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update
bond_token.changeOfferingStatus(True, {'from': issuer})
# assertion
assert bond_token.status() is True
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# change exchange contract
with brownie.reverts():
bond_token.changeOfferingStatus(True, {'from': users['user1']})
# TEST_applyForOffering
class TestApplyForOffering:
#######################################
# Normal
#######################################
# Normal_1
# Default value
def test_normal_1(self, users, personal_info):
issuer = users['issuer']
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# assertion
application = bond_token.applicationsForOffering(brownie.ETH_ADDRESS)
assert application[0] == 0
assert application[1] == 0
assert application[2] == ''
# Normal_2
def test_normal_2(self, users, personal_info):
issuer = users['issuer']
applicant = users['user1']
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# update offering status
bond_token.changeOfferingStatus.transact(True, {'from': issuer})
# register personal info of applicant
personal_info.register.transact(
issuer,
"encrypted_message",
{'from': applicant}
)
# apply for offering
bond_token.applyForOffering.transact(
10,
'abcdefgh',
{'from': applicant}
)
# assertion
application = bond_token.applicationsForOffering(applicant)
assert application[0] == 10
assert application[1] == 0
assert application[2] == 'abcdefgh'
# Normal_3
# Multiple applications
def test_normal_3(self, users, personal_info):
issuer = users['issuer']
applicant = users['user1']
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# update offering status
bond_token.changeOfferingStatus.transact(True, {'from': issuer})
# register personal info of applicant
personal_info.register.transact(
issuer,
"encrypted_message",
{'from': applicant}
)
# apply for offering (1)
bond_token.applyForOffering.transact(
10,
'abcdefgh',
{'from': applicant}
)
# apply for offering (2)
bond_token.applyForOffering.transact(
20,
'vwxyz',
{'from': applicant}
)
# assertion
application = bond_token.applicationsForOffering(applicant)
assert application[0] == 20
assert application[1] == 0
assert application[2] == 'vwxyz'
#######################################
# Error
#######################################
# Error_1
# The offering status must be true.
def test_error_1(self, users, personal_info):
issuer = users['issuer']
applicant = users['user1']
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# apply for offering
with brownie.reverts():
bond_token.applyForOffering.transact(
10,
'abcdefgh',
{'from': applicant}
)
# assertion
application = bond_token.applicationsForOffering(applicant)
assert application[0] == 0
assert application[1] == 0
assert application[2] == ''
# Error_2
# Applicant need to register personal information.
def test_error_2(self, users, personal_info):
issuer = users['issuer']
applicant = users['user1']
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# update offering status
bond_token.changeOfferingStatus.transact(True, {'from': issuer})
# apply for offering
with brownie.reverts():
bond_token.applyForOffering.transact(
10,
'abcdefgh',
{'from': applicant}
)
# assertion
application = bond_token.applicationsForOffering(applicant)
assert application[0] == 0
assert application[1] == 0
assert application[2] == ''
# TEST_allot
class TestAllot:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, personal_info):
issuer = users['issuer']
applicant = users['user1']
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# update offering status
bond_token.changeOfferingStatus.transact(True, {'from': issuer})
# register personal info of applicant
personal_info.register.transact(
issuer,
"encrypted_message",
{'from': applicant}
)
# apply for offering
bond_token.applyForOffering.transact(
10,
'abcdefgh',
{'from': applicant}
)
# allot
bond_token.allot.transact(
applicant,
5,
{'from': issuer}
)
# assertion
application = bond_token.applicationsForOffering(applicant)
assert application[0] == 10
assert application[1] == 5
assert application[2] == 'abcdefgh'
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_error_1(self, users, personal_info):
issuer = users['issuer']
applicant = users['user1']
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# update offering status
bond_token.changeOfferingStatus.transact(True, {'from': issuer})
# allot
with brownie.reverts():
bond_token.allot.transact(applicant, 5, {'from': applicant})
# assertion
application = bond_token.applicationsForOffering(applicant)
assert application[0] == 0
assert application[1] == 0
assert application[2] == ''
# TEST_changeToRedeemed
class TestChangeToRedeemed:
#######################################
# Normal
#######################################
# Normal_1
def test_redeem_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# redeem
bond_token.changeToRedeemed.transact({'from': issuer})
# assertion
is_redeemed = bond_token.isRedeemed()
assert is_redeemed is True
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_redeem_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# redeem
with brownie.reverts():
bond_token.changeToRedeemed.transact({'from': users['user1']})
# assertion
is_redeemed = bond_token.isRedeemed()
assert is_redeemed is False
# TEST_authorizeLockAddress
class TestAuthorizeLockAddress:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# authorize
bond_token.authorizeLockAddress.transact(
brownie.ETH_ADDRESS,
True,
{'from': issuer}
)
# assertion
assert bond_token.authorizedLockAddress(brownie.ETH_ADDRESS) is True
assert bond_token.authorizedLockAddress(brownie.ZERO_ADDRESS) is False
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# authorize
with brownie.reverts():
bond_token.authorizeLockAddress.transact(
brownie.ETH_ADDRESS,
True,
{'from': users['user1']}
)
# assertion
assert bond_token.authorizedLockAddress(brownie.ETH_ADDRESS) is False
# TEST_lock/lockedOf
class TestLock:
#######################################
# Normal
#######################################
# Normal_1
# Lock assets to authorized addresses
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
user = users['user1']
target = users['user2']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
transfer_amount = 30
lock_amount = 10
# transfer to account
bond_token.transferFrom.transact(issuer, user, transfer_amount, {'from': issuer})
# authorize target address
bond_token.authorizeLockAddress.transact(target, True, {'from': issuer})
# lock
tx = bond_token.lock.transact(target, lock_amount, {'from': user})
# assertion
assert bond_token.balanceOf(user) == transfer_amount - lock_amount
assert bond_token.lockedOf(target, user) == lock_amount
assert tx.events["Lock"]["accountAddress"] == user
assert tx.events["Lock"]["lockAddress"] == target
assert tx.events["Lock"]["value"] == lock_amount
# Normal_2
# Lock assets to issuer addresses
def test_normal_2(self, users, IbetStraightBond):
issuer = users['issuer']
user = users['user1']
transfer_amount = 30
lock_amount = 10
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# transfer to account
bond_token.transferFrom.transact(issuer, user, transfer_amount, {'from': issuer})
# lock
bond_token.lock.transact(issuer, lock_amount, {'from': user})
# assertion
assert bond_token.balanceOf(user) == transfer_amount - lock_amount
assert bond_token.lockedOf(issuer, user) == lock_amount
#######################################
# Error
#######################################
# Error_1
# Insufficient balance
def test_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
user = users['user1']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
transfer_amount = 30
lock_amount = 40
# transfer to account
bond_token.transferFrom.transact(issuer, user, transfer_amount, {'from': issuer})
# lock
with brownie.reverts():
bond_token.lock.transact(issuer, lock_amount, {'from': user})
# assertion
assert bond_token.balanceOf(user) == transfer_amount
assert bond_token.lockedOf(issuer, user) == 0
# Error_2
# Lock assets to not authorized address
def test_error_2(self, users, IbetStraightBond):
issuer = users['issuer']
user = users['user1']
not_authorized_address = users['user2']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
transfer_amount = 30
lock_amount = 10
# transfer to account
bond_token.transferFrom.transact(issuer, user, transfer_amount, {'from': issuer})
# lock
with brownie.reverts():
bond_token.lock.transact(not_authorized_address, lock_amount, {'from': user})
# assertion
assert bond_token.balanceOf(user) == transfer_amount
assert bond_token.lockedOf(issuer, user) == 0
# TEST_unlock
class TestUnlock:
#######################################
# Normal
#######################################
# Normal_1
# authorized addresses
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
user1 = users['user1']
user2 = users['user2']
target = users['agent']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
transfer_amount = 30
lock_amount = 20
unlock_amount = 10
# transfer to account
bond_token.transferFrom.transact(issuer, user1, transfer_amount, {'from': issuer})
# authorize target address
bond_token.authorizeLockAddress.transact(target, True, {'from': issuer})
# lock
bond_token.lock.transact(target, lock_amount, {'from': user1})
# unlock
tx = bond_token.unlock.transact(user1, user2, unlock_amount, {'from': target})
# assertion
assert bond_token.balanceOf(user1) == transfer_amount - lock_amount
assert bond_token.balanceOf(user2) == unlock_amount
assert bond_token.lockedOf(target, user1) == lock_amount - unlock_amount
assert tx.events["Unlock"]["accountAddress"] == user1.address
assert tx.events["Unlock"]["lockAddress"] == target.address
assert tx.events["Unlock"]["recipientAddress"] == user2.address
assert tx.events["Unlock"]["value"] == unlock_amount
# Normal_2
# issuer
def test_normal_2(self, users, IbetStraightBond):
issuer = users['issuer']
user1 = users['user1']
user2 = users['user2']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
transfer_amount = 30
lock_amount = 20
unlock_amount = 10
# transfer to account
bond_token.transferFrom.transact(issuer, user1, transfer_amount, {'from': issuer})
# lock
bond_token.lock.transact(issuer, lock_amount, {'from': user1})
# unlock
tx = bond_token.unlock.transact(user1, user2, unlock_amount, {'from': issuer})
# assertion
assert bond_token.balanceOf(user1) == transfer_amount - lock_amount
assert bond_token.balanceOf(user2) == unlock_amount
assert bond_token.lockedOf(issuer, user1) == lock_amount - unlock_amount
assert tx.events["Unlock"]["accountAddress"] == user1.address
assert tx.events["Unlock"]["lockAddress"] == issuer.address
assert tx.events["Unlock"]["recipientAddress"] == user2.address
assert tx.events["Unlock"]["value"] == unlock_amount
#######################################
# Error
#######################################
# Error_1
# Cannot unlock a quantity that exceeds the lock quantity
def test_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
user1 = users['user1']
user2 = users['user2']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
transfer_amount = 30
lock_amount = 10
unlock_amount = 11
# transfer to account
bond_token.transferFrom.transact(issuer, user1, transfer_amount, {'from': issuer})
# lock
bond_token.lock.transact(issuer, lock_amount, {'from': user1})
# unlock
with brownie.reverts():
bond_token.unlock.transact(user1, user2, unlock_amount, {'from': issuer})
# assertion
assert bond_token.balanceOf(user1) == transfer_amount - lock_amount
assert bond_token.balanceOf(user2) == 0
assert bond_token.lockedOf(issuer, user1) == lock_amount
# Error_2
# Not authorized
def test_error_2(self, users, IbetStraightBond):
issuer = users['issuer']
user1 = users['user1']
user2 = users['user2']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
transfer_amount = 30
lock_amount = 10
unlock_amount = 3
# transfer to account
bond_token.transferFrom.transact(issuer, user1, transfer_amount, {'from': issuer})
# lock
bond_token.lock.transact(issuer, lock_amount, {'from': user1})
# unlock
with brownie.reverts():
bond_token.unlock.transact(user1, user2, unlock_amount, {'from': user2})
# assertion
assert bond_token.balanceOf(user1) == transfer_amount - lock_amount
assert bond_token.balanceOf(user2) == 0
assert bond_token.lockedOf(issuer, user1) == lock_amount
# TEST_issueFrom
class TestIssueFrom:
#######################################
# Normal
#######################################
# Normal_1
# Issue from issuer address
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
issue_amount = 10
# issue token
deploy_args = init_args()
deploy_args[2] = 1000
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# issue from issuer address
bond_token.issueFrom.transact(
issuer,
brownie.ZERO_ADDRESS,
issue_amount,
{'from': issuer}
)
# assertion
assert bond_token.totalSupply() == deploy_args[2] + issue_amount
assert bond_token.balanceOf(issuer) == deploy_args[2] + issue_amount
# Normal_2
# Issue from EOA
def test_normal_2(self, users, IbetStraightBond):
issuer = users['issuer']
issue_amount = 10
# issue token
deploy_args = init_args()
deploy_args[2] = 1000
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# issue from EOA
bond_token.issueFrom.transact(
brownie.ETH_ADDRESS,
brownie.ZERO_ADDRESS,
issue_amount,
{'from': issuer}
)
# assertion
assert bond_token.totalSupply() == deploy_args[2] + issue_amount
assert bond_token.balanceOf(issuer) == deploy_args[2]
assert bond_token.balanceOf(brownie.ETH_ADDRESS) == issue_amount
# Normal_3
# Issue from locked address
def test_normal_3(self, users, IbetStraightBond):
issuer = users['issuer']
lock_address = users['user1']
lock_amount = 10
issue_amount = 10
# issue token
deploy_args = init_args()
deploy_args[2] = 1000
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# authorize lock address
bond_token.authorizeLockAddress.transact(lock_address, True, {'from': issuer})
# lock
bond_token.lock.transact(lock_address, lock_amount, {'from': issuer})
# issue from lock address
bond_token.issueFrom.transact(
issuer,
lock_address,
issue_amount,
{'from': issuer}
)
# assertion
assert bond_token.totalSupply() == deploy_args[2] + issue_amount
assert bond_token.balanceOf(issuer) == deploy_args[2] - lock_amount
assert bond_token.lockedOf(lock_address, issuer) == lock_amount + issue_amount
#######################################
# Error
#######################################
# Error_1_1
# Over the limit
# issuer address
def test_error_1_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# issue from issuer address
with brownie.reverts():
bond_token.issueFrom.transact(
issuer,
brownie.ZERO_ADDRESS,
1,
{'from': issuer}
)
# Error_1_2
# Over the limit
# locked address
def test_error_1_2(self, users, IbetStraightBond):
issuer = users['issuer']
lock_address = users['user1']
lock_amount = 2 ** 256 - 1
issue_amount = 1
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# authorize lock address
bond_token.authorizeLockAddress.transact(lock_address, True, {'from': issuer})
# lock
bond_token.lock.transact(lock_address, lock_amount, {'from': issuer})
# issue from lock address
with brownie.reverts():
bond_token.issueFrom.transact(
issuer,
lock_address,
issue_amount,
{'from': issuer}
)
# assertion
assert bond_token.balanceOf(issuer) == deploy_args[2] - lock_amount
assert bond_token.lockedOf(lock_address, issuer) == lock_amount
# Error_2
# Not authorized
def test_error_2(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# issue from not authorized user
with brownie.reverts():
bond_token.issueFrom.transact(
issuer,
brownie.ZERO_ADDRESS,
1,
{'from': users['user1']}
)
# TEST_setFaceValue
class TestSetFaceValue:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update
bond_token.setFaceValue(10001, {'from': issuer})
# assertion
assert bond_token.faceValue() == 10001
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update
with brownie.reverts():
bond_token.setFaceValue(10001, {'from': users['user1']})
assert bond_token.faceValue() == deploy_args[3]
# TEST_redeemFrom
class TestRedeemFrom:
#######################################
# Normal
#######################################
# Normal_1
# Redeem from issuer address
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
redeem_amount = 10
# issue token
deploy_args = init_args()
deploy_args[2] = 1000
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# redeem
bond_token.redeemFrom.transact(
issuer,
brownie.ZERO_ADDRESS,
redeem_amount,
{'from': issuer}
)
# assertion
total_supply = bond_token.totalSupply()
balance = bond_token.balanceOf(issuer)
assert total_supply == deploy_args[2] - redeem_amount
assert balance == deploy_args[2] - redeem_amount
# Normal_2
# Redeem from EOA
def test_normal_2(self, users, IbetStraightBond):
issuer = users['issuer']
user = users['user1']
transfer_amount = 20
redeem_amount = 10
# issue token
deploy_args = init_args()
deploy_args[2] = 1000
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# transfer to user
bond_token.transferFrom.transact(
issuer,
user,
transfer_amount,
{'from': issuer}
)
# redeem
bond_token.redeemFrom.transact(
user,
brownie.ZERO_ADDRESS,
redeem_amount,
{'from': issuer}
)
# assertion
assert bond_token.totalSupply() == deploy_args[2] - redeem_amount
assert bond_token.balanceOf(issuer) == deploy_args[2] - transfer_amount
assert bond_token.balanceOf(user) == transfer_amount - redeem_amount
# Normal_3
# Redeem from locked address
def test_normal_3(self, users, IbetStraightBond):
issuer = users['issuer']
lock_address = users['user1']
lock_amount = 20
redeem_amount = 10
# issue token
deploy_args = init_args()
deploy_args[2] = 1000
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# authorize lock address
bond_token.authorizeLockAddress.transact(
lock_address,
True,
{'from': issuer}
)
# lock
bond_token.lock.transact(
lock_address,
lock_amount,
{'from': issuer}
)
# redeem from lock address
bond_token.redeemFrom.transact(
issuer,
lock_address,
redeem_amount,
{'from': issuer}
)
# assertion
assert bond_token.totalSupply() == deploy_args[2] - redeem_amount
assert bond_token.balanceOf(issuer) == deploy_args[2] - lock_amount
assert bond_token.lockedOf(lock_address, issuer) == lock_amount - redeem_amount
#######################################
# Error
#######################################
# Error_1
# Exceeds balance
def test_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
redeem_amount = 101
# issue token
deploy_args = init_args()
deploy_args[2] = 100
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# redeem
with brownie.reverts():
bond_token.redeemFrom.transact(
issuer,
brownie.ZERO_ADDRESS,
redeem_amount,
{'from': issuer}
)
# assertion
assert bond_token.totalSupply() == deploy_args[2]
assert bond_token.balanceOf(issuer) == deploy_args[2]
# Error_2
# Exceeds locked quantity
def test_error_2(self, users, IbetStraightBond):
issuer = users['issuer']
lock_address = users['user1']
lock_amount = 20
redeem_amount = 21
# issue token
deploy_args = init_args()
deploy_args[2] = 100
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# authorize lock address
bond_token.authorizeLockAddress.transact(
lock_address,
True,
{'from': issuer}
)
# lock
bond_token.lock.transact(
lock_address,
lock_amount,
{'from': issuer}
)
# redeem from lock address
with brownie.reverts():
bond_token.redeemFrom.transact(
issuer,
lock_address,
redeem_amount,
{'from': issuer}
)
# assertion
assert bond_token.totalSupply() == deploy_args[2]
assert bond_token.balanceOf(issuer) == deploy_args[2] - lock_amount
assert bond_token.lockedOf(lock_address, issuer) == lock_amount
# Error_3
# Not authorized
def test_error_3(self, users, IbetStraightBond):
issuer = users['issuer']
redeem_amount = 100
# issue token
deploy_args = init_args()
deploy_args[2] = 100
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# redeem
with brownie.reverts():
bond_token.redeemFrom.transact(
issuer,
brownie.ZERO_ADDRESS,
redeem_amount,
{'from': users['user1']}
)
# assertion
assert bond_token.totalSupply() == deploy_args[2]
assert bond_token.balanceOf(issuer) == deploy_args[2]
# TEST_setRedemptionValue
class TestSetRedemptionValue:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update
bond_token.setRedemptionValue(10000, {'from': issuer})
# assertion
assert bond_token.redemptionValue() == 10000
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_error_1(self, users, IbetStraightBond):
issuer = users['issuer']
# issue token
deploy_args = init_args()
bond_token = brownie_utils.force_deploy(issuer, IbetStraightBond, *deploy_args)
# update
with brownie.reverts():
bond_token.setRedemptionValue(10000, {'from': users['user1']})
# assertion
assert bond_token.redemptionValue() == deploy_args[5]
# TEST_setTransferApprovalRequired
class TestSetTransferApprovalRequired:
#######################################
# Normal
#######################################
# Normal_1
# Default value
def test_normal_1(self, users):
issuer = users["issuer"]
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=brownie.ZERO_ADDRESS
)
# assertion
assert bond_token.transferApprovalRequired() == False
# Normal_2
def test_normal_2(self, users):
issuer = users["issuer"]
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=brownie.ZERO_ADDRESS
)
# update
tx = bond_token.setTransferApprovalRequired(
True,
{"from": issuer}
)
# assertion
assert bond_token.transferApprovalRequired() == True
assert tx.events["ChangeTransferApprovalRequired"]["required"] == True
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_error_1(self, users, personal_info):
issuer = users["issuer"]
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
# set required to True
with brownie.reverts():
bond_token.setTransferApprovalRequired(
True,
{"from": users["user1"]}
)
# assertion
assert bond_token.transferApprovalRequired() == False
# TEST_applyForTransfer
class TestApplyForTransfer:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, personal_info):
issuer = users["issuer"]
to_address = users["user1"]
transfer_amount = 100
transfer_data = "test_data"
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
bond_token.setTransferApprovalRequired(
True,
{"from": issuer}
)
# register personal information (to_address)
personal_info.register(
issuer,
"encrypted_message",
{"from": to_address}
)
# apply for transfer
tx = bond_token.applyForTransfer(
to_address,
transfer_amount,
transfer_data,
{"from": issuer}
)
# assertion
assert bond_token.balances(issuer) == deploy_args[3] - transfer_amount
assert bond_token.balances(to_address) == 0
assert bond_token.pendingTransfer(issuer) == transfer_amount
assert bond_token.applicationsForTransfer(0) == (
issuer,
to_address,
transfer_amount,
True
)
assert tx.events["ApplyForTransfer"]["index"] == 0
assert tx.events["ApplyForTransfer"]["from"] == issuer
assert tx.events["ApplyForTransfer"]["to"] == to_address
assert tx.events["ApplyForTransfer"]["value"] == transfer_amount
assert tx.events["ApplyForTransfer"]["data"] == transfer_data
# Normal_2
# Multiple execution
def test_normal_2(self, users, personal_info):
issuer = users["issuer"]
to_address = users["user1"]
transfer_amount = 100
transfer_data = "test_data"
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
bond_token.setTransferApprovalRequired(
True,
{"from": issuer}
)
# register personal information (to_address)
personal_info.register(
issuer,
"encrypted_message",
{"from": to_address}
)
# apply for transfer
for i in range(2):
bond_token.applyForTransfer(
to_address,
transfer_amount,
transfer_data,
{"from": issuer}
)
# assertion
assert bond_token.balances(issuer) == deploy_args[3] - transfer_amount * 2
assert bond_token.balances(to_address) == 0
assert bond_token.pendingTransfer(issuer) == transfer_amount * 2
for i in range(2):
assert bond_token.applicationsForTransfer(i) == (
issuer,
to_address,
transfer_amount,
True
)
# Normal_3
# Transfer to issuer
# No need to register personal information
def test_normal_3(self, users, personal_info):
issuer = users["issuer"]
to_address = issuer
transfer_amount = 100
transfer_data = "test_data"
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
bond_token.setTransferApprovalRequired(
True,
{"from": issuer}
)
# apply for transfer
bond_token.applyForTransfer(
to_address,
transfer_amount,
transfer_data,
{"from": issuer}
)
# assertion
assert bond_token.balances(issuer) == deploy_args[3] - transfer_amount
assert bond_token.pendingTransfer(issuer) == transfer_amount
assert bond_token.applicationsForTransfer(0) == (
issuer,
to_address,
transfer_amount,
True
)
#######################################
# Error
#######################################
# Error_1
# transferApprovalRequired = false
def test_error_1(self, users):
issuer = users["issuer"]
to_address = users["user1"]
transfer_amount = 100
transfer_data = "test_data"
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=brownie.ZERO_ADDRESS
)
# apply for transfer
with brownie.reverts():
bond_token.applyForTransfer(
to_address,
transfer_amount,
transfer_data,
{"from": issuer}
)
# assertion
assert bond_token.balances(issuer) == deploy_args[3]
assert bond_token.balances(to_address) == 0
assert bond_token.pendingTransfer(issuer) == 0
# Error_2
# transferable = false
def test_error_2(self, users):
issuer = users["issuer"]
to_address = users["user1"]
transfer_amount = 100
transfer_data = "test_data"
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=brownie.ZERO_ADDRESS
)
bond_token.setTransferApprovalRequired(
True,
{"from": issuer}
)
bond_token.setTransferable(
False,
{"from": issuer}
)
# apply for transfer
with brownie.reverts():
bond_token.applyForTransfer(
to_address,
transfer_amount,
transfer_data,
{"from": issuer}
)
# assertion
assert bond_token.balances(issuer) == deploy_args[3]
assert bond_token.balances(to_address) == 0
assert bond_token.pendingTransfer(issuer) == 0
# Error_3
# Insufficient balance
def test_error_3(self, users):
issuer = users["issuer"]
to_address = users["user1"]
transfer_data = "test_data"
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=brownie.ZERO_ADDRESS
)
bond_token.setTransferApprovalRequired(
True,
{"from": issuer}
)
# apply for transfer
with brownie.reverts():
bond_token.applyForTransfer(
to_address,
deploy_args[3] + 1,
transfer_data,
{"from": issuer}
)
# assertion
assert bond_token.balances(issuer) == deploy_args[3]
assert bond_token.balances(to_address) == 0
assert bond_token.pendingTransfer(issuer) == 0
# Error_4
# Personal information is not registered
def test_error_4(self, users, personal_info):
issuer = users["issuer"]
to_address = users["user1"]
transfer_amount = 100
transfer_data = "test_data"
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
bond_token.setTransferApprovalRequired(
True,
{"from": issuer}
)
# apply for transfer
with brownie.reverts():
bond_token.applyForTransfer(
to_address,
transfer_amount,
transfer_data,
{"from": issuer}
)
# assertion
assert bond_token.balances(issuer) == deploy_args[3]
assert bond_token.balances(to_address) == 0
assert bond_token.pendingTransfer(issuer) == 0
# TEST_cancelTransfer
class TestCancelTransfer:
#######################################
# Normal
#######################################
# Normal_1
# Cancel by applicant
def test_normal_1(self, users, personal_info):
issuer = users["issuer"]
user1 = users["user1"]
user2 = users["user2"]
transfer_amount = 100
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
bond_token.transferFrom(
issuer,
user1,
transfer_amount,
{"from": issuer}
)
bond_token.setTransferApprovalRequired(
True,
{"from": issuer}
)
# register personal information (to_address)
personal_info.register(
issuer,
"encrypted_message",
{"from": user2}
)
# apply for transfer
bond_token.applyForTransfer(
user2,
transfer_amount,
"test_data",
{"from": user1} # from user1 to user2
)
# cancel transfer (from applicant)
tx = bond_token.cancelTransfer(
0,
"test_data",
{"from": user1}
)
# assertion
assert bond_token.balances(issuer) == deploy_args[3] - transfer_amount
assert bond_token.balances(user1) == transfer_amount
assert bond_token.pendingTransfer(user1) == 0
assert bond_token.applicationsForTransfer(0) == (
user1,
user2,
transfer_amount,
False
)
assert tx.events["CancelTransfer"]["index"] == 0
assert tx.events["CancelTransfer"]["from"] == user1
assert tx.events["CancelTransfer"]["to"] == user2
assert tx.events["CancelTransfer"]["data"] == "test_data"
# Normal_2
# Cancel by issuer
def test_normal_2(self, users, personal_info):
issuer = users["issuer"]
user1 = users["user1"]
user2 = users["user2"]
transfer_amount = 100
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
bond_token.transferFrom(
issuer,
user1,
transfer_amount,
{"from": issuer}
)
bond_token.setTransferApprovalRequired(
True,
{"from": issuer}
)
# register personal information (to_address)
personal_info.register(
issuer,
"encrypted_message",
{"from": user2}
)
# apply for transfer
bond_token.applyForTransfer(
user2,
transfer_amount,
"test_data",
{"from": user1} # from user1 to user2
)
# cancel transfer (from issuer)
tx = bond_token.cancelTransfer(
0,
"test_data",
{"from": issuer}
)
# assertion
assert bond_token.balances(issuer) == deploy_args[3] - transfer_amount
assert bond_token.balances(user1) == transfer_amount
assert bond_token.pendingTransfer(user1) == 0
assert bond_token.applicationsForTransfer(0) == (
user1,
user2,
transfer_amount,
False
)
assert tx.events["CancelTransfer"]["index"] == 0
assert tx.events["CancelTransfer"]["from"] == user1
assert tx.events["CancelTransfer"]["to"] == user2
assert tx.events["CancelTransfer"]["data"] == "test_data"
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_error_1(self, users, personal_info):
issuer = users["issuer"]
user1 = users["user1"]
user2 = users["user2"]
transfer_amount = 100
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
bond_token.transferFrom(
issuer,
user1,
transfer_amount,
{"from": issuer}
)
bond_token.setTransferApprovalRequired(
True,
{"from": issuer}
)
# register personal information (to_address)
personal_info.register(
issuer,
"encrypted_message",
{"from": user2}
)
# apply for transfer
bond_token.applyForTransfer(
user2,
transfer_amount,
"test_data",
{"from": user1} # from user1 to user2
)
# cancel transfer (from issuer)
with brownie.reverts():
bond_token.cancelTransfer(
0,
"test_data",
{"from": user2}
)
# assertion
assert bond_token.balances(issuer) == deploy_args[3] - transfer_amount
assert bond_token.balances(user1) == 0
assert bond_token.pendingTransfer(user1) == transfer_amount
assert bond_token.applicationsForTransfer(0) == (
user1,
user2,
transfer_amount,
True
)
# Error_2
# Applications that have already been cancelled cannot be cancelled.
def test_error_2(self, users, personal_info):
issuer = users["issuer"]
user1 = users["user1"]
user2 = users["user2"]
transfer_amount = 100
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
bond_token.transferFrom(
issuer,
user1,
transfer_amount,
{"from": issuer}
)
bond_token.setTransferApprovalRequired(
True,
{"from": issuer}
)
# register personal information (to_address)
personal_info.register(
issuer,
"encrypted_message",
{"from": user2}
)
# apply for transfer
bond_token.applyForTransfer(
user2,
transfer_amount,
"test_data",
{"from": user1} # from user1 to user2
)
# cancel transfer (1)
bond_token.cancelTransfer(
0,
"test_data",
{"from": user1}
)
# cancel transfer (2)
with brownie.reverts():
bond_token.cancelTransfer(
0,
"test_data",
{"from": user1}
)
# assertion
assert bond_token.balances(issuer) == deploy_args[3] - transfer_amount
assert bond_token.balances(user1) == transfer_amount
assert bond_token.pendingTransfer(user1) == 0
assert bond_token.applicationsForTransfer(0) == (
user1,
user2,
transfer_amount,
False
)
# TEST_approveTransfer
class TestApproveTransfer:
#######################################
# Normal
#######################################
# Normal_1
def test_normal_1(self, users, personal_info):
issuer = users["issuer"]
user1 = users["user1"]
transfer_amount = 100
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
bond_token.setTransferApprovalRequired(
True,
{"from": issuer}
)
# register personal information (to_address)
personal_info.register(
issuer,
"encrypted_message",
{"from": user1}
)
# apply for transfer
bond_token.applyForTransfer(
user1,
transfer_amount,
"test_data",
{"from": issuer} # from issuer to user1
)
# approve transfer
tx = bond_token.approveTransfer(
0,
"test_data",
{"from": issuer}
)
# assertion
assert bond_token.balances(issuer) == deploy_args[3] - transfer_amount
assert bond_token.balances(user1) == transfer_amount
assert bond_token.pendingTransfer(issuer) == 0
assert bond_token.applicationsForTransfer(0) == (
issuer,
user1,
transfer_amount,
False
)
assert tx.events["ApproveTransfer"]["index"] == 0
assert tx.events["ApproveTransfer"]["from"] == issuer
assert tx.events["ApproveTransfer"]["to"] == user1
assert tx.events["ApproveTransfer"]["data"] == "test_data"
assert tx.events["Transfer"]["from"] == issuer
assert tx.events["Transfer"]["to"] == user1
assert tx.events["Transfer"]["value"] == transfer_amount
#######################################
# Error
#######################################
# Error_1
# Not authorized
def test_error_1(self, users, personal_info):
issuer = users["issuer"]
user1 = users["user1"]
transfer_amount = 100
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
bond_token.setTransferApprovalRequired(
True,
{"from": issuer}
)
# register personal information (to_address)
personal_info.register(
issuer,
"encrypted_message",
{"from": user1}
)
# apply for transfer
bond_token.applyForTransfer(
user1,
transfer_amount,
"test_data",
{"from": issuer} # from issuer to user1
)
# approve transfer
with brownie.reverts():
bond_token.approveTransfer(
0,
"test_data",
{"from": user1}
)
# assertion
assert bond_token.balances(issuer) == deploy_args[3] - transfer_amount
assert bond_token.balances(user1) == 0
assert bond_token.pendingTransfer(issuer) == transfer_amount
assert bond_token.applicationsForTransfer(0) == (
issuer,
user1,
transfer_amount,
True
)
# Error_2
# transferable = false
def test_error_2(self, users, personal_info):
issuer = users["issuer"]
user1 = users["user1"]
transfer_amount = 100
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
bond_token.setTransferApprovalRequired(
True,
{"from": issuer}
)
# register personal information (to_address)
personal_info.register(
issuer,
"encrypted_message",
{"from": user1}
)
# apply for transfer
bond_token.applyForTransfer(
user1,
transfer_amount,
"test_data",
{"from": issuer} # from issuer to user1
)
# approve transfer
bond_token.setTransferable(
False,
{"from": issuer}
)
with brownie.reverts():
bond_token.approveTransfer(
0,
"test_data",
{"from": issuer}
)
# assertion
assert bond_token.balances(issuer) == deploy_args[3] - transfer_amount
assert bond_token.balances(user1) == 0
assert bond_token.pendingTransfer(issuer) == transfer_amount
assert bond_token.applicationsForTransfer(0) == (
issuer,
user1,
transfer_amount,
True
)
# Error_3
# Applications that have already been approved cannot be approved.
def test_error_3(self, users, personal_info):
issuer = users["issuer"]
user1 = users["user1"]
transfer_amount = 100
# issue token
bond_token, deploy_args = issue_transferable_bond_token(
issuer=issuer,
exchange_address=brownie.ZERO_ADDRESS,
personal_info_address=personal_info.address
)
bond_token.setTransferApprovalRequired(
True,
{"from": issuer}
)
# register personal information (to_address)
personal_info.register(
issuer,
"encrypted_message",
{"from": user1}
)
# apply for transfer
bond_token.applyForTransfer(
user1,
transfer_amount,
"test_data",
{"from": issuer} # from issuer to user1
)
# approve transfer (1)
bond_token.approveTransfer(
0,
"test_data",
{"from": issuer}
)
# approve transfer (2)
with brownie.reverts():
bond_token.approveTransfer(
0,
"test_data",
{"from": issuer}
)
# assertion
assert bond_token.balances(issuer) == deploy_args[3] - transfer_amount
assert bond_token.balances(user1) == transfer_amount
assert bond_token.pendingTransfer(issuer) == 0
assert bond_token.applicationsForTransfer(0) == (
issuer,
user1,
transfer_amount,
False
)
| 29.061554
| 114
| 0.562471
| 8,173
| 88,289
| 5.823321
| 0.039153
| 0.08604
| 0.042232
| 0.040425
| 0.885427
| 0.855738
| 0.833719
| 0.813485
| 0.794827
| 0.770412
| 0
| 0.01467
| 0.308238
| 88,289
| 3,037
| 115
| 29.071123
| 0.764601
| 0.100533
| 0
| 0.769748
| 0
| 0
| 0.047933
| 0.002857
| 0
| 0
| 0
| 0
| 0.132213
| 1
| 0.052101
| false
| 0
| 0.002241
| 0
| 0.071709
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c438c9244dd67513a0dec4e97ffbc1e1aa32338
| 16,359
|
py
|
Python
|
tests/utils/test_jacobian.py
|
i-jones/captum
|
567ec6fc67ab85ce07d075b25428be22bb65e31b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/utils/test_jacobian.py
|
i-jones/captum
|
567ec6fc67ab85ce07d075b25428be22bb65e31b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/utils/test_jacobian.py
|
i-jones/captum
|
567ec6fc67ab85ce07d075b25428be22bb65e31b
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
import torch
import torch.nn as nn
from captum._utils.gradient import (
_compute_jacobian_wrt_params,
_compute_jacobian_wrt_params_with_sample_wise_trick,
)
from tests.helpers.basic import BaseTest, assertTensorAlmostEqual
from tests.helpers.basic_models import BasicLinearModel2, BasicLinearModel_Multilayer
class Test(BaseTest):
def test_jacobian_scores_single_scalar(self) -> None:
model = BasicLinearModel2(5, 1)
model.linear.weight = nn.Parameter(torch.arange(0, 5).float().reshape(1, 5))
a = torch.ones(5).unsqueeze(0)
grads = _compute_jacobian_wrt_params(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], a)
grads = _compute_jacobian_wrt_params_with_sample_wise_trick(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], a)
def test_jacobian_scores_single_vector(self) -> None:
model = BasicLinearModel2(5, 2)
model.linear.weight = nn.Parameter(torch.arange(0, 10).view(2, 5).float())
a = torch.ones(5).unsqueeze(0)
grads = _compute_jacobian_wrt_params(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], torch.cat((a, a)))
grads = _compute_jacobian_wrt_params_with_sample_wise_trick(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], torch.cat((a, a)))
def test_jacobian_scores_single_scalar_multilayer(self) -> None:
model = BasicLinearModel_Multilayer(5, 2, 1)
model.linear1.weight = nn.Parameter(torch.arange(0, 10).view(2, 5).float())
model.linear2.weight = nn.Parameter(torch.arange(1, 3).view(1, 2).float())
a = torch.ones(5).unsqueeze(0)
grads = _compute_jacobian_wrt_params(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], torch.cat((a, 2 * a)))
assertTensorAlmostEqual(self, grads[1][0], torch.Tensor([[10, 35]]))
grads = _compute_jacobian_wrt_params_with_sample_wise_trick(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], torch.cat((a, 2 * a)))
assertTensorAlmostEqual(self, grads[1][0], torch.Tensor([[10, 35]]))
def test_jacobian_scores_single_vector_multilayer(self) -> None:
model = BasicLinearModel_Multilayer(5, 2, 2)
model.linear1.weight = nn.Parameter(torch.arange(0, 10).view(2, 5).float())
model.linear2.weight = nn.Parameter(torch.arange(0, 4).view(2, 2).float())
a = torch.ones(5).unsqueeze(0)
grads = _compute_jacobian_wrt_params(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], torch.cat((2 * a, 4 * a)))
assertTensorAlmostEqual(self, grads[1][0], torch.Tensor([[10, 35], [10, 35]]))
grads = _compute_jacobian_wrt_params_with_sample_wise_trick(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], torch.cat((2 * a, 4 * a)))
assertTensorAlmostEqual(self, grads[1][0], torch.Tensor([[10, 35], [10, 35]]))
def test_jacobian_scores_batch_scalar(self) -> None:
model = BasicLinearModel2(5, 1)
model.linear.weight = nn.Parameter(torch.arange(0, 5).float().reshape(1, 5))
a = torch.stack((torch.ones(5), torch.ones(5) * 2))
grads = _compute_jacobian_wrt_params(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], a[0:1])
assertTensorAlmostEqual(self, grads[0][1], a[1:2])
grads = _compute_jacobian_wrt_params_with_sample_wise_trick(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], a[0:1])
assertTensorAlmostEqual(self, grads[0][1], a[1:2])
def test_jacobian_scores_batch_vector(self) -> None:
model = BasicLinearModel2(5, 2)
model.linear.weight = nn.Parameter(torch.arange(0, 10).view(2, 5).float())
a = torch.stack((torch.ones(5), torch.ones(5) * 2))
grads = _compute_jacobian_wrt_params(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], torch.stack((a[0], a[0])))
assertTensorAlmostEqual(self, grads[0][1], torch.stack((a[1], a[1])))
grads = _compute_jacobian_wrt_params_with_sample_wise_trick(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], torch.stack((a[0], a[0])))
assertTensorAlmostEqual(self, grads[0][1], torch.stack((a[1], a[1])))
def test_jacobian_scores_batch_scalar_multilayer(self) -> None:
model = BasicLinearModel_Multilayer(5, 2, 1)
model.linear1.weight = nn.Parameter(torch.arange(0, 10).view(2, 5).float())
model.linear2.weight = nn.Parameter(torch.arange(1, 3).view(1, 2).float())
a = torch.stack((torch.ones(5), torch.ones(5) * 2))
grads = _compute_jacobian_wrt_params(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], torch.stack((a[0], 2 * a[0])))
assertTensorAlmostEqual(self, grads[1][0], torch.Tensor([[10, 35]]))
assertTensorAlmostEqual(self, grads[0][1], torch.stack((a[1], 2 * a[1])))
assertTensorAlmostEqual(self, grads[1][1], torch.Tensor([[20, 70]]))
grads = _compute_jacobian_wrt_params_with_sample_wise_trick(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], torch.stack((a[0], 2 * a[0])))
assertTensorAlmostEqual(self, grads[1][0], torch.Tensor([[10, 35]]))
assertTensorAlmostEqual(self, grads[0][1], torch.stack((a[1], 2 * a[1])))
assertTensorAlmostEqual(self, grads[1][1], torch.Tensor([[20, 70]]))
def test_jacobian_scores_batch_vector_multilayer(self) -> None:
model = BasicLinearModel_Multilayer(5, 2, 2)
model.linear1.weight = nn.Parameter(torch.arange(0, 10).view(2, 5).float())
model.linear2.weight = nn.Parameter(torch.arange(0, 4).view(2, 2).float())
a = torch.stack((torch.ones(5), torch.ones(5) * 2))
grads = _compute_jacobian_wrt_params(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], torch.stack((2 * a[0], 4 * a[0])))
assertTensorAlmostEqual(self, grads[1][0], torch.Tensor([[10, 35], [10, 35]]))
assertTensorAlmostEqual(self, grads[0][1], torch.stack((2 * a[1], 4 * a[1])))
assertTensorAlmostEqual(self, grads[1][1], torch.Tensor([[20, 70], [20, 70]]))
grads = _compute_jacobian_wrt_params_with_sample_wise_trick(model, (a,))
assertTensorAlmostEqual(self, grads[0][0], torch.stack((2 * a[0], 4 * a[0])))
assertTensorAlmostEqual(self, grads[1][0], torch.Tensor([[10, 35], [10, 35]]))
assertTensorAlmostEqual(self, grads[0][1], torch.stack((2 * a[1], 4 * a[1])))
assertTensorAlmostEqual(self, grads[1][1], torch.Tensor([[20, 70], [20, 70]]))
def test_jacobian_loss_single_scalar(self) -> None:
model = BasicLinearModel2(5, 1)
model.linear.weight = nn.Parameter(torch.arange(0, 5).view(1, 5).float())
a = torch.ones(5).unsqueeze(0)
label = torch.Tensor([9])
loss_fn = nn.MSELoss(reduction="none")
grads = _compute_jacobian_wrt_params(model, (a,), label, loss_fn)
assertTensorAlmostEqual(self, grads[0][0], 2 * (10 - 9) * a)
loss_fn = nn.MSELoss(reduction="sum")
grads = _compute_jacobian_wrt_params_with_sample_wise_trick(
model, (a,), label, loss_fn
)
assertTensorAlmostEqual(self, grads[0][0], 2 * (10 - 9) * a)
def test_jacobian_loss_single_vector(self) -> None:
model = BasicLinearModel2(5, 2)
model.linear.weight = nn.Parameter(torch.arange(0, 10).view(2, 5).float())
a = torch.ones(5).unsqueeze(0)
label = torch.Tensor([[9, 38]])
loss_fn = nn.MSELoss(reduction="none")
grads = _compute_jacobian_wrt_params(model, (a,), label, loss_fn)
assertTensorAlmostEqual(
self, grads[0][0], torch.cat((2 * (10 - 9) * a, 2 * (35 - 38) * a))
)
loss_fn = nn.MSELoss(reduction="sum")
grads = _compute_jacobian_wrt_params_with_sample_wise_trick(
model, (a,), label, loss_fn
)
assertTensorAlmostEqual(
self, grads[0][0], torch.cat((2 * (10 - 9) * a, 2 * (35 - 38) * a))
)
def test_jacobian_loss_batch_scalar(self) -> None:
model = BasicLinearModel2(5, 1)
model.linear.weight = nn.Parameter(torch.arange(0, 5).float().reshape(1, 5))
a = torch.stack((torch.ones(5), torch.ones(5) * 2))
label = torch.Tensor([[9], [18]])
loss_fn = nn.MSELoss(reduction="none")
grads = _compute_jacobian_wrt_params(model, (a,), label, loss_fn)
assertTensorAlmostEqual(self, grads[0][0], 2 * (10 - 9) * a[0:1])
assertTensorAlmostEqual(self, grads[0][1], 2 * (20 - 18) * a[1:2])
loss_fn = nn.MSELoss(reduction="sum")
grads = _compute_jacobian_wrt_params_with_sample_wise_trick(
model, (a,), label, loss_fn
)
assertTensorAlmostEqual(self, grads[0][0], 2 * (10 - 9) * a[0:1])
assertTensorAlmostEqual(self, grads[0][1], 2 * (20 - 18) * a[1:2])
def test_jacobian_loss_batch_vector(self) -> None:
model = BasicLinearModel2(5, 2)
model.linear.weight = nn.Parameter(torch.arange(0, 10).view(2, 5).float())
a = torch.stack((torch.ones(5), torch.ones(5) * 2))
label = torch.Tensor([[9, 38], [18, 74]])
loss_fn = nn.MSELoss(reduction="none")
grads = _compute_jacobian_wrt_params(model, (a,), label, loss_fn)
assertTensorAlmostEqual(
self, grads[0][0], torch.stack((2 * (10 - 9) * a[0], 2 * (35 - 38) * a[0]))
)
assertTensorAlmostEqual(
self, grads[0][1], torch.stack((2 * (20 - 18) * a[1], 2 * (70 - 74) * a[1]))
)
loss_fn = nn.MSELoss(reduction="sum")
grads = _compute_jacobian_wrt_params_with_sample_wise_trick(
model, (a,), label, loss_fn
)
assertTensorAlmostEqual(
self, grads[0][0], torch.stack((2 * (10 - 9) * a[0], 2 * (35 - 38) * a[0]))
)
assertTensorAlmostEqual(
self, grads[0][1], torch.stack((2 * (20 - 18) * a[1], 2 * (70 - 74) * a[1]))
)
def test_jacobian_loss_single_scalar_multilayer(self) -> None:
model = BasicLinearModel_Multilayer(5, 2, 1)
model.linear1.weight = nn.Parameter(torch.arange(0, 10).view(2, 5).float())
model.linear2.weight = nn.Parameter(torch.arange(1, 3).view(1, 2).float())
a = torch.ones(5).unsqueeze(0)
label = torch.Tensor([[78]])
loss_fn = nn.MSELoss(reduction="none")
grads = _compute_jacobian_wrt_params(model, (a,), label, loss_fn)
assertTensorAlmostEqual(
self, grads[0][0], torch.cat((2 * (80 - 78) * a, 2 * 2 * (80 - 78) * a))
)
assertTensorAlmostEqual(
self, grads[1][0], 2 * (80 - 78) * torch.Tensor([[10, 35]])
)
loss_fn = nn.MSELoss(reduction="sum")
grads = _compute_jacobian_wrt_params_with_sample_wise_trick(
model, (a,), label, loss_fn
)
assertTensorAlmostEqual(
self, grads[0][0], torch.cat((2 * (80 - 78) * a, 2 * 2 * (80 - 78) * a))
)
assertTensorAlmostEqual(
self, grads[1][0], 2 * (80 - 78) * torch.Tensor([[10, 35]])
)
def test_jacobian_loss_batch_vector_multilayer(self) -> None:
model = BasicLinearModel_Multilayer(5, 2, 2)
model.linear1.weight = nn.Parameter(torch.arange(0, 10).view(2, 5).float())
model.linear2.weight = nn.Parameter(torch.arange(0, 4).view(2, 2).float())
a = torch.stack((torch.ones(5), torch.ones(5) * 2))
label = torch.Tensor([[33, 124], [69, 256]])
loss_fn = nn.MSELoss(reduction="none")
grads = _compute_jacobian_wrt_params(model, (a,), label, loss_fn)
assertTensorAlmostEqual(
self,
grads[0][0],
torch.stack(
(
2 * (0 * (35 - 33) + 2 * (125 - 124)) * a[0],
2 * (1 * (35 - 33) + 3 * (125 - 124)) * a[0],
)
),
)
assertTensorAlmostEqual(
self,
grads[1][0],
torch.Tensor(
[
[2 * (35 - 33) * 10, 2 * (35 - 33) * 35],
[2 * (125 - 124) * 10, 2 * (125 - 124) * 35],
]
),
)
assertTensorAlmostEqual(
self,
grads[0][1],
torch.stack(
(
2 * (0 * (70 - 69) + 2 * (250 - 256)) * a[1],
2 * (1 * (70 - 69) + 3 * (250 - 256)) * a[1],
)
),
)
assertTensorAlmostEqual(
self,
grads[1][1],
torch.Tensor(
[
[2 * (70 - 69) * 10 * 2, 2 * (70 - 69) * 35 * 2],
[2 * (250 - 256) * 10 * 2, 2 * (250 - 256) * 35 * 2],
]
),
)
loss_fn = nn.MSELoss(reduction="sum")
grads_h = _compute_jacobian_wrt_params_with_sample_wise_trick(
model, (a,), label, loss_fn
)
assertTensorAlmostEqual(self, grads_h[0][0], grads[0][0])
assertTensorAlmostEqual(self, grads_h[1][0], grads[1][0])
assertTensorAlmostEqual(self, grads_h[0][1], grads[0][1])
assertTensorAlmostEqual(self, grads_h[1][1], grads[1][1])
def test_jacobian_loss_custom_correct(self) -> None:
model = BasicLinearModel2(5, 2)
model.linear.weight = nn.Parameter(torch.arange(0, 10).view(2, 5).float())
def my_loss(out, label):
return (out - label).pow(2)
a = torch.stack((torch.ones(5), torch.ones(5) * 2))
label = torch.Tensor([[9, 38], [18, 74]])
grads = _compute_jacobian_wrt_params(model, (a,), label, my_loss)
assertTensorAlmostEqual(
self, grads[0][0], torch.stack((2 * (10 - 9) * a[0], 2 * (35 - 38) * a[0]))
)
assertTensorAlmostEqual(
self, grads[0][1], torch.stack((2 * (20 - 18) * a[1], 2 * (70 - 74) * a[1]))
)
def test_jacobian_loss_custom_wrong(self) -> None:
model = BasicLinearModel2(5, 2)
model.linear.weight = nn.Parameter(torch.arange(0, 10).view(2, 5).float())
def my_loss(out, label):
return torch.sum((out - label).pow(2))
a = torch.stack((torch.ones(5), torch.ones(5) * 2))
label = torch.Tensor([[9, 38], [18, 74]])
with self.assertRaises(AssertionError):
_compute_jacobian_wrt_params(model, (a,), label, my_loss)
def test_jacobian_loss_custom_correct_sample_wise_trick(self) -> None:
model = BasicLinearModel2(5, 2)
model.linear.weight = nn.Parameter(torch.arange(0, 10).view(2, 5).float())
def my_loss(out, label):
return torch.sum((out - label).pow(2))
a = torch.stack((torch.ones(5), torch.ones(5) * 2))
label = torch.Tensor([[9, 38], [18, 74]])
grads = _compute_jacobian_wrt_params_with_sample_wise_trick(
model, (a,), label, my_loss # type: ignore
)
assertTensorAlmostEqual(
self, grads[0][0], torch.stack((2 * (10 - 9) * a[0], 2 * (35 - 38) * a[0]))
)
assertTensorAlmostEqual(
self, grads[0][1], torch.stack((2 * (20 - 18) * a[1], 2 * (70 - 74) * a[1]))
)
def test_jacobian_loss_custom_wrong_sample_wise_trick(self) -> None:
model = BasicLinearModel2(5, 2)
model.linear.weight = nn.Parameter(torch.arange(0, 10).view(2, 5).float())
def my_loss(out, label):
return (out - label).pow(2)
a = torch.stack((torch.ones(5), torch.ones(5) * 2))
label = torch.Tensor([[9, 38], [18, 74]])
with self.assertRaises(AssertionError):
_compute_jacobian_wrt_params_with_sample_wise_trick(
model, (a,), label, my_loss # type: ignore
)
def test_jacobian_loss_wrong_reduction_sample_wise_trick(self) -> None:
model = BasicLinearModel2(5, 2)
model.linear.weight = nn.Parameter(torch.arange(0, 10).view(2, 5).float())
loss_fn = nn.MSELoss(reduction="none")
a = torch.stack((torch.ones(5), torch.ones(5) * 2))
label = torch.Tensor([[9, 38], [18, 74]])
with self.assertRaises(AssertionError):
_compute_jacobian_wrt_params_with_sample_wise_trick(
model, (a,), label, loss_fn
)
| 42.601563
| 88
| 0.586588
| 2,130
| 16,359
| 4.340376
| 0.04507
| 0.186912
| 0.221525
| 0.157058
| 0.95165
| 0.935641
| 0.903299
| 0.900054
| 0.891833
| 0.872904
| 0
| 0.070764
| 0.251054
| 16,359
| 383
| 89
| 42.712794
| 0.683807
| 0.002873
| 0
| 0.711475
| 0
| 0
| 0.002821
| 0
| 0
| 0
| 0
| 0
| 0.222951
| 1
| 0.07541
| false
| 0
| 0.016393
| 0.013115
| 0.108197
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b1f4fd54269bcee4790ce18282cadc8dd82a5fee
| 192
|
py
|
Python
|
pylurch/contract/database/exception.py
|
tingiskhan/pylurch
|
f94581f7e31fb32fd41d74e55d731147bf64e4ac
|
[
"MIT"
] | null | null | null |
pylurch/contract/database/exception.py
|
tingiskhan/pylurch
|
f94581f7e31fb32fd41d74e55d731147bf64e4ac
|
[
"MIT"
] | null | null | null |
pylurch/contract/database/exception.py
|
tingiskhan/pylurch
|
f94581f7e31fb32fd41d74e55d731147bf64e4ac
|
[
"MIT"
] | null | null | null |
from . import BaseMixin
from sqlalchemy import Column, String
class ExceptionTemplate(BaseMixin):
type_ = Column(String(), nullable=False)
message = Column(String(), nullable=False)
| 24
| 46
| 0.75
| 21
| 192
| 6.809524
| 0.571429
| 0.251748
| 0.27972
| 0.34965
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151042
| 192
| 7
| 47
| 27.428571
| 0.877301
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
591eaa4d528aa1b2b97070a162228f3303119764
| 103
|
py
|
Python
|
become_yukarin/dataset/__init__.py
|
nameless-writer/become-yukarin
|
90143c848ce87c04060b61c199a34fc1053f607f
|
[
"MIT"
] | 562
|
2017-12-12T04:29:47.000Z
|
2022-03-19T03:59:16.000Z
|
become_yukarin/dataset/__init__.py
|
ababa893/become-yukarin
|
7fe29af7a15dddcf8220733472cb985019aa1e87
|
[
"MIT"
] | 64
|
2018-03-10T17:26:29.000Z
|
2021-01-29T22:15:10.000Z
|
become_yukarin/dataset/__init__.py
|
ababa893/become-yukarin
|
7fe29af7a15dddcf8220733472cb985019aa1e87
|
[
"MIT"
] | 101
|
2018-02-12T03:03:09.000Z
|
2022-01-28T15:28:59.000Z
|
from . import dataset
from . import utility
from .dataset import create
from .dataset import create_sr
| 20.6
| 30
| 0.805825
| 15
| 103
| 5.466667
| 0.4
| 0.243902
| 0.414634
| 0.560976
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15534
| 103
| 4
| 31
| 25.75
| 0.942529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a706beadb9cc407af83417215e6adeb8aa6887d7
| 13,509
|
py
|
Python
|
tests/test_app_routers_notifications_GET.py
|
BoostryJP/ibet-Prime
|
924e7f8da4f8feea0a572e8b5532e09bcdf2dc99
|
[
"Apache-2.0"
] | 2
|
2021-08-19T12:35:25.000Z
|
2022-02-16T04:13:38.000Z
|
tests/test_app_routers_notifications_GET.py
|
BoostryJP/ibet-Prime
|
924e7f8da4f8feea0a572e8b5532e09bcdf2dc99
|
[
"Apache-2.0"
] | 46
|
2021-09-02T03:22:05.000Z
|
2022-03-31T09:20:00.000Z
|
tests/test_app_routers_notifications_GET.py
|
BoostryJP/ibet-Prime
|
924e7f8da4f8feea0a572e8b5532e09bcdf2dc99
|
[
"Apache-2.0"
] | 1
|
2021-11-17T23:18:27.000Z
|
2021-11-17T23:18:27.000Z
|
"""
Copyright BOOSTRY Co., Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
SPDX-License-Identifier: Apache-2.0
"""
from datetime import datetime
from app.model.db import (
Notification,
NotificationType
)
from tests.account_config import config_eth_account
class TestAppRoutersNotificationsPOST:
# target API endpoint
base_url = "/notifications"
###########################################################################
# Normal Case
###########################################################################
# <Normal_1>
# Non filtered
def test_normal_1(self, client, db):
user_1 = config_eth_account("user1")
issuer_address_1 = user_1["address"]
user_2 = config_eth_account("user2")
issuer_address_2 = user_2["address"]
# prepare data
_notification_1 = Notification()
_notification_1.notice_id = "notice_id_1"
_notification_1.issuer_address = issuer_address_1
_notification_1.priority = 0
_notification_1.type = NotificationType.BULK_TRANSFER_ERROR
_notification_1.code = 0
_notification_1.metainfo = {
"test_1": "test_1"
}
_notification_1.created = datetime.strptime("2022/01/01 15:20:30", '%Y/%m/%d %H:%M:%S') # JST 2022/01/02
db.add(_notification_1)
_notification_2 = Notification()
_notification_2.notice_id = "notice_id_2"
_notification_2.issuer_address = issuer_address_1
_notification_2.priority = 1
_notification_2.type = NotificationType.SCHEDULE_EVENT_ERROR
_notification_2.code = 1
_notification_2.metainfo = {
"test_2": "test_2"
}
_notification_2.created = datetime.strptime("2022/01/02 00:20:30", '%Y/%m/%d %H:%M:%S') # JST 2022/01/02
db.add(_notification_2)
_notification_3 = Notification()
_notification_3.notice_id = "notice_id_3"
_notification_3.issuer_address = issuer_address_2
_notification_3.priority = 2
_notification_3.type = NotificationType.BULK_TRANSFER_ERROR
_notification_3.code = 2
_notification_3.metainfo = {
"test_3": "test_3"
}
_notification_3.created = datetime.strptime("2022/01/02 15:20:30", '%Y/%m/%d %H:%M:%S') # JST 2022/01/03
db.add(_notification_3)
_notification_4 = Notification()
_notification_4.notice_id = "notice_id_4"
_notification_4.issuer_address = issuer_address_2
_notification_4.priority = 0
_notification_4.type = NotificationType.SCHEDULE_EVENT_ERROR
_notification_4.code = 3
_notification_4.metainfo = {
"test_4": "test_4"
}
_notification_4.created = datetime.strptime("2022/01/03 00:20:30", '%Y/%m/%d %H:%M:%S') # JST 2022/01/03
db.add(_notification_4)
# request target API
resp = client.get(
self.base_url,
)
# assertion
assert resp.status_code == 200
assert resp.json() == {
"result_set": {
"count": 4,
"offset": None,
"limit": None,
"total": 4
},
"notifications": [
{
"notice_id": "notice_id_1",
"issuer_address": issuer_address_1,
"priority": 0,
"notice_type": NotificationType.BULK_TRANSFER_ERROR,
"notice_code": 0,
"metainfo": {
"test_1": "test_1"
},
"created": "2022-01-02T00:20:30+09:00"
},
{
"notice_id": "notice_id_2",
"issuer_address": issuer_address_1,
"priority": 1,
"notice_type": NotificationType.SCHEDULE_EVENT_ERROR,
"notice_code": 1,
"metainfo": {
"test_2": "test_2"
},
"created": "2022-01-02T09:20:30+09:00"
},
{
"notice_id": "notice_id_3",
"issuer_address": issuer_address_2,
"priority": 2,
"notice_type": NotificationType.BULK_TRANSFER_ERROR,
"notice_code": 2,
"metainfo": {
"test_3": "test_3"
},
"created": "2022-01-03T00:20:30+09:00"
},
{
"notice_id": "notice_id_4",
"issuer_address": issuer_address_2,
"priority": 0,
"notice_type": NotificationType.SCHEDULE_EVENT_ERROR,
"notice_code": 3,
"metainfo": {
"test_4": "test_4"
},
"created": "2022-01-03T09:20:30+09:00"
},
]
}
# <Normal_2>
# filtered
def test_normal_2(self, client, db):
user_1 = config_eth_account("user1")
issuer_address_1 = user_1["address"]
user_2 = config_eth_account("user2")
issuer_address_2 = user_2["address"]
# prepare data
_notification_1 = Notification()
_notification_1.notice_id = "notice_id_1"
_notification_1.issuer_address = issuer_address_1
_notification_1.priority = 0
_notification_1.type = NotificationType.BULK_TRANSFER_ERROR
_notification_1.code = 0
_notification_1.metainfo = {
"test_1": "test_1"
}
_notification_1.created = datetime.strptime("2022/01/01 15:20:30", '%Y/%m/%d %H:%M:%S') # JST 2022/01/02
db.add(_notification_1)
_notification_2 = Notification()
_notification_2.notice_id = "notice_id_2"
_notification_2.issuer_address = issuer_address_1
_notification_2.priority = 1
_notification_2.type = NotificationType.SCHEDULE_EVENT_ERROR
_notification_2.code = 1
_notification_2.metainfo = {
"test_2": "test_2"
}
_notification_2.created = datetime.strptime("2022/01/02 00:20:30", '%Y/%m/%d %H:%M:%S') # JST 2022/01/02
db.add(_notification_2)
_notification_3 = Notification()
_notification_3.notice_id = "notice_id_3"
_notification_3.issuer_address = issuer_address_2
_notification_3.priority = 2
_notification_3.type = NotificationType.BULK_TRANSFER_ERROR
_notification_3.code = 2
_notification_3.metainfo = {
"test_3": "test_3"
}
_notification_3.created = datetime.strptime("2022/01/02 15:20:30", '%Y/%m/%d %H:%M:%S') # JST 2022/01/03
db.add(_notification_3)
_notification_4 = Notification()
_notification_4.notice_id = "notice_id_4"
_notification_4.issuer_address = issuer_address_2
_notification_4.priority = 0
_notification_4.type = NotificationType.SCHEDULE_EVENT_ERROR
_notification_4.code = 3
_notification_4.metainfo = {
"test_4": "test_4"
}
_notification_4.created = datetime.strptime("2022/01/03 00:20:30", '%Y/%m/%d %H:%M:%S') # JST 2022/01/03
db.add(_notification_4)
# request target API
resp = client.get(
self.base_url,
params={
"notice_type": NotificationType.SCHEDULE_EVENT_ERROR,
},
headers={
"issuer-address": issuer_address_1,
}
)
# assertion
assert resp.status_code == 200
assert resp.json() == {
"result_set": {
"count": 1,
"offset": None,
"limit": None,
"total": 4
},
"notifications": [
{
"notice_id": "notice_id_2",
"issuer_address": issuer_address_1,
"priority": 1,
"notice_type": NotificationType.SCHEDULE_EVENT_ERROR,
"notice_code": 1,
"metainfo": {
"test_2": "test_2"
},
"created": "2022-01-02T09:20:30+09:00"
},
]
}
# <Normal_3>
# limit-offset
def test_normal_3(self, client, db):
user_1 = config_eth_account("user1")
issuer_address_1 = user_1["address"]
user_2 = config_eth_account("user2")
issuer_address_2 = user_2["address"]
# prepare data
_notification_1 = Notification()
_notification_1.notice_id = "notice_id_1"
_notification_1.issuer_address = issuer_address_1
_notification_1.priority = 0
_notification_1.type = NotificationType.BULK_TRANSFER_ERROR
_notification_1.code = 0
_notification_1.metainfo = {
"test_1": "test_1"
}
_notification_1.created = datetime.strptime("2022/01/01 15:20:30", '%Y/%m/%d %H:%M:%S') # JST 2022/01/02
db.add(_notification_1)
_notification_2 = Notification()
_notification_2.notice_id = "notice_id_2"
_notification_2.issuer_address = issuer_address_1
_notification_2.priority = 1
_notification_2.type = NotificationType.SCHEDULE_EVENT_ERROR
_notification_2.code = 1
_notification_2.metainfo = {
"test_2": "test_2"
}
_notification_2.created = datetime.strptime("2022/01/02 00:20:30", '%Y/%m/%d %H:%M:%S') # JST 2022/01/02
db.add(_notification_2)
_notification_3 = Notification()
_notification_3.notice_id = "notice_id_3"
_notification_3.issuer_address = issuer_address_2
_notification_3.priority = 2
_notification_3.type = NotificationType.BULK_TRANSFER_ERROR
_notification_3.code = 2
_notification_3.metainfo = {
"test_3": "test_3"
}
_notification_3.created = datetime.strptime("2022/01/02 15:20:30", '%Y/%m/%d %H:%M:%S') # JST 2022/01/03
db.add(_notification_3)
_notification_4 = Notification()
_notification_4.notice_id = "notice_id_4"
_notification_4.issuer_address = issuer_address_2
_notification_4.priority = 0
_notification_4.type = NotificationType.SCHEDULE_EVENT_ERROR
_notification_4.code = 3
_notification_4.metainfo = {
"test_4": "test_4"
}
_notification_4.created = datetime.strptime("2022/01/03 00:20:30", '%Y/%m/%d %H:%M:%S') # JST 2022/01/03
db.add(_notification_4)
# request target API
resp = client.get(
self.base_url,
params={
"offset": 1,
"limit": 2
}
)
# assertion
assert resp.status_code == 200
assert resp.json() == {
"result_set": {
"count": 4,
"offset": 1,
"limit": 2,
"total": 4
},
"notifications": [
{
"notice_id": "notice_id_2",
"issuer_address": issuer_address_1,
"priority": 1,
"notice_type": NotificationType.SCHEDULE_EVENT_ERROR,
"notice_code": 1,
"metainfo": {
"test_2": "test_2"
},
"created": "2022-01-02T09:20:30+09:00"
},
{
"notice_id": "notice_id_3",
"issuer_address": issuer_address_2,
"priority": 2,
"notice_type": NotificationType.BULK_TRANSFER_ERROR,
"notice_code": 2,
"metainfo": {
"test_3": "test_3"
},
"created": "2022-01-03T00:20:30+09:00"
},
]
}
###########################################################################
# Error Case
###########################################################################
# <Error_1>
# Parameter Error
def test_error_1(self, client, db):
# request target API
resp = client.get(
self.base_url,
headers={
"issuer-address": "test",
}
)
# assertion
assert resp.status_code == 422
assert resp.json() == {
"meta": {
"code": 1,
"title": "RequestValidationError"
},
"detail": [
{
"loc": ["header", "issuer-address"],
"msg": "issuer-address is not a valid address",
"type": "value_error"
}
]
}
| 35.738095
| 113
| 0.525502
| 1,402
| 13,509
| 4.714693
| 0.116976
| 0.096369
| 0.057489
| 0.078669
| 0.845537
| 0.82829
| 0.810741
| 0.810741
| 0.790923
| 0.784871
| 0
| 0.076765
| 0.350063
| 13,509
| 377
| 114
| 35.832891
| 0.676082
| 0.078392
| 0
| 0.718033
| 0
| 0
| 0.157817
| 0.016269
| 0
| 0
| 0
| 0
| 0.02623
| 1
| 0.013115
| false
| 0
| 0.009836
| 0
| 0.029508
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
597921fe7d17176ed4cbb24e391f5261ea170710
| 12,622
|
py
|
Python
|
timelight_ai_python_api_client/api/source_group_api.py
|
timelight-ai/python-api-client
|
7e14341a89e8b7e1b4b0730416f6ddd3ef66ef39
|
[
"MIT"
] | null | null | null |
timelight_ai_python_api_client/api/source_group_api.py
|
timelight-ai/python-api-client
|
7e14341a89e8b7e1b4b0730416f6ddd3ef66ef39
|
[
"MIT"
] | null | null | null |
timelight_ai_python_api_client/api/source_group_api.py
|
timelight-ai/python-api-client
|
7e14341a89e8b7e1b4b0730416f6ddd3ef66ef39
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
timelight
This is the timelight api. # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from timelight_ai_python_api_client.api_client import ApiClient
class SourceGroupApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def v1_source_group_create_post(self, source_group_create_dto, **kwargs): # noqa: E501
"""Create a new source groups # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_source_group_create_post(source_group_create_dto, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SourceGroupCreateDto source_group_create_dto: (required)
:return: SourceGroupDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.v1_source_group_create_post_with_http_info(source_group_create_dto, **kwargs) # noqa: E501
else:
(data) = self.v1_source_group_create_post_with_http_info(source_group_create_dto, **kwargs) # noqa: E501
return data
def v1_source_group_create_post_with_http_info(self, source_group_create_dto, **kwargs): # noqa: E501
"""Create a new source groups # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_source_group_create_post_with_http_info(source_group_create_dto, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SourceGroupCreateDto source_group_create_dto: (required)
:return: SourceGroupDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['source_group_create_dto'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_source_group_create_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'source_group_create_dto' is set
if ('source_group_create_dto' not in params or
params['source_group_create_dto'] is None):
raise ValueError("Missing the required parameter `source_group_create_dto` when calling `v1_source_group_create_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'source_group_create_dto' in params:
body_params = params['source_group_create_dto']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/source-group/create', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SourceGroupDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def v1_source_group_group_id_patch(self, source_group_patch_dto, group_id, **kwargs): # noqa: E501
"""Updates a group configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_source_group_group_id_patch(source_group_patch_dto, group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SourceGroupPatchDto source_group_patch_dto: (required)
:param float group_id: (required)
:return: SourceGroupDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.v1_source_group_group_id_patch_with_http_info(source_group_patch_dto, group_id, **kwargs) # noqa: E501
else:
(data) = self.v1_source_group_group_id_patch_with_http_info(source_group_patch_dto, group_id, **kwargs) # noqa: E501
return data
def v1_source_group_group_id_patch_with_http_info(self, source_group_patch_dto, group_id, **kwargs): # noqa: E501
"""Updates a group configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_source_group_group_id_patch_with_http_info(source_group_patch_dto, group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SourceGroupPatchDto source_group_patch_dto: (required)
:param float group_id: (required)
:return: SourceGroupDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['source_group_patch_dto', 'group_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_source_group_group_id_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'source_group_patch_dto' is set
if ('source_group_patch_dto' not in params or
params['source_group_patch_dto'] is None):
raise ValueError("Missing the required parameter `source_group_patch_dto` when calling `v1_source_group_group_id_patch`") # noqa: E501
# verify the required parameter 'group_id' is set
if ('group_id' not in params or
params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `v1_source_group_group_id_patch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'group_id' in params:
path_params['groupId'] = params['group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'source_group_patch_dto' in params:
body_params = params['source_group_patch_dto']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/source-group/{groupId}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SourceGroupDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def v1_source_group_list_get(self, **kwargs): # noqa: E501
"""All source groups # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_source_group_list_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: SourceGroupListDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.v1_source_group_list_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.v1_source_group_list_get_with_http_info(**kwargs) # noqa: E501
return data
def v1_source_group_list_get_with_http_info(self, **kwargs): # noqa: E501
"""All source groups # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_source_group_list_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: SourceGroupListDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_source_group_list_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/source-group/list', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SourceGroupListDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 38.717791
| 147
| 0.630883
| 1,497
| 12,622
| 4.985972
| 0.105544
| 0.084003
| 0.047026
| 0.040193
| 0.928457
| 0.910638
| 0.887594
| 0.867229
| 0.834673
| 0.817792
| 0
| 0.017466
| 0.283315
| 12,622
| 325
| 148
| 38.836923
| 0.80765
| 0.310252
| 0
| 0.71345
| 1
| 0
| 0.196764
| 0.088439
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040936
| false
| 0
| 0.023392
| 0
| 0.122807
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e6013582d2ce6243a1c1ff201793eb82b7bf2987
| 178
|
py
|
Python
|
suldocs/admin.py
|
suldoc/djangoAPI
|
dd85ffcec0fba7ec98097ebf41f374781c47f1cc
|
[
"Apache-2.0"
] | null | null | null |
suldocs/admin.py
|
suldoc/djangoAPI
|
dd85ffcec0fba7ec98097ebf41f374781c47f1cc
|
[
"Apache-2.0"
] | 1
|
2019-08-21T07:40:57.000Z
|
2019-08-21T07:40:57.000Z
|
suldocs/admin.py
|
suldoc/djangoAPI
|
dd85ffcec0fba7ec98097ebf41f374781c47f1cc
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from django.contrib import admin
from .models.taste_note import TasteNoteModel
admin.site.register(TasteNoteModel)
| 25.428571
| 45
| 0.837079
| 24
| 178
| 6.166667
| 0.541667
| 0.135135
| 0.22973
| 0.310811
| 0.378378
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106742
| 178
| 7
| 46
| 25.428571
| 0.930818
| 0.146067
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e6259a113fd8677d6f9756f72ad843a860acc129
| 24,646
|
py
|
Python
|
SPConvNets/models/epn_gcn_netvlad.py
|
chienerh/EPN_PointCloud
|
d1488cf1ff82a5bc7ac89c28df30fa2f3f2e0e30
|
[
"MIT"
] | null | null | null |
SPConvNets/models/epn_gcn_netvlad.py
|
chienerh/EPN_PointCloud
|
d1488cf1ff82a5bc7ac89c28df30fa2f3f2e0e30
|
[
"MIT"
] | null | null | null |
SPConvNets/models/epn_gcn_netvlad.py
|
chienerh/EPN_PointCloud
|
d1488cf1ff82a5bc7ac89c28df30fa2f3f2e0e30
|
[
"MIT"
] | null | null | null |
"""
network architechture for place recognition (Oxford dataset) with attention
"""
import math
import os
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.modules.batchnorm import _BatchNorm
import time
from collections import OrderedDict
import json
import vgtk
import SPConvNets.utils as M
import vgtk.spconv.functional as L
import SPConvNets.models.pr_so3net_pn as frontend
import SPConvNets.models.gcn as attention
from SPConvNets.models.pointnet_epn_netvlad import STN3d
import config as cfg
def split_train(x, model):
if x.shape[0] >=4:
query_pcd, pos_pcd, neg_pcd, otherneg_pcd = torch.split(
x, [1, cfg.TRAIN_POSITIVES_PER_QUERY, cfg.TRAIN_NEGATIVES_PER_QUERY, 1], dim=0)
x_query, _ = model(query_pcd)
x_pos, _ = model(pos_pcd)
x_neg, _ = model(neg_pcd)
x_otherneg, _ = model(otherneg_pcd)
x_frontend = torch.cat((x_query, x_pos, x_neg, x_otherneg), 0)
elif x.shape[0] == 3:
query_pcd, pos_pcd, neg_pcd = torch.split(
x, [1, cfg.TRAIN_POSITIVES_PER_QUERY, cfg.TRAIN_NEGATIVES_PER_QUERY], dim=0)
x_query, _ = model(query_pcd)
x_pos, _ = model(pos_pcd)
x_neg, _ = model(neg_pcd)
x_frontend = torch.cat((x_query, x_pos, x_neg), 0)
elif x.shape[0] == 2:
query_pcd, pos_pcd = torch.split(
x, [1, cfg.TRAIN_POSITIVES_PER_QUERY], dim=0)
x_query, _ = model(query_pcd)
x_pos, _ = model(pos_pcd)
x_frontend = torch.cat((x_query, x_pos), 0)
elif x.shape[0] == 1:
x_frontend, _ = model(x)
else:
print('x.shape[0]', x.shape[0])
return x_frontend
class EPN_GCN_NetVLAD(nn.Module):
def __init__(self, opt):
super(EPN_GCN_NetVLAD, self).__init__()
self.opt = opt
# epn param
mlps=[[64,64], [128, 128]]
out_mlps=[128, self.opt.model.output_num]
strides=[1, 1]
self.epn = frontend.build_model(self.opt, mlps, out_mlps, strides)
self.gnn = attention.GCN(4, self.opt.model.output_num, 10, ['self','cross','self'])
self.netvlad = M.NetVLADLoupe(feature_size=self.opt.model.output_num, max_samples=self.opt.num_selected_points, cluster_size=64,
output_dim=self.opt.global_feature_dim, gating=True, add_batch_norm=True,
is_training=True)
def forward(self, x):
'''
INPUT: B, N, D
Local Feature: B, 128, self.opt.model.output_num
Global Feature: B, self.opt.global_feature_dim
'''
select_index = np.arange(0, cfg.NUM_POINTS, cfg.NUM_POINTS//cfg.NUM_SELECTED_POINTS)
# select_index = torch.randint(0, cfg.NUM_POINTS, (cfg.NUM_SELECTED_POINTS,))
if x.shape[0] >=4:
####################################################
# STEP 1: Frontend, learn invariant local features #
####################################################
# input point cloud with shape [B, N, 3]
query_pcd, pos_pcd, neg_pcd, otherneg_pcd = torch.split(
x, [1, cfg.TRAIN_POSITIVES_PER_QUERY, cfg.TRAIN_NEGATIVES_PER_QUERY, 1], dim=0)
# reduce size of point cloud
query_pcd = query_pcd[:,select_index, :]
pos_pcd = pos_pcd[:,select_index, :]
neg_pcd = neg_pcd[:,select_index, :]
otherneg_pcd = otherneg_pcd[:,select_index, :]
x_downsize = torch.cat((query_pcd, pos_pcd, neg_pcd, otherneg_pcd), 0)
# print('x after downsize', x_downsize.shape)
x_query, _ = self.epn(query_pcd)
x_pos, _ = self.epn(pos_pcd)
x_neg, _ = self.epn(neg_pcd)
x_otherneg, _ = self.epn(otherneg_pcd)
x_frontend = torch.cat((x_query, x_pos, x_neg, x_otherneg), 0)
# local features with shape [B, C, N]
# print('x after epn', x_frontend.shape)
elif x.shape[0] == 3:
####################################################
# STEP 1: Frontend, learn invariant local features #
####################################################
# input point cloud with shape [B, N, 3]
query_pcd, pos_pcd, neg_pcd = torch.split(
x, [1, cfg.TRAIN_POSITIVES_PER_QUERY, cfg.TRAIN_NEGATIVES_PER_QUERY], dim=0)
# reduce size of point cloud
query_pcd = query_pcd[:,select_index, :]
pos_pcd = pos_pcd[:,select_index, :]
neg_pcd = neg_pcd[:,select_index, :]
x_downsize = torch.cat((query_pcd, pos_pcd, neg_pcd), 0)
# print('x after downsize', x_downsize.shape)
x_query, _ = self.epn(query_pcd)
x_pos, _ = self.epn(pos_pcd)
x_neg, _ = self.epn(neg_pcd)
x_frontend = torch.cat((x_query, x_pos, x_neg), 0)
# local features with shape [B, C, N]
# print('x after epn', x_frontend.shape)
elif x.shape[0] == 2:
####################################################
# STEP 1: Frontend, learn invariant local features #
####################################################
# input point cloud with shape [B, N, 3]
query_pcd, pos_pcd = torch.split(
x, [1, cfg.TRAIN_POSITIVES_PER_QUERY], dim=0)
# reduce size of point cloud
query_pcd = query_pcd[:,select_index, :]
pos_pcd = pos_pcd[:,select_index, :]
x_downsize = torch.cat((query_pcd, pos_pcd), 0)
# print('x after downsize', x_downsize.shape)
x_query, _ = self.epn(query_pcd)
x_pos, _ = self.epn(pos_pcd)
x_frontend = torch.cat((x_query, x_pos), 0)
# local features with shape [B, C, N]
# print('x after epn', x_frontend.shape)
elif x.shape[0] == 1:
x_downsize = x[:,select_index, :]
# print('x after downsize', x_downsize.shape)
x_frontend, _ = self.epn(x_downsize)
# print('x after epn', x_frontend.shape)
else:
print('x.shape[0]', x.shape[0])
######################################################
# STEP 2: Attention, learn co-contextual information #
######################################################
x_gcn = self.gnn(x_downsize, x_frontend)
# print('x after gcn', x_gcn.shape)
###################################################################
# STEP 3: NetVLAD, learn global descriptors for place recognition #
###################################################################
x = self.netvlad(x_gcn)
return x, x_frontend
class EPN_Atten_NetVLAD(nn.Module):
def __init__(self, opt):
super(EPN_Atten_NetVLAD, self).__init__()
self.opt = opt
# epn param
mlps=[[64]]
out_mlps=[64, self.opt.model.output_num]
strides=[1,1]
self.epn = frontend.build_model(self.opt, mlps, out_mlps, strides, downsample=False, outblock='linear')
self.atten = torch.nn.MultiheadAttention(self.opt.model.output_num, 4)
self.netvlad = M.NetVLADLoupe(feature_size=self.opt.model.output_num, max_samples=self.opt.num_selected_points, cluster_size=64,
output_dim=self.opt.global_feature_dim, gating=True, add_batch_norm=True,
is_training=True)
def forward(self, x):
'''
INPUT: B, N, D
Local Feature: B, 128, self.opt.model.output_num
Global Feature: B, self.opt.global_feature_dim
'''
x_frontend = split_train(x, self.epn)
x_atten, atten_weight = self.atten(x_frontend, x_frontend, x_frontend)
x = self.netvlad(x_atten)
return x, x_frontend
class Atten_EPN_NetVLAD(nn.Module):
def __init__(self, opt):
super(Atten_EPN_NetVLAD, self).__init__()
self.opt = opt
# epn param
mlps=[[64]]
out_mlps=[64, self.opt.model.output_num]
strides=[1,1]
self.atten = torch.nn.MultiheadAttention(3, 3)
self.epn = frontend.build_model(self.opt, mlps, out_mlps, strides, downsample=False, outblock='linear')
self.netvlad = M.NetVLADLoupe(feature_size=self.opt.model.output_num, max_samples=self.opt.num_selected_points, cluster_size=64,
output_dim=self.opt.global_feature_dim, gating=True, add_batch_norm=True,
is_training=True)
def forward(self, x):
'''
INPUT: B, N, D
Local Feature: B, 128, self.opt.model.output_num
Global Feature: B, self.opt.global_feature_dim
'''
x_atten, atten_weight = self.atten(x, x, x)
x_frontend = split_train(x_atten, self.epn)
x = self.netvlad(x_frontend)
return x, x_frontend
class Atten_EPN_NetVLAD_select(nn.Module):
def __init__(self, opt):
super(Atten_EPN_NetVLAD_select, self).__init__()
self.opt = opt
# epn param
mlps=[[64]]
out_mlps=[64, self.opt.model.output_num]
strides=[1, 1]
self.atten = torch.nn.MultiheadAttention(3, 3, batch_first=True)
self.epn = frontend.build_model(self.opt, mlps, out_mlps, strides, downsample=False, outblock='linear')
self.netvlad = M.NetVLADLoupe(feature_size=self.opt.model.output_num, max_samples=self.opt.num_selected_points, cluster_size=64,
output_dim=self.opt.global_feature_dim, gating=True, add_batch_norm=True,
is_training=True)
def forward(self, x):
'''
INPUT: B, N, D
Local Feature: B, 128, self.opt.model.output_num
Global Feature: B, self.opt.global_feature_dim
'''
# Use attention to choose points and downsample
# print('input x', x.shape)
x_atten, atten_weight = self.atten(x, x, x)
# print('x_weight', x_atten.shape)
# print('x_atten', atten_weight.shape)
atten_weight_sum = torch.sum(atten_weight, 2)
# print('atten_weight', atten_weight.shape)
# select_index = torch.randint(0, cfg.NUM_POINTS, (cfg.NUM_SELECTED_POINTS,))
# print('input x', x.shape)
if x.shape[0] >=4:
query_pcd, pos_pcd, neg_pcd, otherneg_pcd = torch.split(
x, [1, cfg.TRAIN_POSITIVES_PER_QUERY, cfg.TRAIN_NEGATIVES_PER_QUERY, 1], dim=0)
query_attn, pos_attn, neg_attn, otherneg_attn = torch.split(
atten_weight_sum, [1, cfg.TRAIN_POSITIVES_PER_QUERY, cfg.TRAIN_NEGATIVES_PER_QUERY, 1], dim=0)
# reduce size of point cloud
query_pcd = query_pcd[:, torch.topk(query_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
pos_pcd = pos_pcd[:, torch.topk(pos_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
neg_pcd = neg_pcd[:, torch.topk(neg_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
otherneg_pcd = otherneg_pcd[:,torch.topk(otherneg_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
x_query, _ = self.epn(query_pcd)
x_pos, _ = self.epn(pos_pcd)
x_neg, _ = self.epn(neg_pcd)
x_otherneg, _ = self.epn(otherneg_pcd)
x_frontend = torch.cat((x_query, x_pos, x_neg, x_otherneg), 0)
elif x.shape[0] == 3:
query_pcd, pos_pcd, neg_pcd = torch.split(
x, [1, cfg.TRAIN_POSITIVES_PER_QUERY, cfg.TRAIN_NEGATIVES_PER_QUERY], dim=0)
query_attn, pos_attn, neg_attn = torch.split(
atten_weight_sum, [1, cfg.TRAIN_POSITIVES_PER_QUERY, cfg.TRAIN_NEGATIVES_PER_QUERY], dim=0)
# reduce size of point cloud
query_pcd = query_pcd[:, torch.topk(query_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
pos_pcd = pos_pcd[:, torch.topk(pos_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
neg_pcd = neg_pcd[:, torch.topk(neg_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
x_query, _ = self.epn(query_pcd)
x_pos, _ = self.epn(pos_pcd)
x_neg, _ = self.epn(neg_pcd)
x_frontend = torch.cat((x_query, x_pos, x_neg), 0)
elif x.shape[0] == 2:
query_pcd, pos_pcd = torch.split(
x, [1, cfg.TRAIN_POSITIVES_PER_QUERY], dim=0)
query_attn, pos_attn = torch.split(
atten_weight_sum, [1, cfg.TRAIN_POSITIVES_PER_QUERY], dim=0)
# reduce size of point cloud
query_pcd = query_pcd[:,torch.topk(query_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
pos_pcd = pos_pcd[:,torch.topk(pos_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
x_query, _ = self.epn(query_pcd)
x_pos, _ = self.epn(pos_pcd)
x_frontend = torch.cat((x_query, x_pos), 0)
elif x.shape[0] == 1:
query_pcd = x[:,torch.topk(atten_weight_sum, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
x_frontend, _ = self.epn(query_pcd)
else:
print('x.shape[0]', x.shape[0])
# print('x_frontend', x_frontend.shape)
x = self.netvlad(x_frontend)
return x, atten_weight_sum
class EPN_CA_NetVLAD(nn.Module):
def __init__(self, opt):
super(EPN_CA_NetVLAD, self).__init__()
self.opt = opt
# epn param
mlps=[[64]]
out_mlps=[64, self.opt.model.output_num]
strides=[1,1]
self.epn = frontend.build_model(self.opt, mlps, out_mlps, strides, downsample=False, outblock='linear')
# self.atten = attention.CrossAttnetion(4, self.opt.model.output_num)
self.atten = attention.CrossAttnetion_all(4, self.opt.model.output_num)
self.netvlad = M.NetVLADLoupe(feature_size=self.opt.model.output_num, max_samples=self.opt.num_selected_points, cluster_size=64,
output_dim=self.opt.global_feature_dim, gating=True, add_batch_norm=True,
is_training=True)
def forward(self, x):
'''
INPUT: B, N, D
Local Feature: B, 128, self.opt.model.output_num
Global Feature: B, self.opt.global_feature_dim
'''
x_frontend = split_train(x, self.epn)
x = self.netvlad(x_frontend)
front_end = {'invariant':x_frontend, 'attention':x_atten}
return x, x_frontend
class EPN_CA_NetVLAD_select(nn.Module):
def __init__(self, opt):
super(EPN_CA_NetVLAD_select, self).__init__()
self.opt = opt
# transformation
self.trans = False
self.stn = STN3d(num_points=cfg.NUM_POINTS, k=3, use_bn=False)
# epn param
mlps=[[64]]
out_mlps=[64, self.opt.model.output_num]
strides=[1,1]
self.epn = frontend.build_model(self.opt, mlps, out_mlps, strides, downsample=False, outblock='linear')
self.atten = attention.CrossAttnetion_all(4, self.opt.model.output_num)
self.netvlad = M.NetVLADLoupe(feature_size=self.opt.model.output_num, max_samples=self.opt.num_selected_points, cluster_size=64,
output_dim=self.opt.global_feature_dim, gating=True, add_batch_norm=True,
is_training=True)
def forward(self, x):
'''
INPUT: B, N, D
Local Feature: B, N', self.opt.model.output_num
Global Feature: B, self.opt.global_feature_dim
'''
# transformation
if self.trans:
x = x.unsqueeze(1) # (B, 1, N, D)
trans = self.stn(x) # B, 3, 3
x = torch.matmul(torch.squeeze(x), trans) # B, N, 3
select_index = torch.randint(0, cfg.NUM_POINTS, (cfg.NUM_SELECTED_POINTS,))
# print('input x', x.shape)
if x.shape[0] >=4:
query_pcd, pos_pcd, neg_pcd, otherneg_pcd = torch.split(
x, [1, cfg.TRAIN_POSITIVES_PER_QUERY, cfg.TRAIN_NEGATIVES_PER_QUERY, 1], dim=0)
# reduce size of point cloud
query_pcd = query_pcd[:,select_index, :]
pos_pcd = pos_pcd[:,select_index, :]
neg_pcd = neg_pcd[:,select_index, :]
otherneg_pcd = otherneg_pcd[:,select_index, :]
x_query, _ = self.epn(query_pcd)
x_pos, _ = self.epn(pos_pcd)
x_neg, _ = self.epn(neg_pcd)
x_otherneg, _ = self.epn(otherneg_pcd)
x_frontend = torch.cat((x_query, x_pos, x_neg, x_otherneg), 0)
elif x.shape[0] == 3:
query_pcd, pos_pcd, neg_pcd = torch.split(
x, [1, cfg.TRAIN_POSITIVES_PER_QUERY, cfg.TRAIN_NEGATIVES_PER_QUERY], dim=0)
# reduce size of point cloud
query_pcd = query_pcd[:,select_index, :]
pos_pcd = pos_pcd[:,select_index, :]
neg_pcd = neg_pcd[:,select_index, :]
x_query, _ = self.epn(query_pcd)
x_pos, _ = self.epn(pos_pcd)
x_neg, _ = self.epn(neg_pcd)
x_frontend = torch.cat((x_query, x_pos, x_neg), 0)
elif x.shape[0] == 2:
query_pcd, pos_pcd = torch.split(
x, [1, cfg.TRAIN_POSITIVES_PER_QUERY], dim=0)
# reduce size of point cloud
query_pcd = query_pcd[:,select_index, :]
pos_pcd = pos_pcd[:,select_index, :]
x_query, _ = self.epn(query_pcd)
x_pos, _ = self.epn(pos_pcd)
x_frontend = torch.cat((x_query, x_pos), 0)
elif x.shape[0] == 1:
query_pcd = x[:,select_index, :]
x_frontend, _ = self.epn(query_pcd)
else:
print('x.shape[0]', x.shape[0])
x_gcn = self.atten(x_frontend)
x = self.netvlad(x_gcn)
front_end = {'invariant':x_frontend, 'attention':x_gcn}
return x, x_gcn
class CA_EPN_NetVLAD_select(nn.Module):
def __init__(self, opt):
super(CA_EPN_NetVLAD_select, self).__init__()
self.opt = opt
# self.atten = attention.CrossAttnetion(3, 3)
# self.atten = attention.CrossAttnetion_all(3, 3)
self.atten = attention.CrossAttnetion_all_weights(3, 3)
# epn param
mlps=[[128]]
out_mlps=[128, self.opt.model.output_num]
strides=[1,1]
self.epn = frontend.build_model(self.opt, mlps, out_mlps, strides, downsample=False, outblock='linear')
# self.epn = frontend.build_model(self.opt, mlps, out_mlps, strides, downsample=False)
self.netvlad = M.NetVLADLoupe(feature_size=self.opt.model.output_num, max_samples=self.opt.num_selected_points, cluster_size=64,
output_dim=self.opt.global_feature_dim, gating=True, add_batch_norm=True,
is_training=True)
def forward(self, x):
'''
INPUT: B, NUM_POINTS, D
Local Feature: B, NUM_SELECTED_POINTS, self.opt.model.output_num
Global Feature: B, self.opt.global_feature_dim
'''
# Use attention to choose points and downsample
x_gcn = self.atten(x)
x_gcn = torch.sum(x_gcn, 2)
# print('x_gcn', x_gcn.shape)
# select_index = torch.randint(0, cfg.NUM_POINTS, (cfg.NUM_SELECTED_POINTS,))
# print('input x', x.shape)
if x.shape[0] >=4:
query_pcd, pos_pcd, neg_pcd, otherneg_pcd = torch.split(
x, [1, cfg.TRAIN_POSITIVES_PER_QUERY, cfg.TRAIN_NEGATIVES_PER_QUERY, 1], dim=0)
query_attn, pos_attn, neg_attn, otherneg_attn = torch.split(
x_gcn, [1, cfg.TRAIN_POSITIVES_PER_QUERY, cfg.TRAIN_NEGATIVES_PER_QUERY, 1], dim=0)
# reduce size of point cloud
query_pcd = query_pcd[:, torch.topk(query_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
pos_pcd = pos_pcd[:, torch.topk(pos_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
neg_pcd = neg_pcd[:, torch.topk(neg_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
otherneg_pcd = otherneg_pcd[:,torch.topk(otherneg_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
x_query, _ = self.epn(query_pcd)
x_pos, _ = self.epn(pos_pcd)
x_neg, _ = self.epn(neg_pcd)
x_otherneg, _ = self.epn(otherneg_pcd)
x_frontend = torch.cat((x_query, x_pos, x_neg, x_otherneg), 0)
elif x.shape[0] == 3:
query_pcd, pos_pcd, neg_pcd = torch.split(
x, [1, cfg.TRAIN_POSITIVES_PER_QUERY, cfg.TRAIN_NEGATIVES_PER_QUERY], dim=0)
query_attn, pos_attn, neg_attn = torch.split(
x_gcn, [1, cfg.TRAIN_POSITIVES_PER_QUERY, cfg.TRAIN_NEGATIVES_PER_QUERY], dim=0)
# reduce size of point cloud
query_pcd = query_pcd[:, torch.topk(query_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
pos_pcd = pos_pcd[:, torch.topk(pos_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
neg_pcd = neg_pcd[:, torch.topk(neg_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
x_query, _ = self.epn(query_pcd)
x_pos, _ = self.epn(pos_pcd)
x_neg, _ = self.epn(neg_pcd)
x_frontend = torch.cat((x_query, x_pos, x_neg), 0)
elif x.shape[0] == 2:
query_pcd, pos_pcd = torch.split(
x, [1, cfg.TRAIN_POSITIVES_PER_QUERY], dim=0)
query_attn, pos_attn = torch.split(
x_gcn, [1, cfg.TRAIN_POSITIVES_PER_QUERY], dim=0)
# reduce size of point cloud
query_pcd = query_pcd[:,torch.topk(query_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
pos_pcd = pos_pcd[:,torch.topk(pos_attn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
x_query, _ = self.epn(query_pcd)
x_pos, _ = self.epn(pos_pcd)
x_frontend = torch.cat((x_query, x_pos), 0)
elif x.shape[0] == 1:
query_pcd = x[:,torch.topk(x_gcn, cfg.NUM_SELECTED_POINTS, 1)[1].squeeze(), :]
x_frontend, _ = self.epn(query_pcd)
else:
print('x.shape[0]', x.shape[0])
# print('x_frontend', x_frontend.shape)
x = self.netvlad(x_frontend)
front_end = {'invariant':x_frontend, 'attention':x_gcn}
return x, x_gcn
class EPN_Transformer_NetVLAD(nn.Module):
def __init__(self, opt):
super(EPN_Transformer_NetVLAD, self).__init__()
self.opt = opt
# epn param
mlps=[[64,64], [128, 128]]
out_mlps=[128, self.opt.model.output_num]
self.epn = frontend.build_model(self.opt, mlps, out_mlps)
# transformer
encoder_layer = nn.TransformerEncoderLayer(d_model=self.opt.model.output_num, nhead=4, \
dim_feedforward=1024, activation='relu', batch_first=False, dropout=0.)
self.transformer_encoder = torch.nn.TransformerEncoder(encoder_layer, num_layers=1)
self.conv_after_cat = nn.Conv2d(2048, 1024, kernel_size=(1,1), stride=(1,1), bias=False)
self.relu = nn.ReLU(inplace=True)
# netvlad
self.netvlad = M.NetVLADLoupe(feature_size=self.opt.model.output_num, max_samples=self.opt.num_selected_points, cluster_size=64,
output_dim=self.opt.global_feature_dim, gating=True, add_batch_norm=True,
is_training=True)
def forward(self, x):
'''
INPUT: B, N, D
Local Feature: B, 128, self.opt.model.output_num
Global Feature: B, self.opt.global_feature_dim
'''
x_frontend = split_train(x, self.epn)
# print('x_frontend', x_frontend.shape)
x_atten = self.transformer_encoder(x_frontend)
x_encoder = torch.cat((x_frontend, x_atten), dim=2)
# print('x_encoder', x_encoder.shape)
x_encoder = x_encoder.transpose(1, 2)
x_encoder = x_encoder.unsqueeze(3)
# print('x_encoder', x_encoder.shape)
x_encoder = self.relu(self.conv_after_cat(x_encoder))
x_encoder = F.normalize(x_encoder, dim=2)
x_encoder = torch.squeeze(x_encoder, 3)
x_encoder = x_encoder.transpose(1, 2)
# print('x_encoder', x_encoder.shape)
x_output = self.netvlad(x_encoder)
# print('x_output', x_output.shape)
return x_output, x_frontend
| 42.202055
| 136
| 0.581595
| 3,305
| 24,646
| 4.042057
| 0.05416
| 0.041395
| 0.043267
| 0.040422
| 0.865409
| 0.850438
| 0.842353
| 0.816378
| 0.794895
| 0.780822
| 0
| 0.018372
| 0.280045
| 24,646
| 584
| 137
| 42.202055
| 0.734502
| 0.13771
| 0
| 0.745042
| 0
| 0
| 0.007732
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048159
| false
| 0
| 0.048159
| 0
| 0.144476
| 0.014164
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0588db1b0f5521bbb7c3c6ad34f7b007dd06dc43
| 588
|
py
|
Python
|
experiments/directory_paths.py
|
ashudeep/ml-fairness-gym
|
8eb4ad99ac8588f315072d9c451f55ce7a03e326
|
[
"Apache-2.0"
] | null | null | null |
experiments/directory_paths.py
|
ashudeep/ml-fairness-gym
|
8eb4ad99ac8588f315072d9c451f55ce7a03e326
|
[
"Apache-2.0"
] | null | null | null |
experiments/directory_paths.py
|
ashudeep/ml-fairness-gym
|
8eb4ad99ac8588f315072d9c451f55ce7a03e326
|
[
"Apache-2.0"
] | null | null | null |
# DEFAULT_EMBEDDING_PATH = '/home/as3354/ml-fairness-gym/environments/recommenders/one_hot_embeddings_1000/factorization.json'
# DEFAULT_OUTPUT_DIRECTORY = '/share/thorsten/as3354/saferecs/results_0828/'
# DEFAULT_DATA_DIRECTORY = '/home/as3354/ml-fairness-gym/environments/recommenders/one_hot_embeddings_1000'
DEFAULT_EMBEDDING_PATH = '/share/thorsten/as3354/saferecs/data/ml-data/one_hot_embeddings/factorization.json'
DEFAULT_OUTPUT_DIRECTORY = '/share/thorsten/as3354/saferecs/results_0914/'
DEFAULT_DATA_DIRECTORY = '/share/thorsten/as3354/saferecs/data/ml-data/one_hot_embeddings/'
| 98
| 126
| 0.846939
| 76
| 588
| 6.236842
| 0.328947
| 0.050633
| 0.135021
| 0.227848
| 0.833333
| 0.814346
| 0.814346
| 0.814346
| 0.814346
| 0.814346
| 0
| 0.070423
| 0.034014
| 588
| 6
| 127
| 98
| 0.764085
| 0.518707
| 0
| 0
| 0
| 0
| 0.682143
| 0.682143
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
e9d130daed8dd61edf87bc9864a35e76f2e27ffc
| 59
|
py
|
Python
|
theBroker/venv/Lib/site-packages/ttn/github_com/TheThingsNetwork/api/monitor/__init__.py
|
emirgo/WeatherStation
|
f0f8c3464470991fc962d83cea20f3bcfd6a04b6
|
[
"MIT"
] | 32
|
2017-11-01T16:03:48.000Z
|
2021-11-16T12:35:34.000Z
|
theBroker/venv/Lib/site-packages/ttn/github_com/TheThingsNetwork/api/monitor/__init__.py
|
emirgo/WeatherStation
|
f0f8c3464470991fc962d83cea20f3bcfd6a04b6
|
[
"MIT"
] | 28
|
2017-11-20T09:45:59.000Z
|
2021-12-14T09:31:24.000Z
|
theBroker/venv/Lib/site-packages/ttn/github_com/TheThingsNetwork/api/monitor/__init__.py
|
emirgo/WeatherStation
|
f0f8c3464470991fc962d83cea20f3bcfd6a04b6
|
[
"MIT"
] | 22
|
2017-11-03T10:21:50.000Z
|
2021-04-08T05:20:51.000Z
|
from .monitor_pb2_grpc import *
from .monitor_pb2 import *
| 19.666667
| 31
| 0.79661
| 9
| 59
| 4.888889
| 0.555556
| 0.5
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039216
| 0.135593
| 59
| 2
| 32
| 29.5
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
75729febd35882b2931863d92e4655bcbd21d492
| 16,179
|
py
|
Python
|
tests/subaligner/test_predictor.py
|
baxtree/subaligner
|
0a72a54d5271c6ce86b556860992584ef4a51012
|
[
"MIT"
] | 227
|
2020-01-05T18:47:15.000Z
|
2022-03-19T20:19:33.000Z
|
tests/subaligner/test_predictor.py
|
baxtree/subaligner
|
0a72a54d5271c6ce86b556860992584ef4a51012
|
[
"MIT"
] | 43
|
2019-12-16T09:15:38.000Z
|
2022-03-21T10:39:40.000Z
|
tests/subaligner/test_predictor.py
|
baxtree/subaligner
|
0a72a54d5271c6ce86b556860992584ef4a51012
|
[
"MIT"
] | 9
|
2020-11-16T03:33:11.000Z
|
2022-03-11T22:12:38.000Z
|
import unittest
import os
import sys
from mock import patch
from parameterized import parameterized
from subaligner.exception import TerminalException
from subaligner.logger import Logger
from subaligner.media_helper import MediaHelper
from subaligner.predictor import Predictor as Undertest
class PredictorTests(unittest.TestCase):
def setUp(self):
Undertest._Predictor__MAX_SHIFT_IN_SECS = sys.maxsize
self.weights_dir = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/models/training/weights"
)
self.video_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test.mp4"
)
self.audio_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test.wav"
)
self.srt_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test.srt"
)
self.ttml_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test.xml"
)
self.vtt_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test.vtt"
)
self.ass_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test.ass"
)
self.ssa_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test.ssa"
)
self.microdvd_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test.sub"
)
self.mpl2_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test_mpl2.txt"
)
self.tmp_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test.tmp"
)
self.sami_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test.smi"
)
self.stl_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test.stl"
)
self.sbv_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test.sbv"
)
self.ytt_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test.ytt"
)
self.long_subtitle_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test_too_long.srt"
)
self.plain_text_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "resource/test_plain.txt"
)
self.audio_file_paths = []
def tearDown(self):
for file in self.audio_file_paths:
os.remove(file) if os.path.isfile(file) else None
def test_predict_single_pass_with_fps(self):
subs, audio_file_path, voice_probabilities, frame_rate = Undertest(n_mfcc=20, step_sample=0.02).predict_single_pass(
self.video_file_path, self.srt_file_path, self.weights_dir
)
self.audio_file_paths.append(audio_file_path)
self.assertGreater(len(subs), 0)
self.assertIsNotNone(audio_file_path)
self.assertGreater(len(voice_probabilities), 0)
self.assertEqual(24.0, frame_rate)
@parameterized.expand([
["video_file_path", "srt_file_path"],
["video_file_path", "ttml_file_path"],
["video_file_path", "vtt_file_path"],
["video_file_path", "ass_file_path"],
["video_file_path", "ssa_file_path"],
["video_file_path", "microdvd_file_path"],
["video_file_path", "mpl2_file_path"],
["video_file_path", "tmp_file_path"],
["video_file_path", "sami_file_path"],
["video_file_path", "stl_file_path"],
["video_file_path", "sbv_file_path"],
["video_file_path", "ytt_file_path"],
])
def test_predict_single_pass_on_video(self, media_file_path, subtitle_file_path):
subs, audio_file_path, voice_probabilities, frame_rate = Undertest(n_mfcc=20).predict_single_pass(
getattr(self, media_file_path), getattr(self, subtitle_file_path), self.weights_dir
)
self.audio_file_paths.append(audio_file_path)
self.assertGreater(len(subs), 0)
self.assertIsNotNone(audio_file_path)
self.assertGreater(len(voice_probabilities), 0)
self.assertEqual(24.0, frame_rate)
@parameterized.expand([
["audio_file_path", "srt_file_path"],
["audio_file_path", "ttml_file_path"],
["audio_file_path", "vtt_file_path"],
["audio_file_path", "ass_file_path"],
["audio_file_path", "ssa_file_path"],
["audio_file_path", "microdvd_file_path"],
["audio_file_path", "mpl2_file_path"],
["audio_file_path", "tmp_file_path"],
["audio_file_path", "sami_file_path"],
["audio_file_path", "stl_file_path"],
["audio_file_path", "sbv_file_path"],
["audio_file_path", "ytt_file_path"],
])
def test_predict_single_pass_on_audio(self, media_file_path, subtitle_file_path):
subs, audio_file_path, voice_probabilities, frame_rate = Undertest(n_mfcc=20).predict_single_pass(
getattr(self, media_file_path), getattr(self, subtitle_file_path), self.weights_dir
)
self.audio_file_paths.append(audio_file_path)
self.assertGreater(len(subs), 0)
self.assertIsNotNone(audio_file_path)
self.assertGreater(len(voice_probabilities), 0)
self.assertIsNone(frame_rate)
def test_predict_on_subtitle_longer_than_audio_within_threshold(self):
subs, audio_file_path, _, _ = Undertest(n_mfcc=20).predict_single_pass(
self.video_file_path, self.long_subtitle_file_path, self.weights_dir
)
self.audio_file_paths.append(audio_file_path)
self.assertGreater(len(subs), 0)
self.assertIsNotNone(audio_file_path)
def test_predict_on_subtitle_longer_than_audio_above_threshold(self):
subs, audio_file_path, _, _ = Undertest(n_mfcc=20).predict_single_pass(
self.video_file_path, self.long_subtitle_file_path, self.weights_dir
)
self.audio_file_paths.append(audio_file_path)
self.assertGreater(len(subs), 0)
self.assertIsNotNone(audio_file_path)
@parameterized.expand([
["video_file_path", "srt_file_path"],
["video_file_path", "ttml_file_path"],
["video_file_path", "vtt_file_path"],
["video_file_path", "ass_file_path"],
["video_file_path", "ssa_file_path"],
["video_file_path", "microdvd_file_path"],
["video_file_path", "mpl2_file_path"],
["video_file_path", "tmp_file_path"],
["video_file_path", "sami_file_path"],
["video_file_path", "stl_file_path"],
["video_file_path", "sbv_file_path"],
["video_file_path", "ytt_file_path"],
])
def test_predict_dual_pass_on_video(self, media_file_path, subtitle_file_path):
undertest_obj = Undertest(n_mfcc=20)
new_subs, subs, voice_probabilities, frame_rate = undertest_obj.predict_dual_pass(
getattr(self, media_file_path), getattr(self, subtitle_file_path), self.weights_dir
)
self.assertGreater(len(new_subs), 0)
self.assertEqual(len(new_subs), len(subs))
self.assertGreater(len(voice_probabilities), 0)
self.assertEqual(24.0, frame_rate)
@parameterized.expand([
["audio_file_path", "srt_file_path"],
["audio_file_path", "ttml_file_path"],
["audio_file_path", "vtt_file_path"],
["audio_file_path", "ass_file_path"],
["audio_file_path", "ssa_file_path"],
["audio_file_path", "microdvd_file_path"],
["audio_file_path", "mpl2_file_path"],
["audio_file_path", "tmp_file_path"],
["audio_file_path", "sami_file_path"],
["audio_file_path", "stl_file_path"],
["audio_file_path", "sbv_file_path"],
["audio_file_path", "ytt_file_path"],
])
def test_predict_dual_pass_on_video(self, media_file_path, subtitle_file_path):
undertest_obj = Undertest(n_mfcc=20)
new_subs, subs, voice_probabilities, frame_rate = undertest_obj.predict_dual_pass(
getattr(self, media_file_path), getattr(self, subtitle_file_path), self.weights_dir
)
self.assertGreater(len(new_subs), 0)
self.assertEqual(len(new_subs), len(subs))
self.assertGreater(len(voice_probabilities), 0)
self.assertIsNone(frame_rate)
def test_predict_dual_pass_without_stretching_logs(self):
quiet = Logger.QUIET
Logger.QUIET = True
undertest_obj = Undertest(n_mfcc=20)
new_subs, subs, voice_probabilities, frame_rate = undertest_obj.predict_dual_pass(
self.audio_file_path, self.srt_file_path, self.weights_dir
)
self.assertGreater(len(new_subs), 0)
self.assertEqual(len(new_subs), len(subs))
self.assertGreater(len(voice_probabilities), 0)
self.assertIsNone(frame_rate)
Logger.QUIET = quiet
def test_predict_dual_pass_with_stretching(self):
undertest_obj = Undertest(n_mfcc=20)
new_subs, subs, voice_probabilities, frame_rate = undertest_obj.predict_dual_pass(
self.video_file_path, self.srt_file_path, self.weights_dir, stretch=True
)
stretched = False
for index, sub in enumerate(new_subs):
if sub.duration != subs[index].duration:
stretched = True
break
self.assertGreater(len(new_subs), 0)
self.assertEqual(len(new_subs), len(subs))
self.assertTrue(stretched)
self.assertGreater(len(voice_probabilities), 0)
self.assertEqual(24.0, frame_rate)
def test_predict_dual_pass_with_specified_language(self):
undertest_obj = Undertest(n_mfcc=20)
new_subs, subs, voice_probabilities, frame_rate = undertest_obj.predict_dual_pass(
self.video_file_path, self.srt_file_path, self.weights_dir, stretch=True, stretch_in_lang="zho"
)
stretched = False
for index, sub in enumerate(new_subs):
if sub.duration != subs[index].duration:
stretched = True
break
self.assertGreater(len(new_subs), 0)
self.assertEqual(len(new_subs), len(subs))
self.assertTrue(stretched)
self.assertGreater(len(voice_probabilities), 0)
self.assertEqual(24.0, frame_rate)
def test_predict_plain_text(self):
subs, audio_file_path, voice_probabilities, frame_rate = Undertest(n_mfcc=20, step_sample=0.02).predict_plain_text(
self.video_file_path, self.plain_text_file_path
)
self.assertGreater(len(subs), 0)
self.assertIsNone(audio_file_path)
self.assertIsNone(voice_probabilities)
self.assertEqual(24.0, frame_rate)
def test_get_log_loss(self):
undertest_obj = Undertest(n_mfcc=20)
subs, audio_file_path, voice_probabilities, frame_rate = undertest_obj.predict_single_pass(
self.video_file_path, self.srt_file_path, self.weights_dir
)
log_loss = undertest_obj.get_log_loss(voice_probabilities, subs)
self.assertGreater(log_loss, 0)
self.assertEqual(24.0, frame_rate)
def test_get_log_loss_on_speech_shorter_than_subtitle(self):
undertest_obj = Undertest(n_mfcc=20)
shorter_audio_file_path, _ = MediaHelper().extract_audio_from_start_to_end(self.audio_file_path, "00:00:00,000", "00:00:32,797")
self.audio_file_paths.append(shorter_audio_file_path)
subs, audio_file_path, voice_probabilities, frame_rate = undertest_obj.predict_single_pass(
shorter_audio_file_path, self.srt_file_path, self.weights_dir
)
log_loss = undertest_obj.get_log_loss(voice_probabilities, subs)
self.assertGreater(log_loss, 0)
self.assertIsNone(frame_rate)
def test_get_min_log_loss_and_index(self):
undertest_obj = Undertest(n_mfcc=20)
subs, audio_file_path, voice_probabilities, frame_rate = undertest_obj.predict_single_pass(
self.video_file_path, self.srt_file_path, self.weights_dir
)
min_log_loss, min_log_loss_pos = undertest_obj.get_min_log_loss_and_index(
voice_probabilities, subs
)
self.assertGreater(min_log_loss, 0)
self.assertGreaterEqual(min_log_loss_pos, 0)
self.assertEqual(24.0, frame_rate)
def test_throw_terminal_exception_on_missing_video(self):
try:
subs, audio_file_path, _, _ = Undertest(n_mfcc=20).predict_single_pass(None, self.srt_file_path, self.weights_dir)
except Exception as e:
self.assertTrue(isinstance(e, TerminalException))
else:
self.fail("Should have thrown exception")
@unittest.skip("Mocking does not work for spawned processes")
@patch("subaligner.media_helper.MediaHelper.extract_audio_from_start_to_end", side_effect=Exception("exception"))
def test_not_throw_exception_on_segment_alignment_failure(self, mock_time_to_sec):
undertest_obj = Undertest(n_mfcc=20)
new_subs, subs, voice_probabilities, frame_rate = undertest_obj.predict_dual_pass(
self.video_file_path, self.srt_file_path, self.weights_dir
)
self.assertGreater(len(new_subs), 0)
self.assertEqual(len(new_subs), len(subs))
self.assertGreater(len(voice_probabilities), 0)
self.assertTrue(mock_time_to_sec.called)
self.assertEqual(24.0, frame_rate)
@unittest.skip("Mocking does not work for spawned processes")
@patch("subaligner.media_helper.MediaHelper.extract_audio_from_start_to_end", side_effect=Exception("exception"))
def test_throw_exception_on_segment_alignment_failure_when_flag_on(self, mock_time_to_sec):
try:
undertest_obj = Undertest(n_mfcc=20)
undertest_obj.predict_dual_pass(self.video_file_path, self.srt_file_path, self.weights_dir, exit_segfail=True)
except Exception as e:
self.assertTrue(mock_time_to_sec.called)
self.assertTrue(isinstance(e, TerminalException))
self.assertTrue("At least one of the segments failed on alignment. Exiting..." in str(e))
else:
self.fail("Should have thrown exception")
@patch("concurrent.futures._base.Future.result", side_effect=KeyboardInterrupt)
def test_throw_exception_on_predict_interrupted(self, mock_result):
try:
undertest_obj = Undertest(n_mfcc=20)
undertest_obj.predict_dual_pass(
self.video_file_path, self.srt_file_path, self.weights_dir
)
except Exception as e:
self.assertTrue(mock_result.called)
self.assertTrue(isinstance(e, TerminalException))
self.assertTrue("interrupted" in str(e))
else:
self.fail("Should have thrown exception")
def test_throw_terminal_exception_on_missing_subtitle(self):
try:
subs, audio_file_path, _, _ = Undertest(n_mfcc=20).predict_single_pass(self.video_file_path, None, self.weights_dir)
self.fail("Should not have reached here")
except Exception as e:
self.assertTrue(isinstance(e, TerminalException))
else:
self.fail("Should have thrown exception")
def test_throw_terminal_exception_on_timeout(self):
backup = Undertest._Predictor__SEGMENT_PREDICTION_TIMEOUT
Undertest._Predictor__SEGMENT_PREDICTION_TIMEOUT = 0.05
try:
undertest_obj = Undertest(n_mfcc=20)
undertest_obj.predict_dual_pass(self.video_file_path, self.srt_file_path, self.weights_dir)
except Exception as e:
self.assertTrue(isinstance(e, TerminalException))
else:
self.fail("Should have thrown exception")
finally:
Undertest._Predictor__SEGMENT_PREDICTION_TIMEOUT = backup
if __name__ == "__main__":
unittest.main()
| 44.084469
| 136
| 0.67736
| 2,084
| 16,179
| 4.846449
| 0.09453
| 0.144951
| 0.066931
| 0.03703
| 0.865446
| 0.832871
| 0.815743
| 0.798614
| 0.770396
| 0.766931
| 0
| 0.009955
| 0.21769
| 16,179
| 366
| 137
| 44.204918
| 0.788022
| 0
| 0
| 0.584098
| 0
| 0
| 0.139069
| 0.016997
| 0
| 0
| 0
| 0
| 0.192661
| 1
| 0.067278
| false
| 0.082569
| 0.027523
| 0
| 0.097859
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
759be8842f0229e65b5cb805ee9ea90e1d47b80c
| 181
|
py
|
Python
|
bc/family_information/utils.py
|
Buckinghamshire-Digital-Service/buckinghamshire-council
|
bbbdb52b515bcdfc79a2bd9198dfa4828405370e
|
[
"BSD-3-Clause"
] | 1
|
2021-02-27T07:27:17.000Z
|
2021-02-27T07:27:17.000Z
|
bc/family_information/utils.py
|
Buckinghamshire-Digital-Service/buckinghamshire-council
|
bbbdb52b515bcdfc79a2bd9198dfa4828405370e
|
[
"BSD-3-Clause"
] | null | null | null |
bc/family_information/utils.py
|
Buckinghamshire-Digital-Service/buckinghamshire-council
|
bbbdb52b515bcdfc79a2bd9198dfa4828405370e
|
[
"BSD-3-Clause"
] | 1
|
2021-06-09T15:56:54.000Z
|
2021-06-09T15:56:54.000Z
|
from bc.family_information.models import FamilyInformationHomePage
def is_family_information_site(site):
return isinstance(site.root_page.specific, FamilyInformationHomePage)
| 30.166667
| 73
| 0.861878
| 20
| 181
| 7.55
| 0.75
| 0.225166
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082873
| 181
| 5
| 74
| 36.2
| 0.909639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
75acfb09e963c141fb6707f71b4f34844c7d4b0a
| 830
|
py
|
Python
|
deathstartblocks.py
|
dragonfly-ai/MineCraft-pi-py-pie
|
333c171170852332f0bfc4bddc9650e2b5e9dcb8
|
[
"Apache-2.0"
] | null | null | null |
deathstartblocks.py
|
dragonfly-ai/MineCraft-pi-py-pie
|
333c171170852332f0bfc4bddc9650e2b5e9dcb8
|
[
"Apache-2.0"
] | null | null | null |
deathstartblocks.py
|
dragonfly-ai/MineCraft-pi-py-pie
|
333c171170852332f0bfc4bddc9650e2b5e9dcb8
|
[
"Apache-2.0"
] | null | null | null |
mc.setBlock(Vec3(-1,38,-87),0, 0)
mc.setBlock(Vec3(0,38,-87),0, 0)
mc.setBlock(Vec3(1,38,-87),0, 0)
mc.setBlock(Vec3(2,38,-87),0, 0)
mc.setBlock(Vec3(3,38,-87),0, 0)
mc.setBlock(Vec3(-1,39,-87),0, 0)
mc.setBlock(Vec3(0,39,-87),0, 0)
mc.setBlock(Vec3(1,39,-87),0, 0)
mc.setBlock(Vec3(2,39,-87),0, 0)
mc.setBlock(Vec3(3,39,-87),0, 0)
mc.setBlock(Vec3(-1,40,-87),0, 0)
mc.setBlock(Vec3(0,40,-87),0, 0)
mc.setBlock(Vec3(1,40,-87),0, 0)
mc.setBlock(Vec3(2,40,-87),0, 0)
mc.setBlock(Vec3(3,40,-87),0, 0)
mc.setBlock(Vec3(-1,41,-87),0, 0)
mc.setBlock(Vec3(0,41,-87),0, 0)
mc.setBlock(Vec3(1,41,-87),0, 0)
mc.setBlock(Vec3(2,41,-87),0, 0)
mc.setBlock(Vec3(3,41,-87),0, 0)
mc.setBlock(Vec3(-1,42,-87),0, 0)
mc.setBlock(Vec3(0,42,-87),0, 0)
mc.setBlock(Vec3(1,42,-87),0, 0)
mc.setBlock(Vec3(2,42,-87),0, 0)
mc.setBlock(Vec3(3,42,-87),0, 0)
| 31.923077
| 33
| 0.63253
| 200
| 830
| 2.625
| 0.065
| 0.47619
| 0.666667
| 0.274286
| 0.988571
| 0.988571
| 0.988571
| 0.765714
| 0.727619
| 0.727619
| 0
| 0.25641
| 0.060241
| 830
| 25
| 34
| 33.2
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
75d68a1ab7cef61bd610c793d104ab239711faae
| 31,823
|
py
|
Python
|
from_config/dev/eval_model.py
|
astrockragh/IceCube
|
eba09e9f9a3c351dbf05496821bcd7d29ac0261c
|
[
"MIT"
] | null | null | null |
from_config/dev/eval_model.py
|
astrockragh/IceCube
|
eba09e9f9a3c351dbf05496821bcd7d29ac0261c
|
[
"MIT"
] | null | null | null |
from_config/dev/eval_model.py
|
astrockragh/IceCube
|
eba09e9f9a3c351dbf05496821bcd7d29ac0261c
|
[
"MIT"
] | 2
|
2021-03-03T20:39:38.000Z
|
2021-06-09T11:58:00.000Z
|
import tensorflow as tf
import tensorflow_probability as tfp
import numpy as np
import matplotlib.pyplot as plt
from tensorflow.math import sin, cos, acos, abs, reduce_mean, subtract, square
import warnings
from tqdm import tqdm
from scipy.stats import norm
warnings.filterwarnings("ignore")
eps=1e-5
def azi_alpha(y_true, y_reco):
diffs = tf.minimum(abs(y_true[:, 2] - y_reco[:, 2]), abs(y_true[:, 2] - y_reco[:, 2])%(np.pi))
u_azi = 180 / np.pi * tfp.stats.percentile(diffs, [50-34,50,50+34, 68])
return u_azi.numpy()
def zeni_alpha(y_true, y_reco):
diffs = tf.minimum(abs(y_true[:, 1] - y_reco[:, 1]), abs(y_true[:, 1] - y_reco[:, 1])%(np.pi))
u_zen = 180 / np.pi * tfp.stats.percentile(diffs, [50-34,50,50+34, 68])
return u_zen.numpy()
def azi_res(y_true, y_reco):
diffs = tf.minimum(abs(y_true[:, 2] - y_reco[:, 2]), abs(y_true[:, 2] - y_reco[:, 2])%(np.pi))
return diffs.numpy()
def zeni_res(y_true, y_reco):
diffs = tf.minimum(abs(y_true[:, 1] - y_reco[:, 1]), abs(y_true[:, 1] - y_reco[:, 1])%(np.pi))
return diffs.numpy()
def mazi_res(y_true, y_reco):
diffs = y_true[:, 2] - y_reco[:, 2]
return diffs
def mzeni_res(y_true, y_reco):
diffs = y_true[:, 1] - y_reco[:, 1]
return diffs
def alpha_from_angle(y_reco, y_true):
zep, zet, azp, azt = y_reco[:,1], y_true[:,1], y_reco[:,2], y_true[:,2]
cosalpha=abs(sin(zep))*cos(azp)*sin(zet)*cos(azt)+abs(sin(zep))*sin(azp)*sin(zet)*sin(azt)+cos(zep)*cos(zet)
cosalpha-=tf.math.sign(cosalpha) * eps
alpha=acos(cosalpha)
return alpha
from scipy.special import iv
def approx(x):
x=np.array(x)
a=1-3/(8*x)
b=1+1/(8*x)
return a/b
def kap_to_sig(kappa):
kappa = np.sqrt(kappa**2) + eps
sigs=np.where(kappa>50, np.sqrt(abs(np.sqrt(-2*np.log(approx(kappa))))), np.sqrt(abs(np.sqrt(-2*np.log(iv(1,kappa)/iv(0,kappa))))))
return sigs
sigs=kap_to_sig(np.arange(0,100,20))
##consider making pull_plot
def performance_e_alpha(loader, test_step, metrics, save=False, save_path=''):
'''Function to test and plot performance of Graph DL
input should be dom pos x,y,z , time, charge(log10)
target should be energy(log10),zenith angle, azimuthal angle, NOT unit vec
'''
loss = 0
prediction_list, target_list = [], []
for batch in loader:
inputs, targets = batch
predictions, targets, out = test_step(inputs, targets)
loss += out
prediction_list.append(predictions)
target_list.append(targets)
y_reco = tf.concat(prediction_list, axis = 0).numpy()
y_true = tf.concat(target_list, axis = 0)
y_true = tf.cast(y_true, tf.float32).numpy()
energy = y_true[:, 0]
counts, bins = np.histogram(energy, bins = 10)
xs = (bins[1:] + bins[: -1]) / 2
w_energies, u_angles = [], []
e_sig, alpha_sig = [], []
old_energy, old_alpha = [], []
zenith, azimuth = [], []
for i in range(len(bins)-1):
idx = np.logical_and(energy > bins[i], energy < bins[i + 1])
w, u_angle, old = metrics(y_reco[idx, :], y_true[idx, :])
old_energy.append(old[0])
old_alpha.append(old[1])
w_energies.append(w[1])
u_angles.append(u_angle[1])
e_sig.append([w[0], w[2]])
alpha_sig.append([u_angle[0], u_angle[2]])
zeni, azi=zeni_alpha(y_reco[idx,:], y_true[idx,:]), azi_alpha(y_reco[idx,:], y_true[idx,:])
zenith.append(zeni)
azimuth.append(azi)
zenith, azimuth = np.array(zenith), np.array(azimuth)
fig, ax = plt.subplots(ncols = 3, nrows = 4, figsize = (12, 20))
axesback=[(0,0), (1,0), (2,0), (3,0)]
for i,j in axesback:
a_ = ax[i][j].twinx()
a_.step(xs, counts, color = "gray", zorder = 10, alpha = 0.7, where = "mid")
a_.set_yscale("log")
ax[i][j].set_xlabel("Log(E)")
#structure: my metrics, old metrics, histogram
# Energy reconstruction
ax_top = ax[0]
ax_top[0].errorbar(xs, w_energies,yerr=np.array(e_sig).T, fmt='k.',capsize=2,linewidth=1,ecolor='r',label='data')
ax_top[0].plot(xs, old_energy, 'bo', label=r"$w(\Delta log(E))$"+'(old metric)')
ax_top[0].set_title("Energy Performance")
ax_top[0].set_ylabel(r"$\Delta log(E)$")
# pull_e=(y_reco[:,0]-tf.reduce_mean(y_reco[:,0]))*np.sqrt(np.abs(y_reco[:,3]))
# ax_top[1].hist(pull_e, label='Pull plot', bins=50, histtype='step')
# ax_top[1].set_title("Solid angle pull plot)")
# ax_top[1].set_title("Energy Performance (old metric)")
# ax_top[1].set_ylabel(r"$w(\Delta log(E))$")
ax_top[1].hist2d(y_true[:,0], y_reco[:,0], bins=100,\
range=[np.percentile(y_true[:,0],[1,99]), np.percentile(y_reco[:,0],[1,99])])
ax_top[1].set_title("ML Reco/True")
ax_top[1].set(xlabel="Truth (log(E))", ylabel="ML Reco (log(E))")
res_e=abs(y_true[:,0]-y_reco[:,0])
ax_top[2].hist2d(np.abs(y_reco[:,3]), res_e, bins=100, \
range=[np.percentile(np.abs(y_reco[:,3]),[1,99]), np.percentile(res_e,[1,99])])
ax_top[2].set_title("ML Kappa correlation with Energy error")
ax_top[2].set(xlabel=r"$\kappa$", ylabel=r"$\Delta E$")
for axi in ax_top:
axi.legend()
#Zenith reconstructi
# Alpha reconstruction
ax_m=ax[1]
ax_m[0].errorbar(xs, u_angles,yerr=np.array(alpha_sig).T, fmt='k.',capsize=2,linewidth=1,ecolor='r',label=r'Median $\pm \sigma$')
ax_m[0].plot(xs, old_alpha, 'bo', label=r"$w(\Omega)$"+'(old metric)')
ax_m[0].set_title("Angle Performance")
ax_m[0].set_ylabel(r"$\Delta \Omega$")
alphas=alpha_from_angle(y_reco, y_true)
pull_alpha=np.array(alphas-tf.reduce_mean(alphas))*np.sqrt(np.abs(y_reco[:,3]))
pull_alpha=np.reshape(pull_alpha, -1)
vals, x , _ = ax_m[1].hist(pull_alpha, label='Pull plot', bins=50, histtype='step', density=1)
ax_m[1].plot(x, norm.pdf(x,0,1))
ax_m[1].set_title("Solid angle pull plot)")
# ax_m[1].set_ylabel(r"$w(\Omega)$")
ax_m[2].hist2d(np.abs(y_reco[:,3]), alphas, bins=100, \
range=[np.percentile(np.abs(y_reco[:,3]),[1,99]), np.percentile(alphas,[1,99])])
ax_m[2].set_title("ML Kappa correlation with angle error")
ax_m[2].set(xlabel=r"$\kappa$", ylabel=r"$\Delta \Omega$")
for axi in ax_m:
axi.legend()
#Zenith reconstruction
ax_z=ax[2]
ax_z[0].errorbar(xs, zenith[:,1],yerr=[zenith[:,0], zenith[:,2]], fmt='k.',capsize=2,linewidth=1,ecolor='r',label=r'Median $\pm \sigma$')
ax_z[0].set_title("Zenith Performance")
ax_z[0].plot(xs, zenith[:,3], 'bo', label='68th')
ax_z[0].set_ylabel(r"$\Delta \Theta$")
reszeni=np.abs(y_reco[:, 1]%(np.pi/2)-y_true[:,1])
ax_z[1].hist(reszeni, label = "ML reco - Truth", histtype = "step", bins = 50)
ax_z[1].hist(y_reco[:, 1]%(np.pi/2), label = "ML reco", histtype = "step", bins = 50)
ax_z[1].hist(y_true[:, 1], label = "Truth", histtype = "step", bins = 50)
ax_z[1].set_title("Zenith Perfomance")
ax_z[1].set_ylabel(r"$\Theta$")
ax_z[2].hist2d(np.abs(y_reco[:,3]), reszeni, bins=100,\
range=[np.percentile(np.abs(y_reco[:,3]),[1,99]), np.percentile(reszeni,[1,99])])
ax_z[2].set_title("ML Kappa correlation with zenith error")
ax_z[2].set(xlabel=r"$\kappa$", ylabel=r"$\Delta \Theta$")
for axi in ax_z:
axi.legend()
#Azimuth reconstruction
ax_az=ax[3]
ax_az[0].errorbar(xs, azimuth[:,1],yerr=[azimuth[:,0], azimuth[:,2]], fmt='k.',capsize=2,linewidth=1,ecolor='r',label=r'Median $\pm \sigma$')
ax_az[0].set_title("Azimuth Performance")
ax_az[0].plot(xs, azimuth[:,3], 'bo', label='68th')
ax_az[0].set_ylabel(r"$\Delta \phi$")
resazi=np.abs(y_reco[:, 2]%(2*np.pi)-y_true[:,2])
ax_az[1].hist(resazi, label = "ML reco - Truth", histtype = "step", bins = 50)
ax_az[1].hist(y_reco[:, 2]%(2*np.pi), label = "ML reco", histtype = "step", bins = 50)
ax_az[1].hist(y_true[:, 2], label = "Truth", histtype = "step", bins = 50)
ax_az[1].set_title("Azimuth Perfomance")
ax_az[1].set_ylabel(r"$\phi$")
ax_az[2].hist2d(np.abs(y_reco[:,3]), resazi, bins=100,\
range=[np.percentile(np.abs(y_reco[:,3]),[1,99]), np.percentile(resazi,[1,99])])
ax_az[2].set_title("ML Kappa correlation with azimuth error")
ax_az[2].set(xlabel=r"$\kappa$", ylabel=r"$\Delta \phi$")
for axi in ax_az:
axi.legend()
fig.tight_layout()
if save:
plt.savefig(save_path)
return fig, ax
def performance_zeniazi(loader, test_step, metrics, save=False, save_path=''):
'''Function to test and plot performance of Graph DL
input should be dom pos x,y,z , time, charge(log10)
target should be energy(log10),zenith angle, azimuthal angle, NOT unit vec
'''
loss = 0
prediction_list, target_list = [], []
for batch in loader:
inputs, targets = batch
predictions, targets, out = test_step(inputs, targets)
loss += out
prediction_list.append(predictions)
target_list.append(targets)
y_reco = tf.concat(prediction_list, axis = 0).numpy()
y_true = tf.concat(target_list, axis = 0)
y_true = tf.cast(y_true, tf.float32).numpy()
energy = y_true[:, 0]
counts, bins = np.histogram(energy, bins = 10)
xs = (bins[1:] + bins[: -1]) / 2
w_energies, u_angles = [], []
e_sig, alpha_sig = [], []
old_energy, old_alpha = [], []
zenith, azimuth = [], []
for i in range(len(bins)-1):
idx = np.logical_and(energy > bins[i], energy < bins[i + 1])
e, old_e, angle, zeni, azi = metrics(y_reco[idx, :], y_true[idx, :])
old_energy.append(old_e)
old_alpha.append(angle[3])
w_energies.append(e[1])
u_angles.append(angle[1])
e_sig.append([e[0], e[2]])
alpha_sig.append([angle[0], angle[2]])
zenith.append(zeni)
azimuth.append(azi)
zenith, azimuth = np.array(zenith), np.array(azimuth)
fig, ax = plt.subplots(ncols = 4, nrows = 3, figsize = (20, 15))
axesback=[(0,0), (0,2), (1,0), (2,0)]
for i,j in axesback:
a_ = ax[i][j].twinx()
a_.step(xs, counts, color = "gray", zorder = 10, alpha = 0.7, where = "mid")
a_.set_yscale("log")
ax[i][j].set_xlabel("Log(E)")
# Energy reconstruction
ax_top = ax[0]
ax_top[0].errorbar(xs, w_energies,yerr=np.array(e_sig).T, fmt='k.',capsize=2,linewidth=1,ecolor='r',label='data')
ax_top[0].plot(xs, old_energy, 'bo', label=r"$w(\Delta log(E))$"+'(old metric)')
ax_top[0].set_title("Energy Performance")
ax_top[0].set_ylabel(r"$\Delta log(E)$")
ax_top[1].hist2d(y_true[:,0], y_reco[:,0], bins=100,\
range=[np.percentile(y_true[:,0],[1,99]), np.percentile(y_reco[:,0],[1,99])])
ax_top[1].set_title("ML Reco/True")
ax_top[1].set(xlabel="Truth (log(E))", ylabel="ML Reco (log(E))")
ax_top[1].plot([np.percentile(y_true[:,0],[1]), np.percentile(y_true[:,0],[99])], [np.percentile(y_true[:,0],[1]), np.percentile(y_true[:,0],[99])], 'w--')
ax_top[2].errorbar(xs, u_angles,yerr=np.array(alpha_sig).T, fmt='k.',capsize=2,linewidth=1,ecolor='r',label=r'Median $\pm \sigma$')
ax_top[2].plot(xs, old_alpha, 'bo', label=r"$w(\Omega)$"+'(old metric)')
ax_top[2].set_title("Angle Performance")
ax_top[2].set_ylabel(r"$\Delta \Omega$")
for axi in ax_top:
axi.legend()
#Zenith reconstructi
ax_z=ax[1]
ax_z[0].errorbar(xs, zenith[:,1],yerr=[zenith[:,0], zenith[:,2]], fmt='k.',capsize=2,linewidth=1,ecolor='r',label=r'Median $\pm \sigma$')
ax_z[0].set_title("Zenith Performance")
ax_z[0].plot(xs, zenith[:,3], 'bo', label='68th')
ax_z[0].set_ylabel(r"$\Delta \Theta$")
reszeni=zeni_res(y_true, y_reco)
ax_z[1].hist(reszeni, label = "ML reco - Truth", histtype = "step", bins = 50)
ax_z[1].hist(y_reco[:, 1], label = "ML reco", histtype = "step", bins = 50)
ax_z[1].hist(y_true[:, 1], label = "Truth", histtype = "step", bins = 50)
ax_z[1].set_title("Zenith Perfomance")
ax_z[1].set_ylabel(r"$\Theta$")
ax_z[2].hist2d(y_true[:,1], y_reco[:,1], bins=100,range=[[0,np.pi], [0,np.pi]])
ax_z[2].set_title("Zenith truth/reco correlation")
ax_z[2].set(xlabel=r"True", ylabel=r"ML reco")
ax_z[2].plot([0, np.pi], [0,np.pi], 'w--')
ax_z[3].hist2d(np.abs(y_reco[:,3]), reszeni, bins=100,\
range=[np.percentile(np.abs(y_reco[:,3]),[0.1,99]), np.percentile(reszeni,[1,99])])
ax_z[3].set_title("ML Kappa correlation with zenith error")
ax_z[3].set(xlabel=r"$\kappa$", ylabel=r"$\Delta \Theta$")
for axi in ax_z:
axi.legend()
#Azimuth reconstruction
ax_az=ax[2]
ax_az[0].errorbar(xs, azimuth[:,1],yerr=[azimuth[:,0], azimuth[:,2]], fmt='k.',capsize=2,linewidth=1,ecolor='r',label=r'Median $\pm \sigma$')
ax_az[0].set_title("Azimuth Performance")
ax_az[0].plot(xs, azimuth[:,3], 'bo', label='68th')
ax_az[0].set_ylabel(r"$\Delta \phi$")
resazi=azi_res(y_true,y_reco)
ax_az[1].hist(resazi, label = "ML reco - Truth", histtype = "step", bins = 50)
ax_az[1].hist(y_reco[:, 2]%(2*np.pi), label = "ML reco", histtype = "step", bins = 50)
ax_az[1].hist(y_true[:, 2], label = "Truth", histtype = "step", bins = 50)
ax_az[1].set_title("Azimuth Perfomance")
ax_az[1].set_ylabel(r"$\phi$")
ax_az[2].hist2d(y_true[:,2], y_reco[:,2], bins=100,\
range=[[0,2*np.pi], [0,2*np.pi]])
ax_az[2].set_title("Zenith truth/reco correlation")
ax_az[2].set(xlabel=r"True", ylabel=r"ML reco")
ax_az[2].plot([0, 2*np.pi], [0,2*np.pi], 'w--')
ax_az[3].hist2d(np.abs(y_reco[:,3]), resazi, bins=100,\
range=[np.percentile(np.abs(y_reco[:,3]),[1,99]), np.percentile(resazi,[1,99])])
ax_az[3].set_title("ML Kappa correlation with azimuth error")
ax_az[3].set(xlabel=r"$\kappa$", ylabel=r"$\Delta \phi$")
for axi in ax_az:
axi.legend()
fig.tight_layout()
if save:
plt.savefig(save_path)
return fig, ax
def performance_vM2D(loader, test_step, metrics, bins=20, save=False, save_path=''):
'''Function to test and plot performance of Graph DL
input should be dom pos x,y,z , time, charge(log10)
target should be energy(log10),zenith angle, azimuthal angle, NOT unit vec
'''
loss = 0
prediction_list, target_list = [], []
for batch in loader:
inputs, targets = batch
predictions, targets, out = test_step(inputs, targets)
loss += out
prediction_list.append(predictions)
target_list.append(targets)
y_reco = tf.concat(prediction_list, axis = 0).numpy()
y_true = tf.concat(target_list, axis = 0)
y_true = tf.cast(y_true, tf.float32).numpy()
energy = y_true[:, 0]
counts, bins = np.histogram(energy, bins = bins)
xs = (bins[1:] + bins[: -1]) / 2
w_energies, u_angles = [], []
e_sig, alpha_sig = [], []
old_energy, old_alpha = [], []
zenith, azimuth = [], []
for i in range(len(bins)-1):
idx = np.logical_and(energy > bins[i], energy < bins[i + 1])
e, old_e, angle, zeni, azi = metrics(y_reco[idx, :], y_true[idx, :])
old_energy.append(old_e)
old_alpha.append(angle[3])
w_energies.append(e[1])
u_angles.append(angle[1])
e_sig.append([e[0], e[2]])
alpha_sig.append([angle[0], angle[2]])
zenith.append(zeni)
azimuth.append(azi)
zenith, azimuth = np.array(zenith), np.array(azimuth)
fig, ax = plt.subplots(ncols = 4, nrows = 3, figsize = (20, 15))
axesback=[(0,0), (0,2), (1,0), (2,0)]
for i,j in axesback:
a_ = ax[i][j].twinx()
a_.step(xs, counts, color = "gray", zorder = 10, alpha = 0.7, where = "mid")
a_.set_yscale("log")
ax[i][j].set_xlabel("Log(E)")
# Energy reconstruction
ax_top = ax[0]
ax_top[0].errorbar(xs, w_energies,yerr=np.array(e_sig).T, fmt='k.',capsize=2,linewidth=1,ecolor='r',label='data')
ax_top[0].plot(xs, old_energy, 'bo', label=r"$w(\Delta log(E))$"+'(old metric)')
ax_top[0].set_title("Energy Performance")
ax_top[0].set_ylabel(r"$\Delta log(E)$")
ax_top[1].hist2d(y_true[:,0], y_reco[:,0], bins=100,\
range=[np.percentile(y_true[:,0],[1,99]), np.percentile(y_reco[:,0],[1,99])])
ax_top[1].set_title("ML Reco/True")
ax_top[1].set(xlabel="Truth (log(E))", ylabel="ML Reco (log(E))")
ax_top[1].plot([np.percentile(y_true[:,0],[1]), np.percentile(y_true[:,0],[99])], [np.percentile(y_true[:,0],[1]), np.percentile(y_true[:,0],[99])], 'w--')
ax_top[2].errorbar(xs, u_angles,yerr=np.array(alpha_sig).T, fmt='k.',capsize=2,linewidth=1,ecolor='r',label=r'Median $\pm \sigma$')
ax_top[2].plot(xs, old_alpha, 'bo', label=r"$w(\Omega)$"+'(old metric)')
ax_top[2].set_title("Angle Performance")
ax_top[2].set_ylabel(r"$\Delta \Omega$")
for axi in ax_top:
axi.legend()
#Zenith reconstructi
ax_z=ax[1]
ax_z[0].errorbar(xs, zenith[:,1],yerr=[zenith[:,0], zenith[:,2]], fmt='k.',capsize=2,linewidth=1,ecolor='r',label=r'Median $\pm \sigma$')
ax_z[0].set_title("Zenith Performance")
ax_z[0].plot(xs, zenith[:,3], 'bo', label='68th')
ax_z[0].set_ylabel(r"$\Delta \Theta$")
reszeni=zeni_res(y_true, y_reco)
ax_z[1].hist(reszeni, label = "ML reco - Truth", histtype = "step", bins = 100)
ax_z[1].hist(y_reco[:, 1], label = "ML reco", histtype = "step", bins = 100)
ax_z[1].hist(y_true[:, 1], label = "Truth", histtype = "step", bins = 100)
ax_z[1].set_title("Zenith Perfomance")
ax_z[1].set_ylabel(r"$\Theta$")
ax_z[2].hist2d(y_true[:,1], y_reco[:,1], bins=100,range=[[0,np.pi], [0,np.pi]])
ax_z[2].set_title("Zenith truth/reco correlation")
ax_z[2].set(xlabel=r"True", ylabel=r"ML reco")
ax_z[2].plot([0, np.pi], [0,np.pi], 'w--')
ax_z[3].hist2d(np.abs(y_reco[:,3]), reszeni, bins=100,\
range=[np.percentile(np.abs(y_reco[:,3]),[1,99]), np.percentile(reszeni,[1,99])])
ax_z[3].set_title("ML Kappa correlation with zenith error")
ax_z[3].set(xlabel=r"$\kappa$", ylabel=r"$\Delta \Theta$")
for axi in ax_z:
axi.legend()
#Azimuth reconstruction
ax_az=ax[2]
ax_az[0].errorbar(xs, azimuth[:,1],yerr=[azimuth[:,0], azimuth[:,2]], fmt='k.',capsize=2,linewidth=1,ecolor='r',label=r'Median $\pm \sigma$')
ax_az[0].set_title("Azimuth Performance")
ax_az[0].plot(xs, azimuth[:,3], 'bo', label='68th')
ax_az[0].set_ylabel(r"$\Delta \phi$")
resazi=azi_res(y_true,y_reco)
ax_az[1].hist(resazi, label = "ML reco - Truth", histtype = "step", bins = 50)
ax_az[1].hist(y_reco[:, 2]%(2*np.pi), label = "ML reco", histtype = "step", bins = 50)
ax_az[1].hist(y_true[:, 2], label = "Truth", histtype = "step", bins = 50)
ax_az[1].set_title("Azimuth Perfomance")
ax_az[1].set_ylabel(r"$\phi$")
ax_az[2].hist2d(y_true[:,2], y_reco[:,2], bins=100,\
range=[[0,2*np.pi], [0,2*np.pi]])
ax_az[2].set_title("Zenith truth/reco correlation")
ax_az[2].set(xlabel=r"True", ylabel=r"ML reco")
ax_az[2].plot([0, 2*np.pi], [0,2*np.pi], 'w--')
ax_az[3].hist2d(np.abs(y_reco[:,4]), resazi, bins=100,\
range=[np.percentile(np.abs(y_reco[:,4]),[1,99]), np.percentile(resazi,[1,99])])
ax_az[3].set_title("ML Kappa correlation with azimuth error")
ax_az[3].set(xlabel=r"$\kappa$", ylabel=r"$\Delta \phi$")
for axi in ax_az:
axi.legend()
fig.tight_layout()
if save:
plt.savefig(save_path+'.png')
return fig, ax
def performance_vM2D_sig(loader, test_step, metrics, bins=20, save=False, save_path=''):
'''Function to test and plot performance of Graph DL
target should be energy(log10),zenith angle, azimuthal angle, NOT unit vec
'''
loss = 0
prediction_list, target_list = [], []
for batch in tqdm(loader, total=loader.steps_per_epoch):
inputs, targets = batch
predictions, targets, out = test_step(inputs, targets)
loss += out
prediction_list.append(predictions)
target_list.append(targets)
y_reco = tf.concat(prediction_list, axis = 0).numpy()
y_true = tf.concat(target_list, axis = 0)
y_true = tf.cast(y_true, tf.float32).numpy()
energy = y_true[:, 0]
counts, bins = np.histogram(energy, bins = bins)
xs = (bins[1:] + bins[: -1]) / 2
w_energies, u_angles = [], []
e_sig, alpha_sig = [], []
old_energy, old_alpha = [], []
zenith, azimuth = [], []
for i in range(len(bins)-1):
idx = np.logical_and(energy > bins[i], energy < bins[i + 1])
e, old_e, angle, zeni, azi = metrics(y_reco[idx, :], y_true[idx, :])
old_energy.append(old_e)
old_alpha.append(angle[3])
w_energies.append(e[1])
u_angles.append(angle[1])
e_sig.append([e[0], e[2]])
alpha_sig.append([angle[0], angle[2]])
zenith.append(zeni)
azimuth.append(azi)
zenith, azimuth = np.array(zenith), np.array(azimuth)
fig, ax = plt.subplots(ncols = 4, nrows = 3, figsize = (20, 15))
axesback=[(0,0), (0,2), (1,0), (2,0)]
for i,j in axesback:
a_ = ax[i][j].twinx()
a_.step(xs, counts, color = "gray", zorder = 10, alpha = 0.7, where = "mid")
a_.set_yscale("log")
ax[i][j].set_xlabel("Log(E)")
# Energy reconstruction
ax_top = ax[0]
ax_top[0].errorbar(xs, w_energies,yerr=np.array(e_sig).T, fmt='k.',capsize=2,linewidth=1,ecolor='r',label='data')
ax_top[0].plot(xs, old_energy, 'bo', label=r"$w(\Delta log(E))$"+'(old metric)')
ax_top[0].set_title("Energy Performance")
ax_top[0].set_ylabel(r"$\Delta log(E)$")
ax_top[1].hist2d(y_true[:,0], y_reco[:,0], bins=100,\
range=[np.percentile(y_true[:,0],[1,99]), np.percentile(y_reco[:,0],[1,99])])
ax_top[1].set_title("ML Reco/True")
ax_top[1].set(xlabel="Truth (log(E))", ylabel="ML Reco (log(E))")
ax_top[1].plot([np.percentile(y_true[:,0],[1]), np.percentile(y_true[:,0],[99])], [np.percentile(y_true[:,0],[1]), np.percentile(y_true[:,0],[99])], 'w--')
ax_top[2].errorbar(xs, u_angles,yerr=np.array(alpha_sig).T, fmt='k.',capsize=2,linewidth=1,ecolor='r',label=r'Median $\pm \sigma$')
ax_top[2].plot(xs, old_alpha, 'bo', label=r"$w(\Omega)$"+'(old metric)')
ax_top[2].set_title("Angle Performance")
ax_top[2].set_ylabel(r"$\Delta \Omega$")
c=180/np.pi
for axi in ax_top:
axi.legend()
#Zenith reconstructi
ax_z=ax[1]
ax_z[0].errorbar(xs, zenith[:,1],yerr=[zenith[:,0], zenith[:,2]], fmt='k.',capsize=2,linewidth=1,ecolor='r',label=r'Median $\pm \sigma$')
ax_z[0].set_title("Zenith Performance")
ax_z[0].plot(xs, zenith[:,3], 'bo', label='68th')
ax_z[0].set_ylabel(r"$\Delta \Theta$")
reszeni=zeni_res(y_true, y_reco)*c
ax_z[1].hist(reszeni, label = "ML reco - Truth", histtype = "step", bins = 100)
ax_z[1].hist(y_reco[:, 1]*c, label = "ML reco", histtype = "step", bins = 100)
ax_z[1].hist(y_true[:, 1]*c, label = "Truth", histtype = "step", bins = 100)
ax_z[1].set_title("Zenith Perfomance")
ax_z[1].set_ylabel(r"$\Theta$")
ax_z[2].hist2d(y_true[:,1]*c, y_reco[:,1]*c, bins=100,range=[[0,np.pi*c], [0,np.pi*c]])
ax_z[2].set_title("Zenith truth/reco correlation")
ax_z[2].set(xlabel=r"True", ylabel=r"ML reco")
ax_z[2].plot([0, np.pi*c], [0,np.pi*c], 'w--')
sigz=kap_to_sig(y_reco[:,3])*c
# return sigz
reszeni=mzeni_res(y_true, y_reco)*c
ax_z[3].hist2d(sigz, reszeni, bins=100,\
range=[np.percentile(sigz,[1,99]), np.percentile(reszeni,[1,99])])
ax_z[3].set_title("ML Kappa correlation with zenith error")
ax_z[3].set(xlabel=r"$\sigma_{\theta}$", ylabel=r"$\Delta \Theta$")
for axi in ax_z:
axi.legend()
#Azimuth reconstruction
ax_az=ax[2]
ax_az[0].errorbar(xs, azimuth[:,1],yerr=[azimuth[:,0], azimuth[:,2]], fmt='k.',capsize=2,linewidth=1,ecolor='r',label=r'Median $\pm \sigma$')
ax_az[0].set_title("Azimuth Performance")
ax_az[0].plot(xs, azimuth[:,3], 'bo', label='68th')
ax_az[0].set_ylabel(r"$\Delta \phi$")
resazi=azi_res(y_true,y_reco)*c
ax_az[1].hist(resazi, label = "ML reco - Truth", histtype = "step", bins = 50)
ax_az[1].hist((y_reco[:, 2]%(2*np.pi))*c, label = "ML reco", histtype = "step", bins = 50)
ax_az[1].hist(y_true[:, 2]*c, label = "Truth", histtype = "step", bins = 50)
ax_az[1].set_title("Azimuth Perfomance")
ax_az[1].set_ylabel(r"$\phi$")
ax_az[2].hist2d(y_true[:,2]*c, y_reco[:,2]*c, bins=100,\
range=[[0,2*np.pi*c], [0,2*np.pi*c]])
ax_az[2].set_title("Azimuth truth/reco correlation")
ax_az[2].set(xlabel=r"True", ylabel=r"ML reco")
ax_az[2].plot([0, 2*np.pi], [0,2*np.pi], 'w--')
sigaz=kap_to_sig(y_reco[:,4])*c
resazi=mazi_res(y_true, y_reco)*c
ax_az[3].hist2d(sigaz, resazi, bins=100,\
range=[np.percentile(sigaz,[1,99]), np.percentile(resazi,[1,99])])
ax_az[3].set_title("ML Kappa correlation with azimuth error")
ax_az[3].set(xlabel=r"$\sigma_{\phi}$", ylabel=r"$\Delta \phi$")
for axi in ax_az:
axi.legend()
ax_top[3].hist(resazi/sigaz, bins=100, density=1, alpha=0.4, range=[-6,6], label='pull_azi')
_,bins,_=ax_top[3].hist(reszeni/sigz, bins=100, density=1, alpha=0.4, range=[-6,6], label='pull_zeni')
from scipy.stats import norm
ax_top[3].plot(bins, norm.pdf(bins,0,1), label='True unit gauss')
ax_top[3].legend()
fig.tight_layout()
if save:
plt.savefig(save_path+'.png')
return fig, ax
def performance_vM23D(loader, test_step, metrics, bins=20, save=False, save_path=''):
'''Function to test and plot performance of Graph DL
input should be dom pos x,y,z , time, charge(log10)
target should be energy(log10),zenith angle, azimuthal angle, NOT unit vec
'''
loss = 0
prediction_list, target_list = [], []
for batch in loader:
inputs, targets = batch
predictions, targets, out = test_step(inputs, targets)
loss += out
prediction_list.append(predictions)
target_list.append(targets)
y_reco = tf.concat(prediction_list, axis = 0).numpy()
y_true = tf.concat(target_list, axis = 0)
y_true = tf.cast(y_true, tf.float32).numpy()
energy = y_true[:, 0]
counts, bins = np.histogram(energy, bins = bins)
xs = (bins[1:] + bins[: -1]) / 2
w_energies, u_angles = [], []
e_sig, alpha_sig = [], []
old_energy, old_alpha = [], []
zenith, azimuth = [], []
for i in range(len(bins)-1):
idx = np.logical_and(energy > bins[i], energy < bins[i + 1])
e, old_e, angle, zeni, azi = metrics(y_reco[idx, :], y_true[idx, :])
old_energy.append(old_e)
old_alpha.append(angle[3])
w_energies.append(e[1])
u_angles.append(angle[1])
e_sig.append([e[0], e[2]])
alpha_sig.append([angle[0], angle[2]])
zenith.append(zeni)
azimuth.append(azi)
zenith, azimuth = np.array(zenith), np.array(azimuth)
fig, ax = plt.subplots(ncols = 4, nrows = 3, figsize = (25, 15))
axesback=[(0,0), (0,2), (1,0), (2,0)]
for i,j in axesback:
a_ = ax[i][j].twinx()
a_.step(xs, counts, color = "gray", zorder = 10, alpha = 0.7, where = "mid")
a_.set_yscale("log")
ax[i][j].set_xlabel("Log(E)")
# Energy reconstruction
ax_top = ax[0]
ax_top[0].errorbar(xs, w_energies,yerr=np.array(e_sig).T, fmt='k.',capsize=2,linewidth=1,ecolor='r',label='data')
ax_top[0].plot(xs, old_energy, 'bo', label=r"$w(\Delta log(E))$"+'(old metric)')
ax_top[0].set_title("Energy Performance")
ax_top[0].set_ylabel(r"$\Delta log(E)$")
ax_top[1].hist2d(y_true[:,0], y_reco[:,0], bins=100,\
range=[np.percentile(y_true[:,0],[1,99]), np.percentile(y_reco[:,0],[1,99])])
ax_top[1].set_title("ML Reco/True")
ax_top[1].set(xlabel="Truth (log(E))", ylabel="ML Reco (log(E))")
ax_top[1].plot([np.percentile(y_true[:,0],[1]), np.percentile(y_true[:,0],[99])], [np.percentile(y_true[:,0],[1]), np.percentile(y_true[:,0],[99])], 'w--')
ax_top[2].errorbar(xs, u_angles,yerr=np.array(alpha_sig).T, fmt='k.',capsize=2,linewidth=1,ecolor='r',label=r'Median $\pm \sigma$')
ax_top[2].plot(xs, old_alpha, 'bo', label=r"$w(\Omega)$"+'(old metric)')
ax_top[2].set_title("Angle Performance")
ax_top[2].set_ylabel(r"$\Delta \Omega$")
sa=alpha_from_angle(y_reco, y_true)
ax_top[3].hist2d(np.abs(y_reco[:,5]), sa, bins=100,\
range=[np.percentile(np.abs(y_reco[:,5]),[1,99]), np.percentile(sa,[1,99])])
ax_top[3].set_title("ML Kappa correlation with solid angle error")
ax_top[3].set(xlabel=r"$\kappa$", ylabel=r"$\Delta \Omega$")
for axi in ax_top:
axi.legend()
#Zenith reconstructi
ax_z=ax[1]
ax_z[0].errorbar(xs, zenith[:,1],yerr=[zenith[:,0], zenith[:,2]], fmt='k.',capsize=2,linewidth=1,ecolor='r',label=r'Median $\pm \sigma$')
ax_z[0].set_title("Zenith Performance")
ax_z[0].plot(xs, zenith[:,3], 'bo', label='68th')
ax_z[0].set_ylabel(r"$\Delta \Theta$")
reszeni=zeni_res(y_true, y_reco)
ax_z[1].hist(reszeni, label = "ML reco - Truth", histtype = "step", bins = 50)
ax_z[1].hist(y_reco[:, 1], label = "ML reco", histtype = "step", bins = 50)
ax_z[1].hist(y_true[:, 1], label = "Truth", histtype = "step", bins = 50)
ax_z[1].set_title("Zenith Perfomance")
ax_z[1].set_ylabel(r"$\Theta$")
ax_z[2].hist2d(y_true[:,1], y_reco[:,1], bins=100,range=[[0,np.pi], [0,np.pi]])
ax_z[2].set_title("Zenith truth/reco correlation")
ax_z[2].set(xlabel=r"True", ylabel=r"ML reco")
ax_z[2].plot([0, np.pi], [0,np.pi], 'w--')
ax_z[3].hist2d(np.abs(y_reco[:,3]), reszeni, bins=100,\
range=[np.percentile(np.abs(y_reco[:,3]),[1,99]), np.percentile(reszeni,[1,99])])
ax_z[3].set_title("ML Kappa correlation with zenith error")
ax_z[3].set(xlabel=r"$\kappa$", ylabel=r"$\Delta \Theta$")
for axi in ax_z:
axi.legend()
#Azimuth reconstruction
ax_az=ax[2]
ax_az[0].errorbar(xs, azimuth[:,1],yerr=[azimuth[:,0], azimuth[:,2]], fmt='k.',capsize=2,linewidth=1,ecolor='r',label=r'Median $\pm \sigma$')
ax_az[0].set_title("Azimuth Performance")
ax_az[0].plot(xs, azimuth[:,3], 'bo', label='68th')
ax_az[0].set_ylabel(r"$\Delta \phi$")
resazi=azi_res(y_true,y_reco)
ax_az[1].hist(resazi, label = "ML reco - Truth", histtype = "step", bins = 50)
ax_az[1].hist(y_reco[:, 2]%(2*np.pi), label = "ML reco", histtype = "step", bins = 50)
ax_az[1].hist(y_true[:, 2], label = "Truth", histtype = "step", bins = 50)
ax_az[1].set_title("Azimuth Perfomance")
ax_az[1].set_ylabel(r"$\phi$")
ax_az[2].hist2d(y_true[:,2], y_reco[:,2], bins=100,\
range=[[0,2*np.pi], [0,2*np.pi]])
ax_az[2].set_title("Zenith truth/reco correlation")
ax_az[2].set(xlabel=r"True", ylabel=r"ML reco")
ax_az[2].plot([0, 2*np.pi], [0,2*np.pi], 'w--')
ax_az[3].hist2d(np.abs(y_reco[:,4]), resazi, bins=100,\
range=[np.percentile(np.abs(y_reco[:,4]),[1,99]), np.percentile(resazi,[1,99])])
ax_az[3].set_title("ML Kappa correlation with azimuth error")
ax_az[3].set(xlabel=r"$\kappa$", ylabel=r"$\Delta \phi$")
for axi in ax_az:
axi.legend()
fig.tight_layout()
if save:
plt.savefig(save_path+'.png')
return fig, ax
| 41.598693
| 159
| 0.598027
| 5,335
| 31,823
| 3.414433
| 0.044049
| 0.028821
| 0.021739
| 0.014273
| 0.916008
| 0.903382
| 0.891524
| 0.871706
| 0.85886
| 0.854798
| 0
| 0.04394
| 0.191874
| 31,823
| 765
| 160
| 41.598693
| 0.664385
| 0.050498
| 0
| 0.788194
| 0
| 0
| 0.11726
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024306
| false
| 0
| 0.017361
| 0
| 0.065972
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f9a5326f213ec7a1c96c7ce0ba919d7a7cd7fb74
| 5,036
|
py
|
Python
|
ppci/arch/riscv/rvc_relocations.py
|
jsdelivrbot/ppci-mirror
|
67195d628275e2332ceaf44c9e13fc58d0877157
|
[
"BSD-2-Clause"
] | null | null | null |
ppci/arch/riscv/rvc_relocations.py
|
jsdelivrbot/ppci-mirror
|
67195d628275e2332ceaf44c9e13fc58d0877157
|
[
"BSD-2-Clause"
] | null | null | null |
ppci/arch/riscv/rvc_relocations.py
|
jsdelivrbot/ppci-mirror
|
67195d628275e2332ceaf44c9e13fc58d0877157
|
[
"BSD-2-Clause"
] | null | null | null |
from ...utils.bitfun import wrap_negative, BitView
from ..encoding import Relocation
from .tokens import RiscvToken, RiscvcToken
import logging
class CRel(Relocation):
l = []
name = 'c_base'
@classmethod
def isinsrange(cls, bits, val):
msb = 1<<(bits-1)
ll = -msb
if (val<=(msb-1) and (val >=ll)):
return True
else:
return False
class CBImm11Relocation(CRel):
name = 'cb_imm11'
token = RiscvToken
changesize = False
def apply(self, sym_value, data, reloc_value, opt = False):
logger = logging.getLogger('linker')
assert sym_value % 2 == 0
assert reloc_value % 2 == 0
offset = sym_value - reloc_value
bv = BitView(data, 0, 4)
if not opt and CRel.isinsrange(12, offset) or self.changesize:
if not opt:
CRel.l.append(reloc_value)
self.changesize = True
rel11 = wrap_negative(offset >> 1, 11)
bv[0:2] = 0b01
bv[2:3] = rel11 >> 4 & 0x1
bv[3:6] = rel11 & 0x7
bv[6:7] = rel11 >> 6 & 0x1
bv[7:8] = rel11 >> 5 & 0x1
bv[8:9] = rel11 >> 9 & 0x1
bv[9:11] = rel11 >> 7 & 0x3
bv[11:12] = rel11 >> 3 & 0x1
bv[12:13] = rel11 >> 10 & 0x1
bv[13:16] = 0b101
bv[16:32] = 0x1
rsize = 2
logger.debug('CBRel inrange: symval:%08x,relocval:%08x,diff:%08x,%s' %(sym_value, reloc_value, offset,self.changesize))
else:
logger.debug('CBRel not in range: symval:%08x,relocval:%08x,diff:%08x,%s' %(sym_value, reloc_value, offset,self.changesize))
rel20 = wrap_negative(offset >> 1, 20)
bv[21:31] = rel20 & 0x3FF
bv[20:21] = rel20 >> 10 & 0x1
bv[12:20] = rel20 >> 11 & 0xFF
bv[31:32] = rel20 >> 19 & 0x1
rsize = 4
if opt:
return data, rsize
else:
return data
class CBlImm11Relocation(CRel):
name = 'cbl_imm11'
token = RiscvToken
changesize = False
def apply(self, sym_value, data, reloc_value, opt = False):
logger = logging.getLogger('linker')
assert sym_value % 2 == 0
assert reloc_value % 2 == 0
offset = sym_value - reloc_value
bv = BitView(data, 0, 4)
if not opt and CRel.isinsrange(12, offset) or self.changesize:
if not opt:
CRel.l.append(reloc_value)
self.changesize = True
rel11 = wrap_negative(offset >> 1, 11)
bv[0:2] = 0b01
bv[2:3] = rel11 >> 4 & 0x1
bv[3:6] = rel11 & 0x7
bv[6:7] = rel11 >> 6 & 0x1
bv[7:8] = rel11 >> 5 & 0x1
bv[8:9] = rel11 >> 9 & 0x1
bv[9:11] = rel11 >> 7 & 0x3
bv[11:12] = rel11 >> 3 & 0x1
bv[12:13] = rel11 >> 10 & 0x1
bv[13:16] = 0b001
bv[16:32] = 0x1
rsize = 2
logger.debug('CRel in range: symval:%08x,relocval:%08x,diff:%08x,%s' %(sym_value, reloc_value, offset,self.changesize))
else:
logger.debug('CRel not in range: symval:%08x,relocval:%08x,diff:%08x,%s' %(sym_value, reloc_value, offset,self.changesize))
rel20 = wrap_negative(offset >> 1, 20)
bv[21:31] = rel20 & 0x3FF
bv[20:21] = rel20 >> 10 & 0x1
bv[12:20] = rel20 >> 11 & 0xFF
bv[31:32] = rel20 >> 19 & 0x1
rsize = 4
if opt:
return data, rsize
else:
return data
class BcImm11Relocation(CRel):
name = 'bc_imm11'
token = RiscvcToken
def apply(self, sym_value, data, reloc_value, opt = False):
assert sym_value % 2 == 0
assert reloc_value % 2 == 0
offset = sym_value - reloc_value
rel11 = wrap_negative(offset >> 1, 11)
bv = BitView(data, 0, 4)
bv[2:3] = rel11 >> 4 & 0x1
bv[3:6] = rel11 & 0x7
bv[6:7] = rel11 >> 6 & 0x1
bv[7:8] = rel11 >> 5 & 0x1
bv[8:9] = rel11 >> 9 & 0x1
bv[9:11] = rel11 >> 7 & 0x3
bv[11:12] = rel11 >> 3 & 0x1
bv[12:13] = rel11 >> 10 & 0x1
if opt:
return data, 2
else:
return data
class BcImm8Relocation(CRel):
name = 'bc_imm8'
token = RiscvcToken
def apply(self, sym_value, data, reloc_value, opt = False):
assert sym_value % 2 == 0
assert reloc_value % 2 == 0
offset = sym_value - reloc_value
rel8 = wrap_negative(offset >> 1, 8)
bv = BitView(data, 0, 4)
bv[2:3] = rel8 >> 4 & 0x1
bv[3:5] = rel8 & 0x3
bv[5:7] = rel8 >> 5 & 0x3
bv[10:12] = rel8 >> 2 & 0x3
bv[12:13] = rel8 >> 7 & 0x1
if opt:
return data, 2
else:
return data
| 34.731034
| 136
| 0.492653
| 652
| 5,036
| 3.734663
| 0.156442
| 0.041068
| 0.022998
| 0.059138
| 0.802875
| 0.802875
| 0.802875
| 0.792197
| 0.756879
| 0.731006
| 0
| 0.139257
| 0.385425
| 5,036
| 144
| 137
| 34.972222
| 0.647496
| 0
| 0
| 0.746269
| 0
| 0
| 0.053813
| 0.030183
| 0
| 0
| 0.024424
| 0
| 0.059701
| 1
| 0.037313
| false
| 0
| 0.029851
| 0
| 0.268657
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ddbca682cfec361e860f5c7c0f42794fc72ef6b1
| 83
|
py
|
Python
|
agents/__init__.py
|
arranbrown99/COVID_simulation
|
d6413b75d225717c4bb7ddfdd6a5a898fb95d4d4
|
[
"MIT"
] | null | null | null |
agents/__init__.py
|
arranbrown99/COVID_simulation
|
d6413b75d225717c4bb7ddfdd6a5a898fb95d4d4
|
[
"MIT"
] | null | null | null |
agents/__init__.py
|
arranbrown99/COVID_simulation
|
d6413b75d225717c4bb7ddfdd6a5a898fb95d4d4
|
[
"MIT"
] | null | null | null |
from agents.agents import DeterministicAgent
from agents.agents import RandomAgent
| 27.666667
| 44
| 0.879518
| 10
| 83
| 7.3
| 0.5
| 0.273973
| 0.438356
| 0.60274
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096386
| 83
| 2
| 45
| 41.5
| 0.973333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
fb04f2816eb7439036aaa0ceb9b1a1406ed048ae
| 7,096
|
py
|
Python
|
otp/nametag/NametagConstants.py
|
ksmit799/Toontown-Source
|
aee8032089f60b528ca9571c0d7cc774d414c085
|
[
"MIT"
] | 8
|
2017-10-10T11:41:01.000Z
|
2021-02-23T12:55:47.000Z
|
otp/nametag/NametagConstants.py
|
ksmit799/Toontown-Source
|
aee8032089f60b528ca9571c0d7cc774d414c085
|
[
"MIT"
] | null | null | null |
otp/nametag/NametagConstants.py
|
ksmit799/Toontown-Source
|
aee8032089f60b528ca9571c0d7cc774d414c085
|
[
"MIT"
] | 2
|
2019-04-06T16:18:23.000Z
|
2021-02-25T06:25:01.000Z
|
CFNoQuitButton = 256
CFPageButton = 16
CFQuicktalker = 4
CFQuitButton = 32
CFReversed = 64
CFSndOpenchat = 128
CFSpeech = 1
CFThought = 2
CFTimeout = 8
CCNormal = 0
CCNoChat = 1
CCNonPlayer = 2
CCSuit = 3
CCToonBuilding = 4
CCSuitBuilding = 5
CCHouseBuilding = 6
CCSpeedChat = 7
CCFreeChat = 8
NAMETAG_COLORS = {CCNormal: (((0.3, 0.3, 0.7, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.3, 0.3, 0.7, 1.0),
(0.2, 0.2, 0.2, 0.6),
(1.0, 0.5, 0.5, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.5, 0.5, 1.0, 1.0),
(1.0, 1.0, 1.0, 1.0),
(0.0, 0.6, 0.6, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.3, 0.3, 0.7, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0))),
CCNoChat: (((0.8, 0.4, 0.0, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((1.0, 0.5, 0.5, 1.0),
(0.2, 0.2, 0.2, 0.6),
(1.0, 0.5, 0.5, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((1.0, 0.5, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0),
(0.0, 0.6, 0.6, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.8, 0.4, 0.0, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0))),
CCNonPlayer: (((0.8, 0.4, 0.0, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.8, 0.4, 0.0, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.8, 0.4, 0.0, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.8, 0.4, 0.0, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0))),
CCSuit: (((0.2, 0.2, 0.2, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.2, 0.2, 0.2, 1.0),
(0.2, 0.2, 0.2, 0.6),
(1.0, 0.5, 0.5, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.4, 0.4, 0.4, 1.0),
(1.0, 1.0, 1.0, 0.7),
(0.0, 0.6, 0.6, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.2, 0.2, 0.2, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0))),
CCSuitBuilding: (((0.5, 0.5, 0.5, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.5, 0.5, 0.5, 1.0),
(0.2, 0.2, 0.2, 0.6),
(1.0, 0.5, 0.5, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.7, 0.7, 0.7, 1.0),
(1.0, 1.0, 1.0, 0.7),
(0.0, 0.6, 0.6, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.5, 0.5, 0.5, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0))),
CCToonBuilding: (((0.2, 0.6, 0.9, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.2, 0.6, 0.9, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.2, 0.6, 0.9, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.2, 0.6, 0.9, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0))),
CCHouseBuilding: (((0.2, 0.6, 0.9, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.2, 0.2, 0.5, 1.0),
(0.2, 0.2, 0.2, 0.6),
(1.0, 0.5, 0.5, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.5, 0.5, 1.0, 1.0),
(1.0, 1.0, 1.0, 1.0),
(0.0, 0.6, 0.6, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.0, 0.6, 0.2, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0))),
CCSpeedChat: (((0.0, 0.6, 0.2, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.0, 0.5, 0.0, 1.0),
(0.5, 0.5, 0.5, 0.6),
(1.0, 0.5, 0.5, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.0, 0.7, 0.2, 1.0),
(1.0, 1.0, 1.0, 0.7),
(0.0, 0.6, 0.6, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.0, 0.6, 0.2, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0))),
CCFreeChat: (((0.3, 0.3, 0.7, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.2, 0.2, 0.5, 1.0),
(0.2, 0.2, 0.2, 0.6),
(1.0, 0.5, 0.5, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.5, 0.5, 1.0, 1.0),
(1.0, 1.0, 1.0, 1.0),
(0.0, 0.6, 0.6, 1.0),
(1.0, 1.0, 1.0, 1.0)),
((0.3, 0.3, 0.7, 1.0),
(0.8, 0.8, 0.8, 0.5),
(0.0, 0.0, 0.0, 1.0),
(1.0, 1.0, 1.0, 1.0)))}
ARROW_COLORS = {CCSuit: (0.8, 0.4, 0.0, 1.0)}
DEFAULT_WORDWRAPS = {CCNormal: 7.5,
CCNoChat: 7.5,
CCNonPlayer: 7.5,
CCSuit: 7.5,
CCToonBuilding: 8.5,
CCSuitBuilding: 8.5,
CCHouseBuilding: 10.0,
CCSpeedChat: 7.5,
CCFreeChat: 7.5}
WTNormal = 0
WTQuickTalker = 1
WTSystem = 2
WTBattleSOS = 3
WTEmote = 4
WTToontownBoardingGroup = 5
WHISPER_COLORS = {WTNormal: (((0.0, 0.0, 0.0, 1.0), (0.2, 0.6, 0.8, 0.6)),
((1.0, 0.5, 0.5, 1.0), (1.0, 1.0, 1.0, 0.8)),
((0.0, 0.0, 0.0, 1.0), (0.2, 0.7, 0.9, 0.6)),
((0.0, 0.0, 0.0, 1.0), (0.2, 0.7, 0.8, 0.6))),
WTQuickTalker: (((0.0, 0.0, 0.0, 1.0), (0.2, 0.6, 0.8, 0.6)),
((1.0, 0.5, 0.5, 1.0), (1.0, 1.0, 1.0, 0.8)),
((0.0, 0.0, 0.0, 1.0), (0.2, 0.7, 0.9, 0.6)),
((0.0, 0.0, 0.0, 1.0), (0.2, 0.7, 0.8, 0.6))),
WTSystem: (((0.0, 0.0, 0.0, 1.0), (0.8, 0.3, 0.6, 0.6)),
((1.0, 0.5, 0.5, 1.0), (1.0, 1.0, 1.0, 0.8)),
((0.0, 0.0, 0.0, 1.0), (0.8, 0.4, 1.0, 0.6)),
((0.0, 0.0, 0.0, 1.0), (0.8, 0.3, 0.6, 0.6))),
WTEmote: (((0.0, 0.0, 0.0, 1.0), (0.9, 0.5, 0.1, 0.6)),
((1.0, 0.5, 0.5, 1.0), (1.0, 1.0, 1.0, 0.8)),
((0.0, 0.0, 0.0, 1.0), (0.9, 0.6, 0.2, 0.6)),
((0.0, 0.0, 0.0, 1.0), (0.9, 0.6, 0.1, 0.6)))}
| 36.57732
| 74
| 0.285513
| 1,513
| 7,096
| 1.336418
| 0.030403
| 0.290801
| 0.34273
| 0.367953
| 0.6909
| 0.690406
| 0.690406
| 0.688922
| 0.684965
| 0.680514
| 0
| 0.366592
| 0.436443
| 7,096
| 194
| 75
| 36.57732
| 0.139035
| 0
| 0
| 0.695876
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fb1685e45778c6773775cd09dd78f076980f151d
| 34
|
py
|
Python
|
src/pyoteapp/version.py
|
bob-anderson-ok/py-ote
|
d3474b3f80e4cc5760b7667214d02607609d4fab
|
[
"MIT"
] | 2
|
2019-01-28T03:58:49.000Z
|
2019-10-18T12:02:30.000Z
|
src/pyoteapp/version.py
|
bob-anderson-ok/py-ote
|
d3474b3f80e4cc5760b7667214d02607609d4fab
|
[
"MIT"
] | null | null | null |
src/pyoteapp/version.py
|
bob-anderson-ok/py-ote
|
d3474b3f80e4cc5760b7667214d02607609d4fab
|
[
"MIT"
] | 2
|
2019-08-15T13:52:16.000Z
|
2019-10-17T00:16:44.000Z
|
def version():
return '3.9.0'
| 11.333333
| 18
| 0.558824
| 6
| 34
| 3.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 0.235294
| 34
| 2
| 19
| 17
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
349bd7ae775811b8c0cf7fa48453d329c97d762e
| 101
|
py
|
Python
|
assets/on_sequence_start.py
|
mabdi/burp-workflows
|
4208a0d9509cea8331884a663b2de6becba544cd
|
[
"Apache-2.0"
] | 1
|
2019-09-21T08:32:32.000Z
|
2019-09-21T08:32:32.000Z
|
assets/on_sequence_start.py
|
mabdi/burp-workflows
|
4208a0d9509cea8331884a663b2de6becba544cd
|
[
"Apache-2.0"
] | null | null | null |
assets/on_sequence_start.py
|
mabdi/burp-workflows
|
4208a0d9509cea8331884a663b2de6becba544cd
|
[
"Apache-2.0"
] | 1
|
2021-05-19T08:55:30.000Z
|
2021-05-19T08:55:30.000Z
|
# on sequence start
def enbale(instance, seq):
return False
def action(instance, seq):
pass
| 14.428571
| 26
| 0.693069
| 14
| 101
| 5
| 0.785714
| 0.314286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.217822
| 101
| 7
| 27
| 14.428571
| 0.886076
| 0.168317
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.25
| 0
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 7
|
34b1feea34c9a14a960b12cc474506d48972f839
| 83,795
|
py
|
Python
|
dohq_teamcity/api/vcs_root_instance_api.py
|
texpine-bgs/teamcity
|
ef379e73e08434bebafa9e3510428102e5927ae9
|
[
"MIT"
] | null | null | null |
dohq_teamcity/api/vcs_root_instance_api.py
|
texpine-bgs/teamcity
|
ef379e73e08434bebafa9e3510428102e5927ae9
|
[
"MIT"
] | null | null | null |
dohq_teamcity/api/vcs_root_instance_api.py
|
texpine-bgs/teamcity
|
ef379e73e08434bebafa9e3510428102e5927ae9
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
TeamCity REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2018.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
from dohq_teamcity.custom.base_model import TeamCityObject
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from dohq_teamcity.models.entries import Entries # noqa: F401,E501
from dohq_teamcity.models.files import Files # noqa: F401,E501
from dohq_teamcity.models.properties import Properties # noqa: F401,E501
from dohq_teamcity.models.vcs_root_instance import VcsRootInstance # noqa: F401,E501
from dohq_teamcity.models.vcs_root_instances import VcsRootInstances # noqa: F401,E501
from dohq_teamcity.models.file import file # noqa: F401,E501
class VcsRootInstanceApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
base_name = 'VcsRootInstance'
def __init__(self, api_client=None):
self.api_client = api_client
def delete_instance_field(self, vcs_root_instance_locator, field, **kwargs): # noqa: E501
"""delete_instance_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_instance_field(vcs_root_instance_locator, field, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str vcs_root_instance_locator: (required)
:param str field: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_instance_field_with_http_info(vcs_root_instance_locator, field, **kwargs) # noqa: E501
else:
(data) = self.__delete_instance_field_with_http_info(vcs_root_instance_locator, field, **kwargs) # noqa: E501
return data
def delete_repository_state(self, vcs_root_instance_locator, **kwargs): # noqa: E501
"""delete_repository_state # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_repository_state(vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str vcs_root_instance_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_repository_state_with_http_info(vcs_root_instance_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_repository_state_with_http_info(vcs_root_instance_locator, **kwargs) # noqa: E501
return data
def get_children(self, path, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_children # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_children(path, vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str vcs_root_instance_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_children_with_http_info(path, vcs_root_instance_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_children_with_http_info(path, vcs_root_instance_locator, **kwargs) # noqa: E501
return data
def get_children_alias(self, path, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_children_alias # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_children_alias(path, vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str vcs_root_instance_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_children_alias_with_http_info(path, vcs_root_instance_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_children_alias_with_http_info(path, vcs_root_instance_locator, **kwargs) # noqa: E501
return data
def get_content(self, path, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_content # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_content(path, vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str vcs_root_instance_locator: (required)
:param str response_builder:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_content_with_http_info(path, vcs_root_instance_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_content_with_http_info(path, vcs_root_instance_locator, **kwargs) # noqa: E501
return data
def get_content_alias(self, path, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_content_alias # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_content_alias(path, vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str vcs_root_instance_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_content_alias_with_http_info(path, vcs_root_instance_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_content_alias_with_http_info(path, vcs_root_instance_locator, **kwargs) # noqa: E501
return data
def get_metadata(self, path, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_metadata(path, vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str vcs_root_instance_locator: (required)
:param str fields:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_metadata_with_http_info(path, vcs_root_instance_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_metadata_with_http_info(path, vcs_root_instance_locator, **kwargs) # noqa: E501
return data
def get_repository_state(self, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_repository_state # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_repository_state(vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str vcs_root_instance_locator: (required)
:param str fields:
:return: Entries
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_repository_state_with_http_info(vcs_root_instance_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_repository_state_with_http_info(vcs_root_instance_locator, **kwargs) # noqa: E501
return data
def get_repository_state_creation_date(self, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_repository_state_creation_date # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_repository_state_creation_date(vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str vcs_root_instance_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_repository_state_creation_date_with_http_info(vcs_root_instance_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_repository_state_creation_date_with_http_info(vcs_root_instance_locator, **kwargs) # noqa: E501
return data
def get_root(self, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_root # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_root(vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str vcs_root_instance_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_root_with_http_info(vcs_root_instance_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_root_with_http_info(vcs_root_instance_locator, **kwargs) # noqa: E501
return data
def get_zipped(self, path, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_zipped # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_zipped(path, vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str vcs_root_instance_locator: (required)
:param str base_path:
:param str locator:
:param str name:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_zipped_with_http_info(path, vcs_root_instance_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_zipped_with_http_info(path, vcs_root_instance_locator, **kwargs) # noqa: E501
return data
def schedule_checking_for_changes(self, **kwargs): # noqa: E501
"""schedule_checking_for_changes # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schedule_checking_for_changes(async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str locator:
:param str requestor:
:param str fields:
:return: VcsRootInstances
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__schedule_checking_for_changes_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.__schedule_checking_for_changes_with_http_info(**kwargs) # noqa: E501
return data
def schedule_checking_for_changes_0(self, **kwargs): # noqa: E501
"""schedule_checking_for_changes_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schedule_checking_for_changes_0(async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str locator:
:param bool ok_on_nothing_found:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__schedule_checking_for_changes_0_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.__schedule_checking_for_changes_0_with_http_info(**kwargs) # noqa: E501
return data
def serve_instance(self, vcs_root_instance_locator, **kwargs): # noqa: E501
"""serve_instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_instance(vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str vcs_root_instance_locator: (required)
:param str fields:
:return: VcsRootInstance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_instance_with_http_info(vcs_root_instance_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_instance_with_http_info(vcs_root_instance_locator, **kwargs) # noqa: E501
return data
def serve_instance_field(self, vcs_root_instance_locator, field, **kwargs): # noqa: E501
"""serve_instance_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_instance_field(vcs_root_instance_locator, field, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str vcs_root_instance_locator: (required)
:param str field: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_instance_field_with_http_info(vcs_root_instance_locator, field, **kwargs) # noqa: E501
else:
(data) = self.__serve_instance_field_with_http_info(vcs_root_instance_locator, field, **kwargs) # noqa: E501
return data
def serve_instances(self, **kwargs): # noqa: E501
"""serve_instances # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_instances(async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str locator:
:param str fields:
:return: VcsRootInstances
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_instances_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.__serve_instances_with_http_info(**kwargs) # noqa: E501
return data
def serve_root_instance_properties(self, vcs_root_instance_locator, **kwargs): # noqa: E501
"""serve_root_instance_properties # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_root_instance_properties(vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str vcs_root_instance_locator: (required)
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_root_instance_properties_with_http_info(vcs_root_instance_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_root_instance_properties_with_http_info(vcs_root_instance_locator, **kwargs) # noqa: E501
return data
def set_instance_field(self, vcs_root_instance_locator, field, **kwargs): # noqa: E501
"""set_instance_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_instance_field(vcs_root_instance_locator, field, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str vcs_root_instance_locator: (required)
:param str field: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_instance_field_with_http_info(vcs_root_instance_locator, field, **kwargs) # noqa: E501
else:
(data) = self.__set_instance_field_with_http_info(vcs_root_instance_locator, field, **kwargs) # noqa: E501
return data
def set_repository_state(self, vcs_root_instance_locator, **kwargs): # noqa: E501
"""set_repository_state # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_repository_state(vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str vcs_root_instance_locator: (required)
:param Entries body:
:param str fields:
:return: Entries
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_repository_state_with_http_info(vcs_root_instance_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_repository_state_with_http_info(vcs_root_instance_locator, **kwargs) # noqa: E501
return data
def __delete_instance_field_with_http_info(self, vcs_root_instance_locator, field, **kwargs): # noqa: E501
"""delete_instance_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_instance_field_with_http_info(vcs_root_instance_locator, field, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str vcs_root_instance_locator: (required)
:param str field: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['vcs_root_instance_locator', 'field'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_instance_field" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `delete_instance_field`") # noqa: E501
# verify the required parameter 'field' is set
if ('field' not in params or
params['field'] is None):
raise ValueError("Missing the required parameter `field` when calling `delete_instance_field`") # noqa: E501
collection_formats = {}
path_params = {}
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
if 'field' in params:
if isinstance(params['field'], TeamCityObject):
path_params['field'] = params['field'].locator_id
else:
path_params['field'] = params['field'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}/{field}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_repository_state_with_http_info(self, vcs_root_instance_locator, **kwargs): # noqa: E501
"""delete_repository_state # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_repository_state_with_http_info(vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str vcs_root_instance_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['vcs_root_instance_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_repository_state" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `delete_repository_state`") # noqa: E501
collection_formats = {}
path_params = {}
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}/repositoryState', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_children_with_http_info(self, path, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_children # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_children_with_http_info(path, vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str vcs_root_instance_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'vcs_root_instance_locator', 'base_path', 'locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_children" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_children`") # noqa: E501
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `get_children`") # noqa: E501
if 'path' in params and not re.search(r'(\/.*)?', params['path']): # noqa: E501
raise ValueError(r"Invalid value for parameter `path` when calling `get_children`, must conform to the pattern `/(\/.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
query_params = []
if 'base_path' in params:
query_params.append(('basePath', params['base_path'])) # noqa: E501
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}/files/latest/children{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Files', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_children_alias_with_http_info(self, path, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_children_alias # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_children_alias_with_http_info(path, vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str vcs_root_instance_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'vcs_root_instance_locator', 'base_path', 'locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_children_alias" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_children_alias`") # noqa: E501
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `get_children_alias`") # noqa: E501
if 'path' in params and not re.search('(.*)?', params['path']): # noqa: E501
raise ValueError("Invalid value for parameter `path` when calling `get_children_alias`, must conform to the pattern `/(.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
query_params = []
if 'base_path' in params:
query_params.append(('basePath', params['base_path'])) # noqa: E501
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}/files/latest/{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Files', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_content_with_http_info(self, path, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_content # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_content_with_http_info(path, vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str vcs_root_instance_locator: (required)
:param str response_builder:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'vcs_root_instance_locator', 'response_builder'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_content" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_content`") # noqa: E501
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `get_content`") # noqa: E501
if 'path' in params and not re.search(r'(\/.*)?', params['path']): # noqa: E501
raise ValueError(r"Invalid value for parameter `path` when calling `get_content`, must conform to the pattern `/(\/.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
query_params = []
if 'response_builder' in params:
query_params.append(('responseBuilder', params['response_builder'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}/files/latest/content{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_content_alias_with_http_info(self, path, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_content_alias # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_content_alias_with_http_info(path, vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str vcs_root_instance_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'vcs_root_instance_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_content_alias" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_content_alias`") # noqa: E501
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `get_content_alias`") # noqa: E501
if 'path' in params and not re.search(r'(\/.*)?', params['path']): # noqa: E501
raise ValueError(r"Invalid value for parameter `path` when calling `get_content_alias`, must conform to the pattern `/(\/.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}/files/latest/files{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_metadata_with_http_info(self, path, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_metadata_with_http_info(path, vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str vcs_root_instance_locator: (required)
:param str fields:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'vcs_root_instance_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_metadata" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_metadata`") # noqa: E501
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `get_metadata`") # noqa: E501
if 'path' in params and not re.search(r'(\/.*)?', params['path']): # noqa: E501
raise ValueError(r"Invalid value for parameter `path` when calling `get_metadata`, must conform to the pattern `/(\/.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}/files/latest/metadata{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_repository_state_with_http_info(self, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_repository_state # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_repository_state_with_http_info(vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str vcs_root_instance_locator: (required)
:param str fields:
:return: Entries
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['vcs_root_instance_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_repository_state" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `get_repository_state`") # noqa: E501
collection_formats = {}
path_params = {}
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}/repositoryState', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Entries', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_repository_state_creation_date_with_http_info(self, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_repository_state_creation_date # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_repository_state_creation_date_with_http_info(vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str vcs_root_instance_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['vcs_root_instance_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_repository_state_creation_date" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `get_repository_state_creation_date`") # noqa: E501
collection_formats = {}
path_params = {}
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}/repositoryState/creationDate', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_root_with_http_info(self, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_root # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_root_with_http_info(vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str vcs_root_instance_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['vcs_root_instance_locator', 'base_path', 'locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_root" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `get_root`") # noqa: E501
collection_formats = {}
path_params = {}
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
query_params = []
if 'base_path' in params:
query_params.append(('basePath', params['base_path'])) # noqa: E501
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}/files/latest', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Files', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_zipped_with_http_info(self, path, vcs_root_instance_locator, **kwargs): # noqa: E501
"""get_zipped # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_zipped_with_http_info(path, vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str vcs_root_instance_locator: (required)
:param str base_path:
:param str locator:
:param str name:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'vcs_root_instance_locator', 'base_path', 'locator', 'name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_zipped" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_zipped`") # noqa: E501
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `get_zipped`") # noqa: E501
if 'path' in params and not re.search(r'(\/.*)?', params['path']): # noqa: E501
raise ValueError(r"Invalid value for parameter `path` when calling `get_zipped`, must conform to the pattern `/(\/.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
query_params = []
if 'base_path' in params:
query_params.append(('basePath', params['base_path'])) # noqa: E501
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}/files/latest/archived{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __schedule_checking_for_changes_with_http_info(self, **kwargs): # noqa: E501
"""schedule_checking_for_changes # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__schedule_checking_for_changes_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str locator:
:param str requestor:
:param str fields:
:return: VcsRootInstances
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['locator', 'requestor', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method schedule_checking_for_changes" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'requestor' in params:
query_params.append(('requestor', params['requestor'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/checkingForChangesQueue', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VcsRootInstances', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __schedule_checking_for_changes_0_with_http_info(self, **kwargs): # noqa: E501
"""schedule_checking_for_changes_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__schedule_checking_for_changes_0_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str locator:
:param bool ok_on_nothing_found:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['locator', 'ok_on_nothing_found'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method schedule_checking_for_changes_0" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'ok_on_nothing_found' in params:
query_params.append(('okOnNothingFound', params['ok_on_nothing_found'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/commitHookNotification', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_instance_with_http_info(self, vcs_root_instance_locator, **kwargs): # noqa: E501
"""serve_instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_instance_with_http_info(vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str vcs_root_instance_locator: (required)
:param str fields:
:return: VcsRootInstance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['vcs_root_instance_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_instance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `serve_instance`") # noqa: E501
collection_formats = {}
path_params = {}
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VcsRootInstance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_instance_field_with_http_info(self, vcs_root_instance_locator, field, **kwargs): # noqa: E501
"""serve_instance_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_instance_field_with_http_info(vcs_root_instance_locator, field, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str vcs_root_instance_locator: (required)
:param str field: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['vcs_root_instance_locator', 'field'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_instance_field" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `serve_instance_field`") # noqa: E501
# verify the required parameter 'field' is set
if ('field' not in params or
params['field'] is None):
raise ValueError("Missing the required parameter `field` when calling `serve_instance_field`") # noqa: E501
collection_formats = {}
path_params = {}
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
if 'field' in params:
if isinstance(params['field'], TeamCityObject):
path_params['field'] = params['field'].locator_id
else:
path_params['field'] = params['field'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}/{field}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_instances_with_http_info(self, **kwargs): # noqa: E501
"""serve_instances # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_instances_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str locator:
:param str fields:
:return: VcsRootInstances
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_instances" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VcsRootInstances', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_root_instance_properties_with_http_info(self, vcs_root_instance_locator, **kwargs): # noqa: E501
"""serve_root_instance_properties # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_root_instance_properties_with_http_info(vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str vcs_root_instance_locator: (required)
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['vcs_root_instance_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_root_instance_properties" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `serve_root_instance_properties`") # noqa: E501
collection_formats = {}
path_params = {}
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}/properties', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Properties', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_instance_field_with_http_info(self, vcs_root_instance_locator, field, **kwargs): # noqa: E501
"""set_instance_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_instance_field_with_http_info(vcs_root_instance_locator, field, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str vcs_root_instance_locator: (required)
:param str field: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['vcs_root_instance_locator', 'field', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_instance_field" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `set_instance_field`") # noqa: E501
# verify the required parameter 'field' is set
if ('field' not in params or
params['field'] is None):
raise ValueError("Missing the required parameter `field` when calling `set_instance_field`") # noqa: E501
collection_formats = {}
path_params = {}
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
if 'field' in params:
if isinstance(params['field'], TeamCityObject):
path_params['field'] = params['field'].locator_id
else:
path_params['field'] = params['field'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}/{field}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_repository_state_with_http_info(self, vcs_root_instance_locator, **kwargs): # noqa: E501
"""set_repository_state # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_repository_state_with_http_info(vcs_root_instance_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str vcs_root_instance_locator: (required)
:param Entries body:
:param str fields:
:return: Entries
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['vcs_root_instance_locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_repository_state" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'vcs_root_instance_locator' is set
if ('vcs_root_instance_locator' not in params or
params['vcs_root_instance_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_instance_locator` when calling `set_repository_state`") # noqa: E501
collection_formats = {}
path_params = {}
if 'vcs_root_instance_locator' in params:
if isinstance(params['vcs_root_instance_locator'], TeamCityObject):
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'].locator_id
else:
path_params['vcsRootInstanceLocator'] = params['vcs_root_instance_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/vcs-root-instances/{vcsRootInstanceLocator}/repositoryState', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Entries', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 42.449341
| 155
| 0.623665
| 9,532
| 83,795
| 5.163764
| 0.020772
| 0.041669
| 0.083196
| 0.121574
| 0.97944
| 0.976148
| 0.973142
| 0.967067
| 0.966539
| 0.962455
| 0
| 0.013915
| 0.285578
| 83,795
| 1,973
| 156
| 42.470857
| 0.808285
| 0.275756
| 0
| 0.823789
| 1
| 0.004405
| 0.237017
| 0.121363
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034361
| false
| 0
| 0.008811
| 0
| 0.095154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
34cb793a27a320ceff0981379e37f6a86c619100
| 2,608
|
py
|
Python
|
Python_lib/wechat_project/ref/cron_manager.py
|
mndarren/CPP-Lib
|
d821e5468d7c76f5a28db28b12670d6a7bce7360
|
[
"Apache-2.0"
] | 5
|
2017-10-25T15:34:37.000Z
|
2020-06-28T17:58:55.000Z
|
Python_lib/wechat_project/ref/cron_manager.py
|
mndarren/CPP-Lib
|
d821e5468d7c76f5a28db28b12670d6a7bce7360
|
[
"Apache-2.0"
] | null | null | null |
Python_lib/wechat_project/ref/cron_manager.py
|
mndarren/CPP-Lib
|
d821e5468d7c76f5a28db28b12670d6a7bce7360
|
[
"Apache-2.0"
] | 4
|
2018-05-19T20:43:31.000Z
|
2020-06-28T17:09:16.000Z
|
import argparse
import os ,sys
import logging
from crontab import CronTab
"""
Task Scheduler
==========
This module manages periodic tasks using cron.
"""
class CronManager:
def __init__(self):
self.cron = CronTab(user=True)
def add_minutely(self, name, user, command, environment=None):
"""
Add an hourly cron task
"""
cron_job = self.cron.new(command=command, user=user)
cron_job.minute.every(2)
cron_job.enable()
self.cron.write()
if self.cron.render():
print self.cron.render()
return True
def add_hourly(self, name, user, command, environment=None):
"""
Add an hourly cron task
"""
cron_job = self.cron.new(command=command, user=user)
cron_job.minute.on(0)
cron_job.hour.during(0,23)
cron_job.enable()
self.cron.write()
if self.cron.render():
print self.cron.render()
return True
def add_daily(self, name, user, command, environment=None):
"""
Add a daily cron task
"""
cron_job = self.cron.new(command=command, user=user)
cron_job.minute.on(0)
cron_job.hour.on(0)
cron_job.enable()
self.cron.write()
if self.cron.render():
print self.cron.render()
return True
def add_weekly(self, name, user, command, environment=None):
"""
Add a weekly cron task
"""
cron_job = self.cron.new(command=command)
cron_job.minute.on(0)
cron_job.hour.on(0)
cron_job.dow.on(1)
cron_job.enable()
self.cron.write()
if self.cron.render():
print self.cron.render()
return True
def add_monthly(self, name, user, command, environment=None):
"""
Add a monthly cron task
"""
cron_job = self.cron.new(command=command)
cron_job.minute.on(0)
cron_job.hour.on(0)
cron_job.day.on(1)
cron_job.month.during(1,12)
cron_job.enable()
self.cron.write()
if self.cron.render():
print self.cron.render()
return True
def add_quarterly(self, name, user, command, environment=None):
"""
Add a quarterly cron task
"""
cron_job = self.cron.new(command=command)
cron_job.minute.on(0)
cron_job.hour.on(0)
cron_job.day.on(1)
cron_job.month.on(3,6,9,12)
cron_job.enable()
self.cron.write()
if self.cron.render():
print self.cron.render()
return True
def add_anually(self, name, user, command, environment=None):
"""
Add a yearly cron task
"""
cron_job = self.cron.new(command=command)
cron_job.minute.on(0)
cron_job.hour.on(0)
cron_job.month.on(12)
cron_job.enable()
self.cron.write()
if self.cron.render():
print self.cron.render()
return True
| 24.148148
| 63
| 0.656442
| 395
| 2,608
| 4.222785
| 0.156962
| 0.138489
| 0.117506
| 0.065947
| 0.82554
| 0.82554
| 0.82554
| 0.82554
| 0.711631
| 0.711631
| 0
| 0.013474
| 0.203221
| 2,608
| 108
| 64
| 24.148148
| 0.78922
| 0
| 0
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.053333
| null | null | 0.093333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
34e017617dd39a43c8f06cf8ac2b4dfddf9dc80b
| 194
|
py
|
Python
|
fairseq/models/wav2bart/__init__.py
|
hbr690188270/wav2bart_fairseq
|
b5e03749dd2d3688f0c623a65626478ad8b5e03d
|
[
"MIT"
] | null | null | null |
fairseq/models/wav2bart/__init__.py
|
hbr690188270/wav2bart_fairseq
|
b5e03749dd2d3688f0c623a65626478ad8b5e03d
|
[
"MIT"
] | null | null | null |
fairseq/models/wav2bart/__init__.py
|
hbr690188270/wav2bart_fairseq
|
b5e03749dd2d3688f0c623a65626478ad8b5e03d
|
[
"MIT"
] | null | null | null |
from .wav2bart import *
from .wav2bart_v2 import *
from .wav2gpt2 import *
from .wav2gpt2_random import *
from .wav2bart_random import *
from .wav2transformer import *
from .prompt_gpt2 import *
| 27.714286
| 30
| 0.78866
| 25
| 194
| 5.96
| 0.36
| 0.402685
| 0.241611
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05988
| 0.139175
| 194
| 7
| 31
| 27.714286
| 0.832335
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9b3f67946d36d4be2f9ca0ddc3fc19bd162e0579
| 13,068
|
py
|
Python
|
tests/corpus/io/test_speech_commands.py
|
toddrme2178/audiomate
|
14e932ce9c0b0bebb895d496cb6054521fc80ab1
|
[
"MIT"
] | null | null | null |
tests/corpus/io/test_speech_commands.py
|
toddrme2178/audiomate
|
14e932ce9c0b0bebb895d496cb6054521fc80ab1
|
[
"MIT"
] | null | null | null |
tests/corpus/io/test_speech_commands.py
|
toddrme2178/audiomate
|
14e932ce9c0b0bebb895d496cb6054521fc80ab1
|
[
"MIT"
] | null | null | null |
import os
import unittest
from audiomate import corpus
from audiomate.corpus.io import SpeechCommandsReader
from tests import resources
class SpeechCommandsReaderTest(unittest.TestCase):
def setUp(self):
self.reader = SpeechCommandsReader()
self.ds_path = resources.sample_corpus_path('speech_commands')
def test_read_files(self):
ds = self.reader.load(self.ds_path)
assert ds.num_files == 13
assert ds.files['0b77ee66_nohash_0_bed'].idx == '0b77ee66_nohash_0_bed'
assert ds.files['0b77ee66_nohash_0_bed'].path == os.path.join(self.ds_path, 'bed', '0b77ee66_nohash_0.wav')
assert ds.files['0b77ee66_nohash_1_bed'].idx == '0b77ee66_nohash_1_bed'
assert ds.files['0b77ee66_nohash_1_bed'].path == os.path.join(self.ds_path, 'bed', '0b77ee66_nohash_1.wav')
assert ds.files['0b77ee66_nohash_2_bed'].idx == '0b77ee66_nohash_2_bed'
assert ds.files['0b77ee66_nohash_2_bed'].path == os.path.join(self.ds_path, 'bed', '0b77ee66_nohash_2.wav')
assert ds.files['0bde966a_nohash_0_bed'].idx == '0bde966a_nohash_0_bed'
assert ds.files['0bde966a_nohash_0_bed'].path == os.path.join(self.ds_path, 'bed', '0bde966a_nohash_0.wav')
assert ds.files['0bde966a_nohash_1_bed'].idx == '0bde966a_nohash_1_bed'
assert ds.files['0bde966a_nohash_1_bed'].path == os.path.join(self.ds_path, 'bed', '0bde966a_nohash_1.wav')
assert ds.files['0c40e715_nohash_0_bed'].idx == '0c40e715_nohash_0_bed'
assert ds.files['0c40e715_nohash_0_bed'].path == os.path.join(self.ds_path, 'bed', '0c40e715_nohash_0.wav')
marvin_path = os.path.join(self.ds_path, 'marvin')
assert ds.files['d5c41d6a_nohash_0_marvin'].idx == 'd5c41d6a_nohash_0_marvin'
assert ds.files['d5c41d6a_nohash_0_marvin'].path == os.path.join(marvin_path, 'd5c41d6a_nohash_0.wav')
assert ds.files['d7a58714_nohash_0_marvin'].idx == 'd7a58714_nohash_0_marvin'
assert ds.files['d7a58714_nohash_0_marvin'].path == os.path.join(marvin_path, 'd7a58714_nohash_0.wav')
assert ds.files['d8a5ace5_nohash_0_marvin'].idx == 'd8a5ace5_nohash_0_marvin'
assert ds.files['d8a5ace5_nohash_0_marvin'].path == os.path.join(marvin_path, 'd8a5ace5_nohash_0.wav')
assert ds.files['0a7c2a8d_nohash_0_one'].idx == '0a7c2a8d_nohash_0_one'
assert ds.files['0a7c2a8d_nohash_0_one'].path == os.path.join(self.ds_path, 'one', '0a7c2a8d_nohash_0.wav')
assert ds.files['0b77ee66_nohash_0_one'].idx == '0b77ee66_nohash_0_one'
assert ds.files['0b77ee66_nohash_0_one'].path == os.path.join(self.ds_path, 'one', '0b77ee66_nohash_0.wav')
assert ds.files['c1b7c224_nohash_0_one'].idx == 'c1b7c224_nohash_0_one'
assert ds.files['c1b7c224_nohash_0_one'].path == os.path.join(self.ds_path, 'one', 'c1b7c224_nohash_0.wav')
assert ds.files['c1b7c224_nohash_1_one'].idx == 'c1b7c224_nohash_1_one'
assert ds.files['c1b7c224_nohash_1_one'].path == os.path.join(self.ds_path, 'one', 'c1b7c224_nohash_1.wav')
def test_read_issuers(self):
ds = self.reader.load(self.ds_path)
assert ds.num_issuers == 8
assert ds.issuers['0b77ee66'].idx == '0b77ee66'
assert ds.issuers['0bde966a'].idx == '0bde966a'
assert ds.issuers['0c40e715'].idx == '0c40e715'
assert ds.issuers['d5c41d6a'].idx == 'd5c41d6a'
assert ds.issuers['d7a58714'].idx == 'd7a58714'
assert ds.issuers['d8a5ace5'].idx == 'd8a5ace5'
assert ds.issuers['0a7c2a8d'].idx == '0a7c2a8d'
assert ds.issuers['c1b7c224'].idx == 'c1b7c224'
def test_read_utterances(self):
ds = self.reader.load(self.ds_path)
assert ds.num_utterances == 13
assert ds.utterances['0b77ee66_nohash_0_bed'].idx == '0b77ee66_nohash_0_bed'
assert ds.utterances['0b77ee66_nohash_0_bed'].file.idx == '0b77ee66_nohash_0_bed'
assert ds.utterances['0b77ee66_nohash_0_bed'].start == 0
assert ds.utterances['0b77ee66_nohash_0_bed'].end == -1
assert ds.utterances['0b77ee66_nohash_1_bed'].idx == '0b77ee66_nohash_1_bed'
assert ds.utterances['0b77ee66_nohash_1_bed'].file.idx == '0b77ee66_nohash_1_bed'
assert ds.utterances['0b77ee66_nohash_1_bed'].start == 0
assert ds.utterances['0b77ee66_nohash_1_bed'].end == -1
assert ds.utterances['0b77ee66_nohash_2_bed'].idx == '0b77ee66_nohash_2_bed'
assert ds.utterances['0b77ee66_nohash_2_bed'].file.idx == '0b77ee66_nohash_2_bed'
assert ds.utterances['0b77ee66_nohash_2_bed'].start == 0
assert ds.utterances['0b77ee66_nohash_2_bed'].end == -1
assert ds.utterances['0bde966a_nohash_0_bed'].idx == '0bde966a_nohash_0_bed'
assert ds.utterances['0bde966a_nohash_0_bed'].file.idx == '0bde966a_nohash_0_bed'
assert ds.utterances['0bde966a_nohash_0_bed'].start == 0
assert ds.utterances['0bde966a_nohash_0_bed'].end == -1
assert ds.utterances['0bde966a_nohash_1_bed'].idx == '0bde966a_nohash_1_bed'
assert ds.utterances['0bde966a_nohash_1_bed'].file.idx == '0bde966a_nohash_1_bed'
assert ds.utterances['0bde966a_nohash_1_bed'].start == 0
assert ds.utterances['0bde966a_nohash_1_bed'].end == -1
assert ds.utterances['0c40e715_nohash_0_bed'].idx == '0c40e715_nohash_0_bed'
assert ds.utterances['0c40e715_nohash_0_bed'].file.idx == '0c40e715_nohash_0_bed'
assert ds.utterances['0c40e715_nohash_0_bed'].start == 0
assert ds.utterances['0c40e715_nohash_0_bed'].end == -1
assert ds.utterances['d5c41d6a_nohash_0_marvin'].idx == 'd5c41d6a_nohash_0_marvin'
assert ds.utterances['d5c41d6a_nohash_0_marvin'].file.idx == 'd5c41d6a_nohash_0_marvin'
assert ds.utterances['d5c41d6a_nohash_0_marvin'].start == 0
assert ds.utterances['d5c41d6a_nohash_0_marvin'].end == -1
assert ds.utterances['d7a58714_nohash_0_marvin'].idx == 'd7a58714_nohash_0_marvin'
assert ds.utterances['d7a58714_nohash_0_marvin'].file.idx == 'd7a58714_nohash_0_marvin'
assert ds.utterances['d7a58714_nohash_0_marvin'].start == 0
assert ds.utterances['d7a58714_nohash_0_marvin'].end == -1
assert ds.utterances['d8a5ace5_nohash_0_marvin'].idx == 'd8a5ace5_nohash_0_marvin'
assert ds.utterances['d8a5ace5_nohash_0_marvin'].file.idx == 'd8a5ace5_nohash_0_marvin'
assert ds.utterances['d8a5ace5_nohash_0_marvin'].start == 0
assert ds.utterances['d8a5ace5_nohash_0_marvin'].end == -1
assert ds.utterances['0a7c2a8d_nohash_0_one'].idx == '0a7c2a8d_nohash_0_one'
assert ds.utterances['0a7c2a8d_nohash_0_one'].file.idx == '0a7c2a8d_nohash_0_one'
assert ds.utterances['0a7c2a8d_nohash_0_one'].start == 0
assert ds.utterances['0a7c2a8d_nohash_0_one'].end == -1
assert ds.utterances['0b77ee66_nohash_0_one'].idx == '0b77ee66_nohash_0_one'
assert ds.utterances['0b77ee66_nohash_0_one'].file.idx == '0b77ee66_nohash_0_one'
assert ds.utterances['0b77ee66_nohash_0_one'].start == 0
assert ds.utterances['0b77ee66_nohash_0_one'].end == -1
assert ds.utterances['c1b7c224_nohash_0_one'].idx == 'c1b7c224_nohash_0_one'
assert ds.utterances['c1b7c224_nohash_0_one'].file.idx == 'c1b7c224_nohash_0_one'
assert ds.utterances['c1b7c224_nohash_0_one'].start == 0
assert ds.utterances['c1b7c224_nohash_0_one'].end == -1
assert ds.utterances['c1b7c224_nohash_1_one'].idx == 'c1b7c224_nohash_1_one'
assert ds.utterances['c1b7c224_nohash_1_one'].file.idx == 'c1b7c224_nohash_1_one'
assert ds.utterances['c1b7c224_nohash_1_one'].start == 0
assert ds.utterances['c1b7c224_nohash_1_one'].end == -1
def test_read_labels(self):
ds = self.reader.load(self.ds_path)
assert len(ds.utterances['0b77ee66_nohash_0_bed'].label_lists) == 1
assert len(ds.utterances['0b77ee66_nohash_0_bed'].label_lists[corpus.LL_WORD_TRANSCRIPT].labels) == 1
assert ds.utterances['0b77ee66_nohash_0_bed'].label_lists[corpus.LL_WORD_TRANSCRIPT][0].value == 'bed'
assert len(ds.utterances['0b77ee66_nohash_1_bed'].label_lists) == 1
assert len(ds.utterances['0b77ee66_nohash_1_bed'].label_lists[corpus.LL_WORD_TRANSCRIPT].labels) == 1
assert ds.utterances['0b77ee66_nohash_1_bed'].label_lists[corpus.LL_WORD_TRANSCRIPT][0].value == 'bed'
assert len(ds.utterances['0b77ee66_nohash_2_bed'].label_lists) == 1
assert len(ds.utterances['0b77ee66_nohash_2_bed'].label_lists[corpus.LL_WORD_TRANSCRIPT].labels) == 1
assert ds.utterances['0b77ee66_nohash_2_bed'].label_lists[corpus.LL_WORD_TRANSCRIPT][0].value == 'bed'
assert len(ds.utterances['0bde966a_nohash_0_bed'].label_lists) == 1
assert len(ds.utterances['0bde966a_nohash_0_bed'].label_lists[corpus.LL_WORD_TRANSCRIPT].labels) == 1
assert ds.utterances['0bde966a_nohash_0_bed'].label_lists[corpus.LL_WORD_TRANSCRIPT][0].value == 'bed'
assert len(ds.utterances['0bde966a_nohash_1_bed'].label_lists) == 1
assert len(ds.utterances['0bde966a_nohash_1_bed'].label_lists[corpus.LL_WORD_TRANSCRIPT].labels) == 1
assert ds.utterances['0bde966a_nohash_1_bed'].label_lists[corpus.LL_WORD_TRANSCRIPT][0].value == 'bed'
assert len(ds.utterances['0c40e715_nohash_0_bed'].label_lists) == 1
assert len(ds.utterances['0c40e715_nohash_0_bed'].label_lists[corpus.LL_WORD_TRANSCRIPT].labels) == 1
assert ds.utterances['0c40e715_nohash_0_bed'].label_lists[corpus.LL_WORD_TRANSCRIPT][0].value == 'bed'
assert len(ds.utterances['d5c41d6a_nohash_0_marvin'].label_lists) == 1
assert len(ds.utterances['d5c41d6a_nohash_0_marvin'].label_lists[corpus.LL_WORD_TRANSCRIPT].labels) == 1
assert ds.utterances['d5c41d6a_nohash_0_marvin'].label_lists[corpus.LL_WORD_TRANSCRIPT][0].value == 'marvin'
assert len(ds.utterances['d7a58714_nohash_0_marvin'].label_lists) == 1
assert len(ds.utterances['d7a58714_nohash_0_marvin'].label_lists[corpus.LL_WORD_TRANSCRIPT].labels) == 1
assert ds.utterances['d7a58714_nohash_0_marvin'].label_lists[corpus.LL_WORD_TRANSCRIPT][0].value == 'marvin'
assert len(ds.utterances['d8a5ace5_nohash_0_marvin'].label_lists) == 1
assert len(ds.utterances['d8a5ace5_nohash_0_marvin'].label_lists[corpus.LL_WORD_TRANSCRIPT].labels) == 1
assert ds.utterances['d8a5ace5_nohash_0_marvin'].label_lists[corpus.LL_WORD_TRANSCRIPT][0].value == 'marvin'
assert len(ds.utterances['0a7c2a8d_nohash_0_one'].label_lists) == 1
assert len(ds.utterances['0a7c2a8d_nohash_0_one'].label_lists[corpus.LL_WORD_TRANSCRIPT].labels) == 1
assert ds.utterances['0a7c2a8d_nohash_0_one'].label_lists[corpus.LL_WORD_TRANSCRIPT][0].value == 'one'
assert len(ds.utterances['0b77ee66_nohash_0_one'].label_lists) == 1
assert len(ds.utterances['0b77ee66_nohash_0_one'].label_lists[corpus.LL_WORD_TRANSCRIPT].labels) == 1
assert ds.utterances['0b77ee66_nohash_0_one'].label_lists[corpus.LL_WORD_TRANSCRIPT][0].value == 'one'
assert len(ds.utterances['c1b7c224_nohash_0_one'].label_lists) == 1
assert len(ds.utterances['c1b7c224_nohash_0_one'].label_lists[corpus.LL_WORD_TRANSCRIPT].labels) == 1
assert ds.utterances['c1b7c224_nohash_0_one'].label_lists[corpus.LL_WORD_TRANSCRIPT][0].value == 'one'
assert len(ds.utterances['c1b7c224_nohash_1_one'].label_lists) == 1
assert len(ds.utterances['c1b7c224_nohash_1_one'].label_lists[corpus.LL_WORD_TRANSCRIPT].labels) == 1
assert ds.utterances['c1b7c224_nohash_1_one'].label_lists[corpus.LL_WORD_TRANSCRIPT][0].value == 'one'
def test_read_subvies(self):
ds = self.reader.load(self.ds_path)
assert ds.num_subviews == 3
assert ds.subviews['train'].num_utterances == 6
assert '0b77ee66_nohash_0_bed' in ds.subviews['train'].utterances.keys()
assert '0b77ee66_nohash_1_bed' in ds.subviews['train'].utterances.keys()
assert '0b77ee66_nohash_2_bed' in ds.subviews['train'].utterances.keys()
assert 'd5c41d6a_nohash_0_marvin' in ds.subviews['train'].utterances.keys()
assert 'c1b7c224_nohash_0_one' in ds.subviews['train'].utterances.keys()
assert 'c1b7c224_nohash_1_one' in ds.subviews['train'].utterances.keys()
assert ds.subviews['dev'].num_utterances == 3
assert '0c40e715_nohash_0_bed' in ds.subviews['dev'].utterances.keys()
assert 'd8a5ace5_nohash_0_marvin' in ds.subviews['dev'].utterances.keys()
assert '0a7c2a8d_nohash_0_one' in ds.subviews['dev'].utterances.keys()
assert ds.subviews['test'].num_utterances == 4
assert '0bde966a_nohash_0_bed' in ds.subviews['test'].utterances.keys()
assert '0bde966a_nohash_1_bed' in ds.subviews['test'].utterances.keys()
assert 'd7a58714_nohash_0_marvin' in ds.subviews['test'].utterances.keys()
assert '0b77ee66_nohash_0_one' in ds.subviews['test'].utterances.keys()
| 64.374384
| 116
| 0.719314
| 1,853
| 13,068
| 4.713977
| 0.037777
| 0.100973
| 0.133944
| 0.083343
| 0.900973
| 0.891128
| 0.872009
| 0.813623
| 0.701088
| 0.673268
| 0
| 0.106427
| 0.148684
| 13,068
| 202
| 117
| 64.693069
| 0.678742
| 0
| 0
| 0.030303
| 0
| 0
| 0.323921
| 0.301423
| 0
| 0
| 0
| 0
| 0.878788
| 1
| 0.036364
| false
| 0
| 0.030303
| 0
| 0.072727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9b3f9dfba4cee8aff400bcb4fc16adb7d0330c38
| 169
|
py
|
Python
|
atlas/foundations_events/src/test/consumers/jobs/__init__.py
|
DeepLearnI/atlas
|
8aca652d7e647b4e88530b93e265b536de7055ed
|
[
"Apache-2.0"
] | 296
|
2020-03-16T19:55:00.000Z
|
2022-01-10T19:46:05.000Z
|
atlas/foundations_events/src/test/consumers/jobs/__init__.py
|
DeepLearnI/atlas
|
8aca652d7e647b4e88530b93e265b536de7055ed
|
[
"Apache-2.0"
] | 57
|
2020-03-17T11:15:57.000Z
|
2021-07-10T14:42:27.000Z
|
atlas/foundations_events/src/test/consumers/jobs/__init__.py
|
DeepLearnI/atlas
|
8aca652d7e647b4e88530b93e265b536de7055ed
|
[
"Apache-2.0"
] | 38
|
2020-03-17T21:06:05.000Z
|
2022-02-08T03:19:34.000Z
|
from test.consumers.jobs.queued import *
from test.consumers.jobs.running import *
from test.consumers.jobs.completed import *
from test.consumers.jobs.failed import *
| 28.166667
| 43
| 0.804734
| 24
| 169
| 5.666667
| 0.375
| 0.235294
| 0.5
| 0.617647
| 0.595588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100592
| 169
| 5
| 44
| 33.8
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
32e193b81aea972ce8ff62c88ee32b6ec13070b3
| 89
|
py
|
Python
|
Ex13/app/history/action.py
|
jvpersuhn/vasconcelos
|
156934614c5ff43bb28ce5342cca644bfe2ff168
|
[
"MIT"
] | null | null | null |
Ex13/app/history/action.py
|
jvpersuhn/vasconcelos
|
156934614c5ff43bb28ce5342cca644bfe2ff168
|
[
"MIT"
] | null | null | null |
Ex13/app/history/action.py
|
jvpersuhn/vasconcelos
|
156934614c5ff43bb28ce5342cca644bfe2ff168
|
[
"MIT"
] | null | null | null |
from Ex13.app.history.model import Empresa
def get_all():
return Empresa.query.all()
| 22.25
| 42
| 0.752809
| 14
| 89
| 4.714286
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025974
| 0.134831
| 89
| 4
| 43
| 22.25
| 0.831169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
fd261e9a24799ec5b0cf6639aa77754c46d05332
| 102
|
py
|
Python
|
bempp/api/linalg/__init__.py
|
pescap/bempp-cl
|
3a68666e8db0e873d418b734289067483f68f12e
|
[
"MIT"
] | null | null | null |
bempp/api/linalg/__init__.py
|
pescap/bempp-cl
|
3a68666e8db0e873d418b734289067483f68f12e
|
[
"MIT"
] | null | null | null |
bempp/api/linalg/__init__.py
|
pescap/bempp-cl
|
3a68666e8db0e873d418b734289067483f68f12e
|
[
"MIT"
] | null | null | null |
from .iterative_solvers import gmres
from .iterative_solvers import cg
from .direct_solvers import lu
| 25.5
| 36
| 0.852941
| 15
| 102
| 5.6
| 0.533333
| 0.464286
| 0.47619
| 0.619048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 102
| 3
| 37
| 34
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
1ff8caec490eb01de66bebf1f98201f841b398dc
| 103
|
py
|
Python
|
TI/__init__.py
|
Dustin-Grandret/LouisChaKnowledgeGraph
|
0e111b40871d31d1716ad967128e3cdc5bc6b3ce
|
[
"Apache-2.0"
] | 4
|
2021-05-24T09:40:53.000Z
|
2021-05-24T13:47:15.000Z
|
TI/__init__.py
|
Dustin-Grandret/LouisChaKnowledgeGraph
|
0e111b40871d31d1716ad967128e3cdc5bc6b3ce
|
[
"Apache-2.0"
] | null | null | null |
TI/__init__.py
|
Dustin-Grandret/LouisChaKnowledgeGraph
|
0e111b40871d31d1716ad967128e3cdc5bc6b3ce
|
[
"Apache-2.0"
] | null | null | null |
from TI.rule_basic import *
from TI.rules import *
from TI.typeinference import *
from TI.test import *
| 25.75
| 30
| 0.776699
| 17
| 103
| 4.647059
| 0.470588
| 0.303797
| 0.455696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145631
| 103
| 4
| 31
| 25.75
| 0.897727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1f043dcf9d219e553c3c7bae788aefa9bb1b6ad3
| 25,961
|
py
|
Python
|
sdk/python/pulumi_oci/dns/record.py
|
EladGabay/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-08-17T11:14:46.000Z
|
2021-12-31T02:07:03.000Z
|
sdk/python/pulumi_oci/dns/record.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-06T11:21:29.000Z
|
2021-09-06T11:21:29.000Z
|
sdk/python/pulumi_oci/dns/record.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-08-24T23:31:30.000Z
|
2022-01-02T19:26:54.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['RecordArgs', 'Record']
@pulumi.input_type
class RecordArgs:
def __init__(__self__, *,
domain: pulumi.Input[str],
rtype: pulumi.Input[str],
zone_name_or_id: pulumi.Input[str],
compartment_id: Optional[pulumi.Input[str]] = None,
rdata: Optional[pulumi.Input[str]] = None,
ttl: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a Record resource.
:param pulumi.Input[str] domain: The fully qualified domain name where the record can be located. Domain value is case insensitive.
:param pulumi.Input[str] rtype: The canonical name for the record's type, such as A or CNAME. For more information, see [Resource Record (RR) TYPEs](https://www.iana.org/assignments/dns-parameters/dns-parameters.xhtml#dns-parameters-4).
:param pulumi.Input[str] zone_name_or_id: The name or OCID of the target zone.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of the compartment the resource belongs to. If supplied, it must match the Zone's compartment ocid.
:param pulumi.Input[str] rdata: (Updatable) The record's data, as whitespace-delimited tokens in type-specific presentation format. All RDATA is normalized and the returned presentation of your RDATA may differ from its initial input. For more information about RDATA, see [Supported DNS Resource Record Types](https://docs.cloud.oracle.com/iaas/Content/DNS/Reference/supporteddnsresource.htm)
:param pulumi.Input[int] ttl: (Updatable) The Time To Live for the record, in seconds.
"""
if domain is not None:
warnings.warn("""The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""", DeprecationWarning)
pulumi.log.warn("""domain is deprecated: The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""")
pulumi.set(__self__, "domain", domain)
if rtype is not None:
warnings.warn("""The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""", DeprecationWarning)
pulumi.log.warn("""rtype is deprecated: The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""")
pulumi.set(__self__, "rtype", rtype)
if zone_name_or_id is not None:
warnings.warn("""The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""", DeprecationWarning)
pulumi.log.warn("""zone_name_or_id is deprecated: The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""")
pulumi.set(__self__, "zone_name_or_id", zone_name_or_id)
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if rdata is not None:
pulumi.set(__self__, "rdata", rdata)
if ttl is not None:
pulumi.set(__self__, "ttl", ttl)
@property
@pulumi.getter
def domain(self) -> pulumi.Input[str]:
"""
The fully qualified domain name where the record can be located. Domain value is case insensitive.
"""
return pulumi.get(self, "domain")
@domain.setter
def domain(self, value: pulumi.Input[str]):
pulumi.set(self, "domain", value)
@property
@pulumi.getter
def rtype(self) -> pulumi.Input[str]:
"""
The canonical name for the record's type, such as A or CNAME. For more information, see [Resource Record (RR) TYPEs](https://www.iana.org/assignments/dns-parameters/dns-parameters.xhtml#dns-parameters-4).
"""
return pulumi.get(self, "rtype")
@rtype.setter
def rtype(self, value: pulumi.Input[str]):
pulumi.set(self, "rtype", value)
@property
@pulumi.getter(name="zoneNameOrId")
def zone_name_or_id(self) -> pulumi.Input[str]:
"""
The name or OCID of the target zone.
"""
return pulumi.get(self, "zone_name_or_id")
@zone_name_or_id.setter
def zone_name_or_id(self, value: pulumi.Input[str]):
pulumi.set(self, "zone_name_or_id", value)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The OCID of the compartment the resource belongs to. If supplied, it must match the Zone's compartment ocid.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter
def rdata(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The record's data, as whitespace-delimited tokens in type-specific presentation format. All RDATA is normalized and the returned presentation of your RDATA may differ from its initial input. For more information about RDATA, see [Supported DNS Resource Record Types](https://docs.cloud.oracle.com/iaas/Content/DNS/Reference/supporteddnsresource.htm)
"""
return pulumi.get(self, "rdata")
@rdata.setter
def rdata(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "rdata", value)
@property
@pulumi.getter
def ttl(self) -> Optional[pulumi.Input[int]]:
"""
(Updatable) The Time To Live for the record, in seconds.
"""
return pulumi.get(self, "ttl")
@ttl.setter
def ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ttl", value)
@pulumi.input_type
class _RecordState:
def __init__(__self__, *,
compartment_id: Optional[pulumi.Input[str]] = None,
domain: Optional[pulumi.Input[str]] = None,
is_protected: Optional[pulumi.Input[bool]] = None,
rdata: Optional[pulumi.Input[str]] = None,
record_hash: Optional[pulumi.Input[str]] = None,
rrset_version: Optional[pulumi.Input[str]] = None,
rtype: Optional[pulumi.Input[str]] = None,
ttl: Optional[pulumi.Input[int]] = None,
zone_name_or_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Record resources.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of the compartment the resource belongs to. If supplied, it must match the Zone's compartment ocid.
:param pulumi.Input[str] domain: The fully qualified domain name where the record can be located. Domain value is case insensitive.
:param pulumi.Input[bool] is_protected: A Boolean flag indicating whether or not parts of the record are unable to be explicitly managed.
:param pulumi.Input[str] rdata: (Updatable) The record's data, as whitespace-delimited tokens in type-specific presentation format. All RDATA is normalized and the returned presentation of your RDATA may differ from its initial input. For more information about RDATA, see [Supported DNS Resource Record Types](https://docs.cloud.oracle.com/iaas/Content/DNS/Reference/supporteddnsresource.htm)
:param pulumi.Input[str] record_hash: A unique identifier for the record within its zone.
:param pulumi.Input[str] rrset_version: The latest version of the record's zone in which its RRSet differs from the preceding version.
:param pulumi.Input[str] rtype: The canonical name for the record's type, such as A or CNAME. For more information, see [Resource Record (RR) TYPEs](https://www.iana.org/assignments/dns-parameters/dns-parameters.xhtml#dns-parameters-4).
:param pulumi.Input[int] ttl: (Updatable) The Time To Live for the record, in seconds.
:param pulumi.Input[str] zone_name_or_id: The name or OCID of the target zone.
"""
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if domain is not None:
warnings.warn("""The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""", DeprecationWarning)
pulumi.log.warn("""domain is deprecated: The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""")
if domain is not None:
pulumi.set(__self__, "domain", domain)
if is_protected is not None:
pulumi.set(__self__, "is_protected", is_protected)
if rdata is not None:
pulumi.set(__self__, "rdata", rdata)
if record_hash is not None:
pulumi.set(__self__, "record_hash", record_hash)
if rrset_version is not None:
pulumi.set(__self__, "rrset_version", rrset_version)
if rtype is not None:
warnings.warn("""The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""", DeprecationWarning)
pulumi.log.warn("""rtype is deprecated: The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""")
if rtype is not None:
pulumi.set(__self__, "rtype", rtype)
if ttl is not None:
pulumi.set(__self__, "ttl", ttl)
if zone_name_or_id is not None:
warnings.warn("""The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""", DeprecationWarning)
pulumi.log.warn("""zone_name_or_id is deprecated: The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""")
if zone_name_or_id is not None:
pulumi.set(__self__, "zone_name_or_id", zone_name_or_id)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The OCID of the compartment the resource belongs to. If supplied, it must match the Zone's compartment ocid.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter
def domain(self) -> Optional[pulumi.Input[str]]:
"""
The fully qualified domain name where the record can be located. Domain value is case insensitive.
"""
return pulumi.get(self, "domain")
@domain.setter
def domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "domain", value)
@property
@pulumi.getter(name="isProtected")
def is_protected(self) -> Optional[pulumi.Input[bool]]:
"""
A Boolean flag indicating whether or not parts of the record are unable to be explicitly managed.
"""
return pulumi.get(self, "is_protected")
@is_protected.setter
def is_protected(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_protected", value)
@property
@pulumi.getter
def rdata(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The record's data, as whitespace-delimited tokens in type-specific presentation format. All RDATA is normalized and the returned presentation of your RDATA may differ from its initial input. For more information about RDATA, see [Supported DNS Resource Record Types](https://docs.cloud.oracle.com/iaas/Content/DNS/Reference/supporteddnsresource.htm)
"""
return pulumi.get(self, "rdata")
@rdata.setter
def rdata(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "rdata", value)
@property
@pulumi.getter(name="recordHash")
def record_hash(self) -> Optional[pulumi.Input[str]]:
"""
A unique identifier for the record within its zone.
"""
return pulumi.get(self, "record_hash")
@record_hash.setter
def record_hash(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "record_hash", value)
@property
@pulumi.getter(name="rrsetVersion")
def rrset_version(self) -> Optional[pulumi.Input[str]]:
"""
The latest version of the record's zone in which its RRSet differs from the preceding version.
"""
return pulumi.get(self, "rrset_version")
@rrset_version.setter
def rrset_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "rrset_version", value)
@property
@pulumi.getter
def rtype(self) -> Optional[pulumi.Input[str]]:
"""
The canonical name for the record's type, such as A or CNAME. For more information, see [Resource Record (RR) TYPEs](https://www.iana.org/assignments/dns-parameters/dns-parameters.xhtml#dns-parameters-4).
"""
return pulumi.get(self, "rtype")
@rtype.setter
def rtype(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "rtype", value)
@property
@pulumi.getter
def ttl(self) -> Optional[pulumi.Input[int]]:
"""
(Updatable) The Time To Live for the record, in seconds.
"""
return pulumi.get(self, "ttl")
@ttl.setter
def ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ttl", value)
@property
@pulumi.getter(name="zoneNameOrId")
def zone_name_or_id(self) -> Optional[pulumi.Input[str]]:
"""
The name or OCID of the target zone.
"""
return pulumi.get(self, "zone_name_or_id")
@zone_name_or_id.setter
def zone_name_or_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone_name_or_id", value)
class Record(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
domain: Optional[pulumi.Input[str]] = None,
rdata: Optional[pulumi.Input[str]] = None,
rtype: Optional[pulumi.Input[str]] = None,
ttl: Optional[pulumi.Input[int]] = None,
zone_name_or_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
## Import
Import is not supported for this resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of the compartment the resource belongs to. If supplied, it must match the Zone's compartment ocid.
:param pulumi.Input[str] domain: The fully qualified domain name where the record can be located. Domain value is case insensitive.
:param pulumi.Input[str] rdata: (Updatable) The record's data, as whitespace-delimited tokens in type-specific presentation format. All RDATA is normalized and the returned presentation of your RDATA may differ from its initial input. For more information about RDATA, see [Supported DNS Resource Record Types](https://docs.cloud.oracle.com/iaas/Content/DNS/Reference/supporteddnsresource.htm)
:param pulumi.Input[str] rtype: The canonical name for the record's type, such as A or CNAME. For more information, see [Resource Record (RR) TYPEs](https://www.iana.org/assignments/dns-parameters/dns-parameters.xhtml#dns-parameters-4).
:param pulumi.Input[int] ttl: (Updatable) The Time To Live for the record, in seconds.
:param pulumi.Input[str] zone_name_or_id: The name or OCID of the target zone.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RecordArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## Import
Import is not supported for this resource.
:param str resource_name: The name of the resource.
:param RecordArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RecordArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
domain: Optional[pulumi.Input[str]] = None,
rdata: Optional[pulumi.Input[str]] = None,
rtype: Optional[pulumi.Input[str]] = None,
ttl: Optional[pulumi.Input[int]] = None,
zone_name_or_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RecordArgs.__new__(RecordArgs)
__props__.__dict__["compartment_id"] = compartment_id
if domain is None and not opts.urn:
raise TypeError("Missing required property 'domain'")
if domain is not None and not opts.urn:
warnings.warn("""The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""", DeprecationWarning)
pulumi.log.warn("""domain is deprecated: The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""")
__props__.__dict__["domain"] = domain
__props__.__dict__["rdata"] = rdata
if rtype is None and not opts.urn:
raise TypeError("Missing required property 'rtype'")
if rtype is not None and not opts.urn:
warnings.warn("""The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""", DeprecationWarning)
pulumi.log.warn("""rtype is deprecated: The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""")
__props__.__dict__["rtype"] = rtype
__props__.__dict__["ttl"] = ttl
if zone_name_or_id is None and not opts.urn:
raise TypeError("Missing required property 'zone_name_or_id'")
if zone_name_or_id is not None and not opts.urn:
warnings.warn("""The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""", DeprecationWarning)
pulumi.log.warn("""zone_name_or_id is deprecated: The 'oci_dns_record' resource has been deprecated. Please use 'oci_dns_rrset' instead.""")
__props__.__dict__["zone_name_or_id"] = zone_name_or_id
__props__.__dict__["is_protected"] = None
__props__.__dict__["record_hash"] = None
__props__.__dict__["rrset_version"] = None
super(Record, __self__).__init__(
'oci:dns/record:Record',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
domain: Optional[pulumi.Input[str]] = None,
is_protected: Optional[pulumi.Input[bool]] = None,
rdata: Optional[pulumi.Input[str]] = None,
record_hash: Optional[pulumi.Input[str]] = None,
rrset_version: Optional[pulumi.Input[str]] = None,
rtype: Optional[pulumi.Input[str]] = None,
ttl: Optional[pulumi.Input[int]] = None,
zone_name_or_id: Optional[pulumi.Input[str]] = None) -> 'Record':
"""
Get an existing Record resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of the compartment the resource belongs to. If supplied, it must match the Zone's compartment ocid.
:param pulumi.Input[str] domain: The fully qualified domain name where the record can be located. Domain value is case insensitive.
:param pulumi.Input[bool] is_protected: A Boolean flag indicating whether or not parts of the record are unable to be explicitly managed.
:param pulumi.Input[str] rdata: (Updatable) The record's data, as whitespace-delimited tokens in type-specific presentation format. All RDATA is normalized and the returned presentation of your RDATA may differ from its initial input. For more information about RDATA, see [Supported DNS Resource Record Types](https://docs.cloud.oracle.com/iaas/Content/DNS/Reference/supporteddnsresource.htm)
:param pulumi.Input[str] record_hash: A unique identifier for the record within its zone.
:param pulumi.Input[str] rrset_version: The latest version of the record's zone in which its RRSet differs from the preceding version.
:param pulumi.Input[str] rtype: The canonical name for the record's type, such as A or CNAME. For more information, see [Resource Record (RR) TYPEs](https://www.iana.org/assignments/dns-parameters/dns-parameters.xhtml#dns-parameters-4).
:param pulumi.Input[int] ttl: (Updatable) The Time To Live for the record, in seconds.
:param pulumi.Input[str] zone_name_or_id: The name or OCID of the target zone.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RecordState.__new__(_RecordState)
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["domain"] = domain
__props__.__dict__["is_protected"] = is_protected
__props__.__dict__["rdata"] = rdata
__props__.__dict__["record_hash"] = record_hash
__props__.__dict__["rrset_version"] = rrset_version
__props__.__dict__["rtype"] = rtype
__props__.__dict__["ttl"] = ttl
__props__.__dict__["zone_name_or_id"] = zone_name_or_id
return Record(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Output[Optional[str]]:
"""
(Updatable) The OCID of the compartment the resource belongs to. If supplied, it must match the Zone's compartment ocid.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter
def domain(self) -> pulumi.Output[str]:
"""
The fully qualified domain name where the record can be located. Domain value is case insensitive.
"""
return pulumi.get(self, "domain")
@property
@pulumi.getter(name="isProtected")
def is_protected(self) -> pulumi.Output[bool]:
"""
A Boolean flag indicating whether or not parts of the record are unable to be explicitly managed.
"""
return pulumi.get(self, "is_protected")
@property
@pulumi.getter
def rdata(self) -> pulumi.Output[Optional[str]]:
"""
(Updatable) The record's data, as whitespace-delimited tokens in type-specific presentation format. All RDATA is normalized and the returned presentation of your RDATA may differ from its initial input. For more information about RDATA, see [Supported DNS Resource Record Types](https://docs.cloud.oracle.com/iaas/Content/DNS/Reference/supporteddnsresource.htm)
"""
return pulumi.get(self, "rdata")
@property
@pulumi.getter(name="recordHash")
def record_hash(self) -> pulumi.Output[str]:
"""
A unique identifier for the record within its zone.
"""
return pulumi.get(self, "record_hash")
@property
@pulumi.getter(name="rrsetVersion")
def rrset_version(self) -> pulumi.Output[str]:
"""
The latest version of the record's zone in which its RRSet differs from the preceding version.
"""
return pulumi.get(self, "rrset_version")
@property
@pulumi.getter
def rtype(self) -> pulumi.Output[str]:
"""
The canonical name for the record's type, such as A or CNAME. For more information, see [Resource Record (RR) TYPEs](https://www.iana.org/assignments/dns-parameters/dns-parameters.xhtml#dns-parameters-4).
"""
return pulumi.get(self, "rtype")
@property
@pulumi.getter
def ttl(self) -> pulumi.Output[Optional[int]]:
"""
(Updatable) The Time To Live for the record, in seconds.
"""
return pulumi.get(self, "ttl")
@property
@pulumi.getter(name="zoneNameOrId")
def zone_name_or_id(self) -> pulumi.Output[str]:
"""
The name or OCID of the target zone.
"""
return pulumi.get(self, "zone_name_or_id")
| 51.818363
| 401
| 0.663303
| 3,361
| 25,961
| 4.939899
| 0.066349
| 0.066253
| 0.066614
| 0.058303
| 0.889177
| 0.863519
| 0.841354
| 0.819972
| 0.810396
| 0.783955
| 0
| 0.000402
| 0.233812
| 25,961
| 500
| 402
| 51.922
| 0.834297
| 0.357613
| 0
| 0.682692
| 1
| 0
| 0.182995
| 0.001339
| 0
| 0
| 0
| 0
| 0
| 1
| 0.147436
| false
| 0.003205
| 0.016026
| 0
| 0.253205
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1f45ed1bf894b7256b419983df7642f12d27f0eb
| 5,674
|
py
|
Python
|
populus-tests/timestamp-scheduling/test_timestamp_execution_window_enforcement.py
|
romil797/ethereum-alarm-clock
|
b2710fb9ff24794fdb1100cdb80acee7efaeb94c
|
[
"MIT"
] | 15
|
2017-09-19T20:54:00.000Z
|
2018-12-09T16:09:22.000Z
|
populus-tests/timestamp-scheduling/test_timestamp_execution_window_enforcement.py
|
romil797/ethereum-alarm-clock
|
b2710fb9ff24794fdb1100cdb80acee7efaeb94c
|
[
"MIT"
] | null | null | null |
populus-tests/timestamp-scheduling/test_timestamp_execution_window_enforcement.py
|
romil797/ethereum-alarm-clock
|
b2710fb9ff24794fdb1100cdb80acee7efaeb94c
|
[
"MIT"
] | 5
|
2017-11-17T20:18:06.000Z
|
2018-10-10T13:55:46.000Z
|
import pytest
def test_execution_rejected_if_before_execution_window_for_timestamps(chain,
web3,
RequestData,
txn_recorder,
get_execute_data,
get_abort_data,
AbortReasons):
txn_request = RequestData(
toAddress=txn_recorder.address,
temporalUnit=2,
).direct_deploy()
request_data = RequestData.from_contract(txn_request)
assert txn_recorder.call().wasCalled() is False
assert request_data.meta.wasCalled is False
assert web3.eth.getBlock('latest')['timestamp'] < request_data.schedule.windowStart
execute_txn_hash = txn_request.transact({'gas': 3000000}).execute()
chain.wait.for_receipt(execute_txn_hash)
request_data.refresh()
assert txn_recorder.call().wasCalled() is False
assert request_data.meta.wasCalled is False
with pytest.raises(AssertionError):
get_execute_data(execute_txn_hash)
abort_data = get_abort_data(execute_txn_hash)
reasons = {entry['args']['reason'] for entry in abort_data}
assert AbortReasons.BeforeCallWindow in reasons
def test_execution_rejected_if_after_execution_window_for_timestamps(chain,
web3,
RequestData,
txn_recorder,
get_execute_data,
set_timestamp,
get_abort_data,
AbortReasons):
txn_request = RequestData(
toAddress=txn_recorder.address,
temporalUnit=2,
).direct_deploy()
request_data = RequestData.from_contract(txn_request)
assert txn_recorder.call().wasCalled() is False
assert request_data.meta.wasCalled is False
end_execution_window = request_data.schedule.windowStart + request_data.schedule.windowSize
set_timestamp(end_execution_window + 1)
assert web3.eth.getBlock('latest')['timestamp'] > end_execution_window
execute_txn_hash = txn_request.transact({'gas': 3000000}).execute()
chain.wait.for_receipt(execute_txn_hash)
request_data.refresh()
assert txn_recorder.call().wasCalled() is False
assert request_data.meta.wasCalled is False
with pytest.raises(AssertionError):
get_execute_data(execute_txn_hash)
abort_data = get_abort_data(execute_txn_hash)
reasons = {entry['args']['reason'] for entry in abort_data}
assert AbortReasons.AfterCallWindow in reasons
def test_execution_allowed_at_start_execution_window_for_timestamps(chain,
web3,
RequestData,
txn_recorder,
get_execute_data,
set_timestamp,
get_abort_data,
AbortReasons):
txn_request = RequestData(
toAddress=txn_recorder.address,
temporalUnit=2,
).direct_deploy()
request_data = RequestData.from_contract(txn_request)
assert txn_recorder.call().wasCalled() is False
assert request_data.meta.wasCalled is False
start_execution_window = request_data.schedule.windowStart
set_timestamp(start_execution_window)
execute_txn_hash = txn_request.transact({'gas': 3000000}).execute()
chain.wait.for_receipt(execute_txn_hash)
request_data.refresh()
assert txn_recorder.call().wasCalled() is True
assert request_data.meta.wasCalled is True
def test_execution_allowed_at_end_execution_window_for_timestamps(chain,
web3,
RequestData,
txn_recorder,
get_execute_data,
set_timestamp,
get_abort_data,
AbortReasons):
txn_request = RequestData(
toAddress=txn_recorder.address,
temporalUnit=2,
).direct_deploy()
request_data = RequestData.from_contract(txn_request)
assert txn_recorder.call().wasCalled() is False
assert request_data.meta.wasCalled is False
end_execution_window = request_data.schedule.windowStart + request_data.schedule.windowSize
set_timestamp(end_execution_window)
execute_txn_hash = txn_request.transact({'gas': 3000000}).execute()
chain.wait.for_receipt(execute_txn_hash)
request_data.refresh()
assert txn_recorder.call().wasCalled() is True
assert request_data.meta.wasCalled is True
| 43.312977
| 95
| 0.519739
| 484
| 5,674
| 5.75
| 0.140496
| 0.086957
| 0.06899
| 0.060367
| 0.958678
| 0.915559
| 0.876752
| 0.876752
| 0.876752
| 0.876752
| 0
| 0.011905
| 0.42263
| 5,674
| 130
| 96
| 43.646154
| 0.837607
| 0
| 0
| 0.867347
| 0
| 0
| 0.010927
| 0
| 0
| 0
| 0
| 0
| 0.22449
| 1
| 0.040816
| false
| 0
| 0.010204
| 0
| 0.05102
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1f6b0532cc54f05cd978f3bfbfe3640d2e92f97f
| 72
|
py
|
Python
|
mcd_changelog/__init__.py
|
jernejml/mcd_changelog
|
db2a429ef8a15921da17b7d2a2e4f9cbe066f47e
|
[
"Unlicense"
] | null | null | null |
mcd_changelog/__init__.py
|
jernejml/mcd_changelog
|
db2a429ef8a15921da17b7d2a2e4f9cbe066f47e
|
[
"Unlicense"
] | null | null | null |
mcd_changelog/__init__.py
|
jernejml/mcd_changelog
|
db2a429ef8a15921da17b7d2a2e4f9cbe066f47e
|
[
"Unlicense"
] | null | null | null |
from .mcd_changelog import fetch
from .mcd_changelog import get_releases
| 36
| 39
| 0.875
| 11
| 72
| 5.454545
| 0.636364
| 0.233333
| 0.533333
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097222
| 72
| 2
| 39
| 36
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
2f103d7a6e1d8c28c2c8e4a4ee1cb252086c76ad
| 3,896
|
py
|
Python
|
test/test_recovery_project_backup_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
test/test_recovery_project_backup_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
test/test_recovery_project_backup_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
"""
HyperOne
HyperOne API # noqa: E501
The version of the OpenAPI document: 0.1.0
Generated by: https://openapi-generator.tech
"""
import unittest
import h1
from h1.api.recovery_project_backup_api import RecoveryProjectBackupApi # noqa: E501
class TestRecoveryProjectBackupApi(unittest.TestCase):
"""RecoveryProjectBackupApi unit test stubs"""
def setUp(self):
self.api = RecoveryProjectBackupApi() # noqa: E501
def tearDown(self):
pass
def test_recovery_project_backup_create(self):
"""Test case for recovery_project_backup_create
Create recovery/backup # noqa: E501
"""
pass
def test_recovery_project_backup_delete(self):
"""Test case for recovery_project_backup_delete
Delete recovery/backup # noqa: E501
"""
pass
def test_recovery_project_backup_event_get(self):
"""Test case for recovery_project_backup_event_get
Get recovery/backup.event # noqa: E501
"""
pass
def test_recovery_project_backup_event_list(self):
"""Test case for recovery_project_backup_event_list
List recovery/backup.event # noqa: E501
"""
pass
def test_recovery_project_backup_export(self):
"""Test case for recovery_project_backup_export
Export recovery/backup # noqa: E501
"""
pass
def test_recovery_project_backup_get(self):
"""Test case for recovery_project_backup_get
Get recovery/backup # noqa: E501
"""
pass
def test_recovery_project_backup_list(self):
"""Test case for recovery_project_backup_list
List recovery/backup # noqa: E501
"""
pass
def test_recovery_project_backup_metric_get(self):
"""Test case for recovery_project_backup_metric_get
Get recovery/backup.metric # noqa: E501
"""
pass
def test_recovery_project_backup_metric_list(self):
"""Test case for recovery_project_backup_metric_list
List recovery/backup.metric # noqa: E501
"""
pass
def test_recovery_project_backup_metric_point_list(self):
"""Test case for recovery_project_backup_metric_point_list
List recovery/backup.point # noqa: E501
"""
pass
def test_recovery_project_backup_service_get(self):
"""Test case for recovery_project_backup_service_get
Get recovery/backup.service # noqa: E501
"""
pass
def test_recovery_project_backup_service_list(self):
"""Test case for recovery_project_backup_service_list
List recovery/backup.service # noqa: E501
"""
pass
def test_recovery_project_backup_tag_create(self):
"""Test case for recovery_project_backup_tag_create
Create recovery/backup.tag # noqa: E501
"""
pass
def test_recovery_project_backup_tag_delete(self):
"""Test case for recovery_project_backup_tag_delete
Delete recovery/backup.tag # noqa: E501
"""
pass
def test_recovery_project_backup_tag_get(self):
"""Test case for recovery_project_backup_tag_get
Get recovery/backup.tag # noqa: E501
"""
pass
def test_recovery_project_backup_tag_list(self):
"""Test case for recovery_project_backup_tag_list
List recovery/backup.tag # noqa: E501
"""
pass
def test_recovery_project_backup_tag_put(self):
"""Test case for recovery_project_backup_tag_put
Replace recovery/backup.tag # noqa: E501
"""
pass
def test_recovery_project_backup_update(self):
"""Test case for recovery_project_backup_update
Update recovery/backup # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 25.135484
| 85
| 0.659138
| 458
| 3,896
| 5.251092
| 0.115721
| 0.230769
| 0.323077
| 0.142204
| 0.789189
| 0.759252
| 0.740125
| 0.710187
| 0.459044
| 0.390437
| 0
| 0.023885
| 0.269251
| 3,896
| 154
| 86
| 25.298701
| 0.820864
| 0.454312
| 0
| 0.413043
| 1
| 0
| 0.004706
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.434783
| false
| 0.413043
| 0.065217
| 0
| 0.521739
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 9
|
2f119e2a3bd498812eca56cfce70cd1092612bf5
| 108
|
py
|
Python
|
Codewars/8kyu/find-the-integral/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/8kyu/find-the-integral/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/8kyu/find-the-integral/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 3.6.0
test.assert_equals(integrate(3, 2), '1x^3')
test.assert_equals(integrate(9, 5), '1.5x^6')
| 21.6
| 45
| 0.666667
| 21
| 108
| 3.333333
| 0.666667
| 0.285714
| 0.457143
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123711
| 0.101852
| 108
| 4
| 46
| 27
| 0.597938
| 0.12963
| 0
| 0
| 0
| 0
| 0.108696
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2f253227018691635b8d0cc8a20e42f500312e55
| 152
|
py
|
Python
|
package_one/module_one.py
|
afaquejam/AwesomeApp
|
d86cc29b4047d9341e6e368530e922f28daad309
|
[
"MIT"
] | 4
|
2018-07-28T01:18:55.000Z
|
2019-04-04T09:32:19.000Z
|
package_one/module_one.py
|
afaquejam/AwesomeApp
|
d86cc29b4047d9341e6e368530e922f28daad309
|
[
"MIT"
] | null | null | null |
package_one/module_one.py
|
afaquejam/AwesomeApp
|
d86cc29b4047d9341e6e368530e922f28daad309
|
[
"MIT"
] | null | null | null |
class IntegerAdder(object):
def __init__(self):
pass
def add(self, operand_one, operand_two):
return operand_one + operand_two
| 21.714286
| 44
| 0.671053
| 19
| 152
| 4.947368
| 0.631579
| 0.212766
| 0.361702
| 0.425532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 152
| 6
| 45
| 25.333333
| 0.824561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.2
| 0
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 9
|
2f7b76a7501beb7fc4649f1bb4f05f273a0a54e6
| 15,081
|
py
|
Python
|
ATSAMD51P19A/libsrc/ATSAMD51P19A/TC0_.py
|
t-ikegami/WioTerminal-CircuitPython
|
efbdc2e13ad969fe009d88f7ec4b836ca61ae973
|
[
"MIT"
] | null | null | null |
ATSAMD51P19A/libsrc/ATSAMD51P19A/TC0_.py
|
t-ikegami/WioTerminal-CircuitPython
|
efbdc2e13ad969fe009d88f7ec4b836ca61ae973
|
[
"MIT"
] | 1
|
2022-01-19T00:16:02.000Z
|
2022-01-26T03:43:34.000Z
|
ATSAMD51P19A/libsrc/ATSAMD51P19A/TC0_.py
|
t-ikegami/WioTerminal-CircuitPython
|
efbdc2e13ad969fe009d88f7ec4b836ca61ae973
|
[
"MIT"
] | null | null | null |
import uctypes as ct
TC_COUNT8 = {
'CTRLA' : ( 0x00, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MODE' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 2 << ct.BF_LEN,
'PRESCSYNC' : 0x00 | ct.BFUINT32 | 4 << ct.BF_POS | 2 << ct.BF_LEN,
'RUNSTDBY' : 0x00 | ct.BFUINT32 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'ONDEMAND' : 0x00 | ct.BFUINT32 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
'PRESCALER' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 3 << ct.BF_LEN,
'ALOCK' : 0x00 | ct.BFUINT32 | 11 << ct.BF_POS | 1 << ct.BF_LEN,
'CAPTEN0' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 1 << ct.BF_LEN,
'CAPTEN1' : 0x00 | ct.BFUINT32 | 17 << ct.BF_POS | 1 << ct.BF_LEN,
'COPEN0' : 0x00 | ct.BFUINT32 | 20 << ct.BF_POS | 1 << ct.BF_LEN,
'COPEN1' : 0x00 | ct.BFUINT32 | 21 << ct.BF_POS | 1 << ct.BF_LEN,
'CAPTMODE0' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 2 << ct.BF_LEN,
'CAPTMODE1' : 0x00 | ct.BFUINT32 | 27 << ct.BF_POS | 2 << ct.BF_LEN,
}),
'CTRLBCLR' : ( 0x04, {
'reg' : 0x00 | ct.UINT8,
'DIR' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'LUPD' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'ONESHOT' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'CMD' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 3 << ct.BF_LEN,
}),
'CTRLBSET' : ( 0x05, {
'reg' : 0x00 | ct.UINT8,
'DIR' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'LUPD' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'ONESHOT' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'CMD' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 3 << ct.BF_LEN,
}),
'EVCTRL' : ( 0x06, {
'reg' : 0x00 | ct.UINT16,
'EVACT' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 3 << ct.BF_LEN,
'TCINV' : 0x00 | ct.BFUINT16 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'TCEI' : 0x00 | ct.BFUINT16 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'OVFEO' : 0x00 | ct.BFUINT16 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'MCEO0' : 0x00 | ct.BFUINT16 | 12 << ct.BF_POS | 1 << ct.BF_LEN,
'MCEO1' : 0x00 | ct.BFUINT16 | 13 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTENCLR' : ( 0x08, {
'reg' : 0x00 | ct.UINT8,
'OVF' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ERR' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MC0' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'MC1' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTENSET' : ( 0x09, {
'reg' : 0x00 | ct.UINT8,
'OVF' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ERR' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MC0' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'MC1' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTFLAG' : ( 0x0A, {
'reg' : 0x00 | ct.UINT8,
'OVF' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ERR' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MC0' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'MC1' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'STATUS' : ( 0x0B, {
'reg' : 0x00 | ct.UINT8,
'STOP' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'SLAVE' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'PERBUFV' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'CCBUFV0' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'CCBUFV1' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'WAVE' : ( 0x0C, {
'reg' : 0x00 | ct.UINT8,
'WAVEGEN' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 2 << ct.BF_LEN,
}),
'DRVCTRL' : ( 0x0D, {
'reg' : 0x00 | ct.UINT8,
'INVEN0' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'INVEN1' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'DBGCTRL' : ( 0x0F, {
'reg' : 0x00 | ct.UINT8,
'DBGRUN' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'SYNCBUSY' : ( 0x10, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'CTRLB' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'STATUS' : 0x00 | ct.BFUINT32 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'COUNT' : 0x00 | ct.BFUINT32 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'PER' : 0x00 | ct.BFUINT32 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'CC0' : 0x00 | ct.BFUINT32 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'CC1' : 0x00 | ct.BFUINT32 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'COUNT' : 0x14 | ct.UINT8,
'PER' : 0x1B | ct.UINT8,
'CC' : ( 0x1C | ct.ARRAY, 2 | ct.UINT8 ),
'PERBUF' : 0x2F | ct.UINT8,
'CCBUF' : ( 0x30 | ct.ARRAY, 2 | ct.UINT8 ),
}
TC_COUNT16 = {
'CTRLA' : ( 0x00, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MODE' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 2 << ct.BF_LEN,
'PRESCSYNC' : 0x00 | ct.BFUINT32 | 4 << ct.BF_POS | 2 << ct.BF_LEN,
'RUNSTDBY' : 0x00 | ct.BFUINT32 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'ONDEMAND' : 0x00 | ct.BFUINT32 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
'PRESCALER' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 3 << ct.BF_LEN,
'ALOCK' : 0x00 | ct.BFUINT32 | 11 << ct.BF_POS | 1 << ct.BF_LEN,
'CAPTEN0' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 1 << ct.BF_LEN,
'CAPTEN1' : 0x00 | ct.BFUINT32 | 17 << ct.BF_POS | 1 << ct.BF_LEN,
'COPEN0' : 0x00 | ct.BFUINT32 | 20 << ct.BF_POS | 1 << ct.BF_LEN,
'COPEN1' : 0x00 | ct.BFUINT32 | 21 << ct.BF_POS | 1 << ct.BF_LEN,
'CAPTMODE0' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 2 << ct.BF_LEN,
'CAPTMODE1' : 0x00 | ct.BFUINT32 | 27 << ct.BF_POS | 2 << ct.BF_LEN,
}),
'CTRLBCLR' : ( 0x04, {
'reg' : 0x00 | ct.UINT8,
'DIR' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'LUPD' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'ONESHOT' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'CMD' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 3 << ct.BF_LEN,
}),
'CTRLBSET' : ( 0x05, {
'reg' : 0x00 | ct.UINT8,
'DIR' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'LUPD' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'ONESHOT' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'CMD' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 3 << ct.BF_LEN,
}),
'EVCTRL' : ( 0x06, {
'reg' : 0x00 | ct.UINT16,
'EVACT' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 3 << ct.BF_LEN,
'TCINV' : 0x00 | ct.BFUINT16 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'TCEI' : 0x00 | ct.BFUINT16 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'OVFEO' : 0x00 | ct.BFUINT16 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'MCEO0' : 0x00 | ct.BFUINT16 | 12 << ct.BF_POS | 1 << ct.BF_LEN,
'MCEO1' : 0x00 | ct.BFUINT16 | 13 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTENCLR' : ( 0x08, {
'reg' : 0x00 | ct.UINT8,
'OVF' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ERR' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MC0' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'MC1' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTENSET' : ( 0x09, {
'reg' : 0x00 | ct.UINT8,
'OVF' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ERR' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MC0' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'MC1' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTFLAG' : ( 0x0A, {
'reg' : 0x00 | ct.UINT8,
'OVF' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ERR' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MC0' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'MC1' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'STATUS' : ( 0x0B, {
'reg' : 0x00 | ct.UINT8,
'STOP' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'SLAVE' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'PERBUFV' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'CCBUFV0' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'CCBUFV1' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'WAVE' : ( 0x0C, {
'reg' : 0x00 | ct.UINT8,
'WAVEGEN' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 2 << ct.BF_LEN,
}),
'DRVCTRL' : ( 0x0D, {
'reg' : 0x00 | ct.UINT8,
'INVEN0' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'INVEN1' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'DBGCTRL' : ( 0x0F, {
'reg' : 0x00 | ct.UINT8,
'DBGRUN' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'SYNCBUSY' : ( 0x10, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'CTRLB' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'STATUS' : 0x00 | ct.BFUINT32 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'COUNT' : 0x00 | ct.BFUINT32 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'PER' : 0x00 | ct.BFUINT32 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'CC0' : 0x00 | ct.BFUINT32 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'CC1' : 0x00 | ct.BFUINT32 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'COUNT' : 0x14 | ct.UINT16,
'CC' : ( 0x1C | ct.ARRAY, 2 | ct.UINT16 ),
'CCBUF' : ( 0x30 | ct.ARRAY, 2 | ct.UINT16 ),
}
TC_COUNT32 = {
'CTRLA' : ( 0x00, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MODE' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 2 << ct.BF_LEN,
'PRESCSYNC' : 0x00 | ct.BFUINT32 | 4 << ct.BF_POS | 2 << ct.BF_LEN,
'RUNSTDBY' : 0x00 | ct.BFUINT32 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'ONDEMAND' : 0x00 | ct.BFUINT32 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
'PRESCALER' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 3 << ct.BF_LEN,
'ALOCK' : 0x00 | ct.BFUINT32 | 11 << ct.BF_POS | 1 << ct.BF_LEN,
'CAPTEN0' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 1 << ct.BF_LEN,
'CAPTEN1' : 0x00 | ct.BFUINT32 | 17 << ct.BF_POS | 1 << ct.BF_LEN,
'COPEN0' : 0x00 | ct.BFUINT32 | 20 << ct.BF_POS | 1 << ct.BF_LEN,
'COPEN1' : 0x00 | ct.BFUINT32 | 21 << ct.BF_POS | 1 << ct.BF_LEN,
'CAPTMODE0' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 2 << ct.BF_LEN,
'CAPTMODE1' : 0x00 | ct.BFUINT32 | 27 << ct.BF_POS | 2 << ct.BF_LEN,
}),
'CTRLBCLR' : ( 0x04, {
'reg' : 0x00 | ct.UINT8,
'DIR' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'LUPD' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'ONESHOT' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'CMD' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 3 << ct.BF_LEN,
}),
'CTRLBSET' : ( 0x05, {
'reg' : 0x00 | ct.UINT8,
'DIR' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'LUPD' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'ONESHOT' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'CMD' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 3 << ct.BF_LEN,
}),
'EVCTRL' : ( 0x06, {
'reg' : 0x00 | ct.UINT16,
'EVACT' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 3 << ct.BF_LEN,
'TCINV' : 0x00 | ct.BFUINT16 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'TCEI' : 0x00 | ct.BFUINT16 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'OVFEO' : 0x00 | ct.BFUINT16 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'MCEO0' : 0x00 | ct.BFUINT16 | 12 << ct.BF_POS | 1 << ct.BF_LEN,
'MCEO1' : 0x00 | ct.BFUINT16 | 13 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTENCLR' : ( 0x08, {
'reg' : 0x00 | ct.UINT8,
'OVF' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ERR' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MC0' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'MC1' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTENSET' : ( 0x09, {
'reg' : 0x00 | ct.UINT8,
'OVF' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ERR' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MC0' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'MC1' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTFLAG' : ( 0x0A, {
'reg' : 0x00 | ct.UINT8,
'OVF' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ERR' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MC0' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'MC1' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'STATUS' : ( 0x0B, {
'reg' : 0x00 | ct.UINT8,
'STOP' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'SLAVE' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'PERBUFV' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'CCBUFV0' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'CCBUFV1' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'WAVE' : ( 0x0C, {
'reg' : 0x00 | ct.UINT8,
'WAVEGEN' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 2 << ct.BF_LEN,
}),
'DRVCTRL' : ( 0x0D, {
'reg' : 0x00 | ct.UINT8,
'INVEN0' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'INVEN1' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'DBGCTRL' : ( 0x0F, {
'reg' : 0x00 | ct.UINT8,
'DBGRUN' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'SYNCBUSY' : ( 0x10, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'CTRLB' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'STATUS' : 0x00 | ct.BFUINT32 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'COUNT' : 0x00 | ct.BFUINT32 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'PER' : 0x00 | ct.BFUINT32 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'CC0' : 0x00 | ct.BFUINT32 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'CC1' : 0x00 | ct.BFUINT32 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'COUNT' : 0x14 | ct.UINT32,
'CC' : ( 0x1C | ct.ARRAY, 2 | ct.UINT32 ),
'CCBUF' : ( 0x30 | ct.ARRAY, 2 | ct.UINT32 ),
}
TC_ = {
'COUNT8' : ( 0x00, TC_COUNT8 ),
'COUNT16' : ( 0x00, TC_COUNT16 ),
'COUNT32' : ( 0x00, TC_COUNT32 ),
}
TC0 = ct.struct(0x40003800, TC_)
| 48.805825
| 75
| 0.499503
| 2,358
| 15,081
| 3.046226
| 0.048346
| 0.19045
| 0.166643
| 0.160379
| 0.975776
| 0.971043
| 0.956425
| 0.956425
| 0.956425
| 0.956425
| 0
| 0.144137
| 0.297063
| 15,081
| 308
| 76
| 48.964286
| 0.53344
| 0
| 0
| 0.920792
| 0
| 0
| 0.085074
| 0
| 0
| 0
| 0.068828
| 0
| 0
| 1
| 0
| false
| 0
| 0.0033
| 0
| 0.0033
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2f9d0399adb0f8ade7650f173e68f1d85409fdba
| 452,049
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_sysmgr_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 177
|
2016-03-15T17:03:51.000Z
|
2022-03-18T16:48:44.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_sysmgr_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2016-03-30T10:45:22.000Z
|
2020-07-14T16:28:13.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_sysmgr_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 85
|
2016-03-16T20:38:57.000Z
|
2022-02-22T04:26:02.000Z
|
""" Cisco_IOS_XR_sysmgr_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR sysmgr package operational data.
This module contains definitions
for the following management objects\:
system\-process\: Process information
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class PlacementState(Enum):
"""
PlacementState (Enum Class)
Process placement state
.. data:: place_null = 0
Process runs everywhere (ubiquitous)
.. data:: place_placeable = 1
Process runs on node chosen by PlaceD
.. data:: place_dlrsc_tracker = 2
Process runs on dSDRSC only
.. data:: place_rack_centric = 3
Process runs on RP of each rack
.. data:: place_dsc_tracker = 4
Process runs on DSC only
"""
place_null = Enum.YLeaf(0, "place-null")
place_placeable = Enum.YLeaf(1, "place-placeable")
place_dlrsc_tracker = Enum.YLeaf(2, "place-dlrsc-tracker")
place_rack_centric = Enum.YLeaf(3, "place-rack-centric")
place_dsc_tracker = Enum.YLeaf(4, "place-dsc-tracker")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['PlacementState']
class ProcessState(Enum):
"""
ProcessState (Enum Class)
Process state
.. data:: none = 0
NONE
.. data:: run = 1
RUN
.. data:: exited = 2
EXITED
.. data:: hold = 3
HOLD
.. data:: wait = 4
WAIT
.. data:: restart = 5
RESTART
.. data:: initializing = 6
INITIALIZING
.. data:: killed = 7
KILLED
.. data:: queued = 8
QUEUED
.. data:: error = 9
ERROR
.. data:: tuple_set = 10
TUPLESET
.. data:: unknown = 11
UNKNOWN
"""
none = Enum.YLeaf(0, "none")
run = Enum.YLeaf(1, "run")
exited = Enum.YLeaf(2, "exited")
hold = Enum.YLeaf(3, "hold")
wait = Enum.YLeaf(4, "wait")
restart = Enum.YLeaf(5, "restart")
initializing = Enum.YLeaf(6, "initializing")
killed = Enum.YLeaf(7, "killed")
queued = Enum.YLeaf(8, "queued")
error = Enum.YLeaf(9, "error")
tuple_set = Enum.YLeaf(10, "tuple-set")
unknown = Enum.YLeaf(11, "unknown")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['ProcessState']
class SystemProcess(_Entity_):
"""
Process information
.. attribute:: node_table
List of nodes
**type**\: :py:class:`NodeTable <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess, self).__init__()
self._top_entity = None
self.yang_name = "system-process"
self.yang_parent_name = "Cisco-IOS-XR-sysmgr-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node-table", ("node_table", SystemProcess.NodeTable))])
self._leafs = OrderedDict()
self.node_table = SystemProcess.NodeTable()
self.node_table.parent = self
self._children_name_map["node_table"] = "node-table"
self._segment_path = lambda: "Cisco-IOS-XR-sysmgr-oper:system-process"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess, [], name, value)
class NodeTable(_Entity_):
"""
List of nodes
.. attribute:: node
Process information per node
**type**\: list of :py:class:`Node <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable, self).__init__()
self.yang_name = "node-table"
self.yang_parent_name = "system-process"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("node", ("node", SystemProcess.NodeTable.Node))])
self._leafs = OrderedDict()
self.node = YList(self)
self._segment_path = lambda: "node-table"
self._absolute_path = lambda: "Cisco-IOS-XR-sysmgr-oper:system-process/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable, [], name, value)
class Node(_Entity_):
"""
Process information per node
.. attribute:: node_name (key)
The node name
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
**config**\: False
.. attribute:: name
Process <WORD> information
**type**\: :py:class:`Name <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name>`
**config**\: False
.. attribute:: jids
Process job id information
**type**\: :py:class:`Jids <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Jids>`
**config**\: False
.. attribute:: dynamic
Process Dynamic information
**type**\: :py:class:`Dynamic <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Dynamic>`
**config**\: False
.. attribute:: boot_stalled
Process Boot Stalled information
**type**\: :py:class:`BootStalled <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.BootStalled>`
**config**\: False
.. attribute:: processes
Process all information
**type**\: :py:class:`Processes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Processes>`
**config**\: False
.. attribute:: startup
Process Startup information
**type**\: :py:class:`Startup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Startup>`
**config**\: False
.. attribute:: mandatory
Mandatory Process information
**type**\: :py:class:`Mandatory <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Mandatory>`
**config**\: False
.. attribute:: abort
Process Abort information
**type**\: :py:class:`Abort <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Abort>`
**config**\: False
.. attribute:: failover
Process Failover information
**type**\: :py:class:`Failover <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Failover>`
**config**\: False
.. attribute:: boot
Process Boot information
**type**\: :py:class:`Boot <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Boot>`
**config**\: False
.. attribute:: logs
Process Log information
**type**\: :py:class:`Logs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Logs>`
**config**\: False
.. attribute:: searchpath
Process Searchpath information
**type**\: :py:class:`Searchpath <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Searchpath>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node, self).__init__()
self.yang_name = "node"
self.yang_parent_name = "node-table"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_name']
self._child_classes = OrderedDict([("name", ("name", SystemProcess.NodeTable.Node.Name)), ("jids", ("jids", SystemProcess.NodeTable.Node.Jids)), ("dynamic", ("dynamic", SystemProcess.NodeTable.Node.Dynamic)), ("boot-stalled", ("boot_stalled", SystemProcess.NodeTable.Node.BootStalled)), ("processes", ("processes", SystemProcess.NodeTable.Node.Processes)), ("startup", ("startup", SystemProcess.NodeTable.Node.Startup)), ("mandatory", ("mandatory", SystemProcess.NodeTable.Node.Mandatory)), ("abort", ("abort", SystemProcess.NodeTable.Node.Abort)), ("failover", ("failover", SystemProcess.NodeTable.Node.Failover)), ("boot", ("boot", SystemProcess.NodeTable.Node.Boot)), ("logs", ("logs", SystemProcess.NodeTable.Node.Logs)), ("searchpath", ("searchpath", SystemProcess.NodeTable.Node.Searchpath))])
self._leafs = OrderedDict([
('node_name', (YLeaf(YType.str, 'node-name'), ['str'])),
])
self.node_name = None
self.name = SystemProcess.NodeTable.Node.Name()
self.name.parent = self
self._children_name_map["name"] = "name"
self.jids = SystemProcess.NodeTable.Node.Jids()
self.jids.parent = self
self._children_name_map["jids"] = "jids"
self.dynamic = SystemProcess.NodeTable.Node.Dynamic()
self.dynamic.parent = self
self._children_name_map["dynamic"] = "dynamic"
self.boot_stalled = SystemProcess.NodeTable.Node.BootStalled()
self.boot_stalled.parent = self
self._children_name_map["boot_stalled"] = "boot-stalled"
self.processes = SystemProcess.NodeTable.Node.Processes()
self.processes.parent = self
self._children_name_map["processes"] = "processes"
self.startup = SystemProcess.NodeTable.Node.Startup()
self.startup.parent = self
self._children_name_map["startup"] = "startup"
self.mandatory = SystemProcess.NodeTable.Node.Mandatory()
self.mandatory.parent = self
self._children_name_map["mandatory"] = "mandatory"
self.abort = SystemProcess.NodeTable.Node.Abort()
self.abort.parent = self
self._children_name_map["abort"] = "abort"
self.failover = SystemProcess.NodeTable.Node.Failover()
self.failover.parent = self
self._children_name_map["failover"] = "failover"
self.boot = SystemProcess.NodeTable.Node.Boot()
self.boot.parent = self
self._children_name_map["boot"] = "boot"
self.logs = SystemProcess.NodeTable.Node.Logs()
self.logs.parent = self
self._children_name_map["logs"] = "logs"
self.searchpath = SystemProcess.NodeTable.Node.Searchpath()
self.searchpath.parent = self
self._children_name_map["searchpath"] = "searchpath"
self._segment_path = lambda: "node" + "[node-name='" + str(self.node_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-sysmgr-oper:system-process/node-table/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node, ['node_name'], name, value)
class Name(_Entity_):
"""
Process <WORD> information
.. attribute:: process_name_run_infos
Process <WORD> information
**type**\: :py:class:`ProcessNameRunInfos <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos>`
**config**\: False
.. attribute:: process_name_infos
Process <WORD> information
**type**\: :py:class:`ProcessNameInfos <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameInfos>`
**config**\: False
.. attribute:: process_name_run_details
Process <WORD> information
**type**\: :py:class:`ProcessNameRunDetails <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails>`
**config**\: False
.. attribute:: process_name_runverboses
Process <WORD> information
**type**\: :py:class:`ProcessNameRunverboses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses>`
**config**\: False
.. attribute:: process_name_details
Process <WORD> information
**type**\: :py:class:`ProcessNameDetails <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameDetails>`
**config**\: False
.. attribute:: process_name_verboses
Process <WORD> information
**type**\: :py:class:`ProcessNameVerboses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameVerboses>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name, self).__init__()
self.yang_name = "name"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("process-name-run-infos", ("process_name_run_infos", SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos)), ("process-name-infos", ("process_name_infos", SystemProcess.NodeTable.Node.Name.ProcessNameInfos)), ("process-name-run-details", ("process_name_run_details", SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails)), ("process-name-runverboses", ("process_name_runverboses", SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses)), ("process-name-details", ("process_name_details", SystemProcess.NodeTable.Node.Name.ProcessNameDetails)), ("process-name-verboses", ("process_name_verboses", SystemProcess.NodeTable.Node.Name.ProcessNameVerboses))])
self._leafs = OrderedDict()
self.process_name_run_infos = SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos()
self.process_name_run_infos.parent = self
self._children_name_map["process_name_run_infos"] = "process-name-run-infos"
self.process_name_infos = SystemProcess.NodeTable.Node.Name.ProcessNameInfos()
self.process_name_infos.parent = self
self._children_name_map["process_name_infos"] = "process-name-infos"
self.process_name_run_details = SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails()
self.process_name_run_details.parent = self
self._children_name_map["process_name_run_details"] = "process-name-run-details"
self.process_name_runverboses = SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses()
self.process_name_runverboses.parent = self
self._children_name_map["process_name_runverboses"] = "process-name-runverboses"
self.process_name_details = SystemProcess.NodeTable.Node.Name.ProcessNameDetails()
self.process_name_details.parent = self
self._children_name_map["process_name_details"] = "process-name-details"
self.process_name_verboses = SystemProcess.NodeTable.Node.Name.ProcessNameVerboses()
self.process_name_verboses.parent = self
self._children_name_map["process_name_verboses"] = "process-name-verboses"
self._segment_path = lambda: "name"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name, [], name, value)
class ProcessNameRunInfos(_Entity_):
"""
Process <WORD> information
.. attribute:: process_name_run_info
Process <WORD> run information
**type**\: list of :py:class:`ProcessNameRunInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos, self).__init__()
self.yang_name = "process-name-run-infos"
self.yang_parent_name = "name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("process-name-run-info", ("process_name_run_info", SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo))])
self._leafs = OrderedDict()
self.process_name_run_info = YList(self)
self._segment_path = lambda: "process-name-run-infos"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos, [], name, value)
class ProcessNameRunInfo(_Entity_):
"""
Process <WORD> run information
.. attribute:: proc_name (key)
Process Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: proc_cpu_time
Proces cpu time
**type**\: :py:class:`ProcCpuTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo.ProcCpuTime>`
**config**\: False
.. attribute:: job_id_xr
Job ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process_id
PID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process_name
Process name
**type**\: str
**config**\: False
.. attribute:: executable
Executable name or path
**type**\: str
**config**\: False
.. attribute:: active_path
Active Path
**type**\: str
**config**\: False
.. attribute:: instance_id
Instance ID
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: args
Args
**type**\: str
**config**\: False
.. attribute:: version_id
Version ID
**type**\: str
**config**\: False
.. attribute:: respawn
Respawn on/off
**type**\: str
**config**\: False
.. attribute:: respawn_count
Respawn Count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: last_started
Last Started
**type**\: str
**config**\: False
.. attribute:: process_state
Process State
**type**\: str
**config**\: False
.. attribute:: last_exit_status
Last Exit status
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: last_exit_reason
Last Exit due to
**type**\: str
**config**\: False
.. attribute:: package_state
Package State
**type**\: str
**config**\: False
.. attribute:: started_on_config
Started on Config
**type**\: str
**config**\: False
.. attribute:: feature_name
Feature Name
**type**\: str
**config**\: False
.. attribute:: tag
Tag
**type**\: str
**config**\: False
.. attribute:: group
Process Group
**type**\: str
**config**\: False
.. attribute:: core
Core
**type**\: str
**config**\: False
.. attribute:: max_core
Max core
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: mandatory
Is mandatory?
**type**\: bool
**config**\: False
.. attribute:: maint_mode_proc
Is admin mode process?
**type**\: bool
**config**\: False
.. attribute:: placement_state
Placement State
**type**\: str
**config**\: False
.. attribute:: start_up_path
Startup Path
**type**\: str
**config**\: False
.. attribute:: memory_limit
Memory Limit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ready
Elapsed Ready
**type**\: str
**config**\: False
.. attribute:: available
Elapsed Available
**type**\: str
**config**\: False
.. attribute:: registered_item
Registered Items
**type**\: list of :py:class:`RegisteredItem <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo.RegisteredItem>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo, self).__init__()
self.yang_name = "process-name-run-info"
self.yang_parent_name = "process-name-run-infos"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['proc_name']
self._child_classes = OrderedDict([("proc-cpu-time", ("proc_cpu_time", SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo.ProcCpuTime)), ("registered-item", ("registered_item", SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo.RegisteredItem))])
self._leafs = OrderedDict([
('proc_name', (YLeaf(YType.str, 'proc-name'), ['str'])),
('job_id_xr', (YLeaf(YType.uint32, 'job-id-xr'), ['int'])),
('process_id', (YLeaf(YType.uint32, 'process-id'), ['int'])),
('process_name', (YLeaf(YType.str, 'process-name'), ['str'])),
('executable', (YLeaf(YType.str, 'executable'), ['str'])),
('active_path', (YLeaf(YType.str, 'active-path'), ['str'])),
('instance_id', (YLeaf(YType.int32, 'instance-id'), ['int'])),
('args', (YLeaf(YType.str, 'args'), ['str'])),
('version_id', (YLeaf(YType.str, 'version-id'), ['str'])),
('respawn', (YLeaf(YType.str, 'respawn'), ['str'])),
('respawn_count', (YLeaf(YType.int32, 'respawn-count'), ['int'])),
('last_started', (YLeaf(YType.str, 'last-started'), ['str'])),
('process_state', (YLeaf(YType.str, 'process-state'), ['str'])),
('last_exit_status', (YLeaf(YType.int32, 'last-exit-status'), ['int'])),
('last_exit_reason', (YLeaf(YType.str, 'last-exit-reason'), ['str'])),
('package_state', (YLeaf(YType.str, 'package-state'), ['str'])),
('started_on_config', (YLeaf(YType.str, 'started-on-config'), ['str'])),
('feature_name', (YLeaf(YType.str, 'feature-name'), ['str'])),
('tag', (YLeaf(YType.str, 'tag'), ['str'])),
('group', (YLeaf(YType.str, 'group'), ['str'])),
('core', (YLeaf(YType.str, 'core'), ['str'])),
('max_core', (YLeaf(YType.int32, 'max-core'), ['int'])),
('level', (YLeaf(YType.str, 'level'), ['str'])),
('mandatory', (YLeaf(YType.boolean, 'mandatory'), ['bool'])),
('maint_mode_proc', (YLeaf(YType.boolean, 'maint-mode-proc'), ['bool'])),
('placement_state', (YLeaf(YType.str, 'placement-state'), ['str'])),
('start_up_path', (YLeaf(YType.str, 'start-up-path'), ['str'])),
('memory_limit', (YLeaf(YType.uint32, 'memory-limit'), ['int'])),
('ready', (YLeaf(YType.str, 'ready'), ['str'])),
('available', (YLeaf(YType.str, 'available'), ['str'])),
])
self.proc_name = None
self.job_id_xr = None
self.process_id = None
self.process_name = None
self.executable = None
self.active_path = None
self.instance_id = None
self.args = None
self.version_id = None
self.respawn = None
self.respawn_count = None
self.last_started = None
self.process_state = None
self.last_exit_status = None
self.last_exit_reason = None
self.package_state = None
self.started_on_config = None
self.feature_name = None
self.tag = None
self.group = None
self.core = None
self.max_core = None
self.level = None
self.mandatory = None
self.maint_mode_proc = None
self.placement_state = None
self.start_up_path = None
self.memory_limit = None
self.ready = None
self.available = None
self.proc_cpu_time = SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo.ProcCpuTime()
self.proc_cpu_time.parent = self
self._children_name_map["proc_cpu_time"] = "proc-cpu-time"
self.registered_item = YList(self)
self._segment_path = lambda: "process-name-run-info" + "[proc-name='" + str(self.proc_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo, ['proc_name', 'job_id_xr', 'process_id', 'process_name', 'executable', 'active_path', 'instance_id', 'args', 'version_id', 'respawn', 'respawn_count', 'last_started', 'process_state', 'last_exit_status', 'last_exit_reason', 'package_state', 'started_on_config', 'feature_name', 'tag', 'group', 'core', 'max_core', 'level', 'mandatory', 'maint_mode_proc', 'placement_state', 'start_up_path', 'memory_limit', 'ready', 'available'], name, value)
class ProcCpuTime(_Entity_):
"""
Proces cpu time
.. attribute:: user
User time
**type**\: str
**config**\: False
.. attribute:: system
Kernel time
**type**\: str
**config**\: False
.. attribute:: total
Total time
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo.ProcCpuTime, self).__init__()
self.yang_name = "proc-cpu-time"
self.yang_parent_name = "process-name-run-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('user', (YLeaf(YType.str, 'user'), ['str'])),
('system', (YLeaf(YType.str, 'system'), ['str'])),
('total', (YLeaf(YType.str, 'total'), ['str'])),
])
self.user = None
self.system = None
self.total = None
self._segment_path = lambda: "proc-cpu-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo.ProcCpuTime, ['user', 'system', 'total'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo.ProcCpuTime']['meta_info']
class RegisteredItem(_Entity_):
"""
Registered Items
.. attribute:: tuple
Tuple
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo.RegisteredItem, self).__init__()
self.yang_name = "registered-item"
self.yang_parent_name = "process-name-run-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tuple', (YLeaf(YType.str, 'tuple'), ['str'])),
])
self.tuple = None
self._segment_path = lambda: "registered-item"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo.RegisteredItem, ['tuple'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo.RegisteredItem']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos.ProcessNameRunInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunInfos']['meta_info']
class ProcessNameInfos(_Entity_):
"""
Process <WORD> information
.. attribute:: process_name_info
Process <WORD> information
**type**\: list of :py:class:`ProcessNameInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameInfos, self).__init__()
self.yang_name = "process-name-infos"
self.yang_parent_name = "name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("process-name-info", ("process_name_info", SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo))])
self._leafs = OrderedDict()
self.process_name_info = YList(self)
self._segment_path = lambda: "process-name-infos"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameInfos, [], name, value)
class ProcessNameInfo(_Entity_):
"""
Process <WORD> information
.. attribute:: proc_name (key)
Process Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: proc_cpu_time
Proces cpu time
**type**\: :py:class:`ProcCpuTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo.ProcCpuTime>`
**config**\: False
.. attribute:: job_id_xr
Job ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process_id
PID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process_name
Process name
**type**\: str
**config**\: False
.. attribute:: executable
Executable name or path
**type**\: str
**config**\: False
.. attribute:: active_path
Active Path
**type**\: str
**config**\: False
.. attribute:: instance_id
Instance ID
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: args
Args
**type**\: str
**config**\: False
.. attribute:: version_id
Version ID
**type**\: str
**config**\: False
.. attribute:: respawn
Respawn on/off
**type**\: str
**config**\: False
.. attribute:: respawn_count
Respawn Count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: last_started
Last Started
**type**\: str
**config**\: False
.. attribute:: process_state
Process State
**type**\: str
**config**\: False
.. attribute:: last_exit_status
Last Exit status
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: last_exit_reason
Last Exit due to
**type**\: str
**config**\: False
.. attribute:: package_state
Package State
**type**\: str
**config**\: False
.. attribute:: started_on_config
Started on Config
**type**\: str
**config**\: False
.. attribute:: feature_name
Feature Name
**type**\: str
**config**\: False
.. attribute:: tag
Tag
**type**\: str
**config**\: False
.. attribute:: group
Process Group
**type**\: str
**config**\: False
.. attribute:: core
Core
**type**\: str
**config**\: False
.. attribute:: max_core
Max core
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: mandatory
Is mandatory?
**type**\: bool
**config**\: False
.. attribute:: maint_mode_proc
Is admin mode process?
**type**\: bool
**config**\: False
.. attribute:: placement_state
Placement State
**type**\: str
**config**\: False
.. attribute:: start_up_path
Startup Path
**type**\: str
**config**\: False
.. attribute:: memory_limit
Memory Limit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ready
Elapsed Ready
**type**\: str
**config**\: False
.. attribute:: available
Elapsed Available
**type**\: str
**config**\: False
.. attribute:: registered_item
Registered Items
**type**\: list of :py:class:`RegisteredItem <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo.RegisteredItem>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo, self).__init__()
self.yang_name = "process-name-info"
self.yang_parent_name = "process-name-infos"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['proc_name']
self._child_classes = OrderedDict([("proc-cpu-time", ("proc_cpu_time", SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo.ProcCpuTime)), ("registered-item", ("registered_item", SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo.RegisteredItem))])
self._leafs = OrderedDict([
('proc_name', (YLeaf(YType.str, 'proc-name'), ['str'])),
('job_id_xr', (YLeaf(YType.uint32, 'job-id-xr'), ['int'])),
('process_id', (YLeaf(YType.uint32, 'process-id'), ['int'])),
('process_name', (YLeaf(YType.str, 'process-name'), ['str'])),
('executable', (YLeaf(YType.str, 'executable'), ['str'])),
('active_path', (YLeaf(YType.str, 'active-path'), ['str'])),
('instance_id', (YLeaf(YType.int32, 'instance-id'), ['int'])),
('args', (YLeaf(YType.str, 'args'), ['str'])),
('version_id', (YLeaf(YType.str, 'version-id'), ['str'])),
('respawn', (YLeaf(YType.str, 'respawn'), ['str'])),
('respawn_count', (YLeaf(YType.int32, 'respawn-count'), ['int'])),
('last_started', (YLeaf(YType.str, 'last-started'), ['str'])),
('process_state', (YLeaf(YType.str, 'process-state'), ['str'])),
('last_exit_status', (YLeaf(YType.int32, 'last-exit-status'), ['int'])),
('last_exit_reason', (YLeaf(YType.str, 'last-exit-reason'), ['str'])),
('package_state', (YLeaf(YType.str, 'package-state'), ['str'])),
('started_on_config', (YLeaf(YType.str, 'started-on-config'), ['str'])),
('feature_name', (YLeaf(YType.str, 'feature-name'), ['str'])),
('tag', (YLeaf(YType.str, 'tag'), ['str'])),
('group', (YLeaf(YType.str, 'group'), ['str'])),
('core', (YLeaf(YType.str, 'core'), ['str'])),
('max_core', (YLeaf(YType.int32, 'max-core'), ['int'])),
('level', (YLeaf(YType.str, 'level'), ['str'])),
('mandatory', (YLeaf(YType.boolean, 'mandatory'), ['bool'])),
('maint_mode_proc', (YLeaf(YType.boolean, 'maint-mode-proc'), ['bool'])),
('placement_state', (YLeaf(YType.str, 'placement-state'), ['str'])),
('start_up_path', (YLeaf(YType.str, 'start-up-path'), ['str'])),
('memory_limit', (YLeaf(YType.uint32, 'memory-limit'), ['int'])),
('ready', (YLeaf(YType.str, 'ready'), ['str'])),
('available', (YLeaf(YType.str, 'available'), ['str'])),
])
self.proc_name = None
self.job_id_xr = None
self.process_id = None
self.process_name = None
self.executable = None
self.active_path = None
self.instance_id = None
self.args = None
self.version_id = None
self.respawn = None
self.respawn_count = None
self.last_started = None
self.process_state = None
self.last_exit_status = None
self.last_exit_reason = None
self.package_state = None
self.started_on_config = None
self.feature_name = None
self.tag = None
self.group = None
self.core = None
self.max_core = None
self.level = None
self.mandatory = None
self.maint_mode_proc = None
self.placement_state = None
self.start_up_path = None
self.memory_limit = None
self.ready = None
self.available = None
self.proc_cpu_time = SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo.ProcCpuTime()
self.proc_cpu_time.parent = self
self._children_name_map["proc_cpu_time"] = "proc-cpu-time"
self.registered_item = YList(self)
self._segment_path = lambda: "process-name-info" + "[proc-name='" + str(self.proc_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo, ['proc_name', 'job_id_xr', 'process_id', 'process_name', 'executable', 'active_path', 'instance_id', 'args', 'version_id', 'respawn', 'respawn_count', 'last_started', 'process_state', 'last_exit_status', 'last_exit_reason', 'package_state', 'started_on_config', 'feature_name', 'tag', 'group', 'core', 'max_core', 'level', 'mandatory', 'maint_mode_proc', 'placement_state', 'start_up_path', 'memory_limit', 'ready', 'available'], name, value)
class ProcCpuTime(_Entity_):
"""
Proces cpu time
.. attribute:: user
User time
**type**\: str
**config**\: False
.. attribute:: system
Kernel time
**type**\: str
**config**\: False
.. attribute:: total
Total time
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo.ProcCpuTime, self).__init__()
self.yang_name = "proc-cpu-time"
self.yang_parent_name = "process-name-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('user', (YLeaf(YType.str, 'user'), ['str'])),
('system', (YLeaf(YType.str, 'system'), ['str'])),
('total', (YLeaf(YType.str, 'total'), ['str'])),
])
self.user = None
self.system = None
self.total = None
self._segment_path = lambda: "proc-cpu-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo.ProcCpuTime, ['user', 'system', 'total'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo.ProcCpuTime']['meta_info']
class RegisteredItem(_Entity_):
"""
Registered Items
.. attribute:: tuple
Tuple
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo.RegisteredItem, self).__init__()
self.yang_name = "registered-item"
self.yang_parent_name = "process-name-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tuple', (YLeaf(YType.str, 'tuple'), ['str'])),
])
self.tuple = None
self._segment_path = lambda: "registered-item"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo.RegisteredItem, ['tuple'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo.RegisteredItem']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameInfos.ProcessNameInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameInfos']['meta_info']
class ProcessNameRunDetails(_Entity_):
"""
Process <WORD> information
.. attribute:: process_name_run_detail
Process <WORD> run detail information
**type**\: list of :py:class:`ProcessNameRunDetail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails, self).__init__()
self.yang_name = "process-name-run-details"
self.yang_parent_name = "name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("process-name-run-detail", ("process_name_run_detail", SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail))])
self._leafs = OrderedDict()
self.process_name_run_detail = YList(self)
self._segment_path = lambda: "process-name-run-details"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails, [], name, value)
class ProcessNameRunDetail(_Entity_):
"""
Process <WORD> run detail information
.. attribute:: proc_name (key)
Process Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: basic_info
Process Basic Info
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo>`
**config**\: False
.. attribute:: detail_info
Process Detail Info
**type**\: :py:class:`DetailInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.DetailInfo>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail, self).__init__()
self.yang_name = "process-name-run-detail"
self.yang_parent_name = "process-name-run-details"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['proc_name']
self._child_classes = OrderedDict([("basic-info", ("basic_info", SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo)), ("detail-info", ("detail_info", SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.DetailInfo))])
self._leafs = OrderedDict([
('proc_name', (YLeaf(YType.str, 'proc-name'), ['str'])),
])
self.proc_name = None
self.basic_info = SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.detail_info = SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.DetailInfo()
self.detail_info.parent = self
self._children_name_map["detail_info"] = "detail-info"
self._segment_path = lambda: "process-name-run-detail" + "[proc-name='" + str(self.proc_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail, ['proc_name'], name, value)
class BasicInfo(_Entity_):
"""
Process Basic Info
.. attribute:: proc_cpu_time
Proces cpu time
**type**\: :py:class:`ProcCpuTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo.ProcCpuTime>`
**config**\: False
.. attribute:: job_id_xr
Job ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process_id
PID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process_name
Process name
**type**\: str
**config**\: False
.. attribute:: executable
Executable name or path
**type**\: str
**config**\: False
.. attribute:: active_path
Active Path
**type**\: str
**config**\: False
.. attribute:: instance_id
Instance ID
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: args
Args
**type**\: str
**config**\: False
.. attribute:: version_id
Version ID
**type**\: str
**config**\: False
.. attribute:: respawn
Respawn on/off
**type**\: str
**config**\: False
.. attribute:: respawn_count
Respawn Count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: last_started
Last Started
**type**\: str
**config**\: False
.. attribute:: process_state
Process State
**type**\: str
**config**\: False
.. attribute:: last_exit_status
Last Exit status
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: last_exit_reason
Last Exit due to
**type**\: str
**config**\: False
.. attribute:: package_state
Package State
**type**\: str
**config**\: False
.. attribute:: started_on_config
Started on Config
**type**\: str
**config**\: False
.. attribute:: feature_name
Feature Name
**type**\: str
**config**\: False
.. attribute:: tag
Tag
**type**\: str
**config**\: False
.. attribute:: group
Process Group
**type**\: str
**config**\: False
.. attribute:: core
Core
**type**\: str
**config**\: False
.. attribute:: max_core
Max core
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: mandatory
Is mandatory?
**type**\: bool
**config**\: False
.. attribute:: maint_mode_proc
Is admin mode process?
**type**\: bool
**config**\: False
.. attribute:: placement_state
Placement State
**type**\: str
**config**\: False
.. attribute:: start_up_path
Startup Path
**type**\: str
**config**\: False
.. attribute:: memory_limit
Memory Limit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ready
Elapsed Ready
**type**\: str
**config**\: False
.. attribute:: available
Elapsed Available
**type**\: str
**config**\: False
.. attribute:: registered_item
Registered Items
**type**\: list of :py:class:`RegisteredItem <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo.RegisteredItem>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "process-name-run-detail"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("proc-cpu-time", ("proc_cpu_time", SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo.ProcCpuTime)), ("registered-item", ("registered_item", SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo.RegisteredItem))])
self._leafs = OrderedDict([
('job_id_xr', (YLeaf(YType.uint32, 'job-id-xr'), ['int'])),
('process_id', (YLeaf(YType.uint32, 'process-id'), ['int'])),
('process_name', (YLeaf(YType.str, 'process-name'), ['str'])),
('executable', (YLeaf(YType.str, 'executable'), ['str'])),
('active_path', (YLeaf(YType.str, 'active-path'), ['str'])),
('instance_id', (YLeaf(YType.int32, 'instance-id'), ['int'])),
('args', (YLeaf(YType.str, 'args'), ['str'])),
('version_id', (YLeaf(YType.str, 'version-id'), ['str'])),
('respawn', (YLeaf(YType.str, 'respawn'), ['str'])),
('respawn_count', (YLeaf(YType.int32, 'respawn-count'), ['int'])),
('last_started', (YLeaf(YType.str, 'last-started'), ['str'])),
('process_state', (YLeaf(YType.str, 'process-state'), ['str'])),
('last_exit_status', (YLeaf(YType.int32, 'last-exit-status'), ['int'])),
('last_exit_reason', (YLeaf(YType.str, 'last-exit-reason'), ['str'])),
('package_state', (YLeaf(YType.str, 'package-state'), ['str'])),
('started_on_config', (YLeaf(YType.str, 'started-on-config'), ['str'])),
('feature_name', (YLeaf(YType.str, 'feature-name'), ['str'])),
('tag', (YLeaf(YType.str, 'tag'), ['str'])),
('group', (YLeaf(YType.str, 'group'), ['str'])),
('core', (YLeaf(YType.str, 'core'), ['str'])),
('max_core', (YLeaf(YType.int32, 'max-core'), ['int'])),
('level', (YLeaf(YType.str, 'level'), ['str'])),
('mandatory', (YLeaf(YType.boolean, 'mandatory'), ['bool'])),
('maint_mode_proc', (YLeaf(YType.boolean, 'maint-mode-proc'), ['bool'])),
('placement_state', (YLeaf(YType.str, 'placement-state'), ['str'])),
('start_up_path', (YLeaf(YType.str, 'start-up-path'), ['str'])),
('memory_limit', (YLeaf(YType.uint32, 'memory-limit'), ['int'])),
('ready', (YLeaf(YType.str, 'ready'), ['str'])),
('available', (YLeaf(YType.str, 'available'), ['str'])),
])
self.job_id_xr = None
self.process_id = None
self.process_name = None
self.executable = None
self.active_path = None
self.instance_id = None
self.args = None
self.version_id = None
self.respawn = None
self.respawn_count = None
self.last_started = None
self.process_state = None
self.last_exit_status = None
self.last_exit_reason = None
self.package_state = None
self.started_on_config = None
self.feature_name = None
self.tag = None
self.group = None
self.core = None
self.max_core = None
self.level = None
self.mandatory = None
self.maint_mode_proc = None
self.placement_state = None
self.start_up_path = None
self.memory_limit = None
self.ready = None
self.available = None
self.proc_cpu_time = SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo.ProcCpuTime()
self.proc_cpu_time.parent = self
self._children_name_map["proc_cpu_time"] = "proc-cpu-time"
self.registered_item = YList(self)
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo, ['job_id_xr', 'process_id', 'process_name', 'executable', 'active_path', 'instance_id', 'args', 'version_id', 'respawn', 'respawn_count', 'last_started', 'process_state', 'last_exit_status', 'last_exit_reason', 'package_state', 'started_on_config', 'feature_name', 'tag', 'group', 'core', 'max_core', 'level', 'mandatory', 'maint_mode_proc', 'placement_state', 'start_up_path', 'memory_limit', 'ready', 'available'], name, value)
class ProcCpuTime(_Entity_):
"""
Proces cpu time
.. attribute:: user
User time
**type**\: str
**config**\: False
.. attribute:: system
Kernel time
**type**\: str
**config**\: False
.. attribute:: total
Total time
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo.ProcCpuTime, self).__init__()
self.yang_name = "proc-cpu-time"
self.yang_parent_name = "basic-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('user', (YLeaf(YType.str, 'user'), ['str'])),
('system', (YLeaf(YType.str, 'system'), ['str'])),
('total', (YLeaf(YType.str, 'total'), ['str'])),
])
self.user = None
self.system = None
self.total = None
self._segment_path = lambda: "proc-cpu-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo.ProcCpuTime, ['user', 'system', 'total'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo.ProcCpuTime']['meta_info']
class RegisteredItem(_Entity_):
"""
Registered Items
.. attribute:: tuple
Tuple
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo.RegisteredItem, self).__init__()
self.yang_name = "registered-item"
self.yang_parent_name = "basic-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tuple', (YLeaf(YType.str, 'tuple'), ['str'])),
])
self.tuple = None
self._segment_path = lambda: "registered-item"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo.RegisteredItem, ['tuple'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo.RegisteredItem']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.BasicInfo']['meta_info']
class DetailInfo(_Entity_):
"""
Process Detail Info
.. attribute:: running_path
Running path
**type**\: str
**config**\: False
.. attribute:: package_path
Package path
**type**\: str
**config**\: False
.. attribute:: job_id_link
Job Id Link
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: group_jid
Group Jid
**type**\: str
**config**\: False
.. attribute:: fail_count
Fail count
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: restart_needed
Restart needed
**type**\: bool
**config**\: False
.. attribute:: init_process
Init process
**type**\: bool
**config**\: False
.. attribute:: last_online
Last Online
**type**\: str
**config**\: False
.. attribute:: this_pcb
This PCB
**type**\: str
**config**\: False
.. attribute:: next_pcb
Next PCB
**type**\: str
**config**\: False
.. attribute:: envs
Env variables
**type**\: str
**config**\: False
.. attribute:: wait_for
Wait For /dev/xxx
**type**\: str
**config**\: False
.. attribute:: job_id_on_rp
Job ID on RP
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: is_standby_capable
Is standby capable?
**type**\: bool
**config**\: False
.. attribute:: disable_kill
Disable kill?
**type**\: bool
**config**\: False
.. attribute:: send_avail
Check avail
**type**\: bool
**config**\: False
.. attribute:: node_event_cli_info
Node Event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: node_redundancy_state
Node redundancy state
**type**\: str
**config**\: False
.. attribute:: role_event_cli_info
Role event cli info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: proc_role_state
Proc Role State
**type**\: str
**config**\: False
.. attribute:: standby_event_cli_info
Standby Event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: cleanup_event_cli_info
Cleanup event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: band_ready_event_cli_info
Band Ready Event CLI Info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: lr_event_cli_info
LR Event CLI Info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: plane_ready_event_cli_info
Plane Ready Event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_is_done_cli_info
MDR is done CLI Info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.DetailInfo, self).__init__()
self.yang_name = "detail-info"
self.yang_parent_name = "process-name-run-detail"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('running_path', (YLeaf(YType.str, 'running-path'), ['str'])),
('package_path', (YLeaf(YType.str, 'package-path'), ['str'])),
('job_id_link', (YLeaf(YType.int32, 'job-id-link'), ['int'])),
('group_jid', (YLeaf(YType.str, 'group-jid'), ['str'])),
('fail_count', (YLeaf(YType.uint32, 'fail-count'), ['int'])),
('restart_needed', (YLeaf(YType.boolean, 'restart-needed'), ['bool'])),
('init_process', (YLeaf(YType.boolean, 'init-process'), ['bool'])),
('last_online', (YLeaf(YType.str, 'last-online'), ['str'])),
('this_pcb', (YLeaf(YType.str, 'this-pcb'), ['str'])),
('next_pcb', (YLeaf(YType.str, 'next-pcb'), ['str'])),
('envs', (YLeaf(YType.str, 'envs'), ['str'])),
('wait_for', (YLeaf(YType.str, 'wait-for'), ['str'])),
('job_id_on_rp', (YLeaf(YType.int32, 'job-id-on-rp'), ['int'])),
('is_standby_capable', (YLeaf(YType.boolean, 'is-standby-capable'), ['bool'])),
('disable_kill', (YLeaf(YType.boolean, 'disable-kill'), ['bool'])),
('send_avail', (YLeaf(YType.boolean, 'send-avail'), ['bool'])),
('node_event_cli_info', (YLeaf(YType.int32, 'node-event-cli-info'), ['int'])),
('node_redundancy_state', (YLeaf(YType.str, 'node-redundancy-state'), ['str'])),
('role_event_cli_info', (YLeaf(YType.int32, 'role-event-cli-info'), ['int'])),
('proc_role_state', (YLeaf(YType.str, 'proc-role-state'), ['str'])),
('standby_event_cli_info', (YLeaf(YType.int32, 'standby-event-cli-info'), ['int'])),
('cleanup_event_cli_info', (YLeaf(YType.int32, 'cleanup-event-cli-info'), ['int'])),
('band_ready_event_cli_info', (YLeaf(YType.int32, 'band-ready-event-cli-info'), ['int'])),
('lr_event_cli_info', (YLeaf(YType.int32, 'lr-event-cli-info'), ['int'])),
('plane_ready_event_cli_info', (YLeaf(YType.int32, 'plane-ready-event-cli-info'), ['int'])),
('mdr_is_done_cli_info', (YLeaf(YType.int32, 'mdr-is-done-cli-info'), ['int'])),
])
self.running_path = None
self.package_path = None
self.job_id_link = None
self.group_jid = None
self.fail_count = None
self.restart_needed = None
self.init_process = None
self.last_online = None
self.this_pcb = None
self.next_pcb = None
self.envs = None
self.wait_for = None
self.job_id_on_rp = None
self.is_standby_capable = None
self.disable_kill = None
self.send_avail = None
self.node_event_cli_info = None
self.node_redundancy_state = None
self.role_event_cli_info = None
self.proc_role_state = None
self.standby_event_cli_info = None
self.cleanup_event_cli_info = None
self.band_ready_event_cli_info = None
self.lr_event_cli_info = None
self.plane_ready_event_cli_info = None
self.mdr_is_done_cli_info = None
self._segment_path = lambda: "detail-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.DetailInfo, ['running_path', 'package_path', 'job_id_link', 'group_jid', 'fail_count', 'restart_needed', 'init_process', 'last_online', 'this_pcb', 'next_pcb', 'envs', 'wait_for', 'job_id_on_rp', 'is_standby_capable', 'disable_kill', 'send_avail', 'node_event_cli_info', 'node_redundancy_state', 'role_event_cli_info', 'proc_role_state', 'standby_event_cli_info', 'cleanup_event_cli_info', 'band_ready_event_cli_info', 'lr_event_cli_info', 'plane_ready_event_cli_info', 'mdr_is_done_cli_info'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail.DetailInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails.ProcessNameRunDetail']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunDetails']['meta_info']
class ProcessNameRunverboses(_Entity_):
"""
Process <WORD> information
.. attribute:: process_name_runverbose
Process <WORD> run verbose information
**type**\: list of :py:class:`ProcessNameRunverbose <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses, self).__init__()
self.yang_name = "process-name-runverboses"
self.yang_parent_name = "name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("process-name-runverbose", ("process_name_runverbose", SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose))])
self._leafs = OrderedDict()
self.process_name_runverbose = YList(self)
self._segment_path = lambda: "process-name-runverboses"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses, [], name, value)
class ProcessNameRunverbose(_Entity_):
"""
Process <WORD> run verbose information
.. attribute:: proc_name (key)
Process Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: basic_info
Process Basic Info
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo>`
**config**\: False
.. attribute:: detail_info
Process Detail Info
**type**\: :py:class:`DetailInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.DetailInfo>`
**config**\: False
.. attribute:: verbose_info
Process Verbose Info
**type**\: :py:class:`VerboseInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose, self).__init__()
self.yang_name = "process-name-runverbose"
self.yang_parent_name = "process-name-runverboses"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['proc_name']
self._child_classes = OrderedDict([("basic-info", ("basic_info", SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo)), ("detail-info", ("detail_info", SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.DetailInfo)), ("verbose-info", ("verbose_info", SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo))])
self._leafs = OrderedDict([
('proc_name', (YLeaf(YType.str, 'proc-name'), ['str'])),
])
self.proc_name = None
self.basic_info = SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.detail_info = SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.DetailInfo()
self.detail_info.parent = self
self._children_name_map["detail_info"] = "detail-info"
self.verbose_info = SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo()
self.verbose_info.parent = self
self._children_name_map["verbose_info"] = "verbose-info"
self._segment_path = lambda: "process-name-runverbose" + "[proc-name='" + str(self.proc_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose, ['proc_name'], name, value)
class BasicInfo(_Entity_):
"""
Process Basic Info
.. attribute:: proc_cpu_time
Proces cpu time
**type**\: :py:class:`ProcCpuTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo.ProcCpuTime>`
**config**\: False
.. attribute:: job_id_xr
Job ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process_id
PID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process_name
Process name
**type**\: str
**config**\: False
.. attribute:: executable
Executable name or path
**type**\: str
**config**\: False
.. attribute:: active_path
Active Path
**type**\: str
**config**\: False
.. attribute:: instance_id
Instance ID
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: args
Args
**type**\: str
**config**\: False
.. attribute:: version_id
Version ID
**type**\: str
**config**\: False
.. attribute:: respawn
Respawn on/off
**type**\: str
**config**\: False
.. attribute:: respawn_count
Respawn Count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: last_started
Last Started
**type**\: str
**config**\: False
.. attribute:: process_state
Process State
**type**\: str
**config**\: False
.. attribute:: last_exit_status
Last Exit status
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: last_exit_reason
Last Exit due to
**type**\: str
**config**\: False
.. attribute:: package_state
Package State
**type**\: str
**config**\: False
.. attribute:: started_on_config
Started on Config
**type**\: str
**config**\: False
.. attribute:: feature_name
Feature Name
**type**\: str
**config**\: False
.. attribute:: tag
Tag
**type**\: str
**config**\: False
.. attribute:: group
Process Group
**type**\: str
**config**\: False
.. attribute:: core
Core
**type**\: str
**config**\: False
.. attribute:: max_core
Max core
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: mandatory
Is mandatory?
**type**\: bool
**config**\: False
.. attribute:: maint_mode_proc
Is admin mode process?
**type**\: bool
**config**\: False
.. attribute:: placement_state
Placement State
**type**\: str
**config**\: False
.. attribute:: start_up_path
Startup Path
**type**\: str
**config**\: False
.. attribute:: memory_limit
Memory Limit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ready
Elapsed Ready
**type**\: str
**config**\: False
.. attribute:: available
Elapsed Available
**type**\: str
**config**\: False
.. attribute:: registered_item
Registered Items
**type**\: list of :py:class:`RegisteredItem <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo.RegisteredItem>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "process-name-runverbose"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("proc-cpu-time", ("proc_cpu_time", SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo.ProcCpuTime)), ("registered-item", ("registered_item", SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo.RegisteredItem))])
self._leafs = OrderedDict([
('job_id_xr', (YLeaf(YType.uint32, 'job-id-xr'), ['int'])),
('process_id', (YLeaf(YType.uint32, 'process-id'), ['int'])),
('process_name', (YLeaf(YType.str, 'process-name'), ['str'])),
('executable', (YLeaf(YType.str, 'executable'), ['str'])),
('active_path', (YLeaf(YType.str, 'active-path'), ['str'])),
('instance_id', (YLeaf(YType.int32, 'instance-id'), ['int'])),
('args', (YLeaf(YType.str, 'args'), ['str'])),
('version_id', (YLeaf(YType.str, 'version-id'), ['str'])),
('respawn', (YLeaf(YType.str, 'respawn'), ['str'])),
('respawn_count', (YLeaf(YType.int32, 'respawn-count'), ['int'])),
('last_started', (YLeaf(YType.str, 'last-started'), ['str'])),
('process_state', (YLeaf(YType.str, 'process-state'), ['str'])),
('last_exit_status', (YLeaf(YType.int32, 'last-exit-status'), ['int'])),
('last_exit_reason', (YLeaf(YType.str, 'last-exit-reason'), ['str'])),
('package_state', (YLeaf(YType.str, 'package-state'), ['str'])),
('started_on_config', (YLeaf(YType.str, 'started-on-config'), ['str'])),
('feature_name', (YLeaf(YType.str, 'feature-name'), ['str'])),
('tag', (YLeaf(YType.str, 'tag'), ['str'])),
('group', (YLeaf(YType.str, 'group'), ['str'])),
('core', (YLeaf(YType.str, 'core'), ['str'])),
('max_core', (YLeaf(YType.int32, 'max-core'), ['int'])),
('level', (YLeaf(YType.str, 'level'), ['str'])),
('mandatory', (YLeaf(YType.boolean, 'mandatory'), ['bool'])),
('maint_mode_proc', (YLeaf(YType.boolean, 'maint-mode-proc'), ['bool'])),
('placement_state', (YLeaf(YType.str, 'placement-state'), ['str'])),
('start_up_path', (YLeaf(YType.str, 'start-up-path'), ['str'])),
('memory_limit', (YLeaf(YType.uint32, 'memory-limit'), ['int'])),
('ready', (YLeaf(YType.str, 'ready'), ['str'])),
('available', (YLeaf(YType.str, 'available'), ['str'])),
])
self.job_id_xr = None
self.process_id = None
self.process_name = None
self.executable = None
self.active_path = None
self.instance_id = None
self.args = None
self.version_id = None
self.respawn = None
self.respawn_count = None
self.last_started = None
self.process_state = None
self.last_exit_status = None
self.last_exit_reason = None
self.package_state = None
self.started_on_config = None
self.feature_name = None
self.tag = None
self.group = None
self.core = None
self.max_core = None
self.level = None
self.mandatory = None
self.maint_mode_proc = None
self.placement_state = None
self.start_up_path = None
self.memory_limit = None
self.ready = None
self.available = None
self.proc_cpu_time = SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo.ProcCpuTime()
self.proc_cpu_time.parent = self
self._children_name_map["proc_cpu_time"] = "proc-cpu-time"
self.registered_item = YList(self)
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo, ['job_id_xr', 'process_id', 'process_name', 'executable', 'active_path', 'instance_id', 'args', 'version_id', 'respawn', 'respawn_count', 'last_started', 'process_state', 'last_exit_status', 'last_exit_reason', 'package_state', 'started_on_config', 'feature_name', 'tag', 'group', 'core', 'max_core', 'level', 'mandatory', 'maint_mode_proc', 'placement_state', 'start_up_path', 'memory_limit', 'ready', 'available'], name, value)
class ProcCpuTime(_Entity_):
"""
Proces cpu time
.. attribute:: user
User time
**type**\: str
**config**\: False
.. attribute:: system
Kernel time
**type**\: str
**config**\: False
.. attribute:: total
Total time
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo.ProcCpuTime, self).__init__()
self.yang_name = "proc-cpu-time"
self.yang_parent_name = "basic-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('user', (YLeaf(YType.str, 'user'), ['str'])),
('system', (YLeaf(YType.str, 'system'), ['str'])),
('total', (YLeaf(YType.str, 'total'), ['str'])),
])
self.user = None
self.system = None
self.total = None
self._segment_path = lambda: "proc-cpu-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo.ProcCpuTime, ['user', 'system', 'total'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo.ProcCpuTime']['meta_info']
class RegisteredItem(_Entity_):
"""
Registered Items
.. attribute:: tuple
Tuple
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo.RegisteredItem, self).__init__()
self.yang_name = "registered-item"
self.yang_parent_name = "basic-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tuple', (YLeaf(YType.str, 'tuple'), ['str'])),
])
self.tuple = None
self._segment_path = lambda: "registered-item"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo.RegisteredItem, ['tuple'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo.RegisteredItem']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.BasicInfo']['meta_info']
class DetailInfo(_Entity_):
"""
Process Detail Info
.. attribute:: running_path
Running path
**type**\: str
**config**\: False
.. attribute:: package_path
Package path
**type**\: str
**config**\: False
.. attribute:: job_id_link
Job Id Link
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: group_jid
Group Jid
**type**\: str
**config**\: False
.. attribute:: fail_count
Fail count
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: restart_needed
Restart needed
**type**\: bool
**config**\: False
.. attribute:: init_process
Init process
**type**\: bool
**config**\: False
.. attribute:: last_online
Last Online
**type**\: str
**config**\: False
.. attribute:: this_pcb
This PCB
**type**\: str
**config**\: False
.. attribute:: next_pcb
Next PCB
**type**\: str
**config**\: False
.. attribute:: envs
Env variables
**type**\: str
**config**\: False
.. attribute:: wait_for
Wait For /dev/xxx
**type**\: str
**config**\: False
.. attribute:: job_id_on_rp
Job ID on RP
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: is_standby_capable
Is standby capable?
**type**\: bool
**config**\: False
.. attribute:: disable_kill
Disable kill?
**type**\: bool
**config**\: False
.. attribute:: send_avail
Check avail
**type**\: bool
**config**\: False
.. attribute:: node_event_cli_info
Node Event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: node_redundancy_state
Node redundancy state
**type**\: str
**config**\: False
.. attribute:: role_event_cli_info
Role event cli info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: proc_role_state
Proc Role State
**type**\: str
**config**\: False
.. attribute:: standby_event_cli_info
Standby Event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: cleanup_event_cli_info
Cleanup event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: band_ready_event_cli_info
Band Ready Event CLI Info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: lr_event_cli_info
LR Event CLI Info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: plane_ready_event_cli_info
Plane Ready Event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_is_done_cli_info
MDR is done CLI Info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.DetailInfo, self).__init__()
self.yang_name = "detail-info"
self.yang_parent_name = "process-name-runverbose"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('running_path', (YLeaf(YType.str, 'running-path'), ['str'])),
('package_path', (YLeaf(YType.str, 'package-path'), ['str'])),
('job_id_link', (YLeaf(YType.int32, 'job-id-link'), ['int'])),
('group_jid', (YLeaf(YType.str, 'group-jid'), ['str'])),
('fail_count', (YLeaf(YType.uint32, 'fail-count'), ['int'])),
('restart_needed', (YLeaf(YType.boolean, 'restart-needed'), ['bool'])),
('init_process', (YLeaf(YType.boolean, 'init-process'), ['bool'])),
('last_online', (YLeaf(YType.str, 'last-online'), ['str'])),
('this_pcb', (YLeaf(YType.str, 'this-pcb'), ['str'])),
('next_pcb', (YLeaf(YType.str, 'next-pcb'), ['str'])),
('envs', (YLeaf(YType.str, 'envs'), ['str'])),
('wait_for', (YLeaf(YType.str, 'wait-for'), ['str'])),
('job_id_on_rp', (YLeaf(YType.int32, 'job-id-on-rp'), ['int'])),
('is_standby_capable', (YLeaf(YType.boolean, 'is-standby-capable'), ['bool'])),
('disable_kill', (YLeaf(YType.boolean, 'disable-kill'), ['bool'])),
('send_avail', (YLeaf(YType.boolean, 'send-avail'), ['bool'])),
('node_event_cli_info', (YLeaf(YType.int32, 'node-event-cli-info'), ['int'])),
('node_redundancy_state', (YLeaf(YType.str, 'node-redundancy-state'), ['str'])),
('role_event_cli_info', (YLeaf(YType.int32, 'role-event-cli-info'), ['int'])),
('proc_role_state', (YLeaf(YType.str, 'proc-role-state'), ['str'])),
('standby_event_cli_info', (YLeaf(YType.int32, 'standby-event-cli-info'), ['int'])),
('cleanup_event_cli_info', (YLeaf(YType.int32, 'cleanup-event-cli-info'), ['int'])),
('band_ready_event_cli_info', (YLeaf(YType.int32, 'band-ready-event-cli-info'), ['int'])),
('lr_event_cli_info', (YLeaf(YType.int32, 'lr-event-cli-info'), ['int'])),
('plane_ready_event_cli_info', (YLeaf(YType.int32, 'plane-ready-event-cli-info'), ['int'])),
('mdr_is_done_cli_info', (YLeaf(YType.int32, 'mdr-is-done-cli-info'), ['int'])),
])
self.running_path = None
self.package_path = None
self.job_id_link = None
self.group_jid = None
self.fail_count = None
self.restart_needed = None
self.init_process = None
self.last_online = None
self.this_pcb = None
self.next_pcb = None
self.envs = None
self.wait_for = None
self.job_id_on_rp = None
self.is_standby_capable = None
self.disable_kill = None
self.send_avail = None
self.node_event_cli_info = None
self.node_redundancy_state = None
self.role_event_cli_info = None
self.proc_role_state = None
self.standby_event_cli_info = None
self.cleanup_event_cli_info = None
self.band_ready_event_cli_info = None
self.lr_event_cli_info = None
self.plane_ready_event_cli_info = None
self.mdr_is_done_cli_info = None
self._segment_path = lambda: "detail-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.DetailInfo, ['running_path', 'package_path', 'job_id_link', 'group_jid', 'fail_count', 'restart_needed', 'init_process', 'last_online', 'this_pcb', 'next_pcb', 'envs', 'wait_for', 'job_id_on_rp', 'is_standby_capable', 'disable_kill', 'send_avail', 'node_event_cli_info', 'node_redundancy_state', 'role_event_cli_info', 'proc_role_state', 'standby_event_cli_info', 'cleanup_event_cli_info', 'band_ready_event_cli_info', 'lr_event_cli_info', 'plane_ready_event_cli_info', 'mdr_is_done_cli_info'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.DetailInfo']['meta_info']
class VerboseInfo(_Entity_):
"""
Process Verbose Info
.. attribute:: process_group
Process Group
**type**\: str
**config**\: False
.. attribute:: respawn_allowed
Is respawn allowed?
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: wait_for_exit
Wait for exit
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: dynamic_tag
Dynamic Tag
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: forced_stop
Forced stop
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: critical_process
Critical Process
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: hold
Hold
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: transient
Transient
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: tuple_cfgmgr
Tuple Cfgmgr
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: standby_capable
Standby capable
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: edm_startup
EDM startup
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: placement
Placement
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: skip_kill_notif
Skip Kill Notif
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: init_proc
Init process
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: sysdb_event
Sysdb Event
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: level_started
Level Started
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: proc_avail
Process available
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: tuples_scanned
Tuples Scanned
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: no_chkpt_start
No checkpoint start
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: in_shut_down
In Shut Down
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: sm_started
SM started
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: ignore_on_sc
Ignore on SC
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: ignore_on_easy_bake
Ignore on EasyBake
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: pre_init
Pre init
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: eoi_received
EOI received
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: eoi_timeout
EOI Timeout
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: avail_timeout
Avail Timeout
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: reserved_memory
Reserved Memory
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: allow_warned
Allow Warned
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: arg_change
Arg Change
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: restart_on_tuple
Restart on tuple
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: boot_hold
Boot Hold
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: reg_id
Reg Id
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: memory_limit
Memory Limit
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: parent_job_id
Parent Job ID
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: tuple_index
Tuple Index
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: dump_count
Dump Count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: respawn_interval_user
Respawn Interval User
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: silent_restart_count
Silent Restart Count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: critical_tier
Critical Tier
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: exit_type
Exit Type
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: init_timeout
Init Timeout
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: restart_by_cmd
Restart by Command
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: boot_pref
Boot Pref
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_mbi_proc
Mdr Mbi proc
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_non_mbi_kld
Mdr Non Mbi Kld
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_mbi_kld
Mdr Mbi Kld
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_shut_delay
Mdr Shut Delay
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_keep_thru
Mdr Keep Thru
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_spoofer
Mdr spoofer
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_spoofed
Mdr spoofed
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_spoofed_last
Mdr spoofed last
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_spoofed_ready
Mdr Spoofed Ready
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_pcb_check
Mdr PCB Check
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_kill_tier
Mdr Kill Tier
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_kld
Mdr kld
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_level
Mdr Level
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: fm_restart_cnt
FM restart count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: self_managed
Self Managed
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: tuple
Tuple
**type**\: list of :py:class:`Tuple <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo.Tuple>`
**config**\: False
.. attribute:: orig_tuple
Orig Tuple
**type**\: list of :py:class:`OrigTuple <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo.OrigTuple>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo, self).__init__()
self.yang_name = "verbose-info"
self.yang_parent_name = "process-name-runverbose"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("tuple", ("tuple", SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo.Tuple)), ("orig-tuple", ("orig_tuple", SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo.OrigTuple))])
self._leafs = OrderedDict([
('process_group', (YLeaf(YType.str, 'process-group'), ['str'])),
('respawn_allowed', (YLeaf(YType.int32, 'respawn-allowed'), ['int'])),
('wait_for_exit', (YLeaf(YType.int32, 'wait-for-exit'), ['int'])),
('dynamic_tag', (YLeaf(YType.int32, 'dynamic-tag'), ['int'])),
('forced_stop', (YLeaf(YType.int32, 'forced-stop'), ['int'])),
('critical_process', (YLeaf(YType.int32, 'critical-process'), ['int'])),
('hold', (YLeaf(YType.int32, 'hold'), ['int'])),
('transient', (YLeaf(YType.int32, 'transient'), ['int'])),
('tuple_cfgmgr', (YLeaf(YType.int32, 'tuple-cfgmgr'), ['int'])),
('standby_capable', (YLeaf(YType.int32, 'standby-capable'), ['int'])),
('edm_startup', (YLeaf(YType.int32, 'edm-startup'), ['int'])),
('placement', (YLeaf(YType.int32, 'placement'), ['int'])),
('skip_kill_notif', (YLeaf(YType.int32, 'skip-kill-notif'), ['int'])),
('init_proc', (YLeaf(YType.int32, 'init-proc'), ['int'])),
('sysdb_event', (YLeaf(YType.int32, 'sysdb-event'), ['int'])),
('level_started', (YLeaf(YType.int32, 'level-started'), ['int'])),
('proc_avail', (YLeaf(YType.int32, 'proc-avail'), ['int'])),
('tuples_scanned', (YLeaf(YType.int32, 'tuples-scanned'), ['int'])),
('no_chkpt_start', (YLeaf(YType.int32, 'no-chkpt-start'), ['int'])),
('in_shut_down', (YLeaf(YType.int32, 'in-shut-down'), ['int'])),
('sm_started', (YLeaf(YType.int32, 'sm-started'), ['int'])),
('ignore_on_sc', (YLeaf(YType.int32, 'ignore-on-sc'), ['int'])),
('ignore_on_easy_bake', (YLeaf(YType.int32, 'ignore-on-easy-bake'), ['int'])),
('pre_init', (YLeaf(YType.int32, 'pre-init'), ['int'])),
('eoi_received', (YLeaf(YType.int32, 'eoi-received'), ['int'])),
('eoi_timeout', (YLeaf(YType.int32, 'eoi-timeout'), ['int'])),
('avail_timeout', (YLeaf(YType.int32, 'avail-timeout'), ['int'])),
('reserved_memory', (YLeaf(YType.int32, 'reserved-memory'), ['int'])),
('allow_warned', (YLeaf(YType.int32, 'allow-warned'), ['int'])),
('arg_change', (YLeaf(YType.int32, 'arg-change'), ['int'])),
('restart_on_tuple', (YLeaf(YType.int32, 'restart-on-tuple'), ['int'])),
('boot_hold', (YLeaf(YType.int32, 'boot-hold'), ['int'])),
('reg_id', (YLeaf(YType.int32, 'reg-id'), ['int'])),
('memory_limit', (YLeaf(YType.int32, 'memory-limit'), ['int'])),
('parent_job_id', (YLeaf(YType.int32, 'parent-job-id'), ['int'])),
('tuple_index', (YLeaf(YType.int32, 'tuple-index'), ['int'])),
('dump_count', (YLeaf(YType.int32, 'dump-count'), ['int'])),
('respawn_interval_user', (YLeaf(YType.int32, 'respawn-interval-user'), ['int'])),
('silent_restart_count', (YLeaf(YType.int32, 'silent-restart-count'), ['int'])),
('critical_tier', (YLeaf(YType.int32, 'critical-tier'), ['int'])),
('exit_type', (YLeaf(YType.int32, 'exit-type'), ['int'])),
('init_timeout', (YLeaf(YType.int32, 'init-timeout'), ['int'])),
('restart_by_cmd', (YLeaf(YType.int32, 'restart-by-cmd'), ['int'])),
('boot_pref', (YLeaf(YType.int32, 'boot-pref'), ['int'])),
('mdr_mbi_proc', (YLeaf(YType.int32, 'mdr-mbi-proc'), ['int'])),
('mdr_non_mbi_kld', (YLeaf(YType.int32, 'mdr-non-mbi-kld'), ['int'])),
('mdr_mbi_kld', (YLeaf(YType.int32, 'mdr-mbi-kld'), ['int'])),
('mdr_shut_delay', (YLeaf(YType.int32, 'mdr-shut-delay'), ['int'])),
('mdr_keep_thru', (YLeaf(YType.int32, 'mdr-keep-thru'), ['int'])),
('mdr_spoofer', (YLeaf(YType.int32, 'mdr-spoofer'), ['int'])),
('mdr_spoofed', (YLeaf(YType.int32, 'mdr-spoofed'), ['int'])),
('mdr_spoofed_last', (YLeaf(YType.int32, 'mdr-spoofed-last'), ['int'])),
('mdr_spoofed_ready', (YLeaf(YType.int32, 'mdr-spoofed-ready'), ['int'])),
('mdr_pcb_check', (YLeaf(YType.int32, 'mdr-pcb-check'), ['int'])),
('mdr_kill_tier', (YLeaf(YType.int32, 'mdr-kill-tier'), ['int'])),
('mdr_kld', (YLeaf(YType.int32, 'mdr-kld'), ['int'])),
('mdr_level', (YLeaf(YType.int32, 'mdr-level'), ['int'])),
('fm_restart_cnt', (YLeaf(YType.int32, 'fm-restart-cnt'), ['int'])),
('self_managed', (YLeaf(YType.int32, 'self-managed'), ['int'])),
])
self.process_group = None
self.respawn_allowed = None
self.wait_for_exit = None
self.dynamic_tag = None
self.forced_stop = None
self.critical_process = None
self.hold = None
self.transient = None
self.tuple_cfgmgr = None
self.standby_capable = None
self.edm_startup = None
self.placement = None
self.skip_kill_notif = None
self.init_proc = None
self.sysdb_event = None
self.level_started = None
self.proc_avail = None
self.tuples_scanned = None
self.no_chkpt_start = None
self.in_shut_down = None
self.sm_started = None
self.ignore_on_sc = None
self.ignore_on_easy_bake = None
self.pre_init = None
self.eoi_received = None
self.eoi_timeout = None
self.avail_timeout = None
self.reserved_memory = None
self.allow_warned = None
self.arg_change = None
self.restart_on_tuple = None
self.boot_hold = None
self.reg_id = None
self.memory_limit = None
self.parent_job_id = None
self.tuple_index = None
self.dump_count = None
self.respawn_interval_user = None
self.silent_restart_count = None
self.critical_tier = None
self.exit_type = None
self.init_timeout = None
self.restart_by_cmd = None
self.boot_pref = None
self.mdr_mbi_proc = None
self.mdr_non_mbi_kld = None
self.mdr_mbi_kld = None
self.mdr_shut_delay = None
self.mdr_keep_thru = None
self.mdr_spoofer = None
self.mdr_spoofed = None
self.mdr_spoofed_last = None
self.mdr_spoofed_ready = None
self.mdr_pcb_check = None
self.mdr_kill_tier = None
self.mdr_kld = None
self.mdr_level = None
self.fm_restart_cnt = None
self.self_managed = None
self.tuple = YList(self)
self.orig_tuple = YList(self)
self._segment_path = lambda: "verbose-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo, ['process_group', 'respawn_allowed', 'wait_for_exit', 'dynamic_tag', 'forced_stop', 'critical_process', 'hold', 'transient', 'tuple_cfgmgr', 'standby_capable', 'edm_startup', 'placement', 'skip_kill_notif', 'init_proc', 'sysdb_event', 'level_started', 'proc_avail', 'tuples_scanned', 'no_chkpt_start', 'in_shut_down', 'sm_started', 'ignore_on_sc', 'ignore_on_easy_bake', 'pre_init', 'eoi_received', 'eoi_timeout', 'avail_timeout', 'reserved_memory', 'allow_warned', 'arg_change', 'restart_on_tuple', 'boot_hold', 'reg_id', 'memory_limit', 'parent_job_id', 'tuple_index', 'dump_count', 'respawn_interval_user', 'silent_restart_count', 'critical_tier', 'exit_type', 'init_timeout', 'restart_by_cmd', 'boot_pref', 'mdr_mbi_proc', 'mdr_non_mbi_kld', 'mdr_mbi_kld', 'mdr_shut_delay', 'mdr_keep_thru', 'mdr_spoofer', 'mdr_spoofed', 'mdr_spoofed_last', 'mdr_spoofed_ready', 'mdr_pcb_check', 'mdr_kill_tier', 'mdr_kld', 'mdr_level', 'fm_restart_cnt', 'self_managed'], name, value)
class Tuple(_Entity_):
"""
Tuple
.. attribute:: tuple
Tuple
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo.Tuple, self).__init__()
self.yang_name = "tuple"
self.yang_parent_name = "verbose-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tuple', (YLeaf(YType.str, 'tuple'), ['str'])),
])
self.tuple = None
self._segment_path = lambda: "tuple"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo.Tuple, ['tuple'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo.Tuple']['meta_info']
class OrigTuple(_Entity_):
"""
Orig Tuple
.. attribute:: tuple
Tuple
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo.OrigTuple, self).__init__()
self.yang_name = "orig-tuple"
self.yang_parent_name = "verbose-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tuple', (YLeaf(YType.str, 'tuple'), ['str'])),
])
self.tuple = None
self._segment_path = lambda: "orig-tuple"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo.OrigTuple, ['tuple'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo.OrigTuple']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose.VerboseInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses.ProcessNameRunverbose']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameRunverboses']['meta_info']
class ProcessNameDetails(_Entity_):
"""
Process <WORD> information
.. attribute:: process_name_detail
Process <WORD> detail information
**type**\: list of :py:class:`ProcessNameDetail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameDetails, self).__init__()
self.yang_name = "process-name-details"
self.yang_parent_name = "name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("process-name-detail", ("process_name_detail", SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail))])
self._leafs = OrderedDict()
self.process_name_detail = YList(self)
self._segment_path = lambda: "process-name-details"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameDetails, [], name, value)
class ProcessNameDetail(_Entity_):
"""
Process <WORD> detail information
.. attribute:: proc_name (key)
Process Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: basic_info
Process Basic Info
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo>`
**config**\: False
.. attribute:: detail_info
Process Detail Info
**type**\: :py:class:`DetailInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.DetailInfo>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail, self).__init__()
self.yang_name = "process-name-detail"
self.yang_parent_name = "process-name-details"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['proc_name']
self._child_classes = OrderedDict([("basic-info", ("basic_info", SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo)), ("detail-info", ("detail_info", SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.DetailInfo))])
self._leafs = OrderedDict([
('proc_name', (YLeaf(YType.str, 'proc-name'), ['str'])),
])
self.proc_name = None
self.basic_info = SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.detail_info = SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.DetailInfo()
self.detail_info.parent = self
self._children_name_map["detail_info"] = "detail-info"
self._segment_path = lambda: "process-name-detail" + "[proc-name='" + str(self.proc_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail, ['proc_name'], name, value)
class BasicInfo(_Entity_):
"""
Process Basic Info
.. attribute:: proc_cpu_time
Proces cpu time
**type**\: :py:class:`ProcCpuTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo.ProcCpuTime>`
**config**\: False
.. attribute:: job_id_xr
Job ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process_id
PID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process_name
Process name
**type**\: str
**config**\: False
.. attribute:: executable
Executable name or path
**type**\: str
**config**\: False
.. attribute:: active_path
Active Path
**type**\: str
**config**\: False
.. attribute:: instance_id
Instance ID
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: args
Args
**type**\: str
**config**\: False
.. attribute:: version_id
Version ID
**type**\: str
**config**\: False
.. attribute:: respawn
Respawn on/off
**type**\: str
**config**\: False
.. attribute:: respawn_count
Respawn Count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: last_started
Last Started
**type**\: str
**config**\: False
.. attribute:: process_state
Process State
**type**\: str
**config**\: False
.. attribute:: last_exit_status
Last Exit status
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: last_exit_reason
Last Exit due to
**type**\: str
**config**\: False
.. attribute:: package_state
Package State
**type**\: str
**config**\: False
.. attribute:: started_on_config
Started on Config
**type**\: str
**config**\: False
.. attribute:: feature_name
Feature Name
**type**\: str
**config**\: False
.. attribute:: tag
Tag
**type**\: str
**config**\: False
.. attribute:: group
Process Group
**type**\: str
**config**\: False
.. attribute:: core
Core
**type**\: str
**config**\: False
.. attribute:: max_core
Max core
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: mandatory
Is mandatory?
**type**\: bool
**config**\: False
.. attribute:: maint_mode_proc
Is admin mode process?
**type**\: bool
**config**\: False
.. attribute:: placement_state
Placement State
**type**\: str
**config**\: False
.. attribute:: start_up_path
Startup Path
**type**\: str
**config**\: False
.. attribute:: memory_limit
Memory Limit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ready
Elapsed Ready
**type**\: str
**config**\: False
.. attribute:: available
Elapsed Available
**type**\: str
**config**\: False
.. attribute:: registered_item
Registered Items
**type**\: list of :py:class:`RegisteredItem <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo.RegisteredItem>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "process-name-detail"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("proc-cpu-time", ("proc_cpu_time", SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo.ProcCpuTime)), ("registered-item", ("registered_item", SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo.RegisteredItem))])
self._leafs = OrderedDict([
('job_id_xr', (YLeaf(YType.uint32, 'job-id-xr'), ['int'])),
('process_id', (YLeaf(YType.uint32, 'process-id'), ['int'])),
('process_name', (YLeaf(YType.str, 'process-name'), ['str'])),
('executable', (YLeaf(YType.str, 'executable'), ['str'])),
('active_path', (YLeaf(YType.str, 'active-path'), ['str'])),
('instance_id', (YLeaf(YType.int32, 'instance-id'), ['int'])),
('args', (YLeaf(YType.str, 'args'), ['str'])),
('version_id', (YLeaf(YType.str, 'version-id'), ['str'])),
('respawn', (YLeaf(YType.str, 'respawn'), ['str'])),
('respawn_count', (YLeaf(YType.int32, 'respawn-count'), ['int'])),
('last_started', (YLeaf(YType.str, 'last-started'), ['str'])),
('process_state', (YLeaf(YType.str, 'process-state'), ['str'])),
('last_exit_status', (YLeaf(YType.int32, 'last-exit-status'), ['int'])),
('last_exit_reason', (YLeaf(YType.str, 'last-exit-reason'), ['str'])),
('package_state', (YLeaf(YType.str, 'package-state'), ['str'])),
('started_on_config', (YLeaf(YType.str, 'started-on-config'), ['str'])),
('feature_name', (YLeaf(YType.str, 'feature-name'), ['str'])),
('tag', (YLeaf(YType.str, 'tag'), ['str'])),
('group', (YLeaf(YType.str, 'group'), ['str'])),
('core', (YLeaf(YType.str, 'core'), ['str'])),
('max_core', (YLeaf(YType.int32, 'max-core'), ['int'])),
('level', (YLeaf(YType.str, 'level'), ['str'])),
('mandatory', (YLeaf(YType.boolean, 'mandatory'), ['bool'])),
('maint_mode_proc', (YLeaf(YType.boolean, 'maint-mode-proc'), ['bool'])),
('placement_state', (YLeaf(YType.str, 'placement-state'), ['str'])),
('start_up_path', (YLeaf(YType.str, 'start-up-path'), ['str'])),
('memory_limit', (YLeaf(YType.uint32, 'memory-limit'), ['int'])),
('ready', (YLeaf(YType.str, 'ready'), ['str'])),
('available', (YLeaf(YType.str, 'available'), ['str'])),
])
self.job_id_xr = None
self.process_id = None
self.process_name = None
self.executable = None
self.active_path = None
self.instance_id = None
self.args = None
self.version_id = None
self.respawn = None
self.respawn_count = None
self.last_started = None
self.process_state = None
self.last_exit_status = None
self.last_exit_reason = None
self.package_state = None
self.started_on_config = None
self.feature_name = None
self.tag = None
self.group = None
self.core = None
self.max_core = None
self.level = None
self.mandatory = None
self.maint_mode_proc = None
self.placement_state = None
self.start_up_path = None
self.memory_limit = None
self.ready = None
self.available = None
self.proc_cpu_time = SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo.ProcCpuTime()
self.proc_cpu_time.parent = self
self._children_name_map["proc_cpu_time"] = "proc-cpu-time"
self.registered_item = YList(self)
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo, ['job_id_xr', 'process_id', 'process_name', 'executable', 'active_path', 'instance_id', 'args', 'version_id', 'respawn', 'respawn_count', 'last_started', 'process_state', 'last_exit_status', 'last_exit_reason', 'package_state', 'started_on_config', 'feature_name', 'tag', 'group', 'core', 'max_core', 'level', 'mandatory', 'maint_mode_proc', 'placement_state', 'start_up_path', 'memory_limit', 'ready', 'available'], name, value)
class ProcCpuTime(_Entity_):
"""
Proces cpu time
.. attribute:: user
User time
**type**\: str
**config**\: False
.. attribute:: system
Kernel time
**type**\: str
**config**\: False
.. attribute:: total
Total time
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo.ProcCpuTime, self).__init__()
self.yang_name = "proc-cpu-time"
self.yang_parent_name = "basic-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('user', (YLeaf(YType.str, 'user'), ['str'])),
('system', (YLeaf(YType.str, 'system'), ['str'])),
('total', (YLeaf(YType.str, 'total'), ['str'])),
])
self.user = None
self.system = None
self.total = None
self._segment_path = lambda: "proc-cpu-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo.ProcCpuTime, ['user', 'system', 'total'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo.ProcCpuTime']['meta_info']
class RegisteredItem(_Entity_):
"""
Registered Items
.. attribute:: tuple
Tuple
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo.RegisteredItem, self).__init__()
self.yang_name = "registered-item"
self.yang_parent_name = "basic-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tuple', (YLeaf(YType.str, 'tuple'), ['str'])),
])
self.tuple = None
self._segment_path = lambda: "registered-item"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo.RegisteredItem, ['tuple'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo.RegisteredItem']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.BasicInfo']['meta_info']
class DetailInfo(_Entity_):
"""
Process Detail Info
.. attribute:: running_path
Running path
**type**\: str
**config**\: False
.. attribute:: package_path
Package path
**type**\: str
**config**\: False
.. attribute:: job_id_link
Job Id Link
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: group_jid
Group Jid
**type**\: str
**config**\: False
.. attribute:: fail_count
Fail count
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: restart_needed
Restart needed
**type**\: bool
**config**\: False
.. attribute:: init_process
Init process
**type**\: bool
**config**\: False
.. attribute:: last_online
Last Online
**type**\: str
**config**\: False
.. attribute:: this_pcb
This PCB
**type**\: str
**config**\: False
.. attribute:: next_pcb
Next PCB
**type**\: str
**config**\: False
.. attribute:: envs
Env variables
**type**\: str
**config**\: False
.. attribute:: wait_for
Wait For /dev/xxx
**type**\: str
**config**\: False
.. attribute:: job_id_on_rp
Job ID on RP
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: is_standby_capable
Is standby capable?
**type**\: bool
**config**\: False
.. attribute:: disable_kill
Disable kill?
**type**\: bool
**config**\: False
.. attribute:: send_avail
Check avail
**type**\: bool
**config**\: False
.. attribute:: node_event_cli_info
Node Event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: node_redundancy_state
Node redundancy state
**type**\: str
**config**\: False
.. attribute:: role_event_cli_info
Role event cli info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: proc_role_state
Proc Role State
**type**\: str
**config**\: False
.. attribute:: standby_event_cli_info
Standby Event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: cleanup_event_cli_info
Cleanup event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: band_ready_event_cli_info
Band Ready Event CLI Info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: lr_event_cli_info
LR Event CLI Info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: plane_ready_event_cli_info
Plane Ready Event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_is_done_cli_info
MDR is done CLI Info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.DetailInfo, self).__init__()
self.yang_name = "detail-info"
self.yang_parent_name = "process-name-detail"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('running_path', (YLeaf(YType.str, 'running-path'), ['str'])),
('package_path', (YLeaf(YType.str, 'package-path'), ['str'])),
('job_id_link', (YLeaf(YType.int32, 'job-id-link'), ['int'])),
('group_jid', (YLeaf(YType.str, 'group-jid'), ['str'])),
('fail_count', (YLeaf(YType.uint32, 'fail-count'), ['int'])),
('restart_needed', (YLeaf(YType.boolean, 'restart-needed'), ['bool'])),
('init_process', (YLeaf(YType.boolean, 'init-process'), ['bool'])),
('last_online', (YLeaf(YType.str, 'last-online'), ['str'])),
('this_pcb', (YLeaf(YType.str, 'this-pcb'), ['str'])),
('next_pcb', (YLeaf(YType.str, 'next-pcb'), ['str'])),
('envs', (YLeaf(YType.str, 'envs'), ['str'])),
('wait_for', (YLeaf(YType.str, 'wait-for'), ['str'])),
('job_id_on_rp', (YLeaf(YType.int32, 'job-id-on-rp'), ['int'])),
('is_standby_capable', (YLeaf(YType.boolean, 'is-standby-capable'), ['bool'])),
('disable_kill', (YLeaf(YType.boolean, 'disable-kill'), ['bool'])),
('send_avail', (YLeaf(YType.boolean, 'send-avail'), ['bool'])),
('node_event_cli_info', (YLeaf(YType.int32, 'node-event-cli-info'), ['int'])),
('node_redundancy_state', (YLeaf(YType.str, 'node-redundancy-state'), ['str'])),
('role_event_cli_info', (YLeaf(YType.int32, 'role-event-cli-info'), ['int'])),
('proc_role_state', (YLeaf(YType.str, 'proc-role-state'), ['str'])),
('standby_event_cli_info', (YLeaf(YType.int32, 'standby-event-cli-info'), ['int'])),
('cleanup_event_cli_info', (YLeaf(YType.int32, 'cleanup-event-cli-info'), ['int'])),
('band_ready_event_cli_info', (YLeaf(YType.int32, 'band-ready-event-cli-info'), ['int'])),
('lr_event_cli_info', (YLeaf(YType.int32, 'lr-event-cli-info'), ['int'])),
('plane_ready_event_cli_info', (YLeaf(YType.int32, 'plane-ready-event-cli-info'), ['int'])),
('mdr_is_done_cli_info', (YLeaf(YType.int32, 'mdr-is-done-cli-info'), ['int'])),
])
self.running_path = None
self.package_path = None
self.job_id_link = None
self.group_jid = None
self.fail_count = None
self.restart_needed = None
self.init_process = None
self.last_online = None
self.this_pcb = None
self.next_pcb = None
self.envs = None
self.wait_for = None
self.job_id_on_rp = None
self.is_standby_capable = None
self.disable_kill = None
self.send_avail = None
self.node_event_cli_info = None
self.node_redundancy_state = None
self.role_event_cli_info = None
self.proc_role_state = None
self.standby_event_cli_info = None
self.cleanup_event_cli_info = None
self.band_ready_event_cli_info = None
self.lr_event_cli_info = None
self.plane_ready_event_cli_info = None
self.mdr_is_done_cli_info = None
self._segment_path = lambda: "detail-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.DetailInfo, ['running_path', 'package_path', 'job_id_link', 'group_jid', 'fail_count', 'restart_needed', 'init_process', 'last_online', 'this_pcb', 'next_pcb', 'envs', 'wait_for', 'job_id_on_rp', 'is_standby_capable', 'disable_kill', 'send_avail', 'node_event_cli_info', 'node_redundancy_state', 'role_event_cli_info', 'proc_role_state', 'standby_event_cli_info', 'cleanup_event_cli_info', 'band_ready_event_cli_info', 'lr_event_cli_info', 'plane_ready_event_cli_info', 'mdr_is_done_cli_info'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail.DetailInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameDetails.ProcessNameDetail']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameDetails']['meta_info']
class ProcessNameVerboses(_Entity_):
"""
Process <WORD> information
.. attribute:: process_name_verbose
Process <WORD> verbose information
**type**\: list of :py:class:`ProcessNameVerbose <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses, self).__init__()
self.yang_name = "process-name-verboses"
self.yang_parent_name = "name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("process-name-verbose", ("process_name_verbose", SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose))])
self._leafs = OrderedDict()
self.process_name_verbose = YList(self)
self._segment_path = lambda: "process-name-verboses"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses, [], name, value)
class ProcessNameVerbose(_Entity_):
"""
Process <WORD> verbose information
.. attribute:: proc_name (key)
Process Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
**config**\: False
.. attribute:: basic_info
Process Basic Info
**type**\: :py:class:`BasicInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo>`
**config**\: False
.. attribute:: detail_info
Process Detail Info
**type**\: :py:class:`DetailInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.DetailInfo>`
**config**\: False
.. attribute:: verbose_info
Process Verbose Info
**type**\: :py:class:`VerboseInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose, self).__init__()
self.yang_name = "process-name-verbose"
self.yang_parent_name = "process-name-verboses"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['proc_name']
self._child_classes = OrderedDict([("basic-info", ("basic_info", SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo)), ("detail-info", ("detail_info", SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.DetailInfo)), ("verbose-info", ("verbose_info", SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo))])
self._leafs = OrderedDict([
('proc_name', (YLeaf(YType.str, 'proc-name'), ['str'])),
])
self.proc_name = None
self.basic_info = SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo()
self.basic_info.parent = self
self._children_name_map["basic_info"] = "basic-info"
self.detail_info = SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.DetailInfo()
self.detail_info.parent = self
self._children_name_map["detail_info"] = "detail-info"
self.verbose_info = SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo()
self.verbose_info.parent = self
self._children_name_map["verbose_info"] = "verbose-info"
self._segment_path = lambda: "process-name-verbose" + "[proc-name='" + str(self.proc_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose, ['proc_name'], name, value)
class BasicInfo(_Entity_):
"""
Process Basic Info
.. attribute:: proc_cpu_time
Proces cpu time
**type**\: :py:class:`ProcCpuTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo.ProcCpuTime>`
**config**\: False
.. attribute:: job_id_xr
Job ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process_id
PID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process_name
Process name
**type**\: str
**config**\: False
.. attribute:: executable
Executable name or path
**type**\: str
**config**\: False
.. attribute:: active_path
Active Path
**type**\: str
**config**\: False
.. attribute:: instance_id
Instance ID
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: args
Args
**type**\: str
**config**\: False
.. attribute:: version_id
Version ID
**type**\: str
**config**\: False
.. attribute:: respawn
Respawn on/off
**type**\: str
**config**\: False
.. attribute:: respawn_count
Respawn Count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: last_started
Last Started
**type**\: str
**config**\: False
.. attribute:: process_state
Process State
**type**\: str
**config**\: False
.. attribute:: last_exit_status
Last Exit status
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: last_exit_reason
Last Exit due to
**type**\: str
**config**\: False
.. attribute:: package_state
Package State
**type**\: str
**config**\: False
.. attribute:: started_on_config
Started on Config
**type**\: str
**config**\: False
.. attribute:: feature_name
Feature Name
**type**\: str
**config**\: False
.. attribute:: tag
Tag
**type**\: str
**config**\: False
.. attribute:: group
Process Group
**type**\: str
**config**\: False
.. attribute:: core
Core
**type**\: str
**config**\: False
.. attribute:: max_core
Max core
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: mandatory
Is mandatory?
**type**\: bool
**config**\: False
.. attribute:: maint_mode_proc
Is admin mode process?
**type**\: bool
**config**\: False
.. attribute:: placement_state
Placement State
**type**\: str
**config**\: False
.. attribute:: start_up_path
Startup Path
**type**\: str
**config**\: False
.. attribute:: memory_limit
Memory Limit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ready
Elapsed Ready
**type**\: str
**config**\: False
.. attribute:: available
Elapsed Available
**type**\: str
**config**\: False
.. attribute:: registered_item
Registered Items
**type**\: list of :py:class:`RegisteredItem <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo.RegisteredItem>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo, self).__init__()
self.yang_name = "basic-info"
self.yang_parent_name = "process-name-verbose"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("proc-cpu-time", ("proc_cpu_time", SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo.ProcCpuTime)), ("registered-item", ("registered_item", SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo.RegisteredItem))])
self._leafs = OrderedDict([
('job_id_xr', (YLeaf(YType.uint32, 'job-id-xr'), ['int'])),
('process_id', (YLeaf(YType.uint32, 'process-id'), ['int'])),
('process_name', (YLeaf(YType.str, 'process-name'), ['str'])),
('executable', (YLeaf(YType.str, 'executable'), ['str'])),
('active_path', (YLeaf(YType.str, 'active-path'), ['str'])),
('instance_id', (YLeaf(YType.int32, 'instance-id'), ['int'])),
('args', (YLeaf(YType.str, 'args'), ['str'])),
('version_id', (YLeaf(YType.str, 'version-id'), ['str'])),
('respawn', (YLeaf(YType.str, 'respawn'), ['str'])),
('respawn_count', (YLeaf(YType.int32, 'respawn-count'), ['int'])),
('last_started', (YLeaf(YType.str, 'last-started'), ['str'])),
('process_state', (YLeaf(YType.str, 'process-state'), ['str'])),
('last_exit_status', (YLeaf(YType.int32, 'last-exit-status'), ['int'])),
('last_exit_reason', (YLeaf(YType.str, 'last-exit-reason'), ['str'])),
('package_state', (YLeaf(YType.str, 'package-state'), ['str'])),
('started_on_config', (YLeaf(YType.str, 'started-on-config'), ['str'])),
('feature_name', (YLeaf(YType.str, 'feature-name'), ['str'])),
('tag', (YLeaf(YType.str, 'tag'), ['str'])),
('group', (YLeaf(YType.str, 'group'), ['str'])),
('core', (YLeaf(YType.str, 'core'), ['str'])),
('max_core', (YLeaf(YType.int32, 'max-core'), ['int'])),
('level', (YLeaf(YType.str, 'level'), ['str'])),
('mandatory', (YLeaf(YType.boolean, 'mandatory'), ['bool'])),
('maint_mode_proc', (YLeaf(YType.boolean, 'maint-mode-proc'), ['bool'])),
('placement_state', (YLeaf(YType.str, 'placement-state'), ['str'])),
('start_up_path', (YLeaf(YType.str, 'start-up-path'), ['str'])),
('memory_limit', (YLeaf(YType.uint32, 'memory-limit'), ['int'])),
('ready', (YLeaf(YType.str, 'ready'), ['str'])),
('available', (YLeaf(YType.str, 'available'), ['str'])),
])
self.job_id_xr = None
self.process_id = None
self.process_name = None
self.executable = None
self.active_path = None
self.instance_id = None
self.args = None
self.version_id = None
self.respawn = None
self.respawn_count = None
self.last_started = None
self.process_state = None
self.last_exit_status = None
self.last_exit_reason = None
self.package_state = None
self.started_on_config = None
self.feature_name = None
self.tag = None
self.group = None
self.core = None
self.max_core = None
self.level = None
self.mandatory = None
self.maint_mode_proc = None
self.placement_state = None
self.start_up_path = None
self.memory_limit = None
self.ready = None
self.available = None
self.proc_cpu_time = SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo.ProcCpuTime()
self.proc_cpu_time.parent = self
self._children_name_map["proc_cpu_time"] = "proc-cpu-time"
self.registered_item = YList(self)
self._segment_path = lambda: "basic-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo, ['job_id_xr', 'process_id', 'process_name', 'executable', 'active_path', 'instance_id', 'args', 'version_id', 'respawn', 'respawn_count', 'last_started', 'process_state', 'last_exit_status', 'last_exit_reason', 'package_state', 'started_on_config', 'feature_name', 'tag', 'group', 'core', 'max_core', 'level', 'mandatory', 'maint_mode_proc', 'placement_state', 'start_up_path', 'memory_limit', 'ready', 'available'], name, value)
class ProcCpuTime(_Entity_):
"""
Proces cpu time
.. attribute:: user
User time
**type**\: str
**config**\: False
.. attribute:: system
Kernel time
**type**\: str
**config**\: False
.. attribute:: total
Total time
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo.ProcCpuTime, self).__init__()
self.yang_name = "proc-cpu-time"
self.yang_parent_name = "basic-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('user', (YLeaf(YType.str, 'user'), ['str'])),
('system', (YLeaf(YType.str, 'system'), ['str'])),
('total', (YLeaf(YType.str, 'total'), ['str'])),
])
self.user = None
self.system = None
self.total = None
self._segment_path = lambda: "proc-cpu-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo.ProcCpuTime, ['user', 'system', 'total'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo.ProcCpuTime']['meta_info']
class RegisteredItem(_Entity_):
"""
Registered Items
.. attribute:: tuple
Tuple
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo.RegisteredItem, self).__init__()
self.yang_name = "registered-item"
self.yang_parent_name = "basic-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tuple', (YLeaf(YType.str, 'tuple'), ['str'])),
])
self.tuple = None
self._segment_path = lambda: "registered-item"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo.RegisteredItem, ['tuple'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo.RegisteredItem']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.BasicInfo']['meta_info']
class DetailInfo(_Entity_):
"""
Process Detail Info
.. attribute:: running_path
Running path
**type**\: str
**config**\: False
.. attribute:: package_path
Package path
**type**\: str
**config**\: False
.. attribute:: job_id_link
Job Id Link
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: group_jid
Group Jid
**type**\: str
**config**\: False
.. attribute:: fail_count
Fail count
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: restart_needed
Restart needed
**type**\: bool
**config**\: False
.. attribute:: init_process
Init process
**type**\: bool
**config**\: False
.. attribute:: last_online
Last Online
**type**\: str
**config**\: False
.. attribute:: this_pcb
This PCB
**type**\: str
**config**\: False
.. attribute:: next_pcb
Next PCB
**type**\: str
**config**\: False
.. attribute:: envs
Env variables
**type**\: str
**config**\: False
.. attribute:: wait_for
Wait For /dev/xxx
**type**\: str
**config**\: False
.. attribute:: job_id_on_rp
Job ID on RP
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: is_standby_capable
Is standby capable?
**type**\: bool
**config**\: False
.. attribute:: disable_kill
Disable kill?
**type**\: bool
**config**\: False
.. attribute:: send_avail
Check avail
**type**\: bool
**config**\: False
.. attribute:: node_event_cli_info
Node Event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: node_redundancy_state
Node redundancy state
**type**\: str
**config**\: False
.. attribute:: role_event_cli_info
Role event cli info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: proc_role_state
Proc Role State
**type**\: str
**config**\: False
.. attribute:: standby_event_cli_info
Standby Event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: cleanup_event_cli_info
Cleanup event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: band_ready_event_cli_info
Band Ready Event CLI Info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: lr_event_cli_info
LR Event CLI Info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: plane_ready_event_cli_info
Plane Ready Event CLI info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_is_done_cli_info
MDR is done CLI Info
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.DetailInfo, self).__init__()
self.yang_name = "detail-info"
self.yang_parent_name = "process-name-verbose"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('running_path', (YLeaf(YType.str, 'running-path'), ['str'])),
('package_path', (YLeaf(YType.str, 'package-path'), ['str'])),
('job_id_link', (YLeaf(YType.int32, 'job-id-link'), ['int'])),
('group_jid', (YLeaf(YType.str, 'group-jid'), ['str'])),
('fail_count', (YLeaf(YType.uint32, 'fail-count'), ['int'])),
('restart_needed', (YLeaf(YType.boolean, 'restart-needed'), ['bool'])),
('init_process', (YLeaf(YType.boolean, 'init-process'), ['bool'])),
('last_online', (YLeaf(YType.str, 'last-online'), ['str'])),
('this_pcb', (YLeaf(YType.str, 'this-pcb'), ['str'])),
('next_pcb', (YLeaf(YType.str, 'next-pcb'), ['str'])),
('envs', (YLeaf(YType.str, 'envs'), ['str'])),
('wait_for', (YLeaf(YType.str, 'wait-for'), ['str'])),
('job_id_on_rp', (YLeaf(YType.int32, 'job-id-on-rp'), ['int'])),
('is_standby_capable', (YLeaf(YType.boolean, 'is-standby-capable'), ['bool'])),
('disable_kill', (YLeaf(YType.boolean, 'disable-kill'), ['bool'])),
('send_avail', (YLeaf(YType.boolean, 'send-avail'), ['bool'])),
('node_event_cli_info', (YLeaf(YType.int32, 'node-event-cli-info'), ['int'])),
('node_redundancy_state', (YLeaf(YType.str, 'node-redundancy-state'), ['str'])),
('role_event_cli_info', (YLeaf(YType.int32, 'role-event-cli-info'), ['int'])),
('proc_role_state', (YLeaf(YType.str, 'proc-role-state'), ['str'])),
('standby_event_cli_info', (YLeaf(YType.int32, 'standby-event-cli-info'), ['int'])),
('cleanup_event_cli_info', (YLeaf(YType.int32, 'cleanup-event-cli-info'), ['int'])),
('band_ready_event_cli_info', (YLeaf(YType.int32, 'band-ready-event-cli-info'), ['int'])),
('lr_event_cli_info', (YLeaf(YType.int32, 'lr-event-cli-info'), ['int'])),
('plane_ready_event_cli_info', (YLeaf(YType.int32, 'plane-ready-event-cli-info'), ['int'])),
('mdr_is_done_cli_info', (YLeaf(YType.int32, 'mdr-is-done-cli-info'), ['int'])),
])
self.running_path = None
self.package_path = None
self.job_id_link = None
self.group_jid = None
self.fail_count = None
self.restart_needed = None
self.init_process = None
self.last_online = None
self.this_pcb = None
self.next_pcb = None
self.envs = None
self.wait_for = None
self.job_id_on_rp = None
self.is_standby_capable = None
self.disable_kill = None
self.send_avail = None
self.node_event_cli_info = None
self.node_redundancy_state = None
self.role_event_cli_info = None
self.proc_role_state = None
self.standby_event_cli_info = None
self.cleanup_event_cli_info = None
self.band_ready_event_cli_info = None
self.lr_event_cli_info = None
self.plane_ready_event_cli_info = None
self.mdr_is_done_cli_info = None
self._segment_path = lambda: "detail-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.DetailInfo, ['running_path', 'package_path', 'job_id_link', 'group_jid', 'fail_count', 'restart_needed', 'init_process', 'last_online', 'this_pcb', 'next_pcb', 'envs', 'wait_for', 'job_id_on_rp', 'is_standby_capable', 'disable_kill', 'send_avail', 'node_event_cli_info', 'node_redundancy_state', 'role_event_cli_info', 'proc_role_state', 'standby_event_cli_info', 'cleanup_event_cli_info', 'band_ready_event_cli_info', 'lr_event_cli_info', 'plane_ready_event_cli_info', 'mdr_is_done_cli_info'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.DetailInfo']['meta_info']
class VerboseInfo(_Entity_):
"""
Process Verbose Info
.. attribute:: process_group
Process Group
**type**\: str
**config**\: False
.. attribute:: respawn_allowed
Is respawn allowed?
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: wait_for_exit
Wait for exit
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: dynamic_tag
Dynamic Tag
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: forced_stop
Forced stop
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: critical_process
Critical Process
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: hold
Hold
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: transient
Transient
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: tuple_cfgmgr
Tuple Cfgmgr
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: standby_capable
Standby capable
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: edm_startup
EDM startup
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: placement
Placement
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: skip_kill_notif
Skip Kill Notif
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: init_proc
Init process
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: sysdb_event
Sysdb Event
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: level_started
Level Started
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: proc_avail
Process available
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: tuples_scanned
Tuples Scanned
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: no_chkpt_start
No checkpoint start
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: in_shut_down
In Shut Down
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: sm_started
SM started
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: ignore_on_sc
Ignore on SC
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: ignore_on_easy_bake
Ignore on EasyBake
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: pre_init
Pre init
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: eoi_received
EOI received
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: eoi_timeout
EOI Timeout
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: avail_timeout
Avail Timeout
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: reserved_memory
Reserved Memory
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: allow_warned
Allow Warned
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: arg_change
Arg Change
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: restart_on_tuple
Restart on tuple
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: boot_hold
Boot Hold
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: reg_id
Reg Id
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: memory_limit
Memory Limit
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: parent_job_id
Parent Job ID
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: tuple_index
Tuple Index
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: dump_count
Dump Count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: respawn_interval_user
Respawn Interval User
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: silent_restart_count
Silent Restart Count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: critical_tier
Critical Tier
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: exit_type
Exit Type
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: init_timeout
Init Timeout
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: restart_by_cmd
Restart by Command
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: boot_pref
Boot Pref
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_mbi_proc
Mdr Mbi proc
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_non_mbi_kld
Mdr Non Mbi Kld
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_mbi_kld
Mdr Mbi Kld
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_shut_delay
Mdr Shut Delay
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_keep_thru
Mdr Keep Thru
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_spoofer
Mdr spoofer
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_spoofed
Mdr spoofed
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_spoofed_last
Mdr spoofed last
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_spoofed_ready
Mdr Spoofed Ready
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_pcb_check
Mdr PCB Check
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_kill_tier
Mdr Kill Tier
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_kld
Mdr kld
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: mdr_level
Mdr Level
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: fm_restart_cnt
FM restart count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: self_managed
Self Managed
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: tuple
Tuple
**type**\: list of :py:class:`Tuple <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo.Tuple>`
**config**\: False
.. attribute:: orig_tuple
Orig Tuple
**type**\: list of :py:class:`OrigTuple <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo.OrigTuple>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo, self).__init__()
self.yang_name = "verbose-info"
self.yang_parent_name = "process-name-verbose"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("tuple", ("tuple", SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo.Tuple)), ("orig-tuple", ("orig_tuple", SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo.OrigTuple))])
self._leafs = OrderedDict([
('process_group', (YLeaf(YType.str, 'process-group'), ['str'])),
('respawn_allowed', (YLeaf(YType.int32, 'respawn-allowed'), ['int'])),
('wait_for_exit', (YLeaf(YType.int32, 'wait-for-exit'), ['int'])),
('dynamic_tag', (YLeaf(YType.int32, 'dynamic-tag'), ['int'])),
('forced_stop', (YLeaf(YType.int32, 'forced-stop'), ['int'])),
('critical_process', (YLeaf(YType.int32, 'critical-process'), ['int'])),
('hold', (YLeaf(YType.int32, 'hold'), ['int'])),
('transient', (YLeaf(YType.int32, 'transient'), ['int'])),
('tuple_cfgmgr', (YLeaf(YType.int32, 'tuple-cfgmgr'), ['int'])),
('standby_capable', (YLeaf(YType.int32, 'standby-capable'), ['int'])),
('edm_startup', (YLeaf(YType.int32, 'edm-startup'), ['int'])),
('placement', (YLeaf(YType.int32, 'placement'), ['int'])),
('skip_kill_notif', (YLeaf(YType.int32, 'skip-kill-notif'), ['int'])),
('init_proc', (YLeaf(YType.int32, 'init-proc'), ['int'])),
('sysdb_event', (YLeaf(YType.int32, 'sysdb-event'), ['int'])),
('level_started', (YLeaf(YType.int32, 'level-started'), ['int'])),
('proc_avail', (YLeaf(YType.int32, 'proc-avail'), ['int'])),
('tuples_scanned', (YLeaf(YType.int32, 'tuples-scanned'), ['int'])),
('no_chkpt_start', (YLeaf(YType.int32, 'no-chkpt-start'), ['int'])),
('in_shut_down', (YLeaf(YType.int32, 'in-shut-down'), ['int'])),
('sm_started', (YLeaf(YType.int32, 'sm-started'), ['int'])),
('ignore_on_sc', (YLeaf(YType.int32, 'ignore-on-sc'), ['int'])),
('ignore_on_easy_bake', (YLeaf(YType.int32, 'ignore-on-easy-bake'), ['int'])),
('pre_init', (YLeaf(YType.int32, 'pre-init'), ['int'])),
('eoi_received', (YLeaf(YType.int32, 'eoi-received'), ['int'])),
('eoi_timeout', (YLeaf(YType.int32, 'eoi-timeout'), ['int'])),
('avail_timeout', (YLeaf(YType.int32, 'avail-timeout'), ['int'])),
('reserved_memory', (YLeaf(YType.int32, 'reserved-memory'), ['int'])),
('allow_warned', (YLeaf(YType.int32, 'allow-warned'), ['int'])),
('arg_change', (YLeaf(YType.int32, 'arg-change'), ['int'])),
('restart_on_tuple', (YLeaf(YType.int32, 'restart-on-tuple'), ['int'])),
('boot_hold', (YLeaf(YType.int32, 'boot-hold'), ['int'])),
('reg_id', (YLeaf(YType.int32, 'reg-id'), ['int'])),
('memory_limit', (YLeaf(YType.int32, 'memory-limit'), ['int'])),
('parent_job_id', (YLeaf(YType.int32, 'parent-job-id'), ['int'])),
('tuple_index', (YLeaf(YType.int32, 'tuple-index'), ['int'])),
('dump_count', (YLeaf(YType.int32, 'dump-count'), ['int'])),
('respawn_interval_user', (YLeaf(YType.int32, 'respawn-interval-user'), ['int'])),
('silent_restart_count', (YLeaf(YType.int32, 'silent-restart-count'), ['int'])),
('critical_tier', (YLeaf(YType.int32, 'critical-tier'), ['int'])),
('exit_type', (YLeaf(YType.int32, 'exit-type'), ['int'])),
('init_timeout', (YLeaf(YType.int32, 'init-timeout'), ['int'])),
('restart_by_cmd', (YLeaf(YType.int32, 'restart-by-cmd'), ['int'])),
('boot_pref', (YLeaf(YType.int32, 'boot-pref'), ['int'])),
('mdr_mbi_proc', (YLeaf(YType.int32, 'mdr-mbi-proc'), ['int'])),
('mdr_non_mbi_kld', (YLeaf(YType.int32, 'mdr-non-mbi-kld'), ['int'])),
('mdr_mbi_kld', (YLeaf(YType.int32, 'mdr-mbi-kld'), ['int'])),
('mdr_shut_delay', (YLeaf(YType.int32, 'mdr-shut-delay'), ['int'])),
('mdr_keep_thru', (YLeaf(YType.int32, 'mdr-keep-thru'), ['int'])),
('mdr_spoofer', (YLeaf(YType.int32, 'mdr-spoofer'), ['int'])),
('mdr_spoofed', (YLeaf(YType.int32, 'mdr-spoofed'), ['int'])),
('mdr_spoofed_last', (YLeaf(YType.int32, 'mdr-spoofed-last'), ['int'])),
('mdr_spoofed_ready', (YLeaf(YType.int32, 'mdr-spoofed-ready'), ['int'])),
('mdr_pcb_check', (YLeaf(YType.int32, 'mdr-pcb-check'), ['int'])),
('mdr_kill_tier', (YLeaf(YType.int32, 'mdr-kill-tier'), ['int'])),
('mdr_kld', (YLeaf(YType.int32, 'mdr-kld'), ['int'])),
('mdr_level', (YLeaf(YType.int32, 'mdr-level'), ['int'])),
('fm_restart_cnt', (YLeaf(YType.int32, 'fm-restart-cnt'), ['int'])),
('self_managed', (YLeaf(YType.int32, 'self-managed'), ['int'])),
])
self.process_group = None
self.respawn_allowed = None
self.wait_for_exit = None
self.dynamic_tag = None
self.forced_stop = None
self.critical_process = None
self.hold = None
self.transient = None
self.tuple_cfgmgr = None
self.standby_capable = None
self.edm_startup = None
self.placement = None
self.skip_kill_notif = None
self.init_proc = None
self.sysdb_event = None
self.level_started = None
self.proc_avail = None
self.tuples_scanned = None
self.no_chkpt_start = None
self.in_shut_down = None
self.sm_started = None
self.ignore_on_sc = None
self.ignore_on_easy_bake = None
self.pre_init = None
self.eoi_received = None
self.eoi_timeout = None
self.avail_timeout = None
self.reserved_memory = None
self.allow_warned = None
self.arg_change = None
self.restart_on_tuple = None
self.boot_hold = None
self.reg_id = None
self.memory_limit = None
self.parent_job_id = None
self.tuple_index = None
self.dump_count = None
self.respawn_interval_user = None
self.silent_restart_count = None
self.critical_tier = None
self.exit_type = None
self.init_timeout = None
self.restart_by_cmd = None
self.boot_pref = None
self.mdr_mbi_proc = None
self.mdr_non_mbi_kld = None
self.mdr_mbi_kld = None
self.mdr_shut_delay = None
self.mdr_keep_thru = None
self.mdr_spoofer = None
self.mdr_spoofed = None
self.mdr_spoofed_last = None
self.mdr_spoofed_ready = None
self.mdr_pcb_check = None
self.mdr_kill_tier = None
self.mdr_kld = None
self.mdr_level = None
self.fm_restart_cnt = None
self.self_managed = None
self.tuple = YList(self)
self.orig_tuple = YList(self)
self._segment_path = lambda: "verbose-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo, ['process_group', 'respawn_allowed', 'wait_for_exit', 'dynamic_tag', 'forced_stop', 'critical_process', 'hold', 'transient', 'tuple_cfgmgr', 'standby_capable', 'edm_startup', 'placement', 'skip_kill_notif', 'init_proc', 'sysdb_event', 'level_started', 'proc_avail', 'tuples_scanned', 'no_chkpt_start', 'in_shut_down', 'sm_started', 'ignore_on_sc', 'ignore_on_easy_bake', 'pre_init', 'eoi_received', 'eoi_timeout', 'avail_timeout', 'reserved_memory', 'allow_warned', 'arg_change', 'restart_on_tuple', 'boot_hold', 'reg_id', 'memory_limit', 'parent_job_id', 'tuple_index', 'dump_count', 'respawn_interval_user', 'silent_restart_count', 'critical_tier', 'exit_type', 'init_timeout', 'restart_by_cmd', 'boot_pref', 'mdr_mbi_proc', 'mdr_non_mbi_kld', 'mdr_mbi_kld', 'mdr_shut_delay', 'mdr_keep_thru', 'mdr_spoofer', 'mdr_spoofed', 'mdr_spoofed_last', 'mdr_spoofed_ready', 'mdr_pcb_check', 'mdr_kill_tier', 'mdr_kld', 'mdr_level', 'fm_restart_cnt', 'self_managed'], name, value)
class Tuple(_Entity_):
"""
Tuple
.. attribute:: tuple
Tuple
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo.Tuple, self).__init__()
self.yang_name = "tuple"
self.yang_parent_name = "verbose-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tuple', (YLeaf(YType.str, 'tuple'), ['str'])),
])
self.tuple = None
self._segment_path = lambda: "tuple"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo.Tuple, ['tuple'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo.Tuple']['meta_info']
class OrigTuple(_Entity_):
"""
Orig Tuple
.. attribute:: tuple
Tuple
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo.OrigTuple, self).__init__()
self.yang_name = "orig-tuple"
self.yang_parent_name = "verbose-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tuple', (YLeaf(YType.str, 'tuple'), ['str'])),
])
self.tuple = None
self._segment_path = lambda: "orig-tuple"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo.OrigTuple, ['tuple'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo.OrigTuple']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose.VerboseInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameVerboses.ProcessNameVerbose']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name.ProcessNameVerboses']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Name']['meta_info']
class Jids(_Entity_):
"""
Process job id information
.. attribute:: jid
Process <jid> information
**type**\: list of :py:class:`Jid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Jids.Jid>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Jids, self).__init__()
self.yang_name = "jids"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("jid", ("jid", SystemProcess.NodeTable.Node.Jids.Jid))])
self._leafs = OrderedDict()
self.jid = YList(self)
self._segment_path = lambda: "jids"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Jids, [], name, value)
class Jid(_Entity_):
"""
Process <jid> information
.. attribute:: job_id (key)
Job ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: proc_cpu_time
Proces cpu time
**type**\: :py:class:`ProcCpuTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Jids.Jid.ProcCpuTime>`
**config**\: False
.. attribute:: job_id_xr
Job ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process_id
PID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process_name
Process name
**type**\: str
**config**\: False
.. attribute:: executable
Executable name or path
**type**\: str
**config**\: False
.. attribute:: active_path
Active Path
**type**\: str
**config**\: False
.. attribute:: instance_id
Instance ID
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: args
Args
**type**\: str
**config**\: False
.. attribute:: version_id
Version ID
**type**\: str
**config**\: False
.. attribute:: respawn
Respawn on/off
**type**\: str
**config**\: False
.. attribute:: respawn_count
Respawn Count
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: last_started
Last Started
**type**\: str
**config**\: False
.. attribute:: process_state
Process State
**type**\: str
**config**\: False
.. attribute:: last_exit_status
Last Exit status
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: last_exit_reason
Last Exit due to
**type**\: str
**config**\: False
.. attribute:: package_state
Package State
**type**\: str
**config**\: False
.. attribute:: started_on_config
Started on Config
**type**\: str
**config**\: False
.. attribute:: feature_name
Feature Name
**type**\: str
**config**\: False
.. attribute:: tag
Tag
**type**\: str
**config**\: False
.. attribute:: group
Process Group
**type**\: str
**config**\: False
.. attribute:: core
Core
**type**\: str
**config**\: False
.. attribute:: max_core
Max core
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: mandatory
Is mandatory?
**type**\: bool
**config**\: False
.. attribute:: maint_mode_proc
Is admin mode process?
**type**\: bool
**config**\: False
.. attribute:: placement_state
Placement State
**type**\: str
**config**\: False
.. attribute:: start_up_path
Startup Path
**type**\: str
**config**\: False
.. attribute:: memory_limit
Memory Limit
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ready
Elapsed Ready
**type**\: str
**config**\: False
.. attribute:: available
Elapsed Available
**type**\: str
**config**\: False
.. attribute:: registered_item
Registered Items
**type**\: list of :py:class:`RegisteredItem <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Jids.Jid.RegisteredItem>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Jids.Jid, self).__init__()
self.yang_name = "jid"
self.yang_parent_name = "jids"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['job_id']
self._child_classes = OrderedDict([("proc-cpu-time", ("proc_cpu_time", SystemProcess.NodeTable.Node.Jids.Jid.ProcCpuTime)), ("registered-item", ("registered_item", SystemProcess.NodeTable.Node.Jids.Jid.RegisteredItem))])
self._leafs = OrderedDict([
('job_id', (YLeaf(YType.uint32, 'job-id'), ['int'])),
('job_id_xr', (YLeaf(YType.uint32, 'job-id-xr'), ['int'])),
('process_id', (YLeaf(YType.uint32, 'process-id'), ['int'])),
('process_name', (YLeaf(YType.str, 'process-name'), ['str'])),
('executable', (YLeaf(YType.str, 'executable'), ['str'])),
('active_path', (YLeaf(YType.str, 'active-path'), ['str'])),
('instance_id', (YLeaf(YType.int32, 'instance-id'), ['int'])),
('args', (YLeaf(YType.str, 'args'), ['str'])),
('version_id', (YLeaf(YType.str, 'version-id'), ['str'])),
('respawn', (YLeaf(YType.str, 'respawn'), ['str'])),
('respawn_count', (YLeaf(YType.int32, 'respawn-count'), ['int'])),
('last_started', (YLeaf(YType.str, 'last-started'), ['str'])),
('process_state', (YLeaf(YType.str, 'process-state'), ['str'])),
('last_exit_status', (YLeaf(YType.int32, 'last-exit-status'), ['int'])),
('last_exit_reason', (YLeaf(YType.str, 'last-exit-reason'), ['str'])),
('package_state', (YLeaf(YType.str, 'package-state'), ['str'])),
('started_on_config', (YLeaf(YType.str, 'started-on-config'), ['str'])),
('feature_name', (YLeaf(YType.str, 'feature-name'), ['str'])),
('tag', (YLeaf(YType.str, 'tag'), ['str'])),
('group', (YLeaf(YType.str, 'group'), ['str'])),
('core', (YLeaf(YType.str, 'core'), ['str'])),
('max_core', (YLeaf(YType.int32, 'max-core'), ['int'])),
('level', (YLeaf(YType.str, 'level'), ['str'])),
('mandatory', (YLeaf(YType.boolean, 'mandatory'), ['bool'])),
('maint_mode_proc', (YLeaf(YType.boolean, 'maint-mode-proc'), ['bool'])),
('placement_state', (YLeaf(YType.str, 'placement-state'), ['str'])),
('start_up_path', (YLeaf(YType.str, 'start-up-path'), ['str'])),
('memory_limit', (YLeaf(YType.uint32, 'memory-limit'), ['int'])),
('ready', (YLeaf(YType.str, 'ready'), ['str'])),
('available', (YLeaf(YType.str, 'available'), ['str'])),
])
self.job_id = None
self.job_id_xr = None
self.process_id = None
self.process_name = None
self.executable = None
self.active_path = None
self.instance_id = None
self.args = None
self.version_id = None
self.respawn = None
self.respawn_count = None
self.last_started = None
self.process_state = None
self.last_exit_status = None
self.last_exit_reason = None
self.package_state = None
self.started_on_config = None
self.feature_name = None
self.tag = None
self.group = None
self.core = None
self.max_core = None
self.level = None
self.mandatory = None
self.maint_mode_proc = None
self.placement_state = None
self.start_up_path = None
self.memory_limit = None
self.ready = None
self.available = None
self.proc_cpu_time = SystemProcess.NodeTable.Node.Jids.Jid.ProcCpuTime()
self.proc_cpu_time.parent = self
self._children_name_map["proc_cpu_time"] = "proc-cpu-time"
self.registered_item = YList(self)
self._segment_path = lambda: "jid" + "[job-id='" + str(self.job_id) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Jids.Jid, ['job_id', 'job_id_xr', 'process_id', 'process_name', 'executable', 'active_path', 'instance_id', 'args', 'version_id', 'respawn', 'respawn_count', 'last_started', 'process_state', 'last_exit_status', 'last_exit_reason', 'package_state', 'started_on_config', 'feature_name', 'tag', 'group', 'core', 'max_core', 'level', 'mandatory', 'maint_mode_proc', 'placement_state', 'start_up_path', 'memory_limit', 'ready', 'available'], name, value)
class ProcCpuTime(_Entity_):
"""
Proces cpu time
.. attribute:: user
User time
**type**\: str
**config**\: False
.. attribute:: system
Kernel time
**type**\: str
**config**\: False
.. attribute:: total
Total time
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Jids.Jid.ProcCpuTime, self).__init__()
self.yang_name = "proc-cpu-time"
self.yang_parent_name = "jid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('user', (YLeaf(YType.str, 'user'), ['str'])),
('system', (YLeaf(YType.str, 'system'), ['str'])),
('total', (YLeaf(YType.str, 'total'), ['str'])),
])
self.user = None
self.system = None
self.total = None
self._segment_path = lambda: "proc-cpu-time"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Jids.Jid.ProcCpuTime, ['user', 'system', 'total'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Jids.Jid.ProcCpuTime']['meta_info']
class RegisteredItem(_Entity_):
"""
Registered Items
.. attribute:: tuple
Tuple
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Jids.Jid.RegisteredItem, self).__init__()
self.yang_name = "registered-item"
self.yang_parent_name = "jid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tuple', (YLeaf(YType.str, 'tuple'), ['str'])),
])
self.tuple = None
self._segment_path = lambda: "registered-item"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Jids.Jid.RegisteredItem, ['tuple'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Jids.Jid.RegisteredItem']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Jids.Jid']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Jids']['meta_info']
class Dynamic(_Entity_):
"""
Process Dynamic information
.. attribute:: process_count
Number of processes
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process
Array of processes
**type**\: list of :py:class:`Process <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Dynamic.Process>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Dynamic, self).__init__()
self.yang_name = "dynamic"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("process", ("process", SystemProcess.NodeTable.Node.Dynamic.Process))])
self._leafs = OrderedDict([
('process_count', (YLeaf(YType.uint32, 'process-count'), ['int'])),
])
self.process_count = None
self.process = YList(self)
self._segment_path = lambda: "dynamic"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Dynamic, ['process_count'], name, value)
class Process(_Entity_):
"""
Array of processes
.. attribute:: name
Process name
**type**\: str
**config**\: False
.. attribute:: instance_id
Instance ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: args
Arguments
**type**\: str
**config**\: False
.. attribute:: jid
Job ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: state
Process state
**type**\: :py:class:`ProcessState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.ProcessState>`
**config**\: False
.. attribute:: last_started
Date and time of process last started
**type**\: str
**config**\: False
.. attribute:: respawn_count
Respawn count
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: placement_state
Placement state
**type**\: :py:class:`PlacementState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.PlacementState>`
**config**\: False
.. attribute:: is_mandatory
Is process mandatory?
**type**\: bool
**config**\: False
.. attribute:: is_maintenance
Is maintenance mode?
**type**\: bool
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Dynamic.Process, self).__init__()
self.yang_name = "process"
self.yang_parent_name = "dynamic"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('instance_id', (YLeaf(YType.uint32, 'instance-id'), ['int'])),
('args', (YLeaf(YType.str, 'args'), ['str'])),
('jid', (YLeaf(YType.uint32, 'jid'), ['int'])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper', 'ProcessState', '')])),
('last_started', (YLeaf(YType.str, 'last-started'), ['str'])),
('respawn_count', (YLeaf(YType.uint8, 'respawn-count'), ['int'])),
('placement_state', (YLeaf(YType.enumeration, 'placement-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper', 'PlacementState', '')])),
('is_mandatory', (YLeaf(YType.boolean, 'is-mandatory'), ['bool'])),
('is_maintenance', (YLeaf(YType.boolean, 'is-maintenance'), ['bool'])),
])
self.name = None
self.instance_id = None
self.args = None
self.jid = None
self.state = None
self.last_started = None
self.respawn_count = None
self.placement_state = None
self.is_mandatory = None
self.is_maintenance = None
self._segment_path = lambda: "process"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Dynamic.Process, ['name', 'instance_id', 'args', 'jid', 'state', 'last_started', 'respawn_count', 'placement_state', 'is_mandatory', 'is_maintenance'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Dynamic.Process']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Dynamic']['meta_info']
class BootStalled(_Entity_):
"""
Process Boot Stalled information
.. attribute:: spawn_status
Spawn status of the processes
**type**\: str
**config**\: False
.. attribute:: boot_hold
Boot hold information of the processes
**type**\: list of :py:class:`BootHold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.BootStalled.BootHold>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.BootStalled, self).__init__()
self.yang_name = "boot-stalled"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("boot-hold", ("boot_hold", SystemProcess.NodeTable.Node.BootStalled.BootHold))])
self._leafs = OrderedDict([
('spawn_status', (YLeaf(YType.str, 'spawn-status'), ['str'])),
])
self.spawn_status = None
self.boot_hold = YList(self)
self._segment_path = lambda: "boot-stalled"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.BootStalled, ['spawn_status'], name, value)
class BootHold(_Entity_):
"""
Boot hold information of the processes
.. attribute:: boot_held_by_name
Processs name
**type**\: str
**config**\: False
.. attribute:: instance_id
Instance Id
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: jid
Job ID
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.BootStalled.BootHold, self).__init__()
self.yang_name = "boot-hold"
self.yang_parent_name = "boot-stalled"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('boot_held_by_name', (YLeaf(YType.str, 'boot-held-by-name'), ['str'])),
('instance_id', (YLeaf(YType.uint32, 'instance-id'), ['int'])),
('jid', (YLeaf(YType.uint32, 'jid'), ['int'])),
])
self.boot_held_by_name = None
self.instance_id = None
self.jid = None
self._segment_path = lambda: "boot-hold"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.BootStalled.BootHold, ['boot_held_by_name', 'instance_id', 'jid'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.BootStalled.BootHold']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.BootStalled']['meta_info']
class Processes(_Entity_):
"""
Process all information
.. attribute:: process_count
Number of processes
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process
Array of processes
**type**\: list of :py:class:`Process <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Processes.Process>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Processes, self).__init__()
self.yang_name = "processes"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("process", ("process", SystemProcess.NodeTable.Node.Processes.Process))])
self._leafs = OrderedDict([
('process_count', (YLeaf(YType.uint32, 'process-count'), ['int'])),
])
self.process_count = None
self.process = YList(self)
self._segment_path = lambda: "processes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Processes, ['process_count'], name, value)
class Process(_Entity_):
"""
Array of processes
.. attribute:: name
Process name
**type**\: str
**config**\: False
.. attribute:: instance_id
Instance ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: args
Arguments
**type**\: str
**config**\: False
.. attribute:: jid
Job ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: state
Process state
**type**\: :py:class:`ProcessState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.ProcessState>`
**config**\: False
.. attribute:: last_started
Date and time of process last started
**type**\: str
**config**\: False
.. attribute:: respawn_count
Respawn count
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: placement_state
Placement state
**type**\: :py:class:`PlacementState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.PlacementState>`
**config**\: False
.. attribute:: is_mandatory
Is process mandatory?
**type**\: bool
**config**\: False
.. attribute:: is_maintenance
Is maintenance mode?
**type**\: bool
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Processes.Process, self).__init__()
self.yang_name = "process"
self.yang_parent_name = "processes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('instance_id', (YLeaf(YType.uint32, 'instance-id'), ['int'])),
('args', (YLeaf(YType.str, 'args'), ['str'])),
('jid', (YLeaf(YType.uint32, 'jid'), ['int'])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper', 'ProcessState', '')])),
('last_started', (YLeaf(YType.str, 'last-started'), ['str'])),
('respawn_count', (YLeaf(YType.uint8, 'respawn-count'), ['int'])),
('placement_state', (YLeaf(YType.enumeration, 'placement-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper', 'PlacementState', '')])),
('is_mandatory', (YLeaf(YType.boolean, 'is-mandatory'), ['bool'])),
('is_maintenance', (YLeaf(YType.boolean, 'is-maintenance'), ['bool'])),
])
self.name = None
self.instance_id = None
self.args = None
self.jid = None
self.state = None
self.last_started = None
self.respawn_count = None
self.placement_state = None
self.is_mandatory = None
self.is_maintenance = None
self._segment_path = lambda: "process"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Processes.Process, ['name', 'instance_id', 'args', 'jid', 'state', 'last_started', 'respawn_count', 'placement_state', 'is_mandatory', 'is_maintenance'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Processes.Process']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Processes']['meta_info']
class Startup(_Entity_):
"""
Process Startup information
.. attribute:: process_count
Number of processes
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process
Array of processes
**type**\: list of :py:class:`Process <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Startup.Process>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Startup, self).__init__()
self.yang_name = "startup"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("process", ("process", SystemProcess.NodeTable.Node.Startup.Process))])
self._leafs = OrderedDict([
('process_count', (YLeaf(YType.uint32, 'process-count'), ['int'])),
])
self.process_count = None
self.process = YList(self)
self._segment_path = lambda: "startup"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Startup, ['process_count'], name, value)
class Process(_Entity_):
"""
Array of processes
.. attribute:: name
Process name
**type**\: str
**config**\: False
.. attribute:: instance_id
Instance ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: args
Arguments
**type**\: str
**config**\: False
.. attribute:: jid
Job ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: state
Process state
**type**\: :py:class:`ProcessState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.ProcessState>`
**config**\: False
.. attribute:: last_started
Date and time of process last started
**type**\: str
**config**\: False
.. attribute:: respawn_count
Respawn count
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: placement_state
Placement state
**type**\: :py:class:`PlacementState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.PlacementState>`
**config**\: False
.. attribute:: is_mandatory
Is process mandatory?
**type**\: bool
**config**\: False
.. attribute:: is_maintenance
Is maintenance mode?
**type**\: bool
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Startup.Process, self).__init__()
self.yang_name = "process"
self.yang_parent_name = "startup"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('instance_id', (YLeaf(YType.uint32, 'instance-id'), ['int'])),
('args', (YLeaf(YType.str, 'args'), ['str'])),
('jid', (YLeaf(YType.uint32, 'jid'), ['int'])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper', 'ProcessState', '')])),
('last_started', (YLeaf(YType.str, 'last-started'), ['str'])),
('respawn_count', (YLeaf(YType.uint8, 'respawn-count'), ['int'])),
('placement_state', (YLeaf(YType.enumeration, 'placement-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper', 'PlacementState', '')])),
('is_mandatory', (YLeaf(YType.boolean, 'is-mandatory'), ['bool'])),
('is_maintenance', (YLeaf(YType.boolean, 'is-maintenance'), ['bool'])),
])
self.name = None
self.instance_id = None
self.args = None
self.jid = None
self.state = None
self.last_started = None
self.respawn_count = None
self.placement_state = None
self.is_mandatory = None
self.is_maintenance = None
self._segment_path = lambda: "process"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Startup.Process, ['name', 'instance_id', 'args', 'jid', 'state', 'last_started', 'respawn_count', 'placement_state', 'is_mandatory', 'is_maintenance'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Startup.Process']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Startup']['meta_info']
class Mandatory(_Entity_):
"""
Mandatory Process information
.. attribute:: process_count
Number of processes
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process
Array of processes
**type**\: list of :py:class:`Process <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Mandatory.Process>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Mandatory, self).__init__()
self.yang_name = "mandatory"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("process", ("process", SystemProcess.NodeTable.Node.Mandatory.Process))])
self._leafs = OrderedDict([
('process_count', (YLeaf(YType.uint32, 'process-count'), ['int'])),
])
self.process_count = None
self.process = YList(self)
self._segment_path = lambda: "mandatory"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Mandatory, ['process_count'], name, value)
class Process(_Entity_):
"""
Array of processes
.. attribute:: name
Process name
**type**\: str
**config**\: False
.. attribute:: instance_id
Instance ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: args
Arguments
**type**\: str
**config**\: False
.. attribute:: jid
Job ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: state
Process state
**type**\: :py:class:`ProcessState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.ProcessState>`
**config**\: False
.. attribute:: last_started
Date and time of process last started
**type**\: str
**config**\: False
.. attribute:: respawn_count
Respawn count
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: placement_state
Placement state
**type**\: :py:class:`PlacementState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.PlacementState>`
**config**\: False
.. attribute:: is_mandatory
Is process mandatory?
**type**\: bool
**config**\: False
.. attribute:: is_maintenance
Is maintenance mode?
**type**\: bool
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Mandatory.Process, self).__init__()
self.yang_name = "process"
self.yang_parent_name = "mandatory"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('instance_id', (YLeaf(YType.uint32, 'instance-id'), ['int'])),
('args', (YLeaf(YType.str, 'args'), ['str'])),
('jid', (YLeaf(YType.uint32, 'jid'), ['int'])),
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper', 'ProcessState', '')])),
('last_started', (YLeaf(YType.str, 'last-started'), ['str'])),
('respawn_count', (YLeaf(YType.uint8, 'respawn-count'), ['int'])),
('placement_state', (YLeaf(YType.enumeration, 'placement-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper', 'PlacementState', '')])),
('is_mandatory', (YLeaf(YType.boolean, 'is-mandatory'), ['bool'])),
('is_maintenance', (YLeaf(YType.boolean, 'is-maintenance'), ['bool'])),
])
self.name = None
self.instance_id = None
self.args = None
self.jid = None
self.state = None
self.last_started = None
self.respawn_count = None
self.placement_state = None
self.is_mandatory = None
self.is_maintenance = None
self._segment_path = lambda: "process"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Mandatory.Process, ['name', 'instance_id', 'args', 'jid', 'state', 'last_started', 'respawn_count', 'placement_state', 'is_mandatory', 'is_maintenance'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Mandatory.Process']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Mandatory']['meta_info']
class Abort(_Entity_):
"""
Process Abort information
.. attribute:: process_abort_count
Number of Aborted Processes
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: process
Array of aborted processes
**type**\: list of :py:class:`Process <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Abort.Process>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Abort, self).__init__()
self.yang_name = "abort"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("process", ("process", SystemProcess.NodeTable.Node.Abort.Process))])
self._leafs = OrderedDict([
('process_abort_count', (YLeaf(YType.uint32, 'process-abort-count'), ['int'])),
])
self.process_abort_count = None
self.process = YList(self)
self._segment_path = lambda: "abort"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Abort, ['process_abort_count'], name, value)
class Process(_Entity_):
"""
Array of aborted processes
.. attribute:: name
Process name
**type**\: str
**config**\: False
.. attribute:: timebuf
Date and time of process abort
**type**\: str
**config**\: False
.. attribute:: job_id
Job ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: is_respawned
Respawn information
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Abort.Process, self).__init__()
self.yang_name = "process"
self.yang_parent_name = "abort"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('timebuf', (YLeaf(YType.str, 'timebuf'), ['str'])),
('job_id', (YLeaf(YType.uint32, 'job-id'), ['int'])),
('is_respawned', (YLeaf(YType.str, 'is-respawned'), ['str'])),
])
self.name = None
self.timebuf = None
self.job_id = None
self.is_respawned = None
self._segment_path = lambda: "process"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Abort.Process, ['name', 'timebuf', 'job_id', 'is_respawned'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Abort.Process']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Abort']['meta_info']
class Failover(_Entity_):
"""
Process Failover information
.. attribute:: failover_log
Failover log message
**type**\: str
**config**\: False
.. attribute:: critical_failover_elapsed_time
Critical Failover Elapsed Time
**type**\: str
**config**\: False
.. attribute:: last_process_started
Last process started
**type**\: str
**config**\: False
.. attribute:: primary_failover_elapsed_time
Primary failover elapsed time
**type**\: str
**config**\: False
.. attribute:: last_primary_proc_started
Last primary process started
**type**\: str
**config**\: False
.. attribute:: standby_band_statistic
Standby Band statistics
**type**\: list of :py:class:`StandbyBandStatistic <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Failover.StandbyBandStatistic>`
**config**\: False
.. attribute:: active_band_statistic
Active Band statistics
**type**\: list of :py:class:`ActiveBandStatistic <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Failover.ActiveBandStatistic>`
**config**\: False
.. attribute:: active_ts_boot_proc
List of booted process as per avail time
**type**\: list of :py:class:`ActiveTsBootProc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Failover.ActiveTsBootProc>`
**config**\: False
.. attribute:: start_ts_boot_proc
List of booted processes per start time
**type**\: list of :py:class:`StartTsBootProc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Failover.StartTsBootProc>`
**config**\: False
.. attribute:: primary_band_statistic
Primary Band statistics
**type**\: list of :py:class:`PrimaryBandStatistic <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Failover.PrimaryBandStatistic>`
**config**\: False
.. attribute:: primary_ts_boot_proc
List of booted processes per primary time
**type**\: list of :py:class:`PrimaryTsBootProc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Failover.PrimaryTsBootProc>`
**config**\: False
.. attribute:: primary_start_ts_boot_proc
List of booted process per primary start time
**type**\: list of :py:class:`PrimaryStartTsBootProc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Failover.PrimaryStartTsBootProc>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Failover, self).__init__()
self.yang_name = "failover"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("standby-band-statistic", ("standby_band_statistic", SystemProcess.NodeTable.Node.Failover.StandbyBandStatistic)), ("active-band-statistic", ("active_band_statistic", SystemProcess.NodeTable.Node.Failover.ActiveBandStatistic)), ("active-ts-boot-proc", ("active_ts_boot_proc", SystemProcess.NodeTable.Node.Failover.ActiveTsBootProc)), ("start-ts-boot-proc", ("start_ts_boot_proc", SystemProcess.NodeTable.Node.Failover.StartTsBootProc)), ("primary-band-statistic", ("primary_band_statistic", SystemProcess.NodeTable.Node.Failover.PrimaryBandStatistic)), ("primary-ts-boot-proc", ("primary_ts_boot_proc", SystemProcess.NodeTable.Node.Failover.PrimaryTsBootProc)), ("primary-start-ts-boot-proc", ("primary_start_ts_boot_proc", SystemProcess.NodeTable.Node.Failover.PrimaryStartTsBootProc))])
self._leafs = OrderedDict([
('failover_log', (YLeaf(YType.str, 'failover-log'), ['str'])),
('critical_failover_elapsed_time', (YLeaf(YType.str, 'critical-failover-elapsed-time'), ['str'])),
('last_process_started', (YLeaf(YType.str, 'last-process-started'), ['str'])),
('primary_failover_elapsed_time', (YLeaf(YType.str, 'primary-failover-elapsed-time'), ['str'])),
('last_primary_proc_started', (YLeaf(YType.str, 'last-primary-proc-started'), ['str'])),
])
self.failover_log = None
self.critical_failover_elapsed_time = None
self.last_process_started = None
self.primary_failover_elapsed_time = None
self.last_primary_proc_started = None
self.standby_band_statistic = YList(self)
self.active_band_statistic = YList(self)
self.active_ts_boot_proc = YList(self)
self.start_ts_boot_proc = YList(self)
self.primary_band_statistic = YList(self)
self.primary_ts_boot_proc = YList(self)
self.primary_start_ts_boot_proc = YList(self)
self._segment_path = lambda: "failover"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Failover, ['failover_log', 'critical_failover_elapsed_time', 'last_process_started', 'primary_failover_elapsed_time', 'last_primary_proc_started'], name, value)
class StandbyBandStatistic(_Entity_):
"""
Standby Band statistics
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: band_name
Band Name
**type**\: str
**config**\: False
.. attribute:: band_finish_time
Band finish time
**type**\: str
**config**\: False
.. attribute:: band_time
Band time
**type**\: str
**config**\: False
.. attribute:: finish_time
Finish Time
**type**\: str
**config**\: False
.. attribute:: idle_percentage
Idle Percentage
**type**\: str
**config**\: False
**units**\: percentage
.. attribute:: jid
Jid
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ready_time
Ready Time
**type**\: str
**config**\: False
.. attribute:: last_process
Last Process Name
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Failover.StandbyBandStatistic, self).__init__()
self.yang_name = "standby-band-statistic"
self.yang_parent_name = "failover"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('level', (YLeaf(YType.str, 'level'), ['str'])),
('band_name', (YLeaf(YType.str, 'band-name'), ['str'])),
('band_finish_time', (YLeaf(YType.str, 'band-finish-time'), ['str'])),
('band_time', (YLeaf(YType.str, 'band-time'), ['str'])),
('finish_time', (YLeaf(YType.str, 'finish-time'), ['str'])),
('idle_percentage', (YLeaf(YType.str, 'idle-percentage'), ['str'])),
('jid', (YLeaf(YType.uint32, 'jid'), ['int'])),
('ready_time', (YLeaf(YType.str, 'ready-time'), ['str'])),
('last_process', (YLeaf(YType.str, 'last-process'), ['str'])),
])
self.level = None
self.band_name = None
self.band_finish_time = None
self.band_time = None
self.finish_time = None
self.idle_percentage = None
self.jid = None
self.ready_time = None
self.last_process = None
self._segment_path = lambda: "standby-band-statistic"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Failover.StandbyBandStatistic, ['level', 'band_name', 'band_finish_time', 'band_time', 'finish_time', 'idle_percentage', 'jid', 'ready_time', 'last_process'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Failover.StandbyBandStatistic']['meta_info']
class ActiveBandStatistic(_Entity_):
"""
Active Band statistics
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: band_name
Band Name
**type**\: str
**config**\: False
.. attribute:: band_finish_time
Band finish time
**type**\: str
**config**\: False
.. attribute:: band_time
Band time
**type**\: str
**config**\: False
.. attribute:: finish_time
Finish Time
**type**\: str
**config**\: False
.. attribute:: idle_percentage
Idle Percentage
**type**\: str
**config**\: False
**units**\: percentage
.. attribute:: jid
Jid
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ready_time
Ready Time
**type**\: str
**config**\: False
.. attribute:: last_process
Last Process Name
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Failover.ActiveBandStatistic, self).__init__()
self.yang_name = "active-band-statistic"
self.yang_parent_name = "failover"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('level', (YLeaf(YType.str, 'level'), ['str'])),
('band_name', (YLeaf(YType.str, 'band-name'), ['str'])),
('band_finish_time', (YLeaf(YType.str, 'band-finish-time'), ['str'])),
('band_time', (YLeaf(YType.str, 'band-time'), ['str'])),
('finish_time', (YLeaf(YType.str, 'finish-time'), ['str'])),
('idle_percentage', (YLeaf(YType.str, 'idle-percentage'), ['str'])),
('jid', (YLeaf(YType.uint32, 'jid'), ['int'])),
('ready_time', (YLeaf(YType.str, 'ready-time'), ['str'])),
('last_process', (YLeaf(YType.str, 'last-process'), ['str'])),
])
self.level = None
self.band_name = None
self.band_finish_time = None
self.band_time = None
self.finish_time = None
self.idle_percentage = None
self.jid = None
self.ready_time = None
self.last_process = None
self._segment_path = lambda: "active-band-statistic"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Failover.ActiveBandStatistic, ['level', 'band_name', 'band_finish_time', 'band_time', 'finish_time', 'idle_percentage', 'jid', 'ready_time', 'last_process'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Failover.ActiveBandStatistic']['meta_info']
class ActiveTsBootProc(_Entity_):
"""
List of booted process as per avail time
.. attribute:: active_time_stamp
Active Time Stamp
**type**\: str
**config**\: False
.. attribute:: go_active
Go Active time stamp
**type**\: str
**config**\: False
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: band_name
Band Name
**type**\: str
**config**\: False
.. attribute:: job_id
Job Id
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: instance_id
Instance Id
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: avail_time_stamp
Avail Time Stamp
**type**\: str
**config**\: False
.. attribute:: avail
Time since Avail
**type**\: str
**config**\: False
.. attribute:: is_avail_timeout
Is Avail timeout
**type**\: bool
**config**\: False
.. attribute:: process_name
Process Name
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Failover.ActiveTsBootProc, self).__init__()
self.yang_name = "active-ts-boot-proc"
self.yang_parent_name = "failover"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('active_time_stamp', (YLeaf(YType.str, 'active-time-stamp'), ['str'])),
('go_active', (YLeaf(YType.str, 'go-active'), ['str'])),
('level', (YLeaf(YType.str, 'level'), ['str'])),
('band_name', (YLeaf(YType.str, 'band-name'), ['str'])),
('job_id', (YLeaf(YType.int32, 'job-id'), ['int'])),
('instance_id', (YLeaf(YType.int32, 'instance-id'), ['int'])),
('avail_time_stamp', (YLeaf(YType.str, 'avail-time-stamp'), ['str'])),
('avail', (YLeaf(YType.str, 'avail'), ['str'])),
('is_avail_timeout', (YLeaf(YType.boolean, 'is-avail-timeout'), ['bool'])),
('process_name', (YLeaf(YType.str, 'process-name'), ['str'])),
])
self.active_time_stamp = None
self.go_active = None
self.level = None
self.band_name = None
self.job_id = None
self.instance_id = None
self.avail_time_stamp = None
self.avail = None
self.is_avail_timeout = None
self.process_name = None
self._segment_path = lambda: "active-ts-boot-proc"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Failover.ActiveTsBootProc, ['active_time_stamp', 'go_active', 'level', 'band_name', 'job_id', 'instance_id', 'avail_time_stamp', 'avail', 'is_avail_timeout', 'process_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Failover.ActiveTsBootProc']['meta_info']
class StartTsBootProc(_Entity_):
"""
List of booted processes per start time
.. attribute:: start_time_stamp
Start Time Stamp
**type**\: str
**config**\: False
.. attribute:: started
Time since started
**type**\: str
**config**\: False
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: jid
Job Id
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: instance_id
Instance Id
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: ready_time_stamp
Ready Time Stamp
**type**\: str
**config**\: False
.. attribute:: ready
Time since Ready
**type**\: str
**config**\: False
.. attribute:: is_eoi_timeout
Is EOI timeout
**type**\: bool
**config**\: False
.. attribute:: process_name
Process Name
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Failover.StartTsBootProc, self).__init__()
self.yang_name = "start-ts-boot-proc"
self.yang_parent_name = "failover"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('start_time_stamp', (YLeaf(YType.str, 'start-time-stamp'), ['str'])),
('started', (YLeaf(YType.str, 'started'), ['str'])),
('level', (YLeaf(YType.str, 'level'), ['str'])),
('jid', (YLeaf(YType.int32, 'jid'), ['int'])),
('instance_id', (YLeaf(YType.int32, 'instance-id'), ['int'])),
('ready_time_stamp', (YLeaf(YType.str, 'ready-time-stamp'), ['str'])),
('ready', (YLeaf(YType.str, 'ready'), ['str'])),
('is_eoi_timeout', (YLeaf(YType.boolean, 'is-eoi-timeout'), ['bool'])),
('process_name', (YLeaf(YType.str, 'process-name'), ['str'])),
])
self.start_time_stamp = None
self.started = None
self.level = None
self.jid = None
self.instance_id = None
self.ready_time_stamp = None
self.ready = None
self.is_eoi_timeout = None
self.process_name = None
self._segment_path = lambda: "start-ts-boot-proc"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Failover.StartTsBootProc, ['start_time_stamp', 'started', 'level', 'jid', 'instance_id', 'ready_time_stamp', 'ready', 'is_eoi_timeout', 'process_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Failover.StartTsBootProc']['meta_info']
class PrimaryBandStatistic(_Entity_):
"""
Primary Band statistics
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: band_name
Band Name
**type**\: str
**config**\: False
.. attribute:: band_finish_time
Band finish time
**type**\: str
**config**\: False
.. attribute:: band_time
Band time
**type**\: str
**config**\: False
.. attribute:: finish_time
Finish Time
**type**\: str
**config**\: False
.. attribute:: idle_percentage
Idle Percentage
**type**\: str
**config**\: False
**units**\: percentage
.. attribute:: jid
Jid
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ready_time
Ready Time
**type**\: str
**config**\: False
.. attribute:: last_process
Last Process Name
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Failover.PrimaryBandStatistic, self).__init__()
self.yang_name = "primary-band-statistic"
self.yang_parent_name = "failover"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('level', (YLeaf(YType.str, 'level'), ['str'])),
('band_name', (YLeaf(YType.str, 'band-name'), ['str'])),
('band_finish_time', (YLeaf(YType.str, 'band-finish-time'), ['str'])),
('band_time', (YLeaf(YType.str, 'band-time'), ['str'])),
('finish_time', (YLeaf(YType.str, 'finish-time'), ['str'])),
('idle_percentage', (YLeaf(YType.str, 'idle-percentage'), ['str'])),
('jid', (YLeaf(YType.uint32, 'jid'), ['int'])),
('ready_time', (YLeaf(YType.str, 'ready-time'), ['str'])),
('last_process', (YLeaf(YType.str, 'last-process'), ['str'])),
])
self.level = None
self.band_name = None
self.band_finish_time = None
self.band_time = None
self.finish_time = None
self.idle_percentage = None
self.jid = None
self.ready_time = None
self.last_process = None
self._segment_path = lambda: "primary-band-statistic"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Failover.PrimaryBandStatistic, ['level', 'band_name', 'band_finish_time', 'band_time', 'finish_time', 'idle_percentage', 'jid', 'ready_time', 'last_process'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Failover.PrimaryBandStatistic']['meta_info']
class PrimaryTsBootProc(_Entity_):
"""
List of booted processes per primary time
.. attribute:: prim_time_stamp
Primary Time Stamp
**type**\: str
**config**\: False
.. attribute:: go_primary
Go primary time stamp
**type**\: str
**config**\: False
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: band_name
Band Name
**type**\: str
**config**\: False
.. attribute:: jid
Job Id
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: instance_id
Instance Id
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: avail_time_stamp
Avail Time Stamp
**type**\: str
**config**\: False
.. attribute:: avail
Time since Avail
**type**\: str
**config**\: False
.. attribute:: is_avail_timeout
Is EOI timeout
**type**\: bool
**config**\: False
.. attribute:: process_name
Process Name
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Failover.PrimaryTsBootProc, self).__init__()
self.yang_name = "primary-ts-boot-proc"
self.yang_parent_name = "failover"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('prim_time_stamp', (YLeaf(YType.str, 'prim-time-stamp'), ['str'])),
('go_primary', (YLeaf(YType.str, 'go-primary'), ['str'])),
('level', (YLeaf(YType.str, 'level'), ['str'])),
('band_name', (YLeaf(YType.str, 'band-name'), ['str'])),
('jid', (YLeaf(YType.int32, 'jid'), ['int'])),
('instance_id', (YLeaf(YType.int32, 'instance-id'), ['int'])),
('avail_time_stamp', (YLeaf(YType.str, 'avail-time-stamp'), ['str'])),
('avail', (YLeaf(YType.str, 'avail'), ['str'])),
('is_avail_timeout', (YLeaf(YType.boolean, 'is-avail-timeout'), ['bool'])),
('process_name', (YLeaf(YType.str, 'process-name'), ['str'])),
])
self.prim_time_stamp = None
self.go_primary = None
self.level = None
self.band_name = None
self.jid = None
self.instance_id = None
self.avail_time_stamp = None
self.avail = None
self.is_avail_timeout = None
self.process_name = None
self._segment_path = lambda: "primary-ts-boot-proc"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Failover.PrimaryTsBootProc, ['prim_time_stamp', 'go_primary', 'level', 'band_name', 'jid', 'instance_id', 'avail_time_stamp', 'avail', 'is_avail_timeout', 'process_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Failover.PrimaryTsBootProc']['meta_info']
class PrimaryStartTsBootProc(_Entity_):
"""
List of booted process per primary start time
.. attribute:: start_time_stamp
Start Time Stamp
**type**\: str
**config**\: False
.. attribute:: started
Time since started
**type**\: str
**config**\: False
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: jid
Job Id
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: instance_id
Instance Id
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: ready_time_stamp
Ready Time Stamp
**type**\: str
**config**\: False
.. attribute:: ready
Time since Ready
**type**\: str
**config**\: False
.. attribute:: is_eoi_timeout
Is EOI timeout
**type**\: bool
**config**\: False
.. attribute:: process_name
Process Name
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Failover.PrimaryStartTsBootProc, self).__init__()
self.yang_name = "primary-start-ts-boot-proc"
self.yang_parent_name = "failover"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('start_time_stamp', (YLeaf(YType.str, 'start-time-stamp'), ['str'])),
('started', (YLeaf(YType.str, 'started'), ['str'])),
('level', (YLeaf(YType.str, 'level'), ['str'])),
('jid', (YLeaf(YType.int32, 'jid'), ['int'])),
('instance_id', (YLeaf(YType.int32, 'instance-id'), ['int'])),
('ready_time_stamp', (YLeaf(YType.str, 'ready-time-stamp'), ['str'])),
('ready', (YLeaf(YType.str, 'ready'), ['str'])),
('is_eoi_timeout', (YLeaf(YType.boolean, 'is-eoi-timeout'), ['bool'])),
('process_name', (YLeaf(YType.str, 'process-name'), ['str'])),
])
self.start_time_stamp = None
self.started = None
self.level = None
self.jid = None
self.instance_id = None
self.ready_time_stamp = None
self.ready = None
self.is_eoi_timeout = None
self.process_name = None
self._segment_path = lambda: "primary-start-ts-boot-proc"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Failover.PrimaryStartTsBootProc, ['start_time_stamp', 'started', 'level', 'jid', 'instance_id', 'ready_time_stamp', 'ready', 'is_eoi_timeout', 'process_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Failover.PrimaryStartTsBootProc']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Failover']['meta_info']
class Boot(_Entity_):
"""
Process Boot information
.. attribute:: last_process_started
Last process started
**type**\: str
**config**\: False
.. attribute:: standby_band_statistic
Standby Band statistics
**type**\: list of :py:class:`StandbyBandStatistic <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Boot.StandbyBandStatistic>`
**config**\: False
.. attribute:: active_band_statistic
Active Band statistics
**type**\: list of :py:class:`ActiveBandStatistic <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Boot.ActiveBandStatistic>`
**config**\: False
.. attribute:: booted_process
List of booted processes
**type**\: list of :py:class:`BootedProcess <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysmgr_oper.SystemProcess.NodeTable.Node.Boot.BootedProcess>`
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Boot, self).__init__()
self.yang_name = "boot"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("standby-band-statistic", ("standby_band_statistic", SystemProcess.NodeTable.Node.Boot.StandbyBandStatistic)), ("active-band-statistic", ("active_band_statistic", SystemProcess.NodeTable.Node.Boot.ActiveBandStatistic)), ("booted-process", ("booted_process", SystemProcess.NodeTable.Node.Boot.BootedProcess))])
self._leafs = OrderedDict([
('last_process_started', (YLeaf(YType.str, 'last-process-started'), ['str'])),
])
self.last_process_started = None
self.standby_band_statistic = YList(self)
self.active_band_statistic = YList(self)
self.booted_process = YList(self)
self._segment_path = lambda: "boot"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Boot, ['last_process_started'], name, value)
class StandbyBandStatistic(_Entity_):
"""
Standby Band statistics
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: band_name
Band Name
**type**\: str
**config**\: False
.. attribute:: band_finish_time
Band finish time
**type**\: str
**config**\: False
.. attribute:: band_time
Band time
**type**\: str
**config**\: False
.. attribute:: finish_time
Finish Time
**type**\: str
**config**\: False
.. attribute:: idle_percentage
Idle Percentage
**type**\: str
**config**\: False
**units**\: percentage
.. attribute:: jid
Jid
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ready_time
Ready Time
**type**\: str
**config**\: False
.. attribute:: last_process
Last Process Name
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Boot.StandbyBandStatistic, self).__init__()
self.yang_name = "standby-band-statistic"
self.yang_parent_name = "boot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('level', (YLeaf(YType.str, 'level'), ['str'])),
('band_name', (YLeaf(YType.str, 'band-name'), ['str'])),
('band_finish_time', (YLeaf(YType.str, 'band-finish-time'), ['str'])),
('band_time', (YLeaf(YType.str, 'band-time'), ['str'])),
('finish_time', (YLeaf(YType.str, 'finish-time'), ['str'])),
('idle_percentage', (YLeaf(YType.str, 'idle-percentage'), ['str'])),
('jid', (YLeaf(YType.uint32, 'jid'), ['int'])),
('ready_time', (YLeaf(YType.str, 'ready-time'), ['str'])),
('last_process', (YLeaf(YType.str, 'last-process'), ['str'])),
])
self.level = None
self.band_name = None
self.band_finish_time = None
self.band_time = None
self.finish_time = None
self.idle_percentage = None
self.jid = None
self.ready_time = None
self.last_process = None
self._segment_path = lambda: "standby-band-statistic"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Boot.StandbyBandStatistic, ['level', 'band_name', 'band_finish_time', 'band_time', 'finish_time', 'idle_percentage', 'jid', 'ready_time', 'last_process'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Boot.StandbyBandStatistic']['meta_info']
class ActiveBandStatistic(_Entity_):
"""
Active Band statistics
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: band_name
Band Name
**type**\: str
**config**\: False
.. attribute:: band_finish_time
Band finish time
**type**\: str
**config**\: False
.. attribute:: band_time
Band time
**type**\: str
**config**\: False
.. attribute:: finish_time
Finish Time
**type**\: str
**config**\: False
.. attribute:: idle_percentage
Idle Percentage
**type**\: str
**config**\: False
**units**\: percentage
.. attribute:: jid
Jid
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ready_time
Ready Time
**type**\: str
**config**\: False
.. attribute:: last_process
Last Process Name
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Boot.ActiveBandStatistic, self).__init__()
self.yang_name = "active-band-statistic"
self.yang_parent_name = "boot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('level', (YLeaf(YType.str, 'level'), ['str'])),
('band_name', (YLeaf(YType.str, 'band-name'), ['str'])),
('band_finish_time', (YLeaf(YType.str, 'band-finish-time'), ['str'])),
('band_time', (YLeaf(YType.str, 'band-time'), ['str'])),
('finish_time', (YLeaf(YType.str, 'finish-time'), ['str'])),
('idle_percentage', (YLeaf(YType.str, 'idle-percentage'), ['str'])),
('jid', (YLeaf(YType.uint32, 'jid'), ['int'])),
('ready_time', (YLeaf(YType.str, 'ready-time'), ['str'])),
('last_process', (YLeaf(YType.str, 'last-process'), ['str'])),
])
self.level = None
self.band_name = None
self.band_finish_time = None
self.band_time = None
self.finish_time = None
self.idle_percentage = None
self.jid = None
self.ready_time = None
self.last_process = None
self._segment_path = lambda: "active-band-statistic"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Boot.ActiveBandStatistic, ['level', 'band_name', 'band_finish_time', 'band_time', 'finish_time', 'idle_percentage', 'jid', 'ready_time', 'last_process'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Boot.ActiveBandStatistic']['meta_info']
class BootedProcess(_Entity_):
"""
List of booted processes
.. attribute:: start_time_stamp
Start Time Stamp
**type**\: str
**config**\: False
.. attribute:: started
Time since started
**type**\: str
**config**\: False
.. attribute:: level
Level
**type**\: str
**config**\: False
.. attribute:: jid
Job Id
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: instance_id
Instance Id
**type**\: int
**range:** \-2147483648..2147483647
**config**\: False
.. attribute:: ready_time_stamp
Ready Time Stamp
**type**\: str
**config**\: False
.. attribute:: ready
Time since Ready
**type**\: str
**config**\: False
.. attribute:: is_eoi_timeout
Is EOI timeout
**type**\: bool
**config**\: False
.. attribute:: process_name
Process Name
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Boot.BootedProcess, self).__init__()
self.yang_name = "booted-process"
self.yang_parent_name = "boot"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('start_time_stamp', (YLeaf(YType.str, 'start-time-stamp'), ['str'])),
('started', (YLeaf(YType.str, 'started'), ['str'])),
('level', (YLeaf(YType.str, 'level'), ['str'])),
('jid', (YLeaf(YType.int32, 'jid'), ['int'])),
('instance_id', (YLeaf(YType.int32, 'instance-id'), ['int'])),
('ready_time_stamp', (YLeaf(YType.str, 'ready-time-stamp'), ['str'])),
('ready', (YLeaf(YType.str, 'ready'), ['str'])),
('is_eoi_timeout', (YLeaf(YType.boolean, 'is-eoi-timeout'), ['bool'])),
('process_name', (YLeaf(YType.str, 'process-name'), ['str'])),
])
self.start_time_stamp = None
self.started = None
self.level = None
self.jid = None
self.instance_id = None
self.ready_time_stamp = None
self.ready = None
self.is_eoi_timeout = None
self.process_name = None
self._segment_path = lambda: "booted-process"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Boot.BootedProcess, ['start_time_stamp', 'started', 'level', 'jid', 'instance_id', 'ready_time_stamp', 'ready', 'is_eoi_timeout', 'process_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Boot.BootedProcess']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Boot']['meta_info']
class Logs(_Entity_):
"""
Process Log information
.. attribute:: log
Process log
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Logs, self).__init__()
self.yang_name = "logs"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('log', (YLeaf(YType.str, 'log'), ['str'])),
])
self.log = None
self._segment_path = lambda: "logs"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Logs, ['log'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Logs']['meta_info']
class Searchpath(_Entity_):
"""
Process Searchpath information
.. attribute:: path
process searchpath
**type**\: str
**config**\: False
"""
_prefix = 'sysmgr-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(SystemProcess.NodeTable.Node.Searchpath, self).__init__()
self.yang_name = "searchpath"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('path', (YLeaf(YType.str, 'path'), ['str'])),
])
self.path = None
self._segment_path = lambda: "searchpath"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(SystemProcess.NodeTable.Node.Searchpath, ['path'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node.Searchpath']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable.Node']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess.NodeTable']['meta_info']
def clone_ptr(self):
self._top_entity = SystemProcess()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sysmgr_oper as meta
return meta._meta_table['SystemProcess']['meta_info']
| 47.103157
| 1,118
| 0.353712
| 28,831
| 452,049
| 5.282647
| 0.013284
| 0.049762
| 0.080366
| 0.036755
| 0.954236
| 0.934079
| 0.919772
| 0.906995
| 0.896037
| 0.882551
| 0
| 0.027822
| 0.554508
| 452,049
| 9,596
| 1,119
| 47.108066
| 0.728464
| 0.196291
| 0
| 0.831421
| 0
| 0
| 0.152421
| 0.036226
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06979
| false
| 0
| 0.026131
| 0
| 0.149458
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2f9e5f7e2313e91ae8a2a5571c60fa780e5adf06
| 63
|
py
|
Python
|
ComRISB/pyglib/pyglib/basic/__init__.py
|
comscope/comsuite
|
d51c43cad0d15dc3b4d1f45e7df777cdddaa9d6c
|
[
"BSD-3-Clause"
] | 18
|
2019-06-15T18:08:21.000Z
|
2022-01-30T05:01:29.000Z
|
ComRISB/pyglib/pyglib/gutz/__init__.py
|
comscope/Comsuite
|
b80ca9f34c519757d337487c489fb655f7598cc2
|
[
"BSD-3-Clause"
] | null | null | null |
ComRISB/pyglib/pyglib/gutz/__init__.py
|
comscope/Comsuite
|
b80ca9f34c519757d337487c489fb655f7598cc2
|
[
"BSD-3-Clause"
] | 11
|
2019-06-05T02:57:55.000Z
|
2021-12-29T02:54:25.000Z
|
from pyglib.basic import units
from pyglib.basic import prints
| 21
| 31
| 0.84127
| 10
| 63
| 5.3
| 0.6
| 0.377358
| 0.566038
| 0.792453
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126984
| 63
| 2
| 32
| 31.5
| 0.963636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 9
|
c0fb27defccd006877f50258acb35aad3e506780
| 9,587
|
py
|
Python
|
models/models_nn.py
|
AaltoPML/informative_prior
|
64661650d2d03050689e841f084b97b0a1e3b1da
|
[
"MIT"
] | null | null | null |
models/models_nn.py
|
AaltoPML/informative_prior
|
64661650d2d03050689e841f084b97b0a1e3b1da
|
[
"MIT"
] | null | null | null |
models/models_nn.py
|
AaltoPML/informative_prior
|
64661650d2d03050689e841f084b97b0a1e3b1da
|
[
"MIT"
] | 1
|
2021-09-07T11:21:17.000Z
|
2021-09-07T11:21:17.000Z
|
from models.layers import *
class Hier_Meanfield_NN(nn.Module):
"""
Hiericical Meanfield Gaussian BNN
"""
def __init__(self, num_feature, num_hidden_nodes, alpha_s=0.5, beta_s=0.5, alpha_t=0.5, beta_t=0.5):
super(Hier_Meanfield_NN, self).__init__()
self.num_feature = num_feature
self.alpha = alpha_s
self.beta = beta_s
scale = 1. * np.sqrt(6. / (num_feature + 1))
self.mu_logsigma = nn.Parameter(torch.Tensor(1).uniform_(-scale, scale))
self.rho_logsigma = nn.Parameter(torch.Tensor(1).uniform_(-4, -2))
self.relu = nn.ReLU()
self.Layer1 = Hier_Meanfield_Layer(num_feature, num_hidden_nodes[0], alpha_t, beta_t)
self.Layer2 = Hier_Meanfield_Layer(num_hidden_nodes[0], num_hidden_nodes[1], alpha_t, beta_t)
self.Layer3 = Hier_Meanfield_Layer(num_hidden_nodes[1], 1, alpha_t, beta_t)
def forward(self, x):
var_sigma = torch.log(1 + torch.exp(self.rho_logsigma))
if self.training:
epsilon_sigma = torch.randn(1)
sigma_n = torch.exp(self.mu_logsigma + var_sigma * epsilon_sigma)
x1, kl1 = self.Layer1(x, sigma_n, self.training)
x1 = self.relu(x1)
x2, kl2 = self.Layer2(x1, sigma_n, self.training)
x2 = self.relu(x2)
x3, kl3 = self.Layer3(x2, sigma_n, self.training)
else:
sigma_n = torch.exp(self.mu_logsigma)
x1, kl1 = self.Layer1(x, sigma_n, self.training)
x1 = self.relu(x1)
x2, kl2 = self.Layer2(x1, sigma_n, self.training)
x2 = self.relu(x2)
x3, kl3 = self.Layer3(x2, sigma_n, self.training)
KL_entropy_sigma = -self._entropy_inverse_gamma(self.mu_logsigma, var_sigma)
KL_prior_sigma = self._expected_prior_inverse_gamma(self.mu_logsigma, var_sigma)
KL_scale = KL_entropy_sigma - KL_prior_sigma
return x3, kl1 + kl2 + kl3 + KL_scale, sigma_n
def _entropy_inverse_gamma(self, mu, sigma):
return torch.sum(mu + torch.log(sigma) + 0.5 * np.log(2 * np.pi) + 0.5)
def _expected_prior_inverse_gamma(self, mu, sigma):
return torch.sum(self.alpha * np.log(self.beta) - (self.alpha + 1) * mu - self.beta * torch.exp(
-mu + 0.5 * sigma ** 2) - torch.lgamma(torch.tensor(self.alpha)))
class Info_Hier_Meanfield_NN(nn.Module):
"""
Informative Hiericical Meanfield Gaussian BNN
"""
def __init__(self, num_feature, num_hidden_nodes, k_min, k_max, tau=1., alpha_s=0.5, beta_s=0.5, alpha_t=0.5, beta_t=0.5):
super(Info_Hier_Meanfield_NN, self).__init__()
self.num_feature = num_feature
self.alpha = alpha_s
self.beta = beta_s
scale = 1. * np.sqrt(6. / (num_feature + 1))
self.mu_logsigma = nn.Parameter(torch.Tensor(1).uniform_(-scale, scale))
self.rho_logsigma = nn.Parameter(torch.Tensor(1).uniform_(-4, -2))
self.relu = nn.ReLU()
self.Layer1 = Info_Hier_Meanfield_Layer(num_feature, num_hidden_nodes[0], k_min, k_max, tau, alpha_t, beta_t)
self.Layer2 = Hier_Meanfield_Layer(num_hidden_nodes[0], num_hidden_nodes[1], alpha_t, beta_t)
self.Layer3 = Hier_Meanfield_Layer(num_hidden_nodes[1], 1, alpha_t, beta_t)
def forward(self, x):
var_sigma = torch.log(1 + torch.exp(self.rho_logsigma))
if self.training:
epsilon_sigma = torch.randn(1)
sigma_n = torch.exp(self.mu_logsigma + var_sigma * epsilon_sigma)
x1, kl1 = self.Layer1(x, sigma_n, self.training)
x1 = self.relu(x1)
x2, kl2 = self.Layer2(x1, sigma_n, self.training)
x2 = self.relu(x2)
x3, kl3 = self.Layer3(x2, sigma_n, self.training)
else:
sigma_n = torch.exp(self.mu_logsigma)
x1, kl1 = self.Layer1(x, sigma_n, self.training)
x1 = self.relu(x1)
x2, kl2 = self.Layer2(x1, sigma_n, self.training)
x2 = self.relu(x2)
x3, kl3 = self.Layer3(x2, sigma_n, self.training)
KL_entropy_sigma = -self._entropy_inverse_gamma(self.mu_logsigma, var_sigma)
KL_prior_sigma = self._expected_prior_inverse_gamma(self.mu_logsigma, var_sigma)
KL_scale = KL_entropy_sigma - KL_prior_sigma
return x3, kl1 + kl2 + kl3 + KL_scale, sigma_n
def _entropy_inverse_gamma(self, mu, sigma):
return torch.sum(mu + torch.log(sigma) + 0.5 * np.log(2 * np.pi) + 0.5)
def _expected_prior_inverse_gamma(self, mu, sigma):
return torch.sum(self.alpha * np.log(self.beta) - (self.alpha + 1) * mu - self.beta * torch.exp(
-mu + 0.5 * sigma ** 2) - torch.lgamma(torch.tensor(self.alpha)))
class Hier_Meanfield_PVE_NN(nn.Module):
"""
Hiericical Meanfield Gaussian BNN
"""
def __init__(self, num_feature, num_hidden_nodes, alpha_s=0.5, beta_s=0.5, alpha_t=0.5, beta_t=0.5):
super(Hier_Meanfield_PVE_NN, self).__init__()
self.num_feature = num_feature
self.alpha = alpha_s
self.beta = beta_s
scale = 1. * np.sqrt(6. / (num_feature + 1))
self.mu_logsigma = nn.Parameter(torch.Tensor(1).uniform_(-scale, scale))
self.rho_logsigma = nn.Parameter(torch.Tensor(1).uniform_(-4, -2))
self.relu = nn.ReLU()
self.Layer1 = Hier_Meanfield_Layer(num_feature, num_hidden_nodes[0], alpha_t, beta_t)
self.Layer2 = Hier_Meanfield_Layer(num_hidden_nodes[0], num_hidden_nodes[1], alpha_t, beta_t)
self.Layer3 = Hier_Meanfield_Layer(num_hidden_nodes[1], 1, alpha_t, beta_t)
def forward(self, x):
var_sigma = torch.log(1 + torch.exp(self.rho_logsigma))
if self.training:
epsilon_sigma = torch.randn(1)
sigma_n = torch.exp(self.mu_logsigma + var_sigma * epsilon_sigma)
x1, kl1 = self.Layer1(x, 1., self.training)
x1 = self.relu(x1)
x2, kl2 = self.Layer2(x1, 1., self.training)
x2 = self.relu(x2)
x3, kl3 = self.Layer3(x2, sigma_n, self.training)
else:
sigma_n = torch.exp(self.mu_logsigma)
x1, kl1 = self.Layer1(x, 1., self.training)
x1 = self.relu(x1)
x2, kl2 = self.Layer2(x1, 1., self.training)
x2 = self.relu(x2)
x3, kl3 = self.Layer3(x2, sigma_n, self.training)
KL_entropy_sigma = -self._entropy_inverse_gamma(self.mu_logsigma, var_sigma)
KL_prior_sigma = self._expected_prior_inverse_gamma(self.mu_logsigma, var_sigma)
KL_scale = KL_entropy_sigma - KL_prior_sigma
return x3, kl1 + kl2 + kl3 + KL_scale, sigma_n
def _entropy_inverse_gamma(self, mu, sigma):
return torch.sum(mu + torch.log(sigma) + 0.5 * np.log(2 * np.pi) + 0.5)
def _expected_prior_inverse_gamma(self, mu, sigma):
return torch.sum(self.alpha * np.log(self.beta) - (self.alpha + 1) * mu - self.beta * torch.exp(
-mu + 0.5 * sigma ** 2) - torch.lgamma(torch.tensor(self.alpha)))
class Info_Hier_Meanfield_PVE_NN(nn.Module):
"""
Informative Hiericical Meanfield Gaussian BNN
"""
def __init__(self, num_feature, num_hidden_nodes, k_min, k_max, tau=1., alpha_s=0.5, beta_s=0.5, alpha_t=0.5, beta_t=0.5):
super(Info_Hier_Meanfield_PVE_NN, self).__init__()
self.num_feature = num_feature
self.alpha = alpha_s
self.beta = beta_s
scale = 1. * np.sqrt(6. / (num_feature + 1))
self.mu_logsigma = nn.Parameter(torch.Tensor(1).uniform_(-scale, scale))
self.rho_logsigma = nn.Parameter(torch.Tensor(1).uniform_(-4, -2))
self.relu = nn.ReLU()
self.Layer1 = Info_Hier_Meanfield_Layer(num_feature, num_hidden_nodes[0], k_min, k_max, tau, alpha_t, beta_t)
self.Layer2 = Hier_Meanfield_Layer(num_hidden_nodes[0], num_hidden_nodes[1], alpha_t, beta_t)
self.Layer3 = Hier_Meanfield_Layer(num_hidden_nodes[1], 1, alpha_t, beta_t)
def forward(self, x):
var_sigma = torch.log(1 + torch.exp(self.rho_logsigma))
if self.training:
epsilon_sigma = torch.randn(1)
sigma_n = torch.exp(self.mu_logsigma + var_sigma * epsilon_sigma)
x1, kl1 = self.Layer1(x, 1., self.training)
x1 = self.relu(x1)
x2, kl2 = self.Layer2(x1, 1., self.training)
x2 = self.relu(x2)
x3, kl3 = self.Layer3(x2, sigma_n, self.training)
else:
sigma_n = torch.exp(self.mu_logsigma)
x1, kl1 = self.Layer1(x, 1., self.training)
x1 = self.relu(x1)
x2, kl2 = self.Layer2(x1, 1., self.training)
x2 = self.relu(x2)
x3, kl3 = self.Layer3(x2, sigma_n, self.training)
KL_entropy_sigma = -self._entropy_inverse_gamma(self.mu_logsigma, var_sigma)
KL_prior_sigma = self._expected_prior_inverse_gamma(self.mu_logsigma, var_sigma)
KL_scale = KL_entropy_sigma - KL_prior_sigma
return x3, kl1 + kl2 + kl3 + KL_scale, sigma_n
def _entropy_inverse_gamma(self, mu, sigma):
return torch.sum(mu + torch.log(sigma) + 0.5 * np.log(2 * np.pi) + 0.5)
def _expected_prior_inverse_gamma(self, mu, sigma):
return torch.sum(self.alpha * np.log(self.beta) - (self.alpha + 1) * mu - self.beta * torch.exp(
-mu + 0.5 * sigma ** 2) - torch.lgamma(torch.tensor(self.alpha)))
| 47.696517
| 127
| 0.617503
| 1,410
| 9,587
| 3.92766
| 0.056738
| 0.010112
| 0.05056
| 0.052004
| 0.995125
| 0.992777
| 0.989346
| 0.989346
| 0.989346
| 0.989346
| 0
| 0.039707
| 0.259205
| 9,587
| 201
| 128
| 47.696517
| 0.740073
| 0.016585
| 0
| 0.942675
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.101911
| false
| 0
| 0.006369
| 0.050955
| 0.210191
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
238ed2e3e9f8f50a2382f3ad6ffe3cb69f5aa4f2
| 24,205
|
py
|
Python
|
binance/endpoints/spot_trade.py
|
vinitjames/pybinance
|
20c006fe391be27b7f948306a94b0309dd2814ea
|
[
"MIT"
] | null | null | null |
binance/endpoints/spot_trade.py
|
vinitjames/pybinance
|
20c006fe391be27b7f948306a94b0309dd2814ea
|
[
"MIT"
] | null | null | null |
binance/endpoints/spot_trade.py
|
vinitjames/pybinance
|
20c006fe391be27b7f948306a94b0309dd2814ea
|
[
"MIT"
] | null | null | null |
from abc import ABCMeta, abstractmethod
from typing import Union, Callable
from binance.utils import format_time
from binance.exceptions import SpotTradingError
import time
class SpotAccountTradeEndpoints(metaclass=ABCMeta):
@property
@abstractmethod
def request_handler(self):
pass
@property
@abstractmethod
def API_VERSION(self):
pass
@property
@abstractmethod
def ORDER_SIDE(self):
pass
@property
@abstractmethod
def ORDER_TYPE(self):
pass
@property
@abstractmethod
def ORDER_STATUS(self):
pass
@property
@abstractmethod
def TIME_IN_FORCE(self):
pass
@property
@abstractmethod
def ORDER_RESPONSE_TYPE(self):
pass
@abstractmethod
def _create_api_uri(self, path: str, version: str) -> str:
pass
def create_order(self,
symbol: str,
side: str,
type: str,
timeInForce: str = None,
quantity: float = None,
quoteOrderQty: float = None,
price: float = None,
newClientOrderId: str = None,
stopPrice: float = None,
icebergQty: float = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
if(params['icebergQty'] is not None):
params['timeInForce'] = self.TIME_IN_FORCE.GTC
params = {k: v for k, v in params.items() if v is not None}
uri = self._create_api_uri('order',
version=self.API_VERSION.PRIVATE)
return self.request_handler.post(uri, signed=True, **params)
def create_test_order(self,
symbol: str,
side: str,
type: str,
timeInForce: str = None,
quantity: float = None,
quoteOrderQty: float = None,
price: float = None,
newClientOrderId: str = None,
stopPrice: float = None,
icebergQty: float = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params = {k: v for k, v in params.items() if v is not None}
uri = self._create_api_uri('order/test',
version=self.API_VERSION.PRIVATE)
return self.request_handler.post(uri, signed=True, **params)
def cancel_order(self,
symbol: str,
orderId: int = None,
origClientOrderId: str = None,
newClientOrderId: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
if(params['orderId'] == None) and (params['origClientOrderId'] == None):
raise SpotTradingError(
'Atleast on of orderId or origClientOrderId not passed',
'for cancelling order')
params = {k: v for k, v in params.items() if v is not None}
uri = self._create_api_uri('order',
version=self.API_VERSION.PRIVATE)
return self.request_handler.delete(uri, signed=True, **params)
def cancel_all_orders(self,
symbol: str,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params = {k: v for k, v in params.items() if v is not None}
uri = self._create_api_uri('openOrders',
version=self.API_VERSION.PRIVATE)
return self.request_handler.delete(uri, signed=True, **params)
def create_oco_order(self,
symbol: str,
side: str,
quantity: float,
price: float,
stopPrice: float,
listClientOrderId: str = None,
limitClientOrderId: str = None,
limitIcebergQty: float = None,
stopClientOrderId: str = None,
stopLimitPrice: float = None,
stopIcebergQty: float = None,
stopLimitTimeInForce: str = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params = {k: v for k, v in params.items() if v is not None}
uri = self._create_api_uri('order/oco',
version=self.API_VERSION.PRIVATE)
return self.request_handler.post(uri, signed=True, **params)
def oco_buy_order(self,
symbol: str,
quantity: int,
price: float,
stopPrice: float,
listClientOrderId: str = None,
limitClientOrderId: str = None,
limitIcebergQty: float = None,
stopClientOrderId: str = None,
stopLimitPrice: float = None,
stopIcebergQty: float = None,
stopLimitTimeInForce: str = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params['side'] = self.ORDER_SIDE.BUY
return self.create_oco_order(**params)
def oco_sell_order(self,
symbol: str,
quantity: int,
price: float,
stopPrice: float,
listClientOrderId: str = None,
limitClientOrderId: str = None,
limitIcebergQty: float = None,
stopClientOrderId: str = None,
stopLimitPrice: float = None,
stopIcebergQty: float = None,
stopLimitTimeInForce: str = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params['side'] = self.ORDER_SIDE.SELL
return self.create_oco_order(**params)
def limit_buy_order(self,
symbol: str,
price: float,
quantity: int,
timeInForce: str,
quoteOrderQty: int = None,
newClientOrderId: str = None,
icebergQty: float = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params['side'] = self.ORDER_SIDE.BUY
params['type'] = self.ORDER_TYPE.LIMIT
return self.create_order(**params)
def limit_sell_order(self,
symbol: str,
price: float,
quantity: int,
timeInForce: str,
quoteOrderQty: int = None,
newClientOrderId: str = None,
icebergQty: float = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params['side'] = self.ORDER_SIDE.SELL
params['type'] = self.ORDER_TYPE.LIMIT
return self.create_order(**params)
def limit_maker_buy_order(self,
symbol: str,
price: float,
quantity: int,
timeInForce: str = None,
quoteOrderQty: int = None,
newClientOrderId: str = None,
icebergQty: float = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params['side'] = self.ORDER_SIDE.BUY
params['type'] = self.ORDER_TYPE.LIMIT_MAKER
return self.create_order(**params)
def limit_maker_sell_order(self,
symbol: str,
price: float,
quantity: int,
timeInForce: str = None,
quoteOrderQty: int = None,
newClientOrderId: str = None,
icebergQty: float = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params['side'] = self.ORDER_SIDE.SELL
params['type'] = self.ORDER_TYPE.LIMIT_MAKER
return self.create_order(**params)
def market_buy_order(self,
symbol: str,
quantity: int = None,
quoteOrderQty: int = None,
timeInForce: str = None,
newClientOrderId: str = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
if(params['quantity'] is None) and (params['quoteOrderQty'] is None):
raise SpotTradingError(
'Atleast one of qantity or quoteOrderQty not specified')
params['side'] = self.ORDER_SIDE.BUY
params['type'] = self.ORDER_TYPE.MARKET
return self.create_order(**params)
def market_sell_order(self,
symbol: str,
quantity: int = None,
quoteOrderQty: int = None,
timeInForce: str = None,
newClientOrderId: str = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
if(params['quantity'] is None) and (params['quoteOrderQty'] is None):
raise SpotTradingError(
'Atleast one of qantity or quoteOrderQty not specified')
params['side'] = self.ORDER_SIDE.SELL
params['type'] = self.ORDER_TYPE.MARKET
return self.create_order(**params)
def stoploss_buy_order(self,
symbol: str,
quantity: int,
stopPrice: str,
timeInForce: str = None,
quoteOrderQty: int = None,
newClientOrderId: str = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params['side'] = self.ORDER_SIDE.BUY
params['type'] = self.ORDER_TYPE.STOP_LOSS
return self.create_order(**params)
def stoploss_sell_order(self,
symbol: str,
quantity: int,
stopPrice: str,
timeInForce: str = None,
quoteOrderQty: int = None,
newClientOrderId: str = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params['side'] = self.ORDER_SIDE.SELL
params['type'] = self.ORDER_TYPE.STOP_LOSS
return self.create_order(**params)
def stoploss_limit_buy_order(self,
symbol: str,
price: str,
quantity: int,
timeInForce: str,
stopPrice: str,
quoteOrderQty: int = None,
newClientOrderId: str = None,
icebergQty: float = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params['side'] = self.ORDER_SIDE.BUY
params['type'] = self.ORDER_TYPE.STOP_LOSS_LIMIT
return self.create_order(**params)
def stoploss_limit_sell_order(self,
symbol: str,
price: str,
quantity: int,
timeInForce: str,
stopPrice: str,
quoteOrderQty: int = None,
newClientOrderId: str = None,
icebergQty: float = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params['side'] = self.ORDER_SIDE.SELL
params['type'] = self.ORDER_TYPE.STOP_LOSS_LIMIT
return self.create_order(**params)
def takeprofit_buy_order(self,
symbol: str,
quantity: int,
stopPrice: str,
timeInForce: str = None,
quoteOrderQty: int = None,
newClientOrderId: str = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params['side'] = self.ORDER_SIDE.BUY
params['type'] = self.ORDER_TYPE.TAKE_PROFIT
return self.create_order(**params)
def takeprofit_sell_order(self,
symbol: str,
quantity: int,
stopPrice: str,
timeInForce: str = None,
quoteOrderQty: int = None,
newClientOrderId: str = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params['side'] = self.ORDER_SIDE.SELL
params['type'] = self.ORDER_TYPE.TAKE_PROFIT
return self.create_order(**params)
def takeprofit_limit_buy_order(self,
symbol: str,
quantity: int,
stopPrice: str,
timeInForce: str,
icebergQty: float = None,
quoteOrderQty: int = None,
newClientOrderId: str = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params['side'] = self.ORDER_SIDE.BUY
params['type'] = self.ORDER_TYPE.TAKE_PROFIT_LIMIT
return self.create_order(**params)
def takeprofit_limit_sell_order(self,
symbol: str,
quantity: int,
stopPrice: str,
timeInForce: str,
icebergQty: float = None,
quoteOrderQty: int = None,
newClientOrderId: str = None,
newOrderRespType: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params['side'] = self.ORDER_SIDE.SELL
params['type'] = self.ORDER_TYPE.TAKE_PROFIT_LIMIT
return self.create_order(**params)
def get_order(self,
symbol: str,
orderId: int = None,
origClientOrderId: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
if(params['orderId'] is None) and (params['origClientOrderId'] is None):
raise SpotTradingError('Atleast on of orderId or origClientOrderId not passed',
'for querying order')
params = {k: v for k, v in params.items() if v is not None}
uri = self._create_api_uri('order',
version=self.API_VERSION.PRIVATE)
return self.request_handler.get(uri, signed=True, **params)
def get_open_orders(self,
symbol: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params = {k: v for k, v in params.items() if v is not None}
uri = self._create_api_uri('openOrders',
version=self.API_VERSION.PRIVATE)
return self.request_handler.get(uri, signed=True, **params)
def _get_all_orders(self,
symbol: str,
orderId: int = None,
startTime: int = None,
endTime: int = None,
limit: int = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params = {k: v for k, v in params.items() if v is not None}
uri = self._create_api_uri('allOrders',
version=self.API_VERSION.PRIVATE)
return self.request_handler.get(uri, signed=True, **params)
def get_all_orders(self,
symbol: str,
orderId: int = None,
startTime: Union[int, str] = 0,
endTime: Union[int, str] = None) -> dict:
params = locals()
del params['self']
params = {k: v for k, v in params.items() if v is not None}
return self._get_historical_data(self._get_all_orders, **params)
def get_oco_order(self,
symbol: str,
orderListId: int = None,
origClientOrderId: str = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
if(params['orderListId'] is None) and (params['origClientOrderId'] is None):
raise SpotTradingError('Atleast on of orderListId or origClientOrderId not passed',
'for querying oco order')
params = {k: v for k, v in params.items() if v is not None}
uri = self._create_api_uri('orderList',
version=self.API_VERSION.PRIVATE)
return self.request_handler.get(uri, signed=True, **params)
def get_open_oco_orders(self,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params = {k: v for k, v in params.items() if v is not None}
uri = self._create_api_uri('openOrderList',
version=self.API_VERSION.PRIVATE)
return self.request_handler.get(uri, signed=True, **params)
def get_all_oco_orders(self,
formId: int = None,
startTime: Union[int, str] = None,
endTime: Union[int, str] = None,
limit: int = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
if(params['formId'] is not None) and (
(params['startTime'] is not None) or (params['endTime'] is not None)):
raise SpotTradingError("All OCO orders called with both formId and startTime/endTime ")
if params['startTime'] is not None:
params['startTime'] = format_time(params['startTime'])
if params['endTime'] is not None:
params['endTime'] = format_time(params['endTime'])
params = {k: v for k, v in params.items() if v is not None}
uri = self._create_api_uri('allOrderList',
version=self.API_VERSION.PRIVATE)
return self.request_handler.get(uri, signed=True, **params)
def get_account_info(self,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params = {k: v for k, v in params.items() if v is not None}
uri = self._create_api_uri('account',
version=self.API_VERSION.PRIVATE)
return self.request_handler.get(uri, signed=True, **params)
def get_trade_list(self,
symbol: str,
formId: int = None,
startTime: Union[int, str] = 0,
endTime: Union[int, str] = None) -> dict:
params = locals()
del params['self']
params = {k: v for k, v in params.items() if v is not None}
return self._get_historical_data(self._get_trade_list, **params)
def _get_trade_list(self,
symbol: str,
startTime: int = None,
endTime: int = None,
formId: int = None,
limit: int = None,
recvWindow: int = None) -> dict:
params = locals()
del params['self']
params = {k: v for k, v in params.items() if v is not None}
uri = self._create_api_uri('myTrades',
version=self.API_VERSION.PRIVATE)
print(params)
return self.request_handler.get(uri, signed=True, **params)
def _get_historical_data(self,
func: Callable,
symbol,
startTime: Union[int, str] = 0,
endTime: Union[int, str] = None,
**kwargs) -> dict:
earliest_timestamp = func(symbol,
startTime=0,
limit=1)[0]['time']
startTime = format_time(startTime)
startTime = max(earliest_timestamp, startTime)
if(endTime is not None):
endTime = format_time(endTime)
if(startTime > endTime):
raise ValueError('startTime entered is greater than endTime')
data = []
limit = 500
api_call_count = 0
while(True):
fetched_data = func(symbol,
startTime=startTime,
endTime=endTime,
limit=limit,
**kwargs)
api_call_count += 1
data.extend(fetched_data)
if(len(fetched_data) < limit):
break
startTime = fetched_data[-1]['time'] + 1
if (api_call_count) == 3:
time.sleep(0.5) # sleep to prevent overload of api calls
api_call_count = 0
return data
if __name__ == '__main__':
pass
| 40.341667
| 99
| 0.460401
| 2,129
| 24,205
| 5.116017
| 0.072334
| 0.043059
| 0.039846
| 0.056923
| 0.871282
| 0.84686
| 0.820051
| 0.806188
| 0.795263
| 0.795263
| 0
| 0.001292
| 0.456476
| 24,205
| 599
| 100
| 40.409015
| 0.826619
| 0.00157
| 0
| 0.792706
| 0
| 0
| 0.041962
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076775
| false
| 0.023033
| 0.009597
| 0
| 0.149712
| 0.001919
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9e486d6a4c1c39ab3d266d58a3d2c0c19535fdbd
| 164
|
py
|
Python
|
stepper.py
|
Gyfis/kohonen-nn-implementation-II
|
9e60f33ab4166e095f56ab891e17444ac976e824
|
[
"MIT"
] | 1
|
2016-03-25T15:25:47.000Z
|
2016-03-25T15:25:47.000Z
|
stepper.py
|
Gyfis/kohonen-nn-implementation-II
|
9e60f33ab4166e095f56ab891e17444ac976e824
|
[
"MIT"
] | null | null | null |
stepper.py
|
Gyfis/kohonen-nn-implementation-II
|
9e60f33ab4166e095f56ab891e17444ac976e824
|
[
"MIT"
] | null | null | null |
def default_step(init_val=3, step_every=1, p=-1):
def fraction_function(n):
return init_val * ((n // step_every) + 1) ** p
return fraction_function
| 32.8
| 54
| 0.658537
| 26
| 164
| 3.884615
| 0.5
| 0.138614
| 0.19802
| 0.217822
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030769
| 0.207317
| 164
| 4
| 55
| 41
| 0.746154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
9e52c0511d60d1847fd4f6f98ac06b11d3cbadb0
| 28
|
py
|
Python
|
example/code_score_python/src/main/python/success/__init__.py
|
frederic-baucher/challenge-examples
|
630139272935e86f58ed3c04c899fc21d07b5ee0
|
[
"MIT"
] | null | null | null |
example/code_score_python/src/main/python/success/__init__.py
|
frederic-baucher/challenge-examples
|
630139272935e86f58ed3c04c899fc21d07b5ee0
|
[
"MIT"
] | 4
|
2019-12-15T15:16:41.000Z
|
2022-02-16T01:10:04.000Z
|
example/code_score_python/src/main/python/success/__init__.py
|
frederic-baucher/challenge-examples
|
630139272935e86f58ed3c04c899fc21d07b5ee0
|
[
"MIT"
] | 7
|
2019-12-12T15:21:49.000Z
|
2021-03-27T14:10:57.000Z
|
def score():
return 100
| 9.333333
| 14
| 0.607143
| 4
| 28
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 0.285714
| 28
| 2
| 15
| 14
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
9ea74ee0fd1eb2778ba26f57c53592b2a50aabe0
| 5,663
|
py
|
Python
|
backend/api/migrations/0001_initial.py
|
MimbleWimble-Grin/grin-testnet-deposit-withdraw
|
7943f654b0a6d79d9a31c9719366e9df55d6816c
|
[
"MIT"
] | 6
|
2021-03-11T21:02:21.000Z
|
2022-02-06T20:53:20.000Z
|
backend/api/migrations/0001_initial.py
|
pkariz/grin-testnet-exchange
|
b5c7a5b6322f60348e3b3db563183e2d6d2da234
|
[
"MIT"
] | 1
|
2021-03-12T12:10:19.000Z
|
2021-03-12T12:20:32.000Z
|
backend/api/migrations/0001_initial.py
|
MimbleWimble-Grin/grin-testnet-deposit-withdraw
|
7943f654b0a6d79d9a31c9719366e9df55d6816c
|
[
"MIT"
] | 3
|
2021-03-12T16:42:03.000Z
|
2021-04-19T07:11:33.000Z
|
# Generated by Django 3.1.5 on 2021-03-07 15:03
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Balance',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('amount', models.DecimalField(decimal_places=9, default=0.0, max_digits=30, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(18446744073709551616)])),
('locked_amount', models.DecimalField(decimal_places=9, default=0.0, max_digits=30, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(18446744073709551616)])),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Currency',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('name', models.CharField(max_length=255, unique=True)),
('symbol', models.SlugField(max_length=255, unique=True)),
],
options={
'verbose_name_plural': 'currencies',
},
),
migrations.CreateModel(
name='Withdrawal',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('amount', models.DecimalField(decimal_places=9, default=0.0, max_digits=30, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(18446744073709551616)])),
('status', models.CharField(choices=[('awaiting transaction signature', 'awaiting transaction signature'), ('awaiting confirmation', 'awaiting confirmation'), ('finished', 'finished'), ('canceled', 'canceled')], max_length=255)),
('confirmations', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0)])),
('tx_slate_id', models.CharField(max_length=255, unique=True)),
('kernel_excess', models.CharField(blank=True, max_length=255, null=True, unique=True)),
('balance', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='withdrawals', to='api.balance')),
],
options={
'ordering': ['created'],
},
),
migrations.CreateModel(
name='Deposit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('amount', models.DecimalField(decimal_places=9, default=0.0, max_digits=30, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(18446744073709551616)])),
('status', models.CharField(choices=[('awaiting transaction signature', 'awaiting transaction signature'), ('awaiting confirmation', 'awaiting confirmation'), ('finished', 'finished'), ('canceled', 'canceled')], max_length=255)),
('confirmations', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0)])),
('tx_slate_id', models.CharField(max_length=255, unique=True)),
('kernel_excess', models.CharField(blank=True, max_length=255, null=True, unique=True)),
('balance', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deposits', to='api.balance')),
],
options={
'ordering': ['created'],
},
),
migrations.AddField(
model_name='balance',
name='currency',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='balances', to='api.currency'),
),
migrations.AddField(
model_name='balance',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='balances', to=settings.AUTH_USER_MODEL),
),
]
| 62.230769
| 245
| 0.651068
| 564
| 5,663
| 6.414894
| 0.189716
| 0.039525
| 0.053068
| 0.05749
| 0.838861
| 0.832781
| 0.814538
| 0.779989
| 0.779989
| 0.779989
| 0
| 0.032842
| 0.209606
| 5,663
| 90
| 246
| 62.922222
| 0.775469
| 0.007946
| 0
| 0.614458
| 1
| 0
| 0.131588
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.072289
| 0
| 0.120482
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7b9a61c282353d529ebacfaee8458921181c15ae
| 17,221
|
py
|
Python
|
platform/core/tests/test_auditor/test_auditor_tensorboard.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/tests/test_auditor/test_auditor_tensorboard.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/tests/test_auditor/test_auditor_tensorboard.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
# pylint:disable=ungrouped-imports
from unittest.mock import patch
import pytest
import auditor
from events.registry import tensorboard as tensorboard_events
from factories.factory_plugins import TensorboardJobFactory
from factories.factory_projects import ProjectFactory
from tests.test_auditor.utils import AuditorBaseTest
@pytest.mark.auditor_mark
class AuditorTensorboardTest(AuditorBaseTest):
"""Testing subscribed events"""
EVENTS = tensorboard_events.EVENTS
def setUp(self):
self.tensorboard = TensorboardJobFactory(project=ProjectFactory())
super().setUp()
self.tested_events = {
tensorboard_events.TENSORBOARD_STARTED,
tensorboard_events.TENSORBOARD_STARTED_TRIGGERED,
tensorboard_events.TENSORBOARD_STOPPED,
tensorboard_events.TENSORBOARD_STOPPED_TRIGGERED,
tensorboard_events.TENSORBOARD_CLEANED_TRIGGERED,
tensorboard_events.TENSORBOARD_VIEWED,
tensorboard_events.TENSORBOARD_UNBOOKMARKED,
tensorboard_events.TENSORBOARD_BOOKMARKED,
tensorboard_events.TENSORBOARD_NEW_STATUS,
tensorboard_events.TENSORBOARD_FAILED,
tensorboard_events.TENSORBOARD_SUCCEEDED,
tensorboard_events.TENSORBOARD_STATUSES_VIEWED,
tensorboard_events.TENSORBOARD_UPDATED,
tensorboard_events.TENSORBOARD_DELETED,
tensorboard_events.TENSORBOARD_DELETED_TRIGGERED,
tensorboard_events.TENSORBOARD_ARCHIVED,
tensorboard_events.TENSORBOARD_RESTORED,
}
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_started(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_STARTED,
instance=self.tensorboard,
target='project')
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 0
assert notifier_record.call_count == 1
assert executor_record.call_count == 1
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_started_triggered(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_STARTED_TRIGGERED,
instance=self.tensorboard,
actor_id=1,
actor_name='foo',
target='project')
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_stopped(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_STOPPED,
instance=self.tensorboard,
target='project')
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 0
assert notifier_record.call_count == 1
assert executor_record.call_count == 1
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_stopped_triggered(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_STOPPED_TRIGGERED,
instance=self.tensorboard,
actor_id=1,
actor_name='foo',
target='project')
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_cleaned_triggered(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_CLEANED_TRIGGERED,
instance=self.tensorboard,
actor_id=1,
actor_name='foo',
target='project')
assert tracker_record.call_count == 0
assert activitylogs_record.call_count == 0
assert notifier_record.call_count == 0
assert executor_record.call_count == 1
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_viewed(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_VIEWED,
instance=self.tensorboard,
actor_id=1,
actor_name='foo',
target='project')
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_unbookmarked(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_UNBOOKMARKED,
instance=self.tensorboard,
actor_id=1,
actor_name='foo',
target='project')
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_bookmarked(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_BOOKMARKED,
instance=self.tensorboard,
actor_id=1,
actor_name='foo',
target='project')
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_new_status(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_NEW_STATUS,
instance=self.tensorboard,
target='project')
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 0
assert notifier_record.call_count == 0
assert executor_record.call_count == 1
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_failed(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_FAILED,
instance=self.tensorboard,
target='project')
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 0
assert notifier_record.call_count == 1
assert executor_record.call_count == 1
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_succeeded(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_SUCCEEDED,
instance=self.tensorboard,
target='project')
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 0
assert notifier_record.call_count == 1
assert executor_record.call_count == 1
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_statuses_viewed(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_STATUSES_VIEWED,
instance=self.tensorboard,
target='project',
actor_name='foo',
actor_id=1)
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_updated(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_UPDATED,
instance=self.tensorboard,
actor_name='foo',
actor_id=1)
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_deleted(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_DELETED,
instance=self.tensorboard)
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 0
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_triggered_deleted(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_DELETED_TRIGGERED,
instance=self.tensorboard,
actor_name='foo',
target='project',
actor_id=1)
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_job_archived(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_ARCHIVED,
instance=self.tensorboard,
actor_name='foo',
actor_id=1)
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_tensorboard_job_restored(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=tensorboard_events.TENSORBOARD_RESTORED,
instance=self.tensorboard,
actor_name='foo',
actor_id=1)
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
del AuditorBaseTest
| 46.669377
| 83
| 0.605017
| 1,490
| 17,221
| 6.685906
| 0.051678
| 0.093857
| 0.102389
| 0.05782
| 0.890785
| 0.856254
| 0.85274
| 0.85274
| 0.85274
| 0.85274
| 0
| 0.0068
| 0.325417
| 17,221
| 368
| 84
| 46.796196
| 0.850736
| 0.003426
| 0
| 0.786378
| 0
| 0
| 0.199464
| 0.192236
| 0
| 0
| 0
| 0
| 0.210526
| 1
| 0.055728
| false
| 0
| 0.021672
| 0
| 0.083591
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c8f36696fea1f160a4f84375b3f9130301149dca
| 47,043
|
py
|
Python
|
SBaaS_LIMS/lims_experiment_query.py
|
dmccloskey/SBaaS_LIMS
|
5dfd73689674953345d523178a67b8dda10e6d47
|
[
"MIT"
] | null | null | null |
SBaaS_LIMS/lims_experiment_query.py
|
dmccloskey/SBaaS_LIMS
|
5dfd73689674953345d523178a67b8dda10e6d47
|
[
"MIT"
] | null | null | null |
SBaaS_LIMS/lims_experiment_query.py
|
dmccloskey/SBaaS_LIMS
|
5dfd73689674953345d523178a67b8dda10e6d47
|
[
"MIT"
] | null | null | null |
from SBaaS_base.sbaas_base import sbaas_base
from .lims_experiment_postgresql_models import *
from .lims_experimentor_postgresql_models import *
from .lims_sample_postgresql_models import *
from .lims_extractionMethod_postgresql_models import *
from .lims_acquisitionMethod_postgresql_models import *
from SBaaS_base.sbaas_base_query_update import sbaas_base_query_update
from SBaaS_base.sbaas_base_query_drop import sbaas_base_query_drop
from SBaaS_base.sbaas_base_query_initialize import sbaas_base_query_initialize
from SBaaS_base.sbaas_base_query_insert import sbaas_base_query_insert
from SBaaS_base.sbaas_base_query_select import sbaas_base_query_select
from SBaaS_base.sbaas_base_query_delete import sbaas_base_query_delete
from SBaaS_base.sbaas_template_query import sbaas_template_query
class lims_experiment_query(sbaas_template_query):
def initialize_supportedTables(self):
'''Set the supported tables dict for
'''
tables_supported = {"experiment_types":experiment_types,
"experiment":experiment,
};
self.set_supportedTables(tables_supported);
def get_CVSAndCVSUnitsAndODAndDilAndDilUnits_sampleNameShort(self,experiment_id_I,sample_name_short_I,exp_type_I=4):
'''Querry culture volume sampled, culture volume sampled units, and OD600 from sample name
NOTE: intended to be used within a for loop'''
try:
physiologicalParameters = self.session.query(sample_physiologicalParameters.culture_volume_sampled,
sample_physiologicalParameters.culture_volume_sampled_units,
sample_physiologicalParameters.od600,
sample_description.reconstitution_volume,
sample_description.reconstitution_volume_units).filter(
experiment.id.like(experiment_id_I),
experiment.exp_type_id == exp_type_I,
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id),
sample_description.sample_name_short.like(sample_name_short_I),
sample_description.sample_id.like(sample_physiologicalParameters.sample_id)).all();
cvs_O = physiologicalParameters[0][0];
cvs_units_O = physiologicalParameters[0][1];
od600_O = physiologicalParameters[0][2];
dil_O = physiologicalParameters[0][3];
dil_units_O = physiologicalParameters[0][4];
return cvs_O, cvs_units_O, od600_O, dil_O, dil_units_O;
except SQLAlchemyError as e:
print(e);
# query description from sample_description
def get_description_experimentIDAndSampleID_sampleDescription(self,experiment_id_I,sample_id_I):
'''Query description by sample id from sample_description'''
try:
data = self.session.query(sample_description).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_id_I),
sample.sample_id.like(sample_description.sample_id)).first();
desc = {};
if data:
desc['sample_id']=data.sample_id;
desc['sample_name_short']=data.sample_name_short;
desc['sample_name_abbreviation']=data.sample_name_abbreviation;
desc['sample_date']=data.sample_date;
desc['time_point']=data.time_point;
desc['sample_condition']=data.sample_condition;
desc['extraction_method_id']=data.extraction_method_id;
desc['biological_material']=data.biological_material;
desc['sample_desc']=data.sample_desc;
desc['sample_replicate']=data.sample_replicate;
desc['is_added']=data.is_added;
desc['is_added_units']=data.is_added_units;
desc['reconstitution_volume']=data.reconstitution_volume;
desc['reconstitution_volume_units']=data.reconstitution_volume_units;
desc['istechnical']=data.istechnical;
desc['sample_replicate_biological']=data.sample_replicate_biological;
desc['notes']=data.notes;
return desc;
except SQLAlchemyError as e:
print(e);
def drop_lims_experimentTypes(self):
try:
experiment_types.__table__.drop(self.engine,True);
except SQLAlchemyError as e:
print(e);
def reset_lims_experimentTypes(self):
try:
reset = self.session.query(experiment_types).delete(synchronize_session=False);
self.session.commit();
except SQLAlchemyError as e:
print(e);
def initialize_lims_experimentTypes(self):
try:
experiment_types.__table__.create(self.engine,True);
except SQLAlchemyError as e:
print(e);
def drop_lims_experiment(self):
try:
experiment.__table__.drop(self.engine,True);
except SQLAlchemyError as e:
print(e);
def reset_lims_experiment(self,experiment_id_I=None):
try:
if experiment_id_I:
reset = self.session.query(experiment).filter(experiment.id.like(experiment_id_I)).delete(synchronize_session=False);
else:
reset = self.session.query(experiment).delete(synchronize_session=False);
self.session.commit();
except SQLAlchemyError as e:
print(e);
def initialize_lims_experiment(self):
try:
experiment.__table__.create(self.engine,True);
except SQLAlchemyError as e:
print(e);
def add_experiment(self, data_I):
'''add rows of experiment'''
if data_I:
for d in data_I:
try:
data_add = experiment(d
#d['exp_type_id'],
#d['id'],
#d['sample_name'],
#d['experimentor_id'],
#d['extraction_method_id'],
#d['acquisition_method_id'],
#d['quantitation_method_id'],
#d['internal_standard_id']
);
self.session.add(data_add);
self.session.commit();
except IntegrityError as e:
print(e);
self.session.rollback();
except SQLAlchemyError as e:
print(e);
self.session.rollback();
self.session.commit();
def update_experiment(self,data_I):
'''update rows of experiment'''
if data_I:
for d in data_I:
try:
data_update = self.session.query(experiment).filter(
experiment.sample_name==d['sample_name'],
experiment.id==d['id']).update(
{#'wid ':d['wid '],
'exp_type_id':d['exp_type_id'],
#'id':d['id'],
#'sample_name':d['sample_name'],
'experimentor_id':d['experimentor_id'],
'extraction_method_id':d['extraction_method_id'],
'acquisition_method_id':d['acquisition_method_id'],
'quantitation_method_id':d['quantitation_method_id'],
'internal_standard_id':d['internal_standard_id']},
synchronize_session=False);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def get_nMaxBioReps_sampleDescription(self,experiment_id_I):
'''Query the maximum number of biological replicates corresponding to a given experiment'''
try:
bioReps = self.session.query(sample_description.sample_replicate).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id),
sample_description.istechnical != True).group_by(
sample_description.sample_replicate).order_by(
sample_description.sample_replicate.desc()).all();
maxBioReps_O = 0;
if bioReps:
maxBioReps_O = max(bioReps[0]);
else:
print('no biological replicates found for experiment ' + experiment_id_I);
exit(-1);
return maxBioReps_O;
except SQLAlchemyError as e:
print(e);
def get_batchFileInfo_experimentID(self,experiment_id_I,sample_type_I):
'''Query data from experiment and sample for batch file'''
try:
data = self.session.query(experiment.id,
sample.sample_name,
experiment.acquisition_method_id,
sample.sample_dilution,
sample.sample_type,
sample_description.sample_replicate,
sample_description.sample_desc,
sample_description.sample_name_abbreviation).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_type.like(sample_type_I),
sample.sample_id.like(sample_description.sample_id)).group_by(
experiment.id,
sample.sample_name,
experiment.acquisition_method_id,
sample.sample_dilution,
sample.sample_type,
sample_description.sample_replicate,
sample_description.sample_desc,
sample_description.sample_name_abbreviation).order_by(
experiment.id.asc(),
sample.sample_dilution.desc(),
sample_description.sample_name_abbreviation.asc(),
#sample.sample_name.asc(),
sample_description.sample_replicate.asc(),
sample_description.sample_desc.desc()).all();
#.order_by(
# experiment.id.asc(),
# sample.sample_dilution.desc(),
# sample_description.sample_replicate.asc(),
# sample_description.sample_desc.desc(),
# sample.sample_name.asc()).all();
data_O = [];
if data:
for d in data:
data_tmp = {};
data_tmp['id']=d.id;
data_tmp['sample_name']=d.sample_name;
data_tmp['sample_type']=d.sample_type;
data_tmp['acquisition_method_id']=d.acquisition_method_id;
data_tmp['sample_dilution']=d.sample_dilution;
data_tmp['sample_replicate']=d.sample_replicate;
data_O.append(data_tmp);
else:
print('no data found for experiment ' + experiment_id_I + ' and sample_type' + sample_type_I);
return data_O;
except SQLAlchemyError as e:
print(e);
def get_batchFileInfo_experimentIDAndExpType(self,experiment_id_I,sample_type_I,exp_type_I):
'''Query data from experiment and sample for batch file'''
try:
data = self.session.query(experiment.id,
sample.sample_name,
experiment.acquisition_method_id,
sample.sample_dilution,
sample.sample_type,
sample_description.sample_replicate,
sample_description.sample_desc,
sample_description.sample_name_abbreviation).filter(
experiment.id.like(experiment_id_I),
experiment.exp_type_id==exp_type_I,
experiment.sample_name.like(sample.sample_name),
sample.sample_type.like(sample_type_I),
sample.sample_id.like(sample_description.sample_id)).group_by(
experiment.id,
sample.sample_name,
experiment.acquisition_method_id,
sample.sample_dilution,
sample.sample_type,
sample_description.sample_replicate,
sample_description.sample_desc,
sample_description.sample_name_abbreviation).order_by(
experiment.id.asc(),
sample.sample_dilution.desc(),
sample_description.sample_name_abbreviation.asc(),
#sample.sample_name.asc(),
sample_description.sample_replicate.asc(),
sample_description.sample_desc.desc()).all();
#.order_by(
# experiment.id.asc(),
# sample.sample_dilution.desc(),
# sample_description.sample_replicate.asc(),
# sample_description.sample_desc.desc(),
# sample.sample_name.asc()).all();
data_O = [];
if data:
for d in data:
data_tmp = {};
data_tmp['id']=d.id;
data_tmp['sample_name']=d.sample_name;
data_tmp['sample_type']=d.sample_type;
data_tmp['acquisition_method_id']=d.acquisition_method_id;
data_tmp['sample_dilution']=d.sample_dilution;
data_tmp['sample_replicate']=d.sample_replicate;
data_O.append(data_tmp);
else:
print('no data found for experiment ' + experiment_id_I + ' and sample_type' + sample_type_I);
return data_O;
except SQLAlchemyError as e:
print(e);
def delete_sample_experimentIDAndSampleName_experiment(self,dataListDelete_I):
'''Delete specific samples from an experiment by their sample name from experiment'''
deletes = [];
for d in dataListDelete_I:
try:
delete = self.session.query(experiment).filter(
experiment.id.like(d['experiment_id']),
experiment.sample_name.like(d['sample_name'])).delete(
synchronize_session=False);
if delete == 0:
print('row not found')
print(d);
deletes.append(delete);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def delete_sample_sampleName_sample(self,dataListDelete_I):
'''Delete specific samples from an experiment by their sample name'''
deletes = [];
for d in dataListDelete_I:
try:
delete = self.session.query(sample).filter(
sample.sample_name.like(d['sample_name'])).delete(
synchronize_session=False);
if delete == 0:
print('row not found')
print(d);
deletes.append(delete);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def delete_sample_sampleID_sampleDescription(self,dataListDelete_I):
'''Delete specific samples from an experiment by their sample ID from sample_description'''
deletes = [];
for d in dataListDelete_I:
try:
delete = self.session.query(sample_description).filter(
sample_description.sample_id.like(d['sample_id'])).delete(
synchronize_session=False);
if delete == 0:
print('row not found')
print(d);
deletes.append(delete);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def delete_sample_sampleID_sampleStorage(self,dataListDelete_I):
'''Delete specific samples from an experiment by their sample ID from sample_storage'''
deletes = [];
for d in dataListDelete_I:
try:
delete = self.session.query(sample_storage).filter(
sample_storage.sample_id.like(d['sample_id'])).delete(
synchronize_session=False);
if delete == 0:
print('row not found')
print(d);
deletes.append(delete);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def delete_sample_sampleID_samplePhysiologicalParameters(self,dataListDelete_I):
'''Delete specific samples from an experiment by their sample ID from sample_physiologicalparameters'''
deletes = [];
for d in dataListDelete_I:
try:
delete = self.session.query(sample_physiologicalParameters).filter(
sample_physiologicalParameters.sample_id.like(d['sample_id'])).delete(
synchronize_session=False);
if delete == 0:
print('row not found')
print(d);
deletes.append(delete);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def delete_sample_experimentID_experiment(self,dataListDelete_I):
'''Delete samples from an experiment from experiment'''
deletes = [];
for d in dataListDelete_I:
try:
delete = self.session.query(experiment).filter(
experiment.id.like(d['experiment_id'])).delete(
synchronize_session=False);
if delete == 0:
print('row not found')
print(d);
deletes.append(delete);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def delete_sample_experimentID_sample(self,dataListDelete_I):
'''Delete an experiment from sample'''
deletes = [];
for d in dataListDelete_I:
try:
delete = self.session.query(sample).filter(
experiment.id.like(d['experiment_id']),
experiment.sample_name.like(sample.sample_name)).delete(
synchronize_session=False);
if delete == 0:
print('row not found')
print(d);
deletes.append(delete);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def get_nMaxBioReps_experimentIDAndSampleName_sampleDescription(self,experiment_id_I,sample_name_I):
'''Query the maximum number of biological replicates corresponding to a given experiment'''
try:
bioReps = self.session.query(sample_description.sample_replicate).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample_name_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id),
sample_description.istechnical != True).group_by(
sample_description.sample_replicate).order_by(
sample_description.sample_replicate.desc()).all();
maxBioReps_O = 0;
if bioReps:
maxBioReps_O = max(bioReps[0]);
else:
print('no biological replicates found for experiment ' + experiment_id_I);
exit(-1);
return maxBioReps_O;
except SQLAlchemyError as e:
print(e);
def get_nMaxBioReps_experimentIDAndSampleID_sampleDescription(self,experiment_id_I,sample_id_I):
'''Query the maximum number of biological replicates corresponding to a given experiment'''
try:
bioReps = self.session.query(sample_description.sample_replicate).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_id_I),
sample.sample_id.like(sample_description.sample_id),
sample_description.sample_desc.like('Broth'),
sample_description.istechnical != True).group_by(
sample_description.sample_replicate).order_by(
sample_description.sample_replicate.desc()).all();
maxBioReps_O = 0;
if bioReps:
maxBioReps_O = max(bioReps[0]);
else:
print('no biological replicates found for experiment ' + experiment_id_I);
exit(-1);
return maxBioReps_O;
except SQLAlchemyError as e:
print(e);
def get_nMaxBioReps_experimentIDAndSampleNameAbbreviation_sampleDescription(self,experiment_id_I,sample_name_abbreviation_I):
'''Query the maximum number of biological replicates corresponding to a given experiment'''
try:
bioReps = self.session.query(sample_description.sample_replicate).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id),
sample_description.sample_name_abbreviation.like(sample_name_abbreviation_I),
sample_description.sample_desc.like('Broth')
#sample_description.istechnical != True
).group_by(
sample_description.sample_replicate).order_by(
sample_description.sample_replicate.desc()).all();
maxBioReps_O = 0;
if bioReps:
maxBioReps_O = max(bioReps[0]);
else:
print('no biological replicates found for experiment ' + experiment_id_I);
exit(-1);
return maxBioReps_O;
except SQLAlchemyError as e:
print(e);
def get_nMaxBioReps_experimentIDAndSampleNameAbbreviationAndExpType_sampleDescription(self,experiment_id_I,sample_name_abbreviation_I,exp_type_I):
'''Query the maximum number of biological replicates corresponding to a given experiment'''
try:
bioReps = self.session.query(sample_description.sample_replicate).filter(
experiment.id.like(experiment_id_I),
experiment.exp_type_id==exp_type_I,
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id),
sample_description.sample_name_abbreviation.like(sample_name_abbreviation_I),
sample_description.sample_desc.like('Broth')
#sample_description.istechnical != True
).group_by(
sample_description.sample_replicate).order_by(
sample_description.sample_replicate.desc()).all();
maxBioReps_O = 0;
if bioReps:
maxBioReps_O = max(bioReps[0]);
else:
print('no biological replicates found for experiment ' + experiment_id_I);
exit(-1);
return maxBioReps_O;
except SQLAlchemyError as e:
print(e);
def get_sampleIDs_experimentID_experiment(self,experiment_id_I):
'''Querry sample IDs that are used from the experiment'''
try:
sample_names = self.session.query(sample.sample_id).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name)).group_by(
sample.sample_id).order_by(
sample.sample_id.asc()).all();
sample_names_O = [];
for sn in sample_names: sample_names_O.append(sn.sample_id);
return sample_names_O;
except SQLAlchemyError as e:
print(e);
def get_sampleIDsAndSampleNames_experimentID_experiment(self,experiment_id_I):
'''Querry sample IDs and sample namesthat are used from the experiment'''
try:
sample_names = self.session.query(sample.sample_id,sample.sample_name).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name)).group_by(
sample.sample_id,sample.sample_name).order_by(
sample.sample_name.asc(),
sample.sample_id.asc()).all();
sample_names_O = [];
sample_ids_O = [];
for sn in sample_names:
sample_names_O.append(sn.sample_name);
sample_ids_O.append(sn.sample_id);
return sample_ids_O, sample_names_O;
except SQLAlchemyError as e:
print(e);
def get_sampleIDsAndSampleNames_experimentIDAndSampleName_experiment(self,experiment_id_I,sample_name_I):
'''Querry sample IDs and sample namesthat are used from the experiment'''
try:
sample_names = self.session.query(sample.sample_id,sample.sample_name).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample_name_I),
experiment.sample_name.like(sample.sample_name)).group_by(
sample.sample_id,sample.sample_name).order_by(
sample.sample_name.asc(),
sample.sample_id.asc()).all();
sample_names_O = [];
sample_ids_O = [];
for sn in sample_names:
sample_names_O.append(sn.sample_name);
sample_ids_O.append(sn.sample_id);
return sample_ids_O, sample_names_O;
except SQLAlchemyError as e:
print(e);
def get_sampleNameAbbreviation_experimentIDAndSampleID(self,experiment_id_I,sample_id_I):
'''Querry sample name abbreviation from the experiment'''
try:
sample_name_abbreviations = self.session.query(sample_description.sample_name_abbreviation).filter(
sample.sample_id.like(sample_id_I),
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id)).group_by(
sample_description.sample_name_abbreviation).order_by(
sample_description.sample_name_abbreviation.asc()).all();
sample_name_abbreviations_O = None;
sample_name_abbreviations_O = sample_name_abbreviations[0][0];
return sample_name_abbreviations_O;
except SQLAlchemyError as e:
print(e);
def get_sampleNameAbbreviation_experimentIDAndSampleName(self,experiment_id_I,sample_name_I):
'''Querry sample name abbreviation from the experiment'''
try:
sample_name_abbreviations = self.session.query(sample_description.sample_name_abbreviation).filter(
sample.sample_name.like(sample_name_I),
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id)).group_by(
sample_description.sample_name_abbreviation).order_by(
sample_description.sample_name_abbreviation.asc()).all();
sample_name_abbreviations_O = None;
sample_name_abbreviations_O = sample_name_abbreviations[0][0];
return sample_name_abbreviations_O;
except SQLAlchemyError as e:
print(e);
def get_sampleLabelAndBoxAndPos_experimentIDAndExperimentTypeID_sampleStorage(self,experiment_id_I,exp_type_id_I):
'''Querry sample name abbreviation from the experiment'''
try:
data = self.session.query(sample_storage.sample_id,
sample_storage.sample_label,
sample_storage.box,
sample_storage.pos).filter(
experiment.exp_type_id == exp_type_id_I,
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_storage.sample_id)).group_by(
sample_storage.sample_id,
sample_storage.sample_label,
sample_storage.box,
sample_storage.pos).order_by(
sample_storage.sample_id.asc()).all();
sampleStorage_O = [];
if data:
for d in data:
sampleStorage_O.append({'sample_id':d.sample_id,
'sample_label':d.sample_label,
'box':d.box,
'pos':d.pos});
return sampleStorage_O;
except SQLAlchemyError as e:
print(e);
# query physiological parameters and sample mass conversion
def get_CVSAndCVSUnitsAndODAndDilAndDilUnits_sampleNameShort(self,experiment_id_I,sample_name_short_I,exp_type_I=4):
'''Querry culture volume sampled, culture volume sampled units, and OD600 from sample name
NOTE: intended to be used within a for loop'''
try:
physiologicalParameters = self.session.query(sample_physiologicalParameters.culture_volume_sampled,
sample_physiologicalParameters.culture_volume_sampled_units,
sample_physiologicalParameters.od600,
sample_description.reconstitution_volume,
sample_description.reconstitution_volume_units).filter(
experiment.id.like(experiment_id_I),
experiment.exp_type_id == exp_type_I,
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id),
sample_description.sample_name_short.like(sample_name_short_I),
sample_description.sample_id.like(sample_physiologicalParameters.sample_id)).all();
cvs_O = physiologicalParameters[0][0];
cvs_units_O = physiologicalParameters[0][1];
od600_O = physiologicalParameters[0][2];
dil_O = physiologicalParameters[0][3];
dil_units_O = physiologicalParameters[0][4];
return cvs_O, cvs_units_O, od600_O, dil_O, dil_units_O;
except SQLAlchemyError as e:
print(e);
# query sample ids from sample_physiologicalParameters
def get_sampleIDs_experimentIDNoOD600_samplePhysiologicalParameters(self,experiment_id_I):
'''Querry sample ids (i.e. unknowns) that are used from
the experiment for all experiment types
that do not have an OD600 but do have a time'''
try:
sample_names = self.session.query(sample_physiologicalParameters.sample_id).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_physiologicalParameters.sample_id),
#sample.sample_id.like(sample_description.sample_id),
sample_physiologicalParameters.od600 == None).group_by(
sample_physiologicalParameters.sample_id).order_by(
sample_physiologicalParameters.sample_id.asc()).all();
sample_names_O = [];
for sn in sample_names: sample_names_O.append(sn.sample_id);
return sample_names_O;
except SQLAlchemyError as e:
print(e);
def get_sampleIDs_experimentIDAndSampleDescriptionNoOD600_samplePhysiologicalParameters(self,experiment_id_I,sample_description_I):
'''Querry sample ids (i.e. unknowns) that are used from
the experiment for all experiment types
that do not have an OD600 but do have a time'''
try:
sample_names = self.session.query(sample_physiologicalParameters.sample_id).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_physiologicalParameters.sample_id),
sample.sample_id.like(sample_description.sample_id),
sample_description.sample_desc.like(sample_description_I),
sample_physiologicalParameters.od600 == None).group_by(
sample_physiologicalParameters.sample_id).order_by(
sample_physiologicalParameters.sample_id.asc()).all();
sample_names_O = [];
for sn in sample_names: sample_names_O.append(sn.sample_id);
return sample_names_O;
except SQLAlchemyError as e:
print(e);
def get_sampleIDs_experimentIDWithOD600NoCultureDensity_samplePhysiologicalParameters(self,experiment_id_I):
'''Querry sample ids (i.e. unknowns) that are used from
the experiment for all experiment types
that do have an OD600 but do not have a culture density'''
try:
sample_names = self.session.query(sample_physiologicalParameters.sample_id).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_physiologicalParameters.sample_id),
sample_physiologicalParameters.od600 != None,
sample_physiologicalParameters.culture_density == None).group_by(
sample_physiologicalParameters.sample_id).order_by(
sample_physiologicalParameters.sample_id.asc()).all();
sample_names_O = [];
for sn in sample_names: sample_names_O.append(sn.sample_id);
return sample_names_O;
except SQLAlchemyError as e:
print(e);
# query physiologicalParameters from sample_physiologicalParameters
def get_physiologicalParameters_experimentIDAndSampleID_samplePhysiologicalParameters(self,experiment_id_I,sample_id_I):
'''Query physiologicalParameters by sample id from sample_physiologicalparameters'''
try:
data = self.session.query(sample_physiologicalParameters).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_id_I),
sample.sample_id.like(sample_physiologicalParameters.sample_id)).first();
pp = {};
if data:
pp['sample_id']=data.sample_id;
pp['growth_condition_short']=data.growth_condition_short;
pp['growth_condition_long']=data.growth_condition_long;
pp['media_short']=data.media_short;
pp['media_long']=data.media_long;
pp['isoxic']=data.isoxic;
pp['temperature']=data.temperature;
pp['supplementation']=data.supplementation;
pp['od600']=data.od600;
pp['vcd']=data.vcd;
pp['culture_density']=data.culture_density;
pp['culture_volume_sampled']=data.culture_volume_sampled;
pp['cells']=data.cells;
pp['dcw']=data.dcw;
pp['wcw']=data.wcw;
pp['vcd_units']=data.vcd_units;
pp['culture_density_units']=data.culture_density_units;
pp['culture_volume_sampled_units']=data.culture_volume_sampled_units;
pp['dcw_units']=data.dcw_units;
pp['wcw_units']=data.wcw_units;
return pp;
except SQLAlchemyError as e:
print(e);
def get_rows_experimentID_samplePhysiologicalParameters(self,experiment_id_I):
'''Query rows by experiment_id from sample_physiologicalParameters'''
try:
data = self.session.query(sample_physiologicalParameters,
experiment.id).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_physiologicalParameters.sample_id)).all();
data_O = [];
if data:
for d in data:
tmp = d.sample_physiologicalParameters.__repr__dict__();
tmp['experiment_id']=d.id;
data_O.append(tmp);
return data_O;
except SQLAlchemyError as e:
print(e);
# query OD600 values from sample_physiologicalParameters
def get_OD600s_experimentIDAndSampleID_samplePhysiologicalParameters(self,experiment_id_I,sample_id_I):
'''query OD600 values from biological broth replicates'''
#1 query sample_name_abbreviation and exp_typ_id by experiment_id and sample_id
try:
sample_names = self.session.query(sample_description.sample_name_abbreviation,
experiment.exp_type_id).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id),
sample.sample_id.like(sample_id_I)).first();
sample_name_abbreviation_O = None;
exp_type_id_O = None;
if sample_names:
sample_name_abbreviation_O = sample_names.sample_name_abbreviation;
exp_type_id_O = sample_names.exp_type_id
except SQLAlchemyError as e:
print(e);
#2 query OD600 by sample_name_abbreviation, exp_type_id, sample_description, istechnical
try:
od600 = self.session.query(sample_physiologicalParameters.od600).filter(
sample_description.sample_name_abbreviation.like(sample_name_abbreviation_O),
sample_description.sample_description.like('Broth'),
sample_description.istechnical.is_(False),
sample_physiologicalParameters.od600 != None,
sample.sample_id.like(sample_description.sample_id),
sample_physiologicalParameters.sample_id.like(sample_description.sample_id),
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
experiment.exp_type_id == exp_type_id_O).group_by(
sample_physiologicalParameters.od600).all();
od600_O = [];
if od600:
for od in od600:
od600_O.append(od.od600);
return od600_O;
except SQLAlchemyError as e:
print(e);
# query OD600 and DCW values from sample_physiologicalParameters
def get_OD600AndCultureDensity_experimentIDAndSampleNameShort_samplePhysiologicalParameters(self,experiment_id_I,exp_type_id_I,sample_name_short_I):
'''query OD600 and culture density values sorted by time'''
try:
od600 = self.session.query(sample_physiologicalParameters.od600,
sample_physiologicalParameters.culture_density,
sample_description.sample_date).filter(
experiment.id.like(experiment_id_I),
sample_description.sample_name_short.like(sample_name_short_I),
sample.sample_id.like(sample_description.sample_id),
sample_physiologicalParameters.sample_id.like(sample.sample_id),
experiment.sample_name.like(sample.sample_name),
experiment.exp_type_id == exp_type_id_I).group_by(
sample_physiologicalParameters.od600,
sample_physiologicalParameters.culture_density,
sample_description.sample_date).order_by(
sample_description.sample_date.asc()).all();
od600_O = [];
culture_density_O = [];
if od600:
for od in od600:
od600_O.append(od.od600);
culture_density_O.append(od.culture_density);
return od600_O,culture_density_O;
except SQLAlchemyError as e:
print(e);
# query OD600 and DCW values from sample_physiologicalParameters
def get_OD600AndCultureDensity_experimentIDAndSampleID_samplePhysiologicalParameters(self,experiment_id_I,exp_type_id_I,sample_id_I):
'''query OD600 and culture density values sorted by time'''
try:
od600 = self.session.query(sample_physiologicalParameters.od600,
sample_physiologicalParameters.culture_density,
sample_description.sample_date).filter(
experiment.id.like(experiment_id_I),
sample_description.sample_id.like(sample_id_I),
sample.sample_id.like(sample_id_I),
sample_physiologicalParameters.sample_id.like(sample_id_I),
experiment.sample_name.like(sample.sample_name),
experiment.exp_type_id == exp_type_id_I).first();
od600_O = None;
culture_density_O = None;
if od600:
od600_O=od600.od600;
culture_density_O=od600.culture_density;
return od600_O,culture_density_O;
except SQLAlchemyError as e:
print(e);
# query sample_date from sample_description
def get_sampleDate_experimentIDAndSampleID_sampleDescription(self,experiment_id_I,sample_id_I):
'''Query sample_date by sample id'''
try:
data = self.session.query(sample_description.sample_date).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_id_I),
sample.sample_id.like(sample_description.sample_id)).first();
sample_date_O = None;
if data:
sample_date_O=data.sample_date;
return sample_date_O;
except SQLAlchemyError as e:
print(e);
# query description from sample_description
def get_description_experimentIDAndSampleID_sampleDescription(self,experiment_id_I,sample_id_I):
'''Query description by sample id from sample_description'''
try:
data = self.session.query(sample_description).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_id_I),
sample.sample_id.like(sample_description.sample_id)).first();
desc = {};
if data:
desc['sample_id']=data.sample_id;
desc['sample_name_short']=data.sample_name_short;
desc['sample_name_abbreviation']=data.sample_name_abbreviation;
desc['sample_date']=data.sample_date;
desc['time_point']=data.time_point;
desc['sample_condition']=data.sample_condition;
desc['extraction_method_id']=data.extraction_method_id;
desc['biological_material']=data.biological_material;
desc['sample_desc']=data.sample_desc;
desc['sample_replicate']=data.sample_replicate;
desc['is_added']=data.is_added;
desc['is_added_units']=data.is_added_units;
desc['reconstitution_volume']=data.reconstitution_volume;
desc['reconstitution_volume_units']=data.reconstitution_volume_units;
desc['istechnical']=data.istechnical;
desc['sample_replicate_biological']=data.sample_replicate_biological;
desc['notes']=data.notes;
return desc;
except SQLAlchemyError as e:
print(e);
def get_rows_experimentID_sampleDescription(self,experiment_id_I):
'''Query description by experiment_id from sample_description'''
try:
data = self.session.query(sample_description,
experiment.id).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id)).all();
data_O = [];
if data:
for d in data:
tmp = d.sample_description.__repr__dict__();
tmp['experiment_id']=d.id;
data_O.append(tmp);
return data_O;
except SQLAlchemyError as e:
print(e);
# query storage from sample_storage
def get_rows_experimentID_sampleStorage(self,experiment_id_I):
'''Query rows by experiment_id from sample_storage'''
try:
data = self.session.query(sample_storage,
experiment.id).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_storage.sample_id)).all();
data_O = [];
if data:
for d in data:
tmp = d.sample_storage.__repr__dict__();
tmp['experiment_id']=d.id;
data_O.append(tmp);
return data_O;
except SQLAlchemyError as e:
print(e);
# query storage from sample
def get_rows_experimentID_sample(self,experiment_id_I):
'''Query rows by experiment_id from sample'''
try:
data = self.session.query(sample,
experiment.id).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name)).all();
data_O = [];
if data:
for d in data:
tmp = d.sample.__repr__dict__();
tmp['experiment_id']=d.id;
data_O.append(tmp);
return data_O;
except SQLAlchemyError as e:
print(e);
| 51.356987
| 152
| 0.597602
| 4,832
| 47,043
| 5.505381
| 0.043253
| 0.060146
| 0.073491
| 0.015563
| 0.885309
| 0.845726
| 0.817119
| 0.798887
| 0.785843
| 0.772423
| 0
| 0.006911
| 0.32026
| 47,043
| 915
| 153
| 51.413115
| 0.824968
| 0.032736
| 0
| 0.77625
| 0
| 0
| 0.041833
| 0.010364
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.01625
| null | null | 0.08375
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cdcf8b05b1acf6beb20b69b2b97d1446ce95c76b
| 9,533
|
py
|
Python
|
tests/test_mongo_controller_dict_without_fields.py
|
lucafaggianelli/layabase
|
90733c6b9efd56051dfce5c3d89bd4e657ce7b3f
|
[
"MIT"
] | 3
|
2019-12-02T23:29:44.000Z
|
2019-12-31T00:55:01.000Z
|
tests/test_mongo_controller_dict_without_fields.py
|
lucafaggianelli/layabase
|
90733c6b9efd56051dfce5c3d89bd4e657ce7b3f
|
[
"MIT"
] | 29
|
2019-12-02T16:12:45.000Z
|
2022-02-17T16:01:55.000Z
|
tests/test_mongo_controller_dict_without_fields.py
|
lucafaggianelli/layabase
|
90733c6b9efd56051dfce5c3d89bd4e657ce7b3f
|
[
"MIT"
] | 3
|
2020-01-02T10:58:47.000Z
|
2022-02-17T10:55:18.000Z
|
import enum
import pytest
import layabase
import layabase.mongo
class EnumTest(enum.Enum):
Value1 = 1
Value2 = 2
@pytest.fixture
def controller():
class TestCollection:
__collection_name__ = "test"
key = layabase.mongo.Column(str, is_primary_key=True)
dict_col = layabase.mongo.DictColumn(
get_fields=lambda document: {
"first_key": layabase.mongo.Column(EnumTest, is_nullable=False),
"second_key": layabase.mongo.Column(int, is_nullable=False),
}
)
controller = layabase.CRUDController(TestCollection)
layabase.load("mongomock", [controller])
return controller
def test_post_dict_is_valid(controller):
assert {
"dict_col": {"first_key": "Value1", "second_key": 3},
"key": "my_key",
} == controller.post(
{"key": "my_key", "dict_col": {"first_key": "Value1", "second_key": 3}}
)
def test_get_with_dot_notation_is_valid(controller):
assert {
"dict_col": {"first_key": "Value1", "second_key": 3},
"key": "my_key",
} == controller.post(
{"key": "my_key", "dict_col": {"first_key": EnumTest.Value1, "second_key": 3}}
)
assert [
{"dict_col": {"first_key": "Value1", "second_key": 3}, "key": "my_key"}
] == controller.get({"dict_col.first_key": EnumTest.Value1})
def test_get_with_dot_notation_as_list_is_valid(controller):
controller.post(
{"key": "my_key", "dict_col": {"first_key": EnumTest.Value1, "second_key": 3}}
)
assert [
{"dict_col": {"first_key": "Value1", "second_key": 3}, "key": "my_key"}
] == controller.get({"dict_col.first_key": [EnumTest.Value1]})
def test_get_with_multiple_results_dot_notation_as_list_is_valid(controller):
controller.post_many(
[
{
"key": "my_key",
"dict_col": {"first_key": EnumTest.Value1, "second_key": 3},
},
{
"key": "my_key2",
"dict_col": {"first_key": EnumTest.Value2, "second_key": 4},
},
]
)
assert [
{"dict_col": {"first_key": "Value1", "second_key": 3}, "key": "my_key"},
{"dict_col": {"first_key": "Value2", "second_key": 4}, "key": "my_key2"},
] == controller.get({"dict_col.first_key": [EnumTest.Value1, EnumTest.Value2]})
def test_update_with_dot_notation_is_valid(controller):
assert {
"dict_col": {"first_key": "Value1", "second_key": 3},
"key": "my_key",
} == controller.post(
{"key": "my_key", "dict_col": {"first_key": "Value1", "second_key": 3}}
)
assert (
{"dict_col": {"first_key": "Value1", "second_key": 3}, "key": "my_key"},
{"dict_col": {"first_key": "Value1", "second_key": 4}, "key": "my_key"},
) == controller.put({"key": "my_key", "dict_col.second_key": 4})
def test_update_with_dot_notation_invalid_value_is_invalid(controller):
assert {
"dict_col": {"first_key": "Value1", "second_key": 3},
"key": "my_key",
} == controller.post(
{"key": "my_key", "dict_col": {"first_key": "Value1", "second_key": 3}}
)
with pytest.raises(layabase.ValidationFailed) as exception_info:
controller.put({"key": "my_key", "dict_col.second_key": "invalid integer"})
assert {"dict_col.second_key": ["Not a valid int."]} == exception_info.value.errors
assert {
"key": "my_key",
"dict_col.second_key": "invalid integer",
} == exception_info.value.received_data
def test_delete_with_dot_notation_invalid_value_is_invalid(controller):
assert {
"dict_col": {"first_key": "Value1", "second_key": 3},
"key": "my_key",
} == controller.post(
{"key": "my_key", "dict_col": {"first_key": "Value1", "second_key": 3}}
)
with pytest.raises(layabase.ValidationFailed) as exception_info:
controller.delete({"dict_col.second_key": "invalid integer"})
assert {"dict_col.second_key": ["Not a valid int."]} == exception_info.value.errors
assert {
"dict_col.second_key": "invalid integer"
} == exception_info.value.received_data
def test_delete_with_dot_notation_valid_value_is_valid(controller):
assert {
"dict_col": {"first_key": "Value1", "second_key": 3},
"key": "my_key",
} == controller.post(
{"key": "my_key", "dict_col": {"first_key": "Value1", "second_key": 3}}
)
assert 1 == controller.delete({"dict_col.second_key": 3})
def test_delete_with_dot_notation_enum_value_is_valid(controller):
assert {
"dict_col": {"first_key": "Value1", "second_key": 3},
"key": "my_key",
} == controller.post(
{"key": "my_key", "dict_col": {"first_key": "Value1", "second_key": 3}}
)
assert 1 == controller.delete({"dict_col.first_key": EnumTest.Value1})
def test_post_with_dot_notation_invalid_value_is_invalid(controller):
with pytest.raises(layabase.ValidationFailed) as exception_info:
controller.post(
{
"key": "my_key",
"dict_col.first_key": "Value1",
"dict_col.second_key": "invalid integer",
}
)
assert {"dict_col.second_key": ["Not a valid int."]} == exception_info.value.errors
assert {
"key": "my_key",
"dict_col.first_key": "Value1",
"dict_col.second_key": "invalid integer",
} == exception_info.value.received_data
def test_post_with_dot_notation_valid_value_is_valid(controller):
assert {
"key": "my_key",
"dict_col": {"first_key": "Value2", "second_key": 1},
} == controller.post(
{"key": "my_key", "dict_col.first_key": "Value2", "dict_col.second_key": 1}
)
def test_get_with_unmatching_dot_notation_is_empty(controller):
assert {
"dict_col": {"first_key": "Value1", "second_key": 3},
"key": "my_key",
} == controller.post(
{"key": "my_key", "dict_col": {"first_key": "Value1", "second_key": 3}}
)
assert [] == controller.get({"dict_col.first_key": "Value2"})
def test_get_with_unknown_dot_notation_returns_everything(controller):
assert {
"dict_col": {"first_key": "Value1", "second_key": 3},
"key": "my_key",
} == controller.post(
{"key": "my_key", "dict_col": {"first_key": "Value1", "second_key": 3}}
)
assert [
{"dict_col": {"first_key": "Value1", "second_key": 3}, "key": "my_key"}
] == controller.get({"dict_col.unknown": "Value1"})
def test_delete_with_dot_notation_is_valid(controller):
assert {
"dict_col": {"first_key": "Value1", "second_key": 3},
"key": "my_key",
} == controller.post(
{"key": "my_key", "dict_col": {"first_key": "Value1", "second_key": 3}}
)
assert 1 == controller.delete({"dict_col.first_key": "Value1"})
assert [] == controller.get({})
def test_delete_with_unmatching_dot_notation_is_empty(controller):
assert {
"dict_col": {"first_key": "Value1", "second_key": 3},
"key": "my_key",
} == controller.post(
{"key": "my_key", "dict_col": {"first_key": "Value1", "second_key": 3}}
)
assert 0 == controller.delete({"dict_col.first_key": "Value2"})
assert [
{"dict_col": {"first_key": "Value1", "second_key": 3}, "key": "my_key"}
] == controller.get({})
def test_delete_with_unknown_dot_notation_deletes_everything(controller):
assert {
"dict_col": {"first_key": "Value1", "second_key": 3},
"key": "my_key",
} == controller.post(
{"key": "my_key", "dict_col": {"first_key": "Value1", "second_key": 3}}
)
assert 1 == controller.delete({"dict_col.unknown": "Value2"})
assert [] == controller.get({})
def test_put_without_primary_key_is_invalid(controller):
controller.post(
{"key": "my_key", "dict_col": {"first_key": "Value1", "second_key": 3}}
)
with pytest.raises(layabase.ValidationFailed) as exception_info:
controller.put({"dict_col": {"first_key": "Value2", "second_key": 4}})
assert {"key": ["Missing data for required field."]} == exception_info.value.errors
assert {
"dict_col": {"first_key": "Value2", "second_key": 4}
} == exception_info.value.received_data
def test_post_dict_with_dot_notation_is_valid(controller):
assert {
"dict_col": {"first_key": "Value1", "second_key": 3},
"key": "my_key",
} == controller.post(
{"key": "my_key", "dict_col.first_key": "Value1", "dict_col.second_key": 3}
)
def test_put_dict_with_dot_notation_is_valid(controller):
assert {
"dict_col": {"first_key": "Value1", "second_key": 3},
"key": "my_key",
} == controller.post(
{"key": "my_key", "dict_col": {"first_key": "Value1", "second_key": 3}}
)
assert (
{"dict_col": {"first_key": "Value1", "second_key": 3}, "key": "my_key"},
{"dict_col": {"first_key": "Value2", "second_key": 3}, "key": "my_key"},
) == controller.put({"key": "my_key", "dict_col.first_key": EnumTest.Value2})
def test_post_dict_is_invalid(controller):
with pytest.raises(layabase.ValidationFailed) as exception_info:
controller.post({"key": "my_key", "dict_col": {"first_key": "Value1"}})
assert {
"dict_col.second_key": ["Missing data for required field."]
} == exception_info.value.errors
assert {
"key": "my_key",
"dict_col": {"first_key": "Value1"},
} == exception_info.value.received_data
| 35.047794
| 87
| 0.608203
| 1,171
| 9,533
| 4.596072
| 0.070026
| 0.097547
| 0.12932
| 0.16165
| 0.882386
| 0.857674
| 0.813081
| 0.80825
| 0.767373
| 0.734114
| 0
| 0.016024
| 0.214413
| 9,533
| 271
| 88
| 35.177122
| 0.702631
| 0
| 0
| 0.547085
| 0
| 0
| 0.288367
| 0
| 0
| 0
| 0
| 0
| 0.179372
| 1
| 0.09417
| false
| 0
| 0.017937
| 0
| 0.147982
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b53f4e0a57f9bdd486b2b77f8ac51dd2924c55c6
| 150
|
py
|
Python
|
colosseum/mdps/minigrid_empty/__init__.py
|
MichelangeloConserva/Colosseum
|
b0711fd9ce75520deb74cda75c148984a8e4152f
|
[
"MIT"
] | null | null | null |
colosseum/mdps/minigrid_empty/__init__.py
|
MichelangeloConserva/Colosseum
|
b0711fd9ce75520deb74cda75c148984a8e4152f
|
[
"MIT"
] | null | null | null |
colosseum/mdps/minigrid_empty/__init__.py
|
MichelangeloConserva/Colosseum
|
b0711fd9ce75520deb74cda75c148984a8e4152f
|
[
"MIT"
] | null | null | null |
from colosseum.mdps.minigrid_empty.continuous import MiniGridEmptyContinuous
from colosseum.mdps.minigrid_empty.episodic import MiniGridEmptyEpisodic
| 50
| 76
| 0.906667
| 16
| 150
| 8.375
| 0.625
| 0.19403
| 0.253731
| 0.373134
| 0.447761
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053333
| 150
| 2
| 77
| 75
| 0.943662
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b543779b8ff831754b8175db7e00eb6e65c0ff90
| 18,483
|
py
|
Python
|
indy_common/test/auth/test_auth_nym_with_new_auth_map.py
|
anikitinDSR/indy-node-jenkins-test
|
aaa7ff37e4426e7110f9e82f2c46808fa8f993f5
|
[
"Apache-2.0"
] | 1
|
2019-03-19T23:44:54.000Z
|
2019-03-19T23:44:54.000Z
|
indy_common/test/auth/test_auth_nym_with_new_auth_map.py
|
anikitinDSR/indy-node-jenkins-test
|
aaa7ff37e4426e7110f9e82f2c46808fa8f993f5
|
[
"Apache-2.0"
] | null | null | null |
indy_common/test/auth/test_auth_nym_with_new_auth_map.py
|
anikitinDSR/indy-node-jenkins-test
|
aaa7ff37e4426e7110f9e82f2c46808fa8f993f5
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from indy_common.authorize.auth_actions import AuthActionAdd, AuthActionEdit
from plenum.common.constants import TRUSTEE, STEWARD, VERKEY
from indy_common.constants import ROLE, NYM, TRUST_ANCHOR, NETWORK_MONITOR
@pytest.fixture(scope='module', params=[True, False])
def is_owner(request):
return request.param
def test_make_trustee(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionAdd(txn_type=NYM,
field=ROLE,
value=TRUSTEE,
is_owner=is_owner)])
def test_make_steward(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionAdd(txn_type=NYM,
field=ROLE,
value=STEWARD,
is_owner=is_owner)])
def test_make_trust_anchor(write_request_validation, req, is_owner):
authorized = req.identifier in ("trustee_identifier", "steward_identifier")
assert authorized == write_request_validation(req,
[AuthActionAdd(txn_type=NYM,
field=ROLE,
value=TRUST_ANCHOR,
is_owner=is_owner)])
def test_make_network_monitor(write_request_validation, req, is_owner):
authorized = req.identifier in ("trustee_identifier", "steward_identifier")
assert authorized == write_request_validation(req,
[AuthActionAdd(txn_type=NYM,
field=ROLE,
value=NETWORK_MONITOR,
is_owner=is_owner)])
# Trustee tests
def test_change_trustee_to_trustee(write_request_validation, req, is_owner):
authorized = is_owner
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=TRUSTEE,
new_value=TRUSTEE,
is_owner=is_owner)])
def test_change_trustee_to_steward(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=TRUSTEE,
new_value=STEWARD,
is_owner=is_owner)])
def test_change_trustee_to_trust_anchor(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=TRUSTEE,
new_value=TRUST_ANCHOR,
is_owner=is_owner)])
def test_change_trustee_to_network_monitor(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=TRUSTEE,
new_value=NETWORK_MONITOR,
is_owner=is_owner)])
def test_change_trustee_to_identity_owner(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=TRUSTEE,
new_value='',
is_owner=is_owner)])
# Steward tests
def test_change_steward_to_trustee(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=STEWARD,
new_value=TRUSTEE,
is_owner=is_owner)])
def test_change_steward_to_steward(write_request_validation, req, is_owner):
authorized = is_owner
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=STEWARD,
new_value=STEWARD,
is_owner=is_owner)])
def test_change_steward_to_trust_anchor(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=STEWARD,
new_value=TRUST_ANCHOR,
is_owner=is_owner)])
def test_change_steward_to_network_monitor(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=STEWARD,
new_value=NETWORK_MONITOR,
is_owner=is_owner)])
def test_change_steward_to_identity_owner(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=STEWARD,
new_value='',
is_owner=is_owner)])
# Trust Anchor tests
def test_change_trust_anchor_to_trustee(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=TRUST_ANCHOR,
new_value=TRUSTEE,
is_owner=is_owner)])
def test_change_trust_anchor_to_steward(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=TRUST_ANCHOR,
new_value=STEWARD,
is_owner=is_owner)])
def test_change_trust_anchor_to_trust_anchor(write_request_validation, req, is_owner):
authorized = is_owner
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=TRUST_ANCHOR,
new_value=TRUST_ANCHOR,
is_owner=is_owner)])
def test_change_trust_anchor_to_network_monitor(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=TRUST_ANCHOR,
new_value=NETWORK_MONITOR,
is_owner=is_owner)])
def test_change_trust_anchor_to_identity_owner(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=TRUST_ANCHOR,
new_value='',
is_owner=is_owner)])
# Network Monitor tests
def test_change_network_monitor_to_trustee(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=NETWORK_MONITOR,
new_value=TRUSTEE,
is_owner=is_owner)])
def test_change_network_monitor_to_steward(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=NETWORK_MONITOR,
new_value=STEWARD,
is_owner=is_owner)])
def test_change_network_monitor_to_trust_anchor(write_request_validation, req, is_owner):
authorized = req.identifier in ("trustee_identifier", "steward_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=NETWORK_MONITOR,
new_value=TRUST_ANCHOR,
is_owner=is_owner)])
def test_change_network_monitor_to_network_monitor(write_request_validation, req, is_owner):
authorized = is_owner
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=NETWORK_MONITOR,
new_value=NETWORK_MONITOR,
is_owner=is_owner)])
def test_change_network_monitor_to_identity_owner(write_request_validation, req, is_owner):
authorized = req.identifier in ("trustee_identifier", "steward_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value=NETWORK_MONITOR,
new_value='',
is_owner=is_owner)])
# Identity Owner tests
def test_change_identity_owner_to_trustee(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value='',
new_value=TRUSTEE,
is_owner=is_owner)])
def test_change_identity_owner_to_steward(write_request_validation, req, is_owner):
authorized = (req.identifier == "trustee_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value='',
new_value=STEWARD,
is_owner=is_owner)])
def test_change_identity_owner_to_trust_anchor(write_request_validation, req, is_owner):
authorized = req.identifier in ("trustee_identifier", "steward_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value='',
new_value=TRUST_ANCHOR,
is_owner=is_owner)])
def test_change_identity_owner_to_network_monitor(write_request_validation, req, is_owner):
authorized = req.identifier in ("trustee_identifier", "steward_identifier")
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value='',
new_value=NETWORK_MONITOR,
is_owner=is_owner)])
def test_change_identity_owner_to_identity_owner(write_request_validation, req, is_owner):
authorized = is_owner
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=ROLE,
old_value='',
new_value='',
is_owner=is_owner)])
def test_change_verkey(write_request_validation, req, is_owner):
authorized = is_owner
assert authorized == write_request_validation(req,
[AuthActionEdit(txn_type=NYM,
field=VERKEY,
old_value="_verkey".format(req.identifier),
new_value='new_value',
is_owner=is_owner)])
| 59.240385
| 109
| 0.406373
| 1,255
| 18,483
| 5.586454
| 0.042231
| 0.096848
| 0.188276
| 0.21395
| 0.942804
| 0.937099
| 0.918129
| 0.904721
| 0.896734
| 0.881615
| 0
| 0
| 0.542444
| 18,483
| 311
| 110
| 59.430868
| 0.829017
| 0.004815
| 0
| 0.81893
| 0
| 0
| 0.030563
| 0
| 0
| 0
| 0
| 0
| 0.123457
| 1
| 0.127572
| false
| 0
| 0.016461
| 0.004115
| 0.148148
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
b58308cc4097b3c646aec362c78ca1eace0d9a94
| 68
|
py
|
Python
|
intel_pytorch_extension_py/optim/__init__.py
|
CaoE/intel-extension-for-pytorch
|
2a31cef7592207a7d08e346542218b5a79df8df9
|
[
"Apache-2.0"
] | 1
|
2020-09-29T06:55:00.000Z
|
2020-09-29T06:55:00.000Z
|
intel_pytorch_extension_py/optim/__init__.py
|
CaoE/intel-extension-for-pytorch
|
2a31cef7592207a7d08e346542218b5a79df8df9
|
[
"Apache-2.0"
] | 1
|
2021-03-30T04:54:24.000Z
|
2021-03-30T04:54:24.000Z
|
intel_pytorch_extension_py/optim/__init__.py
|
CaoE/intel-extension-for-pytorch
|
2a31cef7592207a7d08e346542218b5a79df8df9
|
[
"Apache-2.0"
] | null | null | null |
from .split_sgd import is_available
from .split_sgd import SplitSGD
| 22.666667
| 35
| 0.852941
| 11
| 68
| 5
| 0.636364
| 0.327273
| 0.436364
| 0.654545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 68
| 2
| 36
| 34
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a93f7c8d11185ab328f8bbdfa11891002e8e7242
| 3,652
|
py
|
Python
|
allure-pytest/test/fixtures/function_scope/fixtures_simple_test.py
|
vdsbenoit/allure-python
|
7b56b031c42369dd73844105382e9ceb9a88d6cd
|
[
"Apache-2.0"
] | 1
|
2021-02-19T21:00:11.000Z
|
2021-02-19T21:00:11.000Z
|
allure-pytest/test/fixtures/function_scope/fixtures_simple_test.py
|
vdsbenoit/allure-python
|
7b56b031c42369dd73844105382e9ceb9a88d6cd
|
[
"Apache-2.0"
] | null | null | null |
allure-pytest/test/fixtures/function_scope/fixtures_simple_test.py
|
vdsbenoit/allure-python
|
7b56b031c42369dd73844105382e9ceb9a88d6cd
|
[
"Apache-2.0"
] | 1
|
2020-08-05T05:40:44.000Z
|
2020-08-05T05:40:44.000Z
|
"""
>>> allure_report = getfixture('allure_report')
>>> assert_that(allure_report,
... all_of(
... has_property('test_cases', has_length(5)),
... has_property('test_groups', has_length(0))
... )) # doctest: +SKIP
"""
import pytest
@pytest.fixture
def function_scope_simple_fixture():
pass
def test_function_scope_simple_fixture(function_scope_simple_fixture):
"""
>>> allure_report = getfixture('allure_report')
>>> assert_that(allure_report,
... has_test_case('test_function_scope_simple_fixture',
... has_container(allure_report,
... has_before('function_scope_simple_fixture')
... )
... )
... )
"""
pass
def test_reuse_function_scope_simple_fixture(function_scope_simple_fixture):
"""
>>> allure_report = getfixture('allure_report')
>>> assert_that(allure_report,
... has_test_case('test_reuse_function_scope_simple_fixture',
... has_container(allure_report,
... has_before('function_scope_simple_fixture')
... )
... )
... )
"""
pass
@pytest.fixture
def one_more_function_scope_fixture():
pass
def test_with_two_function_scope_fixtures(function_scope_simple_fixture, one_more_function_scope_fixture):
"""
>>> allure_report = getfixture('allure_report')
>>> assert_that(allure_report,
... has_test_case('test_with_two_function_scope_fixtures',
... has_container(allure_report,
... has_before('function_scope_simple_fixture')
... ),
... has_container(allure_report,
... has_before('one_more_function_scope_fixture')
... )
... )
... )
"""
pass
@pytest.fixture
def nested_function_scope_fixture(function_scope_simple_fixture):
pass
def test_nested_function_scope_fixtures(nested_function_scope_fixture):
"""
>>> allure_report = getfixture('allure_report')
>>> assert_that(allure_report,
... has_test_case('test_nested_function_scope_fixtures',
... has_container(allure_report,
... has_before('function_scope_simple_fixture')
... ),
... has_container(allure_report,
... has_before('nested_function_scope_fixture')
... )
... )
... )
"""
pass
@pytest.fixture
def function_scope_fixture_wich_depends_on_two_other(function_scope_simple_fixture, one_more_function_scope_fixture):
pass
def test_with_apple_pineapple_pen_fixture(function_scope_fixture_wich_depends_on_two_other):
"""
>>> allure_report = getfixture('allure_report') # doctest: +SKIP
>>> assert_that(allure_report,
... has_test_case('test_with_apple_pineapple_pen_fixture',
... all_of(
... has_before('function_scope_simple_fixture'),
... has_before('one_more_function_scope_fixture'),
... has_before('function_scope_fixture_wich_depends_on_two_other')
... )
... )) # doctest: +SKIP
"""
pass
| 33.814815
| 117
| 0.542442
| 324
| 3,652
| 5.521605
| 0.12963
| 0.217999
| 0.159307
| 0.217999
| 0.910565
| 0.851314
| 0.803801
| 0.670207
| 0.601453
| 0.465064
| 0
| 0.000843
| 0.350493
| 3,652
| 107
| 118
| 34.130841
| 0.753373
| 0.689211
| 0
| 0.565217
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.391304
| false
| 0.391304
| 0.043478
| 0
| 0.434783
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
8d66fcff62bbdeb4f2a13e5b76f3cd1bcaac21e5
| 61
|
py
|
Python
|
hpc/ase_scripts/mclib_Python/__init__.py
|
McIntyre-Lab/BASE_2020
|
7bedbbb49e590af6da5bcba829b9d6e329f4d15f
|
[
"MIT"
] | 1
|
2021-09-01T07:20:21.000Z
|
2021-09-01T07:20:21.000Z
|
hpc/ase_scripts/mclib_Python/__init__.py
|
McIntyre-Lab/BASE_2020
|
7bedbbb49e590af6da5bcba829b9d6e329f4d15f
|
[
"MIT"
] | 2
|
2021-06-27T18:44:36.000Z
|
2021-06-28T21:02:58.000Z
|
hpc/ase_scripts/mclib_Python/__init__.py
|
McIntyre-Lab/BASE_2020
|
7bedbbb49e590af6da5bcba829b9d6e329f4d15f
|
[
"MIT"
] | 1
|
2021-09-01T07:20:28.000Z
|
2021-09-01T07:20:28.000Z
|
from mclib_Python import git
from mclib_Python import logger
| 20.333333
| 31
| 0.868852
| 10
| 61
| 5.1
| 0.6
| 0.352941
| 0.588235
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131148
| 61
| 2
| 32
| 30.5
| 0.962264
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8d73214578ce3d23f15d82f66691dc201e5738a0
| 5,065
|
py
|
Python
|
projects/DensePose/run_penn.py
|
jhugestar/detectron2
|
a0328e56648b1b26aa50d0079826e3efcbc35f1c
|
[
"Apache-2.0"
] | null | null | null |
projects/DensePose/run_penn.py
|
jhugestar/detectron2
|
a0328e56648b1b26aa50d0079826e3efcbc35f1c
|
[
"Apache-2.0"
] | null | null | null |
projects/DensePose/run_penn.py
|
jhugestar/detectron2
|
a0328e56648b1b26aa50d0079826e3efcbc35f1c
|
[
"Apache-2.0"
] | null | null | null |
from apply_net import denseposeRunner
import sys
import glob
import os
def runPennAction(startIdx, endIdx):
# mocapRootDir = '/run/media/hjoo/disk/data/Penn_Action/labels'
g_bIsDevfair = False
if os.path.exists('/private/home/hjoo'):
g_bIsDevfair = True
if g_bIsDevfair:
inputDir_root = '/private/home/hjoo/data/pennaction/frames'
img_outputDir_root = '/private/home/hjoo/data/pennaction/densepose_img'
json_outputDir_root = '/private/home/hjoo/data/pennaction/densepose'
else:
assert False
if not os.path.exists(img_outputDir_root):
os.mkdir(img_outputDir_root)
if not os.path.exists(json_outputDir_root):
os.mkdir(json_outputDir_root)
# inputFolder=$1
# outputFolder=$2
# #./build/examples/openpose/openpose.bin --image_dir $inputFolder --write_images $outputFolder --write_images_format jpg
# echo ./build/examples/openpose/openpose.bin --image_dir $inputFolder --write_images ${outputFolder}_img --write_images_format jpg --write_json $outputFolder
# seqList = sorted(glob.glob('{0}/*'.format(inputDir_root)) )
for seqIdx in range(startIdx, endIdx):
seqName = '{:04d}'.format(seqIdx)
print(seqName)
inputPath = os.path.join(inputDir_root,seqName)
# if not ("outdoors_fencing_01" in seqName or "downtown_walking_00" in seqName or "outdoors_fencing_01" in seqName):
# continue
outputFolder_img = os.path.join(img_outputDir_root,seqName)
outputFolder_pkl = os.path.join(json_outputDir_root,seqName+'.pkl')
if not os.path.exists(outputFolder_pkl):
print(">>> Running:{}".format(outputFolder_img))
params = ['dump','configs/densepose_rcnn_R_50_FPN_s1x.yaml','model_final_5f3d7f.pkl','{}/*.jpg'.format(inputPath),'--output',outputFolder_pkl,'-v']
denseposeRunner(params)
else:
print(">>> Already exists:{}".format(outputFolder_img))
# if not os.path.exists(outputFolder_img):
# os.mkdir(outputFolder_img)
# print(">>> Running:{}".format(outputFolder_img))
# params = ['show','configs/densepose_rcnn_R_50_FPN_s1x.yaml','model_final_5f3d7f.pkl','{}/*.jpg'.format(inputPath),'dp_contour,bbox','--output','{}/output.jpg'.format(outputFolder_img),'-v']
# denseposeRunner(params)
# else:
# print(">>> Already exists:{}".format(outputFolder_img))
def runPennAction_img(startIdx, endIdx):
# mocapRootDir = '/run/media/hjoo/disk/data/Penn_Action/labels'
g_bIsDevfair = False
if os.path.exists('/private/home/hjoo'):
g_bIsDevfair = True
if g_bIsDevfair:
inputDir_root = '/private/home/hjoo/data/pennaction/frames'
img_outputDir_root = '/private/home/hjoo/data/pennaction/densepose_img'
json_outputDir_root = '/private/home/hjoo/data/pennaction/densepose'
else:
assert False
if not os.path.exists(img_outputDir_root):
os.mkdir(img_outputDir_root)
if not os.path.exists(json_outputDir_root):
os.mkdir(json_outputDir_root)
# inputFolder=$1
# outputFolder=$2
# #./build/examples/openpose/openpose.bin --image_dir $inputFolder --write_images $outputFolder --write_images_format jpg
# echo ./build/examples/openpose/openpose.bin --image_dir $inputFolder --write_images ${outputFolder}_img --write_images_format jpg --write_json $outputFolder
# seqList = sorted(glob.glob('{0}/*'.format(inputDir_root)) )
for seqIdx in range(startIdx, endIdx):
seqName = '{:04d}'.format(seqIdx)
print(seqName)
inputPath = os.path.join(inputDir_root,seqName)
# if not ("outdoors_fencing_01" in seqName or "downtown_walking_00" in seqName or "outdoors_fencing_01" in seqName):
# continue
outputFolder_img = os.path.join(img_outputDir_root,seqName)
outputFolder_pkl = os.path.join(json_outputDir_root,seqName+'.pkl')
# if not os.path.exists(outputFolder_pkl):
# print(">>> Running:{}".format(outputFolder_img))
# params = ['dump','configs/densepose_rcnn_R_50_FPN_s1x.yaml','model_final_5f3d7f.pkl','{}/*.jpg'.format(inputPath),'--output',outputFolder_pkl,'-v']
# denseposeRunner(params)
# else:
# print(">>> Already exists:{}".format(outputFolder_img))
if not os.path.exists(outputFolder_img):
os.mkdir(outputFolder_img)
print(">>> Running:{}".format(outputFolder_img))
params = ['show','configs/densepose_rcnn_R_50_FPN_s1x.yaml','model_final_5f3d7f.pkl','{}/*.jpg'.format(inputPath),'dp_contour,bbox','--output','{}/output.jpg'.format(outputFolder_img),'-v']
denseposeRunner(params)
else:
print(">>> Already exists:{}".format(outputFolder_img))
if __name__ == "__main__":
interval = 20
for i in range(0,2250,interval):
print('runPennAction({},{})'.format(i, i+ interval))
# runPennAction(2,10)
| 39.88189
| 203
| 0.662784
| 601
| 5,065
| 5.349418
| 0.174709
| 0.083981
| 0.037325
| 0.027372
| 0.941835
| 0.941835
| 0.941835
| 0.941835
| 0.941835
| 0.941835
| 0
| 0.013776
| 0.197433
| 5,065
| 126
| 204
| 40.198413
| 0.777122
| 0.371964
| 0
| 0.754098
| 0
| 0
| 0.19568
| 0.123888
| 0
| 0
| 0
| 0
| 0.032787
| 1
| 0.032787
| false
| 0
| 0.065574
| 0
| 0.098361
| 0.114754
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a5fabd61fd8062868a6dcc2020f13979e1a9dbf9
| 6,683
|
py
|
Python
|
tests/unit/oil/plugins/aws/ec2/test_public_ip.py
|
kbougy/oil
|
d0eb2836f1bc551a50a6dbdc9128bee43d305e39
|
[
"MIT"
] | 3
|
2017-11-05T00:02:03.000Z
|
2017-11-12T00:20:47.000Z
|
tests/unit/oil/plugins/aws/ec2/test_public_ip.py
|
kbougy/oil
|
d0eb2836f1bc551a50a6dbdc9128bee43d305e39
|
[
"MIT"
] | 43
|
2017-10-29T21:06:00.000Z
|
2018-02-04T22:40:36.000Z
|
tests/unit/oil/plugins/aws/ec2/test_public_ip.py
|
kbougy/oil
|
d0eb2836f1bc551a50a6dbdc9128bee43d305e39
|
[
"MIT"
] | 1
|
2020-01-07T06:51:57.000Z
|
2020-01-07T06:51:57.000Z
|
import unittest
from oil.plugins.aws.ec2 import PublicIpPlugin
class PublicIpPluginTestCase(unittest.TestCase):
def test_can_be_initialized_and_run_with_no_config(self):
data_fixture = {
'aws': {
'ec2': {
'us-east-1': {
'describe_instances': [
{
'InstanceId': 'theid'
}
]
}
}
}
}
plugin = PublicIpPlugin({})
results = plugin.run(data_fixture)
results_keys = list(results[0].keys())
expected = [
'resource',
'severity',
'message',
'region'
]
self.assertCountEqual(results_keys, expected)
def test_can_be_initialized_and_run_with_empty_config(self):
data_fixture = {
'aws': {
'ec2': {
'us-east-1': {
'describe_instances': [
{
'InstanceId': 'anid'
}
]
}
}
}
}
plugin = PublicIpPlugin({})
results = plugin.run(data_fixture)
results_keys = list(results[0].keys())
expected = [
'resource',
'severity',
'message',
'region'
]
self.assertCountEqual(results_keys, expected)
def test_creates_results_with_correct_fields_for_multiple_instances(self):
instance1_fixture = {
'InstanceId': 'id1'
}
instance2_fixture = {
'InstanceId': 'id1'
}
instances = [instance1_fixture, instance2_fixture]
data_fixture = {
'aws': {
'ec2': {
'us-east-1': {
'describe_instances': instances
}
}
}
}
plugin = PublicIpPlugin({})
results = plugin.run(data_fixture)
results_keys = list(results[0].keys())
expected = [
'resource',
'severity',
'message',
'region'
]
self.assertCountEqual(results_keys, expected)
def test_only_one_sensible_result_if_no_instances(self):
api_data_fixture = {
'aws': {
'ec2': {
'us-east-1': {
'describe_instances': []
}
}
}
}
plugin = PublicIpPlugin({})
results = plugin.run(api_data_fixture)
expected = [{
'resource': 'None',
'region': 'us-east-1',
'severity': 0,
'message': 'No instances found'
}]
self.assertCountEqual(results, expected)
# Write test case for all cases
def test_with_base_public_ip(self):
api_data_fixture = {
'aws': {
'ec2': {
'us-east-1': {
'describe_instances': [
{
'InstanceId': 'theid',
'PublicIpAddress': '111.111.111.111'
}
]
}
}
}
}
plugin = PublicIpPlugin({})
results = plugin.run(api_data_fixture)
expected = [{
'resource': 'theid',
'region': 'us-east-1',
'severity': 1,
'message': 'Instance has public ip: 111.111.111.111'
}]
self.assertEqual(results, expected)
def test_with_nested_public_ip(self):
api_data_fixture = {
'aws': {
'ec2': {
'us-east-1': {
'describe_instances': [
{
'InstanceId': 'theid',
'NetworkInterfaces': [
{
'Association': {
'PublicIp': '111.111.111.111'
}
},
{
'Association': {
'PublicIp': '111.111.111.000'
}
}
]
}
]
}
}
}
}
plugin = PublicIpPlugin({})
results = plugin.run(api_data_fixture)
message_format = 'Instance has public ip: {}'
expected = [
{
'resource': 'theid',
'region': 'us-east-1',
'severity': 1,
'message': message_format.format('111.111.111.111')
},
{
'resource': 'theid',
'region': 'us-east-1',
'severity': 1,
'message': message_format.format('111.111.111.000')
}
]
self.assertEqual(results, expected)
def test_with_nested_public_ip_does_not_add_public_ip_twice(self):
api_data_fixture = {
'aws': {
'ec2': {
'us-east-1': {
'describe_instances': [
{
'InstanceId': 'theid',
'PublicIpAddress': '111.111.111.111',
'NetworkInterfaces': [
{
'Association': {
'PublicIp': '111.111.111.111'
}
},
]
}
]
}
}
}
}
plugin = PublicIpPlugin({})
results = plugin.run(api_data_fixture)
message_format = 'Instance has public ip: {}'
expected = [
{
'resource': 'theid',
'region': 'us-east-1',
'severity': 1,
'message': message_format.format('111.111.111.111')
}
]
self.assertEqual(expected, results)
| 29.834821
| 78
| 0.359719
| 420
| 6,683
| 5.502381
| 0.195238
| 0.064907
| 0.062311
| 0.051493
| 0.800519
| 0.779316
| 0.763739
| 0.727391
| 0.701861
| 0.684119
| 0
| 0.046299
| 0.541074
| 6,683
| 223
| 79
| 29.96861
| 0.707206
| 0.004339
| 0
| 0.527638
| 0
| 0
| 0.147625
| 0
| 0
| 0
| 0
| 0
| 0.035176
| 1
| 0.035176
| false
| 0
| 0.01005
| 0
| 0.050251
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
57192770d32e3ded8ed358d68fd8b7ad1c5d9a94
| 2,772
|
py
|
Python
|
tests/int/test_deposits.py
|
evanwimpey/defi-crawler-py
|
f3ce10bbcd9e822433708efd56e7ee5d371a42b0
|
[
"Apache-2.0"
] | null | null | null |
tests/int/test_deposits.py
|
evanwimpey/defi-crawler-py
|
f3ce10bbcd9e822433708efd56e7ee5d371a42b0
|
[
"Apache-2.0"
] | null | null | null |
tests/int/test_deposits.py
|
evanwimpey/defi-crawler-py
|
f3ce10bbcd9e822433708efd56e7ee5d371a42b0
|
[
"Apache-2.0"
] | null | null | null |
from deficrawler.lending import Lending
def test_deposit_aave_2_eth():
aave = Lending(protocol="Aave", chain="Ethereum", version=2)
deposits = aave.get_data_from_date_range(
'10/05/2021 00:00:01', '11/05/2021 00:01:00', "deposit")
assert(deposits[0]['tx_id'] != "")
assert(deposits[0]['protocol'] == "Aave")
assert(deposits[0]['chain'] == "Ethereum")
assert(deposits[0]['version'] == 2)
assert(deposits[0]['user'] != "")
assert(deposits[0]['token'] != "")
assert(deposits[0]['amount'] > 0)
assert(deposits[0]['timestamp'] > 0)
def test_deposit_aave_2_polygon():
aave = Lending(protocol="Aave", chain="Polygon", version=2)
deposits = aave.get_data_from_date_range(
'11/05/2021 00:00:01', '11/05/2021 00:01:10', "deposit")
assert(deposits[0]['tx_id'] != "")
assert(deposits[0]['protocol'] == "Aave")
assert(deposits[0]['chain'] == "Polygon")
assert(deposits[0]['version'] == 2)
assert(deposits[0]['user'] != "")
assert(deposits[0]['token'] != "")
assert(deposits[0]['amount'] > 0)
assert(deposits[0]['timestamp'] > 0)
def test_deposit_compound_2_eth():
compound = Lending(protocol="Compound", chain="Ethereum", version=2)
deposits = compound.get_data_from_date_range(
'11/05/2021 00:00:01', '11/05/2021 00:01:10', "deposit")
assert(deposits[0]['tx_id'] != "")
assert(deposits[0]['protocol'] == "Compound")
assert(deposits[0]['chain'] == "Ethereum")
assert(deposits[0]['version'] == 2)
assert(deposits[0]['user'] != "")
assert(deposits[0]['token'] != "")
assert(float(deposits[0]['amount']) > 0)
assert(deposits[0]['timestamp'] > 0)
def test_deposit_cream_2_eth():
cream = Lending(protocol="Cream", chain="Ethereum", version=2)
deposits = cream.get_data_from_date_range(
'11/05/2021 00:00:01', '12/05/2021 11:54:10', "deposit")
assert(deposits[0]['tx_id'] != "")
assert(deposits[0]['protocol'] == "Cream")
assert(deposits[0]['chain'] == "Ethereum")
assert(deposits[0]['version'] == 2)
assert(deposits[0]['user'] != "")
assert(deposits[0]['token'] != "")
assert(float(deposits[0]['amount']) > 0)
assert(deposits[0]['timestamp'] > 0)
def test_deposit_cream_2_bsc():
cream = Lending(protocol="Cream", chain="bsc", version=2)
deposits = cream.get_data_from_date_range(
'08/05/2021 00:00:01', '12/05/2021 11:54:10', "deposit")
assert(deposits[0]['tx_id'] != "")
assert(deposits[0]['protocol'] == "Cream")
assert(deposits[0]['chain'] == "bsc")
assert(deposits[0]['version'] == 2)
assert(deposits[0]['user'] != "")
assert(deposits[0]['token'] != "")
assert(float(deposits[0]['amount']) > 0)
assert(deposits[0]['timestamp'] > 0)
| 36.473684
| 72
| 0.612193
| 368
| 2,772
| 4.48913
| 0.111413
| 0.217918
| 0.335956
| 0.03632
| 0.917676
| 0.80569
| 0.80569
| 0.80569
| 0.80569
| 0.73184
| 0
| 0.0884
| 0.16342
| 2,772
| 75
| 73
| 36.96
| 0.623976
| 0
| 0
| 0.704918
| 0
| 0
| 0.212843
| 0
| 0
| 0
| 0
| 0
| 0.655738
| 1
| 0.081967
| false
| 0
| 0.016393
| 0
| 0.098361
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
573e1058c1a8033ecde9dd6f9a420113887bee68
| 10,933
|
py
|
Python
|
tests/uw_v2_catalog/test_uw_catalog.py
|
unfoldingWord-dev/d43-catalog
|
6c36f59b9b326e0ead45739c09631ef1e57c4932
|
[
"MIT"
] | 1
|
2017-05-18T22:18:31.000Z
|
2017-05-18T22:18:31.000Z
|
tests/uw_v2_catalog/test_uw_catalog.py
|
unfoldingWord-dev/d43-catalog
|
6c36f59b9b326e0ead45739c09631ef1e57c4932
|
[
"MIT"
] | 54
|
2016-11-07T03:07:03.000Z
|
2021-04-14T21:24:04.000Z
|
tests/uw_v2_catalog/test_uw_catalog.py
|
unfoldingWord-dev/d43-catalog
|
6c36f59b9b326e0ead45739c09631ef1e57c4932
|
[
"MIT"
] | 7
|
2016-10-26T18:15:14.000Z
|
2018-06-01T18:37:32.000Z
|
# coding=utf-8
from __future__ import print_function, unicode_literals
import os
from unittest import TestCase
from libraries.tools.file_utils import load_json_object
from libraries.tools.mocks import MockS3Handler, MockAPI, MockDynamodbHandler, MockSigner, MockLogger
from mock import patch
from libraries.lambda_handlers.uw_v2_catalog_handler import UwV2CatalogHandler
from libraries.tools.test_utils import assert_s3_equals_api_json, assert_s3_equals_api_text
# This is here to test importing main
@patch('libraries.lambda_handlers.handler.ErrorReporter')
class TestUwV2Catalog(TestCase):
resources_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'resources')
def setUp(self):
self.latest_catalog = load_json_object(os.path.join(TestUwV2Catalog.resources_dir, "v3_catalog.json"))
self.v2_catalog = load_json_object(os.path.join(TestUwV2Catalog.resources_dir, "v2_catalog.json"))
def _make_event(self):
return {
'stage-variables': {
'cdn_bucket': 'cdn.door43.org',
'cdn_url': 'https://cdn.door43.org/',
'from_email': '',
'to_email': ''
}
}
def test_status_missing(self, mock_reporter):
mockDB = MockDynamodbHandler()
mockDB._load_db(os.path.join(TestUwV2Catalog.resources_dir, 'missing_db.json'))
mockV3Api = MockAPI(os.path.join(self.resources_dir, 'v3_api'), 'https://api.door43.org/')
mockS3 = MockS3Handler('uw_bucket')
mockSigner = MockSigner()
mockLogger = MockLogger()
converter = UwV2CatalogHandler(event=self._make_event(),
context=None,
logger=mockLogger,
s3_handler=mockS3,
dynamodb_handler=mockDB,
url_handler=mockV3Api.get_url,
download_handler=mockV3Api.download_file,
signing_handler=mockSigner)
result = converter._get_status()
self.assertFalse(result)
def test_status_not_ready(self, mock_reporter):
mockDB = MockDynamodbHandler()
mockDB._load_db(os.path.join(TestUwV2Catalog.resources_dir, 'not_ready_db.json'))
mockV3Api = MockAPI(os.path.join(self.resources_dir, 'v3_api'), 'https://api.door43.org/')
mockS3 = MockS3Handler('uw_bucket')
mockSigner = MockSigner()
mockLogger = MockLogger()
converter = UwV2CatalogHandler(event=self._make_event(),
context=None,
logger=mockLogger,
s3_handler=mockS3,
dynamodb_handler=mockDB,
url_handler=mockV3Api.get_url,
download_handler=mockV3Api.download_file,
signing_handler=mockSigner)
result = converter._get_status()
self.assertFalse(result)
def test_status_ready_complete(self, mock_reporter):
mockDB = MockDynamodbHandler()
mockDB._load_db(os.path.join(TestUwV2Catalog.resources_dir, 'ready_complete_db.json'))
mockV3Api = MockAPI(os.path.join(self.resources_dir, 'v3_api'), 'https://api.door43.org/')
mockS3 = MockS3Handler('uw_bucket')
mockSigner = MockSigner()
mockLogger = MockLogger()
converter = UwV2CatalogHandler(event=self._make_event(),
context=None,
logger=mockLogger,
s3_handler=mockS3,
dynamodb_handler=mockDB,
url_handler=mockV3Api.get_url,
download_handler=mockV3Api.download_file,
signing_handler=mockSigner)
(status, source_status) = converter._get_status()
self.assertEqual('complete', source_status['state'])
self.assertEqual('complete', status['state'])
def test_status_ready_inprogress(self, mock_reporter):
mockDB = MockDynamodbHandler()
mockDB._load_db(os.path.join(TestUwV2Catalog.resources_dir, 'ready_inprogress_db.json'))
mockV3Api = MockAPI(os.path.join(self.resources_dir, 'v3_api'), 'https://api.door43.org/')
mockS3 = MockS3Handler('uw_bucket')
mockSigner = MockSigner()
mockLogger = MockLogger()
converter = UwV2CatalogHandler(event=self._make_event(),
context=None,
logger=mockLogger,
s3_handler=mockS3,
dynamodb_handler=mockDB,
url_handler=mockV3Api.get_url,
download_handler=mockV3Api.download_file,
signing_handler=mockSigner)
(status, source_status) = converter._get_status()
self.assertEqual('complete', source_status['state'])
self.assertEqual('in-progress', status['state'])
self.assertEqual(1, len(status['processed']))
def test_status_ready_new_db(self, mock_reporter):
mockDB = MockDynamodbHandler()
mockDB._load_db(os.path.join(TestUwV2Catalog.resources_dir, 'ready_new_db.json'))
mockV3Api = MockAPI(os.path.join(self.resources_dir, 'v3_api'), 'https://api.door43.org/')
mockS3 = MockS3Handler('uw_bucket')
mockSigner = MockSigner()
mockLogger = MockLogger()
converter = UwV2CatalogHandler(event=self._make_event(),
context=None,
logger=mockLogger,
s3_handler=mockS3,
dynamodb_handler=mockDB,
url_handler=mockV3Api.get_url,
download_handler=mockV3Api.download_file,
signing_handler=mockSigner)
(status, source_status) = converter._get_status()
self.assertEqual('complete', source_status['state'])
self.assertEqual('in-progress', status['state'])
self.assertEqual(0, len(status['processed']))
def test_status_outdated_complete_db(self, mock_reporter):
mockDB = MockDynamodbHandler()
mockDB._load_db(os.path.join(TestUwV2Catalog.resources_dir, 'ready_outdated_complete_db.json'))
mockV3Api = MockAPI(os.path.join(self.resources_dir, 'v3_api'), 'https://api.door43.org/')
mockS3 = MockS3Handler('uw_bucket')
mockSigner = MockSigner()
mockLogger = MockLogger()
converter = UwV2CatalogHandler(event=self._make_event(),
context=None,
logger=mockLogger,
s3_handler=mockS3,
dynamodb_handler=mockDB,
url_handler=mockV3Api.get_url,
download_handler=mockV3Api.download_file,
signing_handler=mockSigner)
(status, source_status) = converter._get_status()
self.assertEqual('complete', source_status['state'])
self.assertEqual('in-progress', status['state'])
self.assertEqual(0, len(status['processed']))
def test_status_outdated_inprogress_db(self, mock_reporter):
mockDB = MockDynamodbHandler()
mockDB._load_db(os.path.join(TestUwV2Catalog.resources_dir, 'ready_outdated_inprogress_db.json'))
mockV3Api = MockAPI(os.path.join(self.resources_dir, 'v3_api'), 'https://api.door43.org/')
mockS3 = MockS3Handler('uw_bucket')
mockSigner = MockSigner()
mockLogger = MockLogger()
converter = UwV2CatalogHandler(event=self._make_event(),
context=None,
logger=mockLogger,
s3_handler=mockS3,
dynamodb_handler=mockDB,
url_handler=mockV3Api.get_url,
download_handler=mockV3Api.download_file,
signing_handler=mockSigner)
(status, source_status) = converter._get_status()
self.assertEqual('complete', source_status['state'])
self.assertEqual('in-progress', status['state'])
self.assertEqual(0, len(status['processed']))
def test_create_v2_catalog(self, mock_reporter):
mockDB = MockDynamodbHandler()
mockDB._load_db(os.path.join(TestUwV2Catalog.resources_dir, 'ready_new_db.json'))
mockV3Api = MockAPI(os.path.join(self.resources_dir, 'v3_api'), 'https://api.door43.org/')
mockV3Api.add_host(os.path.join(self.resources_dir, 'v3_cdn'), 'https://cdn.door43.org/')
mockV2Api = MockAPI(os.path.join(self.resources_dir, 'v2_api'), 'https://test')
mockS3 = MockS3Handler('uw_bucket')
mockSigner = MockSigner()
mockLogger = MockLogger()
converter = UwV2CatalogHandler(event=self._make_event(),
context=None,
logger=mockLogger,
s3_handler=mockS3,
dynamodb_handler=mockDB,
url_handler=mockV3Api.get_url,
download_handler=mockV3Api.download_file,
signing_handler=mockSigner)
converter.run()
assert_s3_equals_api_json(self, mockS3, mockV2Api, 'v2/uw/catalog.json')
assert_s3_equals_api_json(self, mockS3, mockV2Api, 'v2/uw/obs/en/obs/v4/source.json')
assert_s3_equals_api_text(self, mockS3, mockV2Api, 'v2/uw/gen/en/udb/v7/gen.usfm')
assert_s3_equals_api_text(self, mockS3, mockV2Api, 'v2/uw/1ch/en/ulb/v7/1ch.usfm')
self.assertIn('v2/uw/obs/en/obs/v4/source.json.sig', mockS3._recent_uploads)
self.assertIn('uw/txt/2/catalog.json', mockS3._recent_uploads)
self.assertIn(
'en_udb_1ch: media format "https://cdn.door43.org/en/udb/v9/1ch.pdf" does not match source version "7" and will be excluded.',
mockLogger._messages)
self.assertIn(
'en_obs_obs: media format "https://cdn.door43.org/en/obs/v999/129kbps/en_obs_129kbps.zip" does not match source version "4" and will be excluded.',
mockLogger._messages)
| 52.5625
| 159
| 0.574682
| 1,043
| 10,933
| 5.763183
| 0.136146
| 0.022958
| 0.034936
| 0.04159
| 0.830311
| 0.808185
| 0.787223
| 0.767094
| 0.759774
| 0.759774
| 0
| 0.022749
| 0.328547
| 10,933
| 208
| 160
| 52.5625
| 0.796077
| 0.00439
| 0
| 0.723757
| 0
| 0.01105
| 0.120004
| 0.027566
| 0
| 0
| 0
| 0
| 0.138122
| 1
| 0.055249
| false
| 0
| 0.044199
| 0.005525
| 0.116022
| 0.005525
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5752065ec5aa99756084cf093bc860116de22421
| 52,260
|
py
|
Python
|
data_prep_scripts/gen_train_data.py
|
dlpbc/comma.ai-speed-challenge
|
fd4bd8097feb859234de9333700838fe71efb9dc
|
[
"MIT"
] | 26
|
2018-04-16T04:30:33.000Z
|
2021-06-09T17:39:52.000Z
|
data_prep_scripts/gen_train_data.py
|
dlpbc/comma.ai-speed-challenge
|
fd4bd8097feb859234de9333700838fe71efb9dc
|
[
"MIT"
] | 2
|
2019-02-11T09:43:54.000Z
|
2020-07-14T23:37:17.000Z
|
data_prep_scripts/gen_train_data.py
|
dlpbc/comma.ai-speed-challenge
|
fd4bd8097feb859234de9333700838fe71efb9dc
|
[
"MIT"
] | 10
|
2018-05-12T18:26:54.000Z
|
2020-12-27T16:55:36.000Z
|
'''
This script does 2 things
1. generate example clip (each example contains 40 70x70 frames) from train video and stores a bunch of
this clips in 'npz' files as batches. A typical npz batch file contains 1024 examples.
2. for each example generated, a duplicate copy may be created or the example might not be saved
because of the reason stated below.
REASON: the current form of the train video has a frame labels skewed towards speeds (labels) of a
certain category of speed than others (e.g. labels for speed between 0 - 5 are more than labels
for speed > 25).
In order to balance this and at least provide some balance to enable mean square error learn
appropriately, we use this script to generate train batches (npz files) which contains a more balanced
training examples (i.e. balanced across speed categories). This is done by eliminating some generated
example of a certain speed range category (to reduce its number) and then increase/duplicating examples of a
some speed range category (to increase its number)
'''
import os
import shutil
import sys
import argparse
import numpy as np
import cv2
NUM_FRAMES_PER_EXAMPLE = 40
STRIDE = 1
BATCH_SIZE = 1024
def main(args):
label_path = args.label_path
video_path = args.video_path
output_dir = args.output_dir
if os.path.exists(output_dir):
shutil.rmtree(output_dir)
os.makedirs(output_dir)
# open label file
f = open(label_path, 'r')
labels = f.readlines()
f.close()
# open train split video file
cap = cv2.VideoCapture(video_path)
total_num_video_frames = cap.get(cv2.CAP_PROP_FRAME_COUNT)
# sanity check
assert len(labels) == total_num_video_frames
# statistics of each category
# this is especially relevant to categories with decimal scale factor (e.g '2.3', '1.5' etc)
# for example '2.3' means that
# for every 10 consecutive (collection of frames ready to be used for example generation)
# of the same label type (category),
# 3 of those frames collection will be used to produce 3 cropped batch_examples
# while 7 of those frame collections will be used to produce 2 cropped batch_examples
# using '2.3' scale factor as an example,
# the FIRST ELEMENT of the list stores the number of frame collections (with the same) label type
# that has been parsed. This should not exceed 10, after which we reset back to 0
# the SECOND ELEMENT of the list stores the number of 'third' batch_examples that have been generated
# so far
cat_stats = {
'0-2' : [0,0],
'3-4' : [0,0],
'5-6' : [0,0],
'7-8' : [0,0],
'9-10' : [0,0],
'11-12' : [0,0],
'13-14' : [0,0],
'15-16' : [0,0],
'17-18' : [0,0],
'19-20' : [0,0],
'21-22' : [0,0],
'23-24' : [0,0],
'25-26' : [0,0],
'27-28' : [0,0]
}
batch_examples = []
batch_labels = []
frames = []
idx = 0
batch_num = 0
gen_num_examples = 0
# loop thru frames in video
while True:
ret, frame = cap.read()
if frame is None:
break
# original size of frame 640 x 480 (width x height)
# crop out 256 x 256 in a centered region of the frame
h_center_pixel_idx = frame.shape[0] // 2
w_center_pixel_idx = frame.shape[1] // 2
h_start_idx = h_center_pixel_idx - 128
h_stop_idx = h_center_pixel_idx + 128
w_start_idx = w_center_pixel_idx - 128
w_stop_idx = w_center_pixel_idx + 128
frame = frame[h_start_idx : h_stop_idx, w_start_idx : w_stop_idx]
assert frame.shape[:2] == (256, 256)
# resize the frame to 70 x 70
frame = cv2.resize(frame, (70, 70))
# store frame
frames.append(frame)
if len(frames) == NUM_FRAMES_PER_EXAMPLE:
# an example to be generated
# read label
label = float(labels[idx])
int_label = int(label)
# determine example category based on label
if int_label >= 0 and int_label <= 2:
# scale up, factor 2.3
# generate first example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# generate second example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# sanity check
if cat_stats['0-2'][0] > 10:
print('\n\nSomething went wrong some where')
print('Category: 0-2')
print('iteration: %d' % idx)
print('Parsed example count > 10')
print('forgot to reset count somewhere')
print('Exiting...\n\n')
sys.exit()
# update stat to denote that we have parsed through another frame collection
# of this category (regardless of the outcome of probability of generating a
# 3rd example from it)
cat_stats['0-2'][0] += 1
# generate third example or not based on probability
# use probablity to randomly determine if we are going to generate a third
# example using this example.
# the '.3' in '2.3' scale factor means for a every 10 batch_examples with this same
# label value, we generate a third example
if cat_stats['0-2'][0] > 7 and cat_stats['0-2'][1] < 3:
# this means that if we have already parsed 7 frame collections
# and we have not yet generated a third example from the 7 previous frame collections
# parsed.
# we no longer have the luxury of probability, just generate a third example
# for this frame collection and the next 2 future frame collections in the same
# category.
# generate another(third) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of third example generated
cat_stats['0-2'][1] += 1
elif cat_stats['0-2'][0] <= 6 and cat_stats['0-2'][1] < 3:
# we still have the luxury to flip a coin (random probability)
# to see if we should generate a third batch_examples using
# this frame collection, or push the responsiblity to a future frame collection
# of the same category (same label)
# flip a coin
outcome = np.random.binomial(1, 0.5)
# postive outcome, generate 'third' example
if outcome == 1:
# generate another(third) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of third example generated
cat_stats['0-2'][1] += 1
else:
# negative outcome
# do nothing
pass
# sanity check
if cat_stats['0-2'][0] == 10 and cat_stats['0-2'][1] != 3:
print('\n\nSomething went wrong some where')
print('Category: 0-2')
print('iteration: %d' % idx)
print('Exiting...\n\n')
sys.exit()
if cat_stats['0-2'][0] == 10:
# reset stats for this category
cat_stats['0-2'][0] = 0
cat_stats['0-2'][1] = 0
# end of if label >= 0 and label <= 2:
if int_label >= 3 and int_label <= 4:
# scale up, factor 1.7
# generate first example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# sanity check
if cat_stats['3-4'][0] > 10:
print('\n\nSomething went wrong some where')
print('Category: 3-4')
print('iteration: %d' % idx)
print('Parsed example count > 10')
print('forgot to reset count somewhere')
print('Exiting...\n\n')
sys.exit()
# update stat to denote that we have parsed through another frame collection
# of this category (regardless of whether we generated a 2nd example from
# it or not)
cat_stats['3-4'][0] += 1
# generate second example or not based on probability
# use probablity to randomly determine if we are going to generate a second
# example using this frame collection.
# the '.7' in '1.7' scale factor means for a every 10 batch_examples with this same
# label value, we generate a second example
if cat_stats['3-4'][0] > 3 and cat_stats['3-4'][1] < 7:
# this means that if we have already parsed 3 frame collections
# and we have not yet generated a second example from the 3 previous
# frame collections parsed.
# we no longer have the luxury of probability, just generate a second example
# for this frame collection and the next 6 future frame collections in the same
# category.
# generate another(second) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of second example generated
cat_stats['3-4'][1] += 1
elif cat_stats['3-4'][0] <= 2 and cat_stats['3-4'][1] < 7:
# we still have the luxury to flip a coin (random probability)
# to see if we should generate a second batch_examples using
# this frame collection, or push the responsiblity to a future frame collection
# of the same category (same label)
# flip a coin
outcome = np.random.binomial(1, 0.5)
# postive outcome, generate 'second' example
if outcome == 1:
# generate another(second) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of second example generated
cat_stats['3-4'][1] += 1
else:
# negative outcome
# do nothing
pass
# sanity check
if cat_stats['3-4'][0] == 10 and cat_stats['3-4'][1] != 7:
print('\n\nSomething went wrong some where')
print('Category: 3-4')
print('iteration: %d' % idx)
print('Exiting...\n\n')
sys.exit()
if cat_stats['3-4'][0] == 10:
# reset stats for this category
cat_stats['3-4'][0] = 0
cat_stats['3-4'][1] = 0
# end of if label >= 3 and label <= 4:
if int_label >= 5 and int_label <= 6:
# scale up, factor 1.5
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# sanity check
if cat_stats['5-6'][0] > 10:
print('\n\nSomething went wrong some where')
print('Category: 5-6')
print('iteration: %d' % idx)
print('Parsed example count > 10')
print('forgot to reset count somewhere')
print('Exiting...\n\n')
sys.exit()
# update stat to denote that we have parsed through another frame collection
# of this category (regardless of whether we generated a 2nd example from
# it or not)
cat_stats['5-6'][0] += 1
# use probablity to randomly determine if we are going to generate a second
# example using this frame collection.
# the '.5' in '1.5' scale factor means for a every 10 batch_examples with this same
# label value, we generate a second example
if cat_stats['5-6'][0] > 5 and cat_stats['5-6'][1] < 5:
# this means that if we have already parsed 5 frame collections
# and we have not yet generated a second example from the 5 previous
# frame collections parsed.
# we no longer have the luxury of probability, just generate a second example
# for this frame collection and the next 4 future frame collections in the same
# category.
# generate another(second) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of second example generated
cat_stats['5-6'][1] += 1
elif cat_stats['5-6'][0] <= 4 and cat_stats['5-6'][1] < 5:
# we still have the luxury to flip a coin (random probability)
# to see if we should generate a second batch_examples using
# this frame collection, or push the responsiblity to a future frame collection
# of the same category (same label)
# flip a coin
outcome = np.random.binomial(1, 0.5)
# postive outcome, generate 'second' example
if outcome == 1:
# generate another(second) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of second example generated
cat_stats['5-6'][1] += 1
else:
# negative outcome
# do nothing
pass
# sanity check
if cat_stats['5-6'][0] == 10 and cat_stats['5-6'][1] != 5:
print('\n\nSomething went wrong some where')
print('Category: 3-4')
print('iteration: %d' % idx)
print('Exiting...\n\n')
sys.exit()
if cat_stats['5-6'][0] == 10:
# reset stats for this category
cat_stats['5-6'][0] = 0
cat_stats['5-6'][1] = 0
# end of if label >= 5 and label <= 6:
if int_label >= 7 and int_label <= 8:
# no scaling. just save example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# end of if label >= 7 and label <= 8
if int_label >= 9 and int_label <= 10:
# scale up, factor 1.5
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# sanity check
if cat_stats['9-10'][0] > 10:
print('\n\nSomething went wrong some where')
print('Category: 9-10')
print('iteration: %d' % idx)
print('Parsed example count > 10')
print('forgot to reset count somewhere')
print('Exiting...\n\n')
sys.exit()
# update stat to denote that we have parsed through another frame collection
# of this category (regardless of whether we generated a 2nd example from
# it or not)
cat_stats['9-10'][0] += 1
# use probablity to randomly determine if we are going to generate a second
# example using this frame collection.
# the '.5' in '1.5' scale factor means for a every 10 batch_examples with this same
# label value, we generate a second example
if cat_stats['9-10'][0] > 5 and cat_stats['9-10'][1] < 5:
# this means that if we have already parsed 5 frame collections
# and we have not yet generated a second example from the 5 previous
# frame collections parsed.
# we no longer have the luxury of probability, just generate a second example
# for this frame collection and the next 4 future frame collections in the same
# category.
# generate another(second) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of second example generated
cat_stats['9-10'][1] += 1
elif cat_stats['9-10'][0] <= 4 and cat_stats['9-10'][1] < 5:
# we still have the luxury to flip a coin (random probability)
# to see if we should generate a second batch_examples using
# this frame collection, or push the responsiblity to a future frame collection
# of the same category (same label)
# flip a coin
outcome = np.random.binomial(1, 0.5)
# postive outcome, generate 'second' example
if outcome == 1:
# generate another(second) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of second example generated
cat_stats['9-10'][1] += 1
else:
# negative outcome
# do nothing
pass
# sanity check
if cat_stats['9-10'][0] == 10 and cat_stats['9-10'][1] != 5:
print('\n\nSomething went wrong some where')
print('Category: 9-10')
print('iteration: %d' % idx)
print('Exiting...\n\n')
sys.exit()
if cat_stats['9-10'][0] == 10:
# reset stats for this category
cat_stats['9-10'][0] = 0
cat_stats['9-10'][1] = 0
# end of if label >= 9 and label <= 10
if int_label >= 11 and int_label <= 12:
# scale up, factor 2.5
# generate example 1
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# generate example 2
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# sanity check
if cat_stats['11-12'][0] > 10:
print('\n\nSomething went wrong some where')
print('Category: 11-12')
print('iteration: %d' % idx)
print('Parsed example count > 10')
print('forgot to reset count somewhere')
print('Exiting...\n\n')
sys.exit()
# update stat to denote that we have parsed through another frame collection
# of this category (regardless of the outcome of probability of generating
# a 3rd example from it
cat_stats['11-12'][0] += 1
# generate 3rd example based on probability
# use probablity to randomly determine if we are going to generate a third
# example using this frame collection.
# the '.5' in '2.5' scale factor means for a every 10 batch_examples with this same
# label value, we generate a third example
if cat_stats['11-12'][0] > 5 and cat_stats['11-12'][1] < 5:
# this means that if we have already parsed 5 frame collections
# and we have not yet generated a second example from the 5 previous
# frame collections parsed.
# we no longer have the luxury of probability, just generate a third example
# for this frame collection and the next 4 future frame collections in the same
# category.
# generate another(second) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of third example generated
cat_stats['11-12'][1] += 1
elif cat_stats['11-12'][0] <= 4 and cat_stats['11-12'][1] < 5:
# we still have the luxury to flip a coin (random probability)
# to see if we should generate a third batch_examples using
# this frame collection, or push the responsiblity to a future frame collection
# of the same category (same label)
# flip a coin
outcome = np.random.binomial(1, 0.5)
# postive outcome, generate 'third' example
if outcome == 1:
# generate another(third) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of third example generated
cat_stats['11-12'][1] += 1
else:
# negative outcome
# do nothing
pass
# sanity check
if cat_stats['11-12'][0] == 10 and cat_stats['11-12'][1] != 5:
print('\n\nSomething went wrong some where')
print('Category: 11-12')
print('iteration: %d' % idx)
print('Exiting...\n\n')
sys.exit()
if cat_stats['11-12'][0] == 10:
# reset stats for this category
cat_stats['11-12'][0] = 0
cat_stats['11-12'][1] = 0
# end of if label >= 11 and label <= 12
if int_label >= 13 and int_label <= 14:
# scale up, factor 2.7
# generate first example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# generate second example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# sanity check
if cat_stats['13-14'][0] > 10:
print('\n\nSomething went wrong some where')
print('Category: 13-14')
print('iteration: %d' % idx)
print('Parsed example count > 10')
print('forgot to reset count somewhere')
print('Exiting...\n\n')
sys.exit()
# update stat to denote that we have parsed through another frame collection
# of this category (regardless of the outcome of probability of generating
# a 3rd example from it)
cat_stats['13-14'][0] += 1
# generate third example or not based on probability
# use probablity to randomly determine if we are going to generate a third
# example using this frame collection.
# the '.7' in '2.7' scale factor means for a every 10 batch_examples with this same
# label value, we generate 7 third example
if cat_stats['13-14'][0] > 3 and cat_stats['13-14'][1] < 7:
# this means that if we have already parsed 3 frame collections
# and we have not yet generated a second example from the 3 previous
# frame collections parsed.
# we no longer have the luxury of probability, just generate a third example
# for this frame collection and the next 6 future frame collections in the same
# category.
# generate another(third) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of third example generated
cat_stats['13-14'][1] += 1
elif cat_stats['13-14'][0] <= 2 and cat_stats['13-14'][1] < 7:
# we still have the luxury to flip a coin (random probability)
# to see if we should generate a second batch_examples using
# this frame collection, or push the responsiblity to a future frame collection
# of the same category (same label)
# flip a coin
outcome = np.random.binomial(1, 0.5)
# postive outcome, generate 'third' example
if outcome == 1:
# generate another(second) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of third example generated
cat_stats['13-14'][1] += 1
else:
# negative outcome
# do nothing
pass
# sanity check
if cat_stats['13-14'][0] == 10 and cat_stats['13-14'][1] != 7:
print('\n\nSomething went wrong some where')
print('Category: 13-14')
print('iteration: %d' % idx)
print('Exiting...\n\n')
sys.exit()
if cat_stats['13-14'][0] == 10:
# reset stats for this category
cat_stats['13-14'][0] = 0
cat_stats['13-14'][1] = 0
# end of if label >= 13 and label <= 14
if int_label >= 15 and int_label <= 16:
# scale up, factor 2.2
# generate first example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# generate second example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# sanity check
if cat_stats['15-16'][0] > 10:
print('\n\nSomething went wrong some where')
print('Category: 15-16')
print('iteration: %d' % idx)
print('Parsed example count > 10')
print('forgot to reset count somewhere')
print('Exiting...\n\n')
sys.exit()
# update stat to denote that we have parsed through another frame collection
# of this category (regardless of the outcome of probability of generating
# a 3rd example from it)
cat_stats['15-16'][0] += 1
# generate third example or not based on probability
# use probablity to randomly determine if we are going to generate a third
# example using this frame collection.
# the '.2' in '2.2' scale factor means for a every 10 batch_examples with this same
# label value, we generate two third example
if cat_stats['15-16'][0] > 8 and cat_stats['15-16'][1] < 2:
# this means that if we have already parsed 2 frame collections
# and we have not yet generated a third example from the 2 previous
# frame collections parsed.
# we no longer have the luxury of probability, just generate a third example
# for this frame collection and the next 7 future frame collections in the same
# category.
# generate another(third) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of third example generated
cat_stats['15-16'][1] += 1
elif cat_stats['15-16'][0] <= 7 and cat_stats['15-16'][1] < 2:
# we still have the luxury to flip a coin (random probability)
# to see if we should generate a third batch_examples using
# this frame collection, or push the responsiblity to a future frame collection
# of the same category (same label)
# flip a coin
outcome = np.random.binomial(1, 0.5)
# postive outcome, generate 'third' example
if outcome == 1:
# generate another(third) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of third example generated
cat_stats['15-16'][1] += 1
else:
# negative outcome
# do nothing
pass
# sanity check
if cat_stats['15-16'][0] == 10 and cat_stats['15-16'][1] != 2:
print('\n\nSomething went wrong some where')
print('Category: 15-16')
print('iteration: %d' % idx)
print('Exiting...\n\n')
sys.exit()
if cat_stats['15-16'][0] == 10:
# reset stats for this category
cat_stats['15-16'][0] = 0
cat_stats['15-16'][1] = 0
# end of if label >= 15 and label <= 16
if int_label >= 17 and int_label <= 18:
# scale up, factor 1.9
# generate first example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# sanity check
if cat_stats['17-18'][0] > 10:
print('\n\nSomething went wrong some where')
print('Category: 17-18')
print('iteration: %d' % idx)
print('Parsed example count > 10')
print('forgot to reset count somewhere')
print('Exiting...\n\n')
sys.exit()
# update stat to denote that we have parsed through another frame collection
# of this category (regardless of the outcome of probability of generating
# a 3rd example from it)
cat_stats['17-18'][0] += 1
# generate second example or not based on probability
# use probablity to randomly determine if we are going to generate a second
# example using this frame collection.
# the '.9' in '1.9' scale factor means for a every 10 batch_examples with this same
# label value, we generate a second example
if cat_stats['17-18'][0] > 1 and cat_stats['17-18'][1] < 9:
# this means that if we have already parsed 3 frame collections
# and we have not yet generated a second example from the 3 previous
# frame collections parsed.
# we no longer have the luxury of probability, just generate a second example
# for this frame collection and the next 6 future frame collections in the same
# category.
# generate another(second) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of second example generated
cat_stats['17-18'][1] += 1
elif cat_stats['17-18'][0] <= 1 and cat_stats['17-18'][1] < 9:
# we still have the luxury to flip a coin (random probability)
# to see if we should generate a second batch_examples using
# this frame collection, or push the responsiblity to a future frame collection
# of the same category (same label)
# flip a coin
outcome = np.random.binomial(1, 0.5)
# postive outcome, generate 'second' example
if outcome == 1:
# generate another(second) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of second example generated
cat_stats['17-18'][1] += 1
else:
# negative outcome
# do nothing
pass
# sanity check
if cat_stats['17-18'][0] == 10 and cat_stats['17-18'][1] != 9:
print('\n\nSomething went wrong some where')
print('Category: 17-18')
print('iteration: %d' % idx)
print('Exiting...\n\n')
sys.exit()
if cat_stats['17-18'][0] == 10:
# reset stats for this category
cat_stats['17-18'][0] = 0
cat_stats['17-18'][1] = 0
# end of if label >= 17 and label <= 18
if int_label >= 19 and int_label <= 20:
# scale up, factor 2.2
# generate first example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# generate second example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# sanity check
if cat_stats['19-20'][0] > 10:
print('\n\nSomething went wrong some where')
print('Category: 19-20')
print('iteration: %d' % idx)
print('Parsed example count > 10')
print('forgot to reset count somewhere')
print('Exiting...\n\n')
sys.exit()
# update stat to denote that we have parsed through another frame collection
# of this category (regardless of the outcome of probability of generating a 3rd example from it
cat_stats['19-20'][0] += 1
# generate third example or not based on probability
# use probablity to randomly determine if we are going to generate a third
# example using this frame collection.
# the '.2' in '2.2' scale factor means for a every 10 batch_examples with this same
# label value, we generate two third example
if cat_stats['19-20'][0] > 8 and cat_stats['19-20'][1] < 2:
# this means that if we have already parsed 8 frame collections
# and we have not yet generated a third example from the 8 previous
# frame collections parsed.
# we no longer have the luxury of probability, just generate a third example
# for this frame collection and the next 2 future frame collections in the same
# category.
# generate another(third) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of third example generated
cat_stats['19-20'][1] += 1
elif cat_stats['19-20'][0] <= 7 and cat_stats['19-20'][1] < 2:
# we still have the luxury to flip a coin (random probability)
# to see if we should generate a second batch_examples using
# this frame collection, or push the responsiblity to a future frame collection
# of the same category (same label)
# flip a coin
outcome = np.random.binomial(1, 0.5)
# postive outcome, generate 'third' example
if outcome == 1:
# generate another(second) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update the stat to reflect the current number of third example generated
cat_stats['19-20'][1] += 1
else:
# negative outcome
# do nothing
pass
# sanity check
if cat_stats['19-20'][0] == 10 and cat_stats['19-20'][1] != 2:
print('\n\nSomething went wrong some where')
print('Category: 19-20')
print('iteration: %d' % idx)
print('Exiting...\n\n')
sys.exit()
if cat_stats['19-20'][0] == 10:
# reset stats for this category
cat_stats['19-20'][0] = 0
cat_stats['19-20'][1] = 0
# end of if label >= 19 and label <= 20
if int_label >= 21 and int_label <= 22:
# scale down, factor 0.54
# i.e for every 100 frame collections of this category,
# only use 54 to generate batch_examples, discard the other 36
# sanity check
if cat_stats['21-22'][0] > 100:
print('\n\nSomething went wrong some where')
print('Category: 21-22')
print('iteration: %d' % idx)
print('Parsed example count > 100')
print('forgot to reset count somewhere')
print('Exiting...\n\n')
sys.exit()
# update stat to denote that we have parsed through another frame collection
# of this category (regardless of the outcome of probability of generating a 3rd example from it
cat_stats['21-22'][0] += 1
if cat_stats['21-22'][0] > 36 and cat_stats['21-22'][1] < 54:
# we no longer have the luxury of probability, just generate a second example
# for this frame collection and the next 6 future frame collections in the same
# category.
# generate example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
cat_stats['21-22'][1] += 1
elif cat_stats['21-22'][0] <= 35 and cat_stats['21-22'][1] < 54:
# we still have the luxury to flip a coin (random probability)
# to see if we should generate a second batch_examples using
# this frame collection, or push the responsiblity to a future frame collection
# of the same category (same label)
# flip a coin
outcome = np.random.binomial(1, 0.5)
# postive outcome, generate example
if outcome == 1:
# generate example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
cat_stats['21-22'][1] += 1
else:
# negative outcome
# do nothing
pass
# sanity check
if cat_stats['21-22'][0] == 100 and cat_stats['21-22'][1] != 54:
print('\n\nSomething went wrong some where')
print('Category: 21-22')
print('iteration: %d' % idx)
print('Exiting...\n\n')
sys.exit()
if cat_stats['21-22'][0] == 100:
# reset stats for this category
cat_stats['21-22'][0] = 0
cat_stats['21-22'][1] = 0
# end of if label >= 21 and label <= 22
if int_label >= 23 and int_label <= 24:
# no scaling. just save example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# end of if label >= 23 and label <= 24
if int_label >= 25 and int_label <= 26:
# scale up, factor 2.3
# generate first example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# generate second example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# sanity check
if cat_stats['25-26'][0] > 10:
print('\n\nSomething went wrong some where')
print('Category: 25-26')
print('iteration: %d' % idx)
print('Parsed example count > 10')
print('forgot to reset count somewhere')
print('Exiting...\n\n')
sys.exit()
# update stat to denote that we have parsed through another frame collection
# of this category (regardless of the outcome of probability of generating a 3rd example from it
cat_stats['25-26'][0] += 1
# generate third example or not based on probability
# use probablity to randomly determine if we are going to generate a third
# example using this example.
# the '.3' in '2.3' scale factor means for a every 10 batch_examples with this same
# label value, we generate a third example
if cat_stats['25-26'][0] > 7 and cat_stats['25-26'][1] < 3:
# this means that if we have already parsed 7 frame collections
# and we have not yet generated a third example from the 7 previous frame collections
# parsed.
# we no longer have the luxury of probability, just generate a third example
# for this frame collection and the next 2 future frame collections in the same
# category.
# generate another(third) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update stat to denote number of 3rd example generated
cat_stats['25-26'][1] += 1
elif cat_stats['25-26'][0] <= 6 and cat_stats['25-26'][1] < 3:
# we still have the luxury to flip a coin (random probability)
# to see if we should generate a third batch_examples using
# this frame collection, or push the responsiblity to a future frame collection
# of the same category (same label)
# flip a coin
outcome = np.random.binomial(1, 0.5)
# postive outcome, generate 'third' example
if outcome == 1:
# generate another(third) example
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# update stat to denote the current number of 3rd example generated
cat_stats['25-26'][1] += 1
else:
# negative outcome
# do nothing
pass
# sanity check
if cat_stats['25-26'][0] == 10 and cat_stats['25-26'][1] != 3:
print('\n\nSomething went wrong some where')
print('Category: 25-26')
print('iteration: %d' % idx)
print('Exiting...\n\n')
sys.exit()
if cat_stats['25-26'][0] == 10:
# reset stats for this category
cat_stats['25-26'][0] = 0
cat_stats['25-26'][1] = 0
# end of if label >= 25 and label <= 26
if int_label >= 27 and int_label <= 28:
# scale up, factor 48
# generate 48 example for this category
for i in np.arange(48):
_frames = np.array(frames, copy=True)
batch_examples.append(_frames)
batch_labels.append(label)
gen_num_examples += 1
# end of if label >= 27 and label <= 28
if len(batch_examples) >= BATCH_SIZE: # one batch is completed
# save batch batch_examples to disk
np.savez(output_dir + 'batch_{0}.npz'.format(batch_num+1),
examples=np.asarray(batch_examples[ : BATCH_SIZE]),
labels=np.expand_dims(np.asarray(batch_labels[ : BATCH_SIZE], dtype=np.float32), 1))
# reset batch_examples and label list
batch_examples = batch_examples[BATCH_SIZE : ]
batch_labels = batch_labels[BATCH_SIZE : ]
# update batch number
batch_num += 1
# now focus on generating the next example
frames = frames[STRIDE : ]
# end of if len(frames) == NUM_FRAMES_PER_EXAMPLE
idx += 1
sys.stdout.write('\rNum of batch_examples generated: %d of %d' % gen_num_examples)
# end of while True
if len(batch_examples) > 0:
# no more frames to read
# but there are some batch_examples in the list to save
np.savez(output_dir + 'batch_{0}.npz'.format(batch_num+1),
examples=np.asarray(batch_examples),
labels=np.expand_dims(np.asarray(batch_labels, dtype=np.float32), 1))
batch_num += 1
sys.stdout.write('\rNum of batch_examples generated: %d of %d' % gen_num_examples)
# close video file
cap.release()
print('\ncompleted...')
print('number of batch_examples generated: %d' % gen_num_examples)
print('total number of batches saved to disk: %d' % batch_num)
return
if __name__ == '__main__':
print('\ngenerating training example clips from training video split\n')
parser = argparse.ArgumentParser()
parser.add_argument('video_path',
help='path to the video from which to generate example clips',
type=str)
parser.add_argument('label_path',
help='path to the label file for corresponding video',
type=str)
parser.add_argument('output_dir',
help='path to store output (npz files)',
type=str)
main(parser.parse_args())
| 45.562337
| 116
| 0.499809
| 6,028
| 52,260
| 4.239217
| 0.057233
| 0.045081
| 0.024654
| 0.030484
| 0.848399
| 0.824372
| 0.81181
| 0.79017
| 0.782148
| 0.762855
| 0
| 0.046322
| 0.422503
| 52,260
| 1,146
| 117
| 45.602094
| 0.800398
| 0.342595
| 0
| 0.658929
| 1
| 0
| 0.100718
| 0
| 0
| 0
| 0
| 0
| 0.003571
| 1
| 0.001786
| false
| 0.019643
| 0.010714
| 0
| 0.014286
| 0.203571
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f53111052a44a17a6c21dc71b34d945e42ff7003
| 71
|
py
|
Python
|
teammy/__init__.py
|
t3ammy/teammy
|
d1a70acfbfb89b0645c25ec415427ad227c1b54c
|
[
"MIT"
] | null | null | null |
teammy/__init__.py
|
t3ammy/teammy
|
d1a70acfbfb89b0645c25ec415427ad227c1b54c
|
[
"MIT"
] | null | null | null |
teammy/__init__.py
|
t3ammy/teammy
|
d1a70acfbfb89b0645c25ec415427ad227c1b54c
|
[
"MIT"
] | null | null | null |
from teammy.t3ammy import Teammy
from teammy.t3ammy import Spiderman
| 23.666667
| 36
| 0.830986
| 10
| 71
| 5.9
| 0.5
| 0.338983
| 0.542373
| 0.745763
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032787
| 0.140845
| 71
| 2
| 37
| 35.5
| 0.934426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
1954902129de5ce7604ff4e456ec16404d9c261e
| 170
|
py
|
Python
|
test_legal.py
|
gregdetre/unit-testing-pres
|
c28cd3a938436b18f2175bfb43d0c33820dfc7ee
|
[
"MIT"
] | null | null | null |
test_legal.py
|
gregdetre/unit-testing-pres
|
c28cd3a938436b18f2175bfb43d0c33820dfc7ee
|
[
"MIT"
] | null | null | null |
test_legal.py
|
gregdetre/unit-testing-pres
|
c28cd3a938436b18f2175bfb43d0c33820dfc7ee
|
[
"MIT"
] | null | null | null |
from isitanemail import isitanemail
# USAGE:
# $ nose2 test_legal
# $ nose2 test_legal.test_legal_basic
def test_legal_basic():
assert isitanemail('ab@cd.com')
| 13.076923
| 37
| 0.741176
| 23
| 170
| 5.217391
| 0.565217
| 0.3
| 0.233333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014085
| 0.164706
| 170
| 12
| 38
| 14.166667
| 0.830986
| 0.358824
| 0
| 0
| 0
| 0
| 0.088235
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
195c498883013c22fb4bfe4592a26a22a0e6c3e2
| 4,691
|
py
|
Python
|
code/essentials.py
|
michaelsteodor/pureocr
|
5740c43c3e886ae61c38a0718d3bf98208de21f6
|
[
"MIT"
] | null | null | null |
code/essentials.py
|
michaelsteodor/pureocr
|
5740c43c3e886ae61c38a0718d3bf98208de21f6
|
[
"MIT"
] | null | null | null |
code/essentials.py
|
michaelsteodor/pureocr
|
5740c43c3e886ae61c38a0718d3bf98208de21f6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@authors: Mihnea S. Teodorescu & Moe Assaf, University of Groningen
"""
#### Class declaration
class Essentials():
# Sorting algorithms
def wham_sort(length, arr):
# Break recursive step
if (length <= 1):
return
prefix = [None] * length
suffix = [None] * length
# Get prefix length
p = 1
while (p < length):
if (arr[p-1] > arr[p]):
break
p += 1
while (length - p):
# Get prefix
prefix[0] = arr[0]
p = 1
while (p < length):
if (arr[p-1] <= arr[p]):
prefix[p] = arr[p]
else:
break
p += 1
# Get suffix
s = 0
while (s < p):
if (p+s >= length):
break
suffix[s] = arr[p+s]
s += 1
# Next recursive step
wham_sort(s, suffix)
# Merge prefix and (now sorted) suffix into arr
l = p
r = s
p += s
idx = 0
left = 0
right = 0
while (left < l and right < r):
if (prefix[left] < suffix[right]):
arr[idx] = prefix[left]
left += 1
else:
arr[idx] = suffix[right]
right += 1
idx += 1
if (left >= l):
while (right < r):
arr[idx] = suffix[right]
idx += 1
right += 1
else:
while (left < l):
arr[idx] = prefix[left]
idx += 1
left += 1
def wham_sort_by(self, length, arr, by):
# Break recursive step
if (length <= 1):
return
prefix = [None] * length
suffix = [None] * length
# Get prefix length
p = 1
while (p < length):
if (arr[p-1][by] > arr[p][by]):
break
p += 1
while (length - p):
# Get prefix
prefix[0] = arr[0]
p = 1
while (p < length):
if (arr[p-1][by] <= arr[p][by]):
prefix[p] = arr[p]
else:
break
p += 1
# Get suffix
s = 0
while (s < p):
if (p+s >= length):
break
suffix[s] = arr[p+s]
s += 1
# Next recursive step
self.wham_sort_by(s, suffix, by)
# Merge prefix and (now sorted) suffix into arr
l = p
r = s
p += s
idx = 0
left = 0
right = 0
while (left < l and right < r):
if (prefix[left][by] < suffix[right][by]):
arr[idx] = prefix[left]
left += 1
else:
arr[idx] = suffix[right]
right += 1
idx += 1
if (left >= l):
while (right < r):
arr[idx] = suffix[right]
idx += 1
right += 1
else:
while (left < l):
arr[idx] = prefix[left]
idx += 1
left += 1
def bubble_sort(arr):
length = len(arr)
for i in range(length-1):
for j in range(length-1):
if (arr[j] > arr[j+1]):
temp = arr[j+1]
arr[j+1] = arr[j]
arr[j] = temp
return arr
def bubble_sort_by(arr, by):
length = len(arr)
for i in range(length-1):
for j in range(length-1):
if (arr[j][by] > arr[j+1][by]):
temp = arr[j+1]
arr[j+1] = arr[j]
arr[j] = temp
return arr
# Searching algorithms
def binary_search(length, arr, value):
left = 0
right = length
mid
while (left + 1 < right):
mid = (left + right)/2
if (value < arr[mid]):
right = mid
else:
left = mid
if (length == 0 or arr[left] != value):
return -1
else:
return left
| 27.273256
| 67
| 0.344063
| 480
| 4,691
| 3.341667
| 0.141667
| 0.014963
| 0.026185
| 0.01995
| 0.716958
| 0.716958
| 0.716958
| 0.716958
| 0.716958
| 0.716958
| 0
| 0.028015
| 0.551055
| 4,691
| 171
| 68
| 27.432749
| 0.733618
| 0.090173
| 0
| 0.804511
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037594
| false
| 0
| 0
| 0
| 0.090226
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5ffbe013746879d75e4960a967b5836108ff43c9
| 174
|
py
|
Python
|
boa3_test/test_sc/interop_test/stdlib/DeserializeMismatchedType.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 25
|
2020-07-22T19:37:43.000Z
|
2022-03-08T03:23:55.000Z
|
boa3_test/test_sc/interop_test/stdlib/DeserializeMismatchedType.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 419
|
2020-04-23T17:48:14.000Z
|
2022-03-31T13:17:45.000Z
|
boa3_test/test_sc/interop_test/stdlib/DeserializeMismatchedType.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 15
|
2020-05-21T21:54:24.000Z
|
2021-11-18T06:17:24.000Z
|
from typing import Any
from boa3.builtin import public
from boa3.builtin.interop.stdlib import deserialize
@public
def deserialize_arg() -> Any:
return deserialize(1)
| 17.4
| 51
| 0.781609
| 24
| 174
| 5.625
| 0.583333
| 0.118519
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02027
| 0.149425
| 174
| 9
| 52
| 19.333333
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.5
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
5ffd15b7848be5436b46991db0c1537805967b7f
| 12,604
|
py
|
Python
|
paralleldomain/decoding/sensor_frame_decoder.py
|
parallel-domain/pd-sdk
|
20e3d052a5cb612a2dd84bda7b1b5487a6a60edc
|
[
"Apache-2.0"
] | 10
|
2021-11-17T17:23:49.000Z
|
2022-03-18T09:51:23.000Z
|
paralleldomain/decoding/sensor_frame_decoder.py
|
parallel-domain/pd-sdk
|
20e3d052a5cb612a2dd84bda7b1b5487a6a60edc
|
[
"Apache-2.0"
] | 3
|
2021-12-02T17:16:20.000Z
|
2022-01-07T12:47:13.000Z
|
paralleldomain/decoding/sensor_frame_decoder.py
|
parallel-domain/pd-sdk
|
20e3d052a5cb612a2dd84bda7b1b5487a6a60edc
|
[
"Apache-2.0"
] | 2
|
2022-03-09T07:03:54.000Z
|
2022-03-23T15:53:48.000Z
|
import abc
from datetime import datetime
from typing import Any, Dict, Generic, List, Optional, Tuple, TypeVar, Union
import numpy as np
from paralleldomain.decoding.common import DecoderSettings, LazyLoadPropertyMixin, create_cache_key
from paralleldomain.model.annotation import AnnotationType
from paralleldomain.model.sensor import SensorExtrinsic, SensorIntrinsic, SensorPose
from paralleldomain.model.type_aliases import AnnotationIdentifier, FrameId, SceneName, SensorName
T = TypeVar("T")
TDateTime = TypeVar("TDateTime", bound=Union[None, datetime])
class SensorFrameDecoder(Generic[TDateTime], LazyLoadPropertyMixin):
def __init__(self, dataset_name: str, scene_name: SceneName, settings: DecoderSettings):
self.settings = settings
self.scene_name = scene_name
self.dataset_name = dataset_name
def get_unique_sensor_frame_id(
self, sensor_name: SensorName, frame_id: FrameId, extra: Optional[str] = None
) -> str:
return create_cache_key(
dataset_name=self.dataset_name,
scene_name=self.scene_name,
frame_id=frame_id,
sensor_name=sensor_name,
extra=extra,
)
def get_extrinsic(self, sensor_name: SensorName, frame_id: FrameId) -> SensorExtrinsic:
_unique_cache_key = self.get_unique_sensor_frame_id(
sensor_name=sensor_name, frame_id=frame_id, extra="extrinsic"
)
return self.lazy_load_cache.get_item(
key=_unique_cache_key,
loader=lambda: self._decode_extrinsic(sensor_name=sensor_name, frame_id=frame_id),
)
def get_sensor_pose(self, sensor_name: SensorName, frame_id: FrameId) -> SensorPose:
_unique_cache_key = self.get_unique_sensor_frame_id(
sensor_name=sensor_name, frame_id=frame_id, extra="sensor_pose"
)
return self.lazy_load_cache.get_item(
key=_unique_cache_key,
loader=lambda: self._decode_sensor_pose(sensor_name=sensor_name, frame_id=frame_id),
)
def get_annotations(
self, sensor_name: SensorName, frame_id: FrameId, identifier: AnnotationIdentifier, annotation_type: T
) -> T:
if self.settings.cache_annotations:
_unique_cache_key = self.get_unique_sensor_frame_id(
sensor_name=sensor_name, frame_id=frame_id, extra=f"-annotations-{identifier}"
)
return self.lazy_load_cache.get_item(
key=_unique_cache_key,
loader=lambda: self._decode_annotations(
sensor_name=sensor_name, frame_id=frame_id, identifier=identifier, annotation_type=annotation_type
),
)
else:
return self._decode_annotations(
sensor_name=sensor_name, frame_id=frame_id, identifier=identifier, annotation_type=annotation_type
)
def get_available_annotation_types(
self, sensor_name: SensorName, frame_id: FrameId
) -> Dict[AnnotationType, AnnotationIdentifier]:
_unique_cache_key = self.get_unique_sensor_frame_id(
sensor_name=sensor_name, frame_id=frame_id, extra="-available_annotation_types"
)
return self.lazy_load_cache.get_item(
key=_unique_cache_key,
loader=lambda: self._decode_available_annotation_types(sensor_name=sensor_name, frame_id=frame_id),
)
@abc.abstractmethod
def _decode_available_annotation_types(
self, sensor_name: SensorName, frame_id: FrameId
) -> Dict[AnnotationType, AnnotationIdentifier]:
pass
def get_metadata(self, sensor_name: SensorName, frame_id: FrameId) -> Dict[str, Any]:
_unique_cache_key = self.get_unique_sensor_frame_id(
sensor_name=sensor_name, frame_id=frame_id, extra="-metadata"
)
return self.lazy_load_cache.get_item(
key=_unique_cache_key,
loader=lambda: self._decode_metadata(sensor_name=sensor_name, frame_id=frame_id),
)
@abc.abstractmethod
def _decode_metadata(self, sensor_name: SensorName, frame_id: FrameId) -> Dict[str, Any]:
pass
def get_date_time(self, sensor_name: SensorName, frame_id: FrameId) -> TDateTime:
# if needed add caching here
return self._decode_date_time(sensor_name=sensor_name, frame_id=frame_id)
@abc.abstractmethod
def _decode_date_time(self, sensor_name: SensorName, frame_id: FrameId) -> TDateTime:
pass
@abc.abstractmethod
def _decode_extrinsic(self, sensor_name: SensorName, frame_id: FrameId) -> SensorExtrinsic:
pass
@abc.abstractmethod
def _decode_sensor_pose(self, sensor_name: SensorName, frame_id: FrameId) -> SensorPose:
pass
@abc.abstractmethod
def _decode_annotations(
self, sensor_name: SensorName, frame_id: FrameId, identifier: AnnotationIdentifier, annotation_type: T
) -> T:
pass
class CameraSensorFrameDecoder(SensorFrameDecoder[TDateTime]):
def get_intrinsic(self, sensor_name: SensorName, frame_id: FrameId) -> SensorIntrinsic:
_unique_cache_key = self.get_unique_sensor_frame_id(
sensor_name=sensor_name, frame_id=frame_id, extra="intrinsic"
)
return self.lazy_load_cache.get_item(
key=_unique_cache_key,
loader=lambda: self._decode_intrinsic(sensor_name=sensor_name, frame_id=frame_id),
)
def get_image_dimensions(self, sensor_name: SensorName, frame_id: FrameId) -> Tuple[int, int, int]:
if self.settings.cache_images:
_unique_cache_key = self.get_unique_sensor_frame_id(
sensor_name=sensor_name, frame_id=frame_id, extra="image-dims"
)
return self.lazy_load_cache.get_item(
key=_unique_cache_key,
loader=lambda: self._decode_image_dimensions(sensor_name=sensor_name, frame_id=frame_id),
)
else:
return self._decode_image_dimensions(sensor_name=sensor_name, frame_id=frame_id)
def get_image_rgba(self, sensor_name: SensorName, frame_id: FrameId) -> np.ndarray:
if self.settings.cache_images:
_unique_cache_key = self.get_unique_sensor_frame_id(
sensor_name=sensor_name, frame_id=frame_id, extra="image_rgba"
)
return self.lazy_load_cache.get_item(
key=_unique_cache_key,
loader=lambda: self._decode_image_rgba(sensor_name=sensor_name, frame_id=frame_id),
)
else:
return self._decode_image_rgba(sensor_name=sensor_name, frame_id=frame_id)
@abc.abstractmethod
def _decode_intrinsic(self, sensor_name: SensorName, frame_id: FrameId) -> SensorIntrinsic:
pass
@abc.abstractmethod
def _decode_image_dimensions(self, sensor_name: SensorName, frame_id: FrameId) -> Tuple[int, int, int]:
pass
@abc.abstractmethod
def _decode_image_rgba(self, sensor_name: SensorName, frame_id: FrameId) -> np.ndarray:
pass
class LidarSensorFrameDecoder(SensorFrameDecoder[TDateTime]):
def get_point_cloud_size(self, sensor_name: SensorName, frame_id: FrameId) -> int:
if self.settings.cache_point_clouds:
_unique_cache_key = self.get_unique_sensor_frame_id(
sensor_name=sensor_name, frame_id=frame_id, extra="point_cloud_size"
)
return self.lazy_load_cache.get_item(
key=_unique_cache_key,
loader=lambda: self._decode_point_cloud_size(sensor_name=sensor_name, frame_id=frame_id),
)
else:
return self._decode_point_cloud_size(sensor_name=sensor_name, frame_id=frame_id)
def get_point_cloud_xyz(self, sensor_name: SensorName, frame_id: FrameId) -> Optional[np.ndarray]:
if self.settings.cache_point_clouds:
_unique_cache_key = self.get_unique_sensor_frame_id(
sensor_name=sensor_name, frame_id=frame_id, extra="point_cloud_xyz"
)
return self.lazy_load_cache.get_item(
key=_unique_cache_key,
loader=lambda: self._decode_point_cloud_xyz(sensor_name=sensor_name, frame_id=frame_id),
)
else:
return self._decode_point_cloud_xyz(sensor_name=sensor_name, frame_id=frame_id)
def get_point_cloud_rgb(self, sensor_name: SensorName, frame_id: FrameId) -> Optional[np.ndarray]:
if self.settings.cache_point_clouds:
_unique_cache_key = self.get_unique_sensor_frame_id(
sensor_name=sensor_name, frame_id=frame_id, extra="point_cloud_rgb"
)
return self.lazy_load_cache.get_item(
key=_unique_cache_key,
loader=lambda: self._decode_point_cloud_rgb(sensor_name=sensor_name, frame_id=frame_id),
)
else:
return self._decode_point_cloud_rgb(sensor_name=sensor_name, frame_id=frame_id)
def get_point_cloud_intensity(self, sensor_name: SensorName, frame_id: FrameId) -> Optional[np.ndarray]:
if self.settings.cache_point_clouds:
_unique_cache_key = self.get_unique_sensor_frame_id(
sensor_name=sensor_name, frame_id=frame_id, extra="point_cloud_intensity"
)
return self.lazy_load_cache.get_item(
key=_unique_cache_key,
loader=lambda: self._decode_point_cloud_intensity(sensor_name=sensor_name, frame_id=frame_id),
)
else:
return self._decode_point_cloud_intensity(sensor_name=sensor_name, frame_id=frame_id)
def get_point_cloud_timestamp(self, sensor_name: SensorName, frame_id: FrameId) -> Optional[np.ndarray]:
if self.settings.cache_point_clouds:
_unique_cache_key = self.get_unique_sensor_frame_id(
sensor_name=sensor_name, frame_id=frame_id, extra="point_cloud_timestamp"
)
return self.lazy_load_cache.get_item(
key=_unique_cache_key,
loader=lambda: self._decode_point_cloud_timestamp(sensor_name=sensor_name, frame_id=frame_id),
)
else:
return self._decode_point_cloud_timestamp(sensor_name=sensor_name, frame_id=frame_id)
def get_point_cloud_ring_index(self, sensor_name: SensorName, frame_id: FrameId) -> Optional[np.ndarray]:
if self.settings.cache_point_clouds:
_unique_cache_key = self.get_unique_sensor_frame_id(
sensor_name=sensor_name, frame_id=frame_id, extra="point_cloud_ring_index"
)
return self.lazy_load_cache.get_item(
key=_unique_cache_key,
loader=lambda: self._decode_point_cloud_ring_index(sensor_name=sensor_name, frame_id=frame_id),
)
else:
return self._decode_point_cloud_ring_index(sensor_name=sensor_name, frame_id=frame_id)
def get_point_cloud_ray_type(self, sensor_name: SensorName, frame_id: FrameId) -> Optional[np.ndarray]:
if self.settings.cache_point_clouds:
_unique_cache_key = self.get_unique_sensor_frame_id(
sensor_name=sensor_name, frame_id=frame_id, extra="point_cloud_ray_type"
)
return self.lazy_load_cache.get_item(
key=_unique_cache_key,
loader=lambda: self._decode_point_cloud_ray_type(sensor_name=sensor_name, frame_id=frame_id),
)
else:
return self._decode_point_cloud_ray_type(sensor_name=sensor_name, frame_id=frame_id)
@abc.abstractmethod
def _decode_point_cloud_size(self, sensor_name: SensorName, frame_id: FrameId) -> int:
pass
@abc.abstractmethod
def _decode_point_cloud_xyz(self, sensor_name: SensorName, frame_id: FrameId) -> Optional[np.ndarray]:
pass
@abc.abstractmethod
def _decode_point_cloud_rgb(self, sensor_name: SensorName, frame_id: FrameId) -> Optional[np.ndarray]:
pass
@abc.abstractmethod
def _decode_point_cloud_intensity(self, sensor_name: SensorName, frame_id: FrameId) -> Optional[np.ndarray]:
pass
@abc.abstractmethod
def _decode_point_cloud_timestamp(self, sensor_name: SensorName, frame_id: FrameId) -> Optional[np.ndarray]:
pass
@abc.abstractmethod
def _decode_point_cloud_ring_index(self, sensor_name: SensorName, frame_id: FrameId) -> Optional[np.ndarray]:
pass
@abc.abstractmethod
def _decode_point_cloud_ray_type(self, sensor_name: SensorName, frame_id: FrameId) -> Optional[np.ndarray]:
pass
| 44.854093
| 118
| 0.692241
| 1,562
| 12,604
| 5.151088
| 0.065301
| 0.11571
| 0.05742
| 0.08352
| 0.85036
| 0.835073
| 0.824261
| 0.815809
| 0.815809
| 0.784986
| 0
| 0
| 0.228896
| 12,604
| 280
| 119
| 45.014286
| 0.827863
| 0.002063
| 0
| 0.443515
| 0
| 0
| 0.019879
| 0.009224
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142259
| false
| 0.066946
| 0.033473
| 0.008368
| 0.301255
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
2719b53e0ea9fe0e5a0dd7365dd180021266ecf7
| 161
|
py
|
Python
|
icevision/models/torchvision_models/__init__.py
|
jahnavi0105/icevision
|
24a379f1417032934e41e4a31dfab8562a259344
|
[
"Apache-2.0"
] | null | null | null |
icevision/models/torchvision_models/__init__.py
|
jahnavi0105/icevision
|
24a379f1417032934e41e4a31dfab8562a259344
|
[
"Apache-2.0"
] | null | null | null |
icevision/models/torchvision_models/__init__.py
|
jahnavi0105/icevision
|
24a379f1417032934e41e4a31dfab8562a259344
|
[
"Apache-2.0"
] | null | null | null |
import icevision.models.torchvision_models.faster_rcnn
import icevision.models.torchvision_models.mask_rcnn
import icevision.models.torchvision_models.retinanet
| 40.25
| 54
| 0.906832
| 20
| 161
| 7.05
| 0.4
| 0.319149
| 0.446809
| 0.680851
| 0.865248
| 0.595745
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037267
| 161
| 3
| 55
| 53.666667
| 0.909677
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
272151a9e3d455c016a50787dbadce83f60d66bc
| 4,720
|
py
|
Python
|
archive/tests.py
|
Hayashi-Yudai/scan_system
|
c7fe0326fc12335d312ac0d28da69565813c843a
|
[
"MIT"
] | null | null | null |
archive/tests.py
|
Hayashi-Yudai/scan_system
|
c7fe0326fc12335d312ac0d28da69565813c843a
|
[
"MIT"
] | 5
|
2021-12-23T10:33:19.000Z
|
2022-03-04T08:33:51.000Z
|
archive/tests.py
|
Hayashi-Yudai/scan_system
|
c7fe0326fc12335d312ac0d28da69565813c843a
|
[
"MIT"
] | null | null | null |
from django.test import TestCase, Client
import json
from core.models import TDSData
class TestArchive(TestCase):
def setUp(self):
self.client = Client()
TDSData.objects.create(
start_position=0,
end_position=10,
step=5,
lockin_time=300,
position_data="1,2,3",
intensity_data="1,4,9",
file_name="",
)
def test_get_archive_data(self):
response = self.client.post(
"/archive/get_archive_data/", {"pk": 1, "fft": "false"}
)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(response_json["x"], [1, 2, 3])
self.assertEqual(response_json["y"], [1, 4, 9])
response = self.client.post(
"/archive/get_archive_data/", {"pk": 1, "fft": "true"}
)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(len(response_json["x"]), 4096)
self.assertEqual(len(response_json["y"]), 4096)
for fft_value in response_json["y"]:
self.assertEqual(fft_value.imag, 0)
# Resource not found
response = self.client.post(
"/archive/get_archive_data/", {"pk": 100, "fft": "true"}
)
self.assertEqual(response.status_code, 404)
# Bad request
response = self.client.post("/archive/get_archive_data/", {"pk": 1})
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, b"Invalid parameter(s)")
response = self.client.post("/archive/get_archive_data/", {"pk": "bad request"})
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, b"Invalid parameter(s)")
response = self.client.post("/archive/get_archive_data/", {})
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, b"Invalid parameter(s)")
response = self.client.post("/archive/get_archive_data/", {"bad": "request"})
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, b"Invalid parameter(s)")
def test_calc_fft(self):
response = self.client.post(
"/archive/calc-fft/",
content_type="application/json",
data={"ids": [1], "fft": True},
)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(len(response_json["xs"][0]), 4096)
self.assertEqual(len(response_json["ys"][0]), 4096)
for fft_value in response_json["ys"][0]:
self.assertEqual(fft_value.imag, 0)
response = self.client.post(
"/archive/calc-fft/",
content_type="application/json",
data={"ids": [1], "fft": False},
)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(response_json["xs"][0], [1, 2, 3])
self.assertEqual(response_json["ys"][0], [1, 4, 9])
# Resoure not found
response = self.client.post(
"/archive/calc-fft/",
content_type="application/json",
data={"ids": [1, 100], "fft": False},
)
self.assertEqual(response.status_code, 404)
# Bad requests
response = self.client.post(
"/archive/calc-fft/",
content_type="application/json",
data={"ids": 1, "fft": False},
)
self.assertEqual(response.status_code, 400)
response = self.client.post(
"/archive/calc-fft/",
content_type="application/json",
data={"ids": [1]},
)
self.assertEqual(response.status_code, 400)
response = self.client.post(
"/archive/calc-fft/",
content_type="application/json",
data={"fft": True},
)
self.assertEqual(response.status_code, 400)
response = self.client.post(
"/archive/calc-fft/",
content_type="application/json",
data={},
)
self.assertEqual(response.status_code, 400)
response = self.client.post(
"/archive/calc-fft/",
content_type="application/json",
data={"ids": ["1"]},
)
self.assertEqual(response.status_code, 400)
response = self.client.post(
"/archive/calc-fft/",
content_type="application/json",
data={"ids": [1], "fft": "bad"},
)
self.assertEqual(response.status_code, 400)
| 33.714286
| 88
| 0.574576
| 522
| 4,720
| 5.065134
| 0.149425
| 0.170197
| 0.208775
| 0.133132
| 0.876324
| 0.876324
| 0.812784
| 0.724281
| 0.724281
| 0.706505
| 0
| 0.032961
| 0.280085
| 4,720
| 139
| 89
| 33.956835
| 0.745144
| 0.012924
| 0
| 0.504505
| 0
| 0
| 0.148045
| 0.039106
| 0
| 0
| 0
| 0
| 0.27027
| 1
| 0.027027
| false
| 0
| 0.027027
| 0
| 0.063063
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
276d93baf50906ebbf8b6416d75ff60613e38c78
| 70,979
|
py
|
Python
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 64
|
2016-10-20T15:47:18.000Z
|
2021-11-11T11:57:32.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 126
|
2016-10-05T10:36:14.000Z
|
2019-05-15T08:43:23.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 63
|
2016-11-07T15:23:08.000Z
|
2021-09-22T14:41:16.000Z
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa-types/lsa-type/lsas/lsa/opaque-lsa/extended-prefix/tlvs/tlv/prefix-sid/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State parameters relating to the Prefix SID sub-TLV of the
extended prefix LSA
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__no_php",
"__mapping_server",
"__explicit_null",
"__sid_value_type",
"__sid_scope",
"__multi_topology_identifier",
"__algorithm",
"__sid_value",
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__no_php = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="no-php",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__mapping_server = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="mapping-server",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__explicit_null = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="explicit-null",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__sid_value_type = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"ABSOLUTE": {}, "INDEX": {}},
),
is_leaf=True,
yang_name="sid-value-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
self.__sid_scope = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LOCAL": {}, "GLOBAL": {}},
),
is_leaf=True,
yang_name="sid-scope",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
self.__multi_topology_identifier = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="multi-topology-identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
self.__algorithm = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="algorithm",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
self.__sid_value = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="sid-value",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"ospfv2",
"areas",
"area",
"lsdb",
"lsa-types",
"lsa-type",
"lsas",
"lsa",
"opaque-lsa",
"extended-prefix",
"tlvs",
"tlv",
"prefix-sid",
"state",
]
def _get_no_php(self):
"""
Getter method for no_php, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/no_php (boolean)
YANG Description: If this leaf is set the advertising system has indicated that the
prefix SID must not be popped before delivering packets to it
"""
return self.__no_php
def _set_no_php(self, v, load=False):
"""
Setter method for no_php, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/no_php (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_no_php is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_no_php() directly.
YANG Description: If this leaf is set the advertising system has indicated that the
prefix SID must not be popped before delivering packets to it
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="no-php",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """no_php must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="no-php", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__no_php = t
if hasattr(self, "_set"):
self._set()
def _unset_no_php(self):
self.__no_php = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="no-php",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_mapping_server(self):
"""
Getter method for mapping_server, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/mapping_server (boolean)
YANG Description: If this leaf is set the SID was advertised by a Segment Routing
mapping server
"""
return self.__mapping_server
def _set_mapping_server(self, v, load=False):
"""
Setter method for mapping_server, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/mapping_server (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_mapping_server is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mapping_server() directly.
YANG Description: If this leaf is set the SID was advertised by a Segment Routing
mapping server
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="mapping-server",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """mapping_server must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="mapping-server", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__mapping_server = t
if hasattr(self, "_set"):
self._set()
def _unset_mapping_server(self):
self.__mapping_server = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="mapping-server",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_explicit_null(self):
"""
Getter method for explicit_null, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/explicit_null (boolean)
YANG Description: If this leaf is set, the advertising system has requested that the
prefix SID value should be replaced with the explicit null label
value
"""
return self.__explicit_null
def _set_explicit_null(self, v, load=False):
"""
Setter method for explicit_null, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/explicit_null (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_explicit_null is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_explicit_null() directly.
YANG Description: If this leaf is set, the advertising system has requested that the
prefix SID value should be replaced with the explicit null label
value
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="explicit-null",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """explicit_null must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="explicit-null", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__explicit_null = t
if hasattr(self, "_set"):
self._set()
def _unset_explicit_null(self):
self.__explicit_null = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="explicit-null",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_sid_value_type(self):
"""
Getter method for sid_value_type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/sid_value_type (enumeration)
YANG Description: Specifies the type of the value specified within the Prefix SID
sub-TLV - in particular, whether the value is an index or an
absolute value. This value corresponds with the V-flag of the Prefix
SID sub-TLV
"""
return self.__sid_value_type
def _set_sid_value_type(self, v, load=False):
"""
Setter method for sid_value_type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/sid_value_type (enumeration)
If this variable is read-only (config: false) in the
source YANG file, then _set_sid_value_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sid_value_type() directly.
YANG Description: Specifies the type of the value specified within the Prefix SID
sub-TLV - in particular, whether the value is an index or an
absolute value. This value corresponds with the V-flag of the Prefix
SID sub-TLV
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"ABSOLUTE": {}, "INDEX": {}},
),
is_leaf=True,
yang_name="sid-value-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """sid_value_type must be of a type compatible with enumeration""",
"defined-type": "openconfig-network-instance:enumeration",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ABSOLUTE': {}, 'INDEX': {}},), is_leaf=True, yang_name="sid-value-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='enumeration', is_config=False)""",
}
)
self.__sid_value_type = t
if hasattr(self, "_set"):
self._set()
def _unset_sid_value_type(self):
self.__sid_value_type = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"ABSOLUTE": {}, "INDEX": {}},
),
is_leaf=True,
yang_name="sid-value-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
def _get_sid_scope(self):
"""
Getter method for sid_scope, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/sid_scope (enumeration)
YANG Description: Specifies the scope of the SID advertisement within the Prefix SID
sub-TLV. The scope of the SID is independent of whether the SID
contained is an index, or an absolute value
"""
return self.__sid_scope
def _set_sid_scope(self, v, load=False):
"""
Setter method for sid_scope, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/sid_scope (enumeration)
If this variable is read-only (config: false) in the
source YANG file, then _set_sid_scope is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sid_scope() directly.
YANG Description: Specifies the scope of the SID advertisement within the Prefix SID
sub-TLV. The scope of the SID is independent of whether the SID
contained is an index, or an absolute value
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LOCAL": {}, "GLOBAL": {}},
),
is_leaf=True,
yang_name="sid-scope",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """sid_scope must be of a type compatible with enumeration""",
"defined-type": "openconfig-network-instance:enumeration",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'LOCAL': {}, 'GLOBAL': {}},), is_leaf=True, yang_name="sid-scope", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='enumeration', is_config=False)""",
}
)
self.__sid_scope = t
if hasattr(self, "_set"):
self._set()
def _unset_sid_scope(self):
self.__sid_scope = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LOCAL": {}, "GLOBAL": {}},
),
is_leaf=True,
yang_name="sid-scope",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
def _get_multi_topology_identifier(self):
"""
Getter method for multi_topology_identifier, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/multi_topology_identifier (uint8)
YANG Description: The identifier for the topology to which the Prefix SID relates. The
value of this leaf is a MT-ID as defined in RFC4915
"""
return self.__multi_topology_identifier
def _set_multi_topology_identifier(self, v, load=False):
"""
Setter method for multi_topology_identifier, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/multi_topology_identifier (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_multi_topology_identifier is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_multi_topology_identifier() directly.
YANG Description: The identifier for the topology to which the Prefix SID relates. The
value of this leaf is a MT-ID as defined in RFC4915
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="multi-topology-identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """multi_topology_identifier must be of a type compatible with uint8""",
"defined-type": "uint8",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="multi-topology-identifier", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint8', is_config=False)""",
}
)
self.__multi_topology_identifier = t
if hasattr(self, "_set"):
self._set()
def _unset_multi_topology_identifier(self):
self.__multi_topology_identifier = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="multi-topology-identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
def _get_algorithm(self):
"""
Getter method for algorithm, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/algorithm (uint8)
YANG Description: The algorithm that computes the path associated with the Prefix SID
"""
return self.__algorithm
def _set_algorithm(self, v, load=False):
"""
Setter method for algorithm, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/algorithm (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_algorithm is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_algorithm() directly.
YANG Description: The algorithm that computes the path associated with the Prefix SID
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="algorithm",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """algorithm must be of a type compatible with uint8""",
"defined-type": "uint8",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="algorithm", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint8', is_config=False)""",
}
)
self.__algorithm = t
if hasattr(self, "_set"):
self._set()
def _unset_algorithm(self):
self.__algorithm = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="algorithm",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
def _get_sid_value(self):
"""
Getter method for sid_value, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/sid_value (uint32)
YANG Description: The value of the Prefix SID. The meaning of this value is dependent
upon the type of SID, and its scope. The value contained is either a
32-bit value indicating the index of the SID, or a 24-bit label where
the 20 right-most bits are used for encoding the label value
"""
return self.__sid_value
def _set_sid_value(self, v, load=False):
"""
Setter method for sid_value, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/sid_value (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_sid_value is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sid_value() directly.
YANG Description: The value of the Prefix SID. The meaning of this value is dependent
upon the type of SID, and its scope. The value contained is either a
32-bit value indicating the index of the SID, or a 24-bit label where
the 20 right-most bits are used for encoding the label value
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="sid-value",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """sid_value must be of a type compatible with uint32""",
"defined-type": "uint32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="sid-value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)""",
}
)
self.__sid_value = t
if hasattr(self, "_set"):
self._set()
def _unset_sid_value(self):
self.__sid_value = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="sid-value",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
no_php = __builtin__.property(_get_no_php)
mapping_server = __builtin__.property(_get_mapping_server)
explicit_null = __builtin__.property(_get_explicit_null)
sid_value_type = __builtin__.property(_get_sid_value_type)
sid_scope = __builtin__.property(_get_sid_scope)
multi_topology_identifier = __builtin__.property(_get_multi_topology_identifier)
algorithm = __builtin__.property(_get_algorithm)
sid_value = __builtin__.property(_get_sid_value)
_pyangbind_elements = OrderedDict(
[
("no_php", no_php),
("mapping_server", mapping_server),
("explicit_null", explicit_null),
("sid_value_type", sid_value_type),
("sid_scope", sid_scope),
("multi_topology_identifier", multi_topology_identifier),
("algorithm", algorithm),
("sid_value", sid_value),
]
)
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa-types/lsa-type/lsas/lsa/opaque-lsa/extended-prefix/tlvs/tlv/prefix-sid/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State parameters relating to the Prefix SID sub-TLV of the
extended prefix LSA
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__no_php",
"__mapping_server",
"__explicit_null",
"__sid_value_type",
"__sid_scope",
"__multi_topology_identifier",
"__algorithm",
"__sid_value",
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__no_php = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="no-php",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__mapping_server = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="mapping-server",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__explicit_null = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="explicit-null",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__sid_value_type = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"ABSOLUTE": {}, "INDEX": {}},
),
is_leaf=True,
yang_name="sid-value-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
self.__sid_scope = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LOCAL": {}, "GLOBAL": {}},
),
is_leaf=True,
yang_name="sid-scope",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
self.__multi_topology_identifier = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="multi-topology-identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
self.__algorithm = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="algorithm",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
self.__sid_value = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="sid-value",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"ospfv2",
"areas",
"area",
"lsdb",
"lsa-types",
"lsa-type",
"lsas",
"lsa",
"opaque-lsa",
"extended-prefix",
"tlvs",
"tlv",
"prefix-sid",
"state",
]
def _get_no_php(self):
"""
Getter method for no_php, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/no_php (boolean)
YANG Description: If this leaf is set the advertising system has indicated that the
prefix SID must not be popped before delivering packets to it
"""
return self.__no_php
def _set_no_php(self, v, load=False):
"""
Setter method for no_php, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/no_php (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_no_php is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_no_php() directly.
YANG Description: If this leaf is set the advertising system has indicated that the
prefix SID must not be popped before delivering packets to it
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="no-php",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """no_php must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="no-php", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__no_php = t
if hasattr(self, "_set"):
self._set()
def _unset_no_php(self):
self.__no_php = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="no-php",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_mapping_server(self):
"""
Getter method for mapping_server, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/mapping_server (boolean)
YANG Description: If this leaf is set the SID was advertised by a Segment Routing
mapping server
"""
return self.__mapping_server
def _set_mapping_server(self, v, load=False):
"""
Setter method for mapping_server, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/mapping_server (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_mapping_server is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mapping_server() directly.
YANG Description: If this leaf is set the SID was advertised by a Segment Routing
mapping server
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="mapping-server",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """mapping_server must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="mapping-server", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__mapping_server = t
if hasattr(self, "_set"):
self._set()
def _unset_mapping_server(self):
self.__mapping_server = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="mapping-server",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_explicit_null(self):
"""
Getter method for explicit_null, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/explicit_null (boolean)
YANG Description: If this leaf is set, the advertising system has requested that the
prefix SID value should be replaced with the explicit null label
value
"""
return self.__explicit_null
def _set_explicit_null(self, v, load=False):
"""
Setter method for explicit_null, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/explicit_null (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_explicit_null is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_explicit_null() directly.
YANG Description: If this leaf is set, the advertising system has requested that the
prefix SID value should be replaced with the explicit null label
value
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="explicit-null",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """explicit_null must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="explicit-null", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__explicit_null = t
if hasattr(self, "_set"):
self._set()
def _unset_explicit_null(self):
self.__explicit_null = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="explicit-null",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_sid_value_type(self):
"""
Getter method for sid_value_type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/sid_value_type (enumeration)
YANG Description: Specifies the type of the value specified within the Prefix SID
sub-TLV - in particular, whether the value is an index or an
absolute value. This value corresponds with the V-flag of the Prefix
SID sub-TLV
"""
return self.__sid_value_type
def _set_sid_value_type(self, v, load=False):
"""
Setter method for sid_value_type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/sid_value_type (enumeration)
If this variable is read-only (config: false) in the
source YANG file, then _set_sid_value_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sid_value_type() directly.
YANG Description: Specifies the type of the value specified within the Prefix SID
sub-TLV - in particular, whether the value is an index or an
absolute value. This value corresponds with the V-flag of the Prefix
SID sub-TLV
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"ABSOLUTE": {}, "INDEX": {}},
),
is_leaf=True,
yang_name="sid-value-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """sid_value_type must be of a type compatible with enumeration""",
"defined-type": "openconfig-network-instance:enumeration",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ABSOLUTE': {}, 'INDEX': {}},), is_leaf=True, yang_name="sid-value-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='enumeration', is_config=False)""",
}
)
self.__sid_value_type = t
if hasattr(self, "_set"):
self._set()
def _unset_sid_value_type(self):
self.__sid_value_type = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"ABSOLUTE": {}, "INDEX": {}},
),
is_leaf=True,
yang_name="sid-value-type",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
def _get_sid_scope(self):
"""
Getter method for sid_scope, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/sid_scope (enumeration)
YANG Description: Specifies the scope of the SID advertisement within the Prefix SID
sub-TLV. The scope of the SID is independent of whether the SID
contained is an index, or an absolute value
"""
return self.__sid_scope
def _set_sid_scope(self, v, load=False):
"""
Setter method for sid_scope, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/sid_scope (enumeration)
If this variable is read-only (config: false) in the
source YANG file, then _set_sid_scope is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sid_scope() directly.
YANG Description: Specifies the scope of the SID advertisement within the Prefix SID
sub-TLV. The scope of the SID is independent of whether the SID
contained is an index, or an absolute value
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LOCAL": {}, "GLOBAL": {}},
),
is_leaf=True,
yang_name="sid-scope",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """sid_scope must be of a type compatible with enumeration""",
"defined-type": "openconfig-network-instance:enumeration",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'LOCAL': {}, 'GLOBAL': {}},), is_leaf=True, yang_name="sid-scope", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='enumeration', is_config=False)""",
}
)
self.__sid_scope = t
if hasattr(self, "_set"):
self._set()
def _unset_sid_scope(self):
self.__sid_scope = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LOCAL": {}, "GLOBAL": {}},
),
is_leaf=True,
yang_name="sid-scope",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="enumeration",
is_config=False,
)
def _get_multi_topology_identifier(self):
"""
Getter method for multi_topology_identifier, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/multi_topology_identifier (uint8)
YANG Description: The identifier for the topology to which the Prefix SID relates. The
value of this leaf is a MT-ID as defined in RFC4915
"""
return self.__multi_topology_identifier
def _set_multi_topology_identifier(self, v, load=False):
"""
Setter method for multi_topology_identifier, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/multi_topology_identifier (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_multi_topology_identifier is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_multi_topology_identifier() directly.
YANG Description: The identifier for the topology to which the Prefix SID relates. The
value of this leaf is a MT-ID as defined in RFC4915
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="multi-topology-identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """multi_topology_identifier must be of a type compatible with uint8""",
"defined-type": "uint8",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="multi-topology-identifier", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint8', is_config=False)""",
}
)
self.__multi_topology_identifier = t
if hasattr(self, "_set"):
self._set()
def _unset_multi_topology_identifier(self):
self.__multi_topology_identifier = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="multi-topology-identifier",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
def _get_algorithm(self):
"""
Getter method for algorithm, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/algorithm (uint8)
YANG Description: The algorithm that computes the path associated with the Prefix SID
"""
return self.__algorithm
def _set_algorithm(self, v, load=False):
"""
Setter method for algorithm, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/algorithm (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_algorithm is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_algorithm() directly.
YANG Description: The algorithm that computes the path associated with the Prefix SID
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="algorithm",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """algorithm must be of a type compatible with uint8""",
"defined-type": "uint8",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="algorithm", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint8', is_config=False)""",
}
)
self.__algorithm = t
if hasattr(self, "_set"):
self._set()
def _unset_algorithm(self):
self.__algorithm = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="algorithm",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
def _get_sid_value(self):
"""
Getter method for sid_value, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/sid_value (uint32)
YANG Description: The value of the Prefix SID. The meaning of this value is dependent
upon the type of SID, and its scope. The value contained is either a
32-bit value indicating the index of the SID, or a 24-bit label where
the 20 right-most bits are used for encoding the label value
"""
return self.__sid_value
def _set_sid_value(self, v, load=False):
"""
Setter method for sid_value, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_prefix/tlvs/tlv/prefix_sid/state/sid_value (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_sid_value is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sid_value() directly.
YANG Description: The value of the Prefix SID. The meaning of this value is dependent
upon the type of SID, and its scope. The value contained is either a
32-bit value indicating the index of the SID, or a 24-bit label where
the 20 right-most bits are used for encoding the label value
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="sid-value",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """sid_value must be of a type compatible with uint32""",
"defined-type": "uint32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="sid-value", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)""",
}
)
self.__sid_value = t
if hasattr(self, "_set"):
self._set()
def _unset_sid_value(self):
self.__sid_value = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="sid-value",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
no_php = __builtin__.property(_get_no_php)
mapping_server = __builtin__.property(_get_mapping_server)
explicit_null = __builtin__.property(_get_explicit_null)
sid_value_type = __builtin__.property(_get_sid_value_type)
sid_scope = __builtin__.property(_get_sid_scope)
multi_topology_identifier = __builtin__.property(_get_multi_topology_identifier)
algorithm = __builtin__.property(_get_algorithm)
sid_value = __builtin__.property(_get_sid_value)
_pyangbind_elements = OrderedDict(
[
("no_php", no_php),
("mapping_server", mapping_server),
("explicit_null", explicit_null),
("sid_value_type", sid_value_type),
("sid_scope", sid_scope),
("multi_topology_identifier", multi_topology_identifier),
("algorithm", algorithm),
("sid_value", sid_value),
]
)
| 44.113735
| 539
| 0.612491
| 7,904
| 70,979
| 5.257338
| 0.033654
| 0.061366
| 0.043798
| 0.049237
| 0.990976
| 0.985753
| 0.985753
| 0.985753
| 0.985753
| 0.985753
| 0
| 0.00679
| 0.29041
| 70,979
| 1,608
| 540
| 44.141169
| 0.81825
| 0.26057
| 0
| 0.898693
| 0
| 0.013072
| 0.268432
| 0.098444
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042484
| false
| 0
| 0.012255
| 0
| 0.09232
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
27ac3598406e52c0d4c2dc627fb01432764e8727
| 36,615
|
py
|
Python
|
sdk/python/pulumi_splunk/inputs_tcp_raw.py
|
pulumi/pulumi-splunk
|
a593a4b65e7de94d61b93676231606820193f212
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-12-23T01:26:49.000Z
|
2020-12-23T01:26:49.000Z
|
sdk/python/pulumi_splunk/inputs_tcp_raw.py
|
pulumi/pulumi-splunk
|
a593a4b65e7de94d61b93676231606820193f212
|
[
"ECL-2.0",
"Apache-2.0"
] | 36
|
2020-12-22T16:57:47.000Z
|
2022-03-25T20:12:26.000Z
|
sdk/python/pulumi_splunk/inputs_tcp_raw.py
|
pulumi/pulumi-splunk
|
a593a4b65e7de94d61b93676231606820193f212
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['InputsTcpRawArgs', 'InputsTcpRaw']
@pulumi.input_type
class InputsTcpRawArgs:
def __init__(__self__, *,
acl: Optional[pulumi.Input['InputsTcpRawAclArgs']] = None,
connection_host: Optional[pulumi.Input[str]] = None,
disabled: Optional[pulumi.Input[bool]] = None,
host: Optional[pulumi.Input[str]] = None,
index: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
queue: Optional[pulumi.Input[str]] = None,
raw_tcp_done_timeout: Optional[pulumi.Input[int]] = None,
restrict_to_host: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[str]] = None,
sourcetype: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a InputsTcpRaw resource.
:param pulumi.Input['InputsTcpRawAclArgs'] acl: The app/user context that is the namespace for the resource
:param pulumi.Input[str] connection_host: Valid values: (ip | dns | none)
Set the host for the remote server that is sending data.
ip sets the host to the IP address of the remote server sending data.
dns sets the host to the reverse DNS entry for the IP address of the remote server sending data.
none leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
Default value is dns.
:param pulumi.Input[bool] disabled: Indicates if input is disabled.
:param pulumi.Input[str] host: Host from which the indexer gets data.
:param pulumi.Input[str] index: Index to store generated events. Defaults to default.
:param pulumi.Input[str] name: The input port which receives raw data.
:param pulumi.Input[str] queue: Valid values: (parsingQueue | indexQueue)
Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.
Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more information about props.conf and rules for timestamping and linebreaking, refer to props.conf and the online documentation at "Monitor files and directories with inputs.conf"
Set queue to indexQueue to send your data directly into the index.
:param pulumi.Input[int] raw_tcp_done_timeout: Specifies in seconds the timeout value for adding a Done-key. Default value is 10 seconds.
If a connection over the port specified by name remains idle after receiving data for specified number of seconds, it adds a Done-key. This implies the last event is completely received.
:param pulumi.Input[str] restrict_to_host: Allows for restricting this input to only accept data from the host specified here.
:param pulumi.Input[str] source: Sets the source key/field for events from this input. Defaults to the input file path.
Sets the source key initial value. The key is used during parsing/indexing, in particular to set the source field during indexing. It is also the source field used at search time. As a convenience, the chosen string is prepended with 'source::'.
:param pulumi.Input[str] sourcetype: Set the source type for events from this input.
"sourcetype=" is automatically prepended to <string>.
Defaults to audittrail (if signedaudit=true) or fschange (if signedaudit=false).
"""
if acl is not None:
pulumi.set(__self__, "acl", acl)
if connection_host is not None:
pulumi.set(__self__, "connection_host", connection_host)
if disabled is not None:
pulumi.set(__self__, "disabled", disabled)
if host is not None:
pulumi.set(__self__, "host", host)
if index is not None:
pulumi.set(__self__, "index", index)
if name is not None:
pulumi.set(__self__, "name", name)
if queue is not None:
pulumi.set(__self__, "queue", queue)
if raw_tcp_done_timeout is not None:
pulumi.set(__self__, "raw_tcp_done_timeout", raw_tcp_done_timeout)
if restrict_to_host is not None:
pulumi.set(__self__, "restrict_to_host", restrict_to_host)
if source is not None:
pulumi.set(__self__, "source", source)
if sourcetype is not None:
pulumi.set(__self__, "sourcetype", sourcetype)
@property
@pulumi.getter
def acl(self) -> Optional[pulumi.Input['InputsTcpRawAclArgs']]:
"""
The app/user context that is the namespace for the resource
"""
return pulumi.get(self, "acl")
@acl.setter
def acl(self, value: Optional[pulumi.Input['InputsTcpRawAclArgs']]):
pulumi.set(self, "acl", value)
@property
@pulumi.getter(name="connectionHost")
def connection_host(self) -> Optional[pulumi.Input[str]]:
"""
Valid values: (ip | dns | none)
Set the host for the remote server that is sending data.
ip sets the host to the IP address of the remote server sending data.
dns sets the host to the reverse DNS entry for the IP address of the remote server sending data.
none leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
Default value is dns.
"""
return pulumi.get(self, "connection_host")
@connection_host.setter
def connection_host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "connection_host", value)
@property
@pulumi.getter
def disabled(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates if input is disabled.
"""
return pulumi.get(self, "disabled")
@disabled.setter
def disabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "disabled", value)
@property
@pulumi.getter
def host(self) -> Optional[pulumi.Input[str]]:
"""
Host from which the indexer gets data.
"""
return pulumi.get(self, "host")
@host.setter
def host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host", value)
@property
@pulumi.getter
def index(self) -> Optional[pulumi.Input[str]]:
"""
Index to store generated events. Defaults to default.
"""
return pulumi.get(self, "index")
@index.setter
def index(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "index", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The input port which receives raw data.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def queue(self) -> Optional[pulumi.Input[str]]:
"""
Valid values: (parsingQueue | indexQueue)
Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.
Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more information about props.conf and rules for timestamping and linebreaking, refer to props.conf and the online documentation at "Monitor files and directories with inputs.conf"
Set queue to indexQueue to send your data directly into the index.
"""
return pulumi.get(self, "queue")
@queue.setter
def queue(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "queue", value)
@property
@pulumi.getter(name="rawTcpDoneTimeout")
def raw_tcp_done_timeout(self) -> Optional[pulumi.Input[int]]:
"""
Specifies in seconds the timeout value for adding a Done-key. Default value is 10 seconds.
If a connection over the port specified by name remains idle after receiving data for specified number of seconds, it adds a Done-key. This implies the last event is completely received.
"""
return pulumi.get(self, "raw_tcp_done_timeout")
@raw_tcp_done_timeout.setter
def raw_tcp_done_timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "raw_tcp_done_timeout", value)
@property
@pulumi.getter(name="restrictToHost")
def restrict_to_host(self) -> Optional[pulumi.Input[str]]:
"""
Allows for restricting this input to only accept data from the host specified here.
"""
return pulumi.get(self, "restrict_to_host")
@restrict_to_host.setter
def restrict_to_host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "restrict_to_host", value)
@property
@pulumi.getter
def source(self) -> Optional[pulumi.Input[str]]:
"""
Sets the source key/field for events from this input. Defaults to the input file path.
Sets the source key initial value. The key is used during parsing/indexing, in particular to set the source field during indexing. It is also the source field used at search time. As a convenience, the chosen string is prepended with 'source::'.
"""
return pulumi.get(self, "source")
@source.setter
def source(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source", value)
@property
@pulumi.getter
def sourcetype(self) -> Optional[pulumi.Input[str]]:
"""
Set the source type for events from this input.
"sourcetype=" is automatically prepended to <string>.
Defaults to audittrail (if signedaudit=true) or fschange (if signedaudit=false).
"""
return pulumi.get(self, "sourcetype")
@sourcetype.setter
def sourcetype(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sourcetype", value)
@pulumi.input_type
class _InputsTcpRawState:
def __init__(__self__, *,
acl: Optional[pulumi.Input['InputsTcpRawAclArgs']] = None,
connection_host: Optional[pulumi.Input[str]] = None,
disabled: Optional[pulumi.Input[bool]] = None,
host: Optional[pulumi.Input[str]] = None,
index: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
queue: Optional[pulumi.Input[str]] = None,
raw_tcp_done_timeout: Optional[pulumi.Input[int]] = None,
restrict_to_host: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[str]] = None,
sourcetype: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering InputsTcpRaw resources.
:param pulumi.Input['InputsTcpRawAclArgs'] acl: The app/user context that is the namespace for the resource
:param pulumi.Input[str] connection_host: Valid values: (ip | dns | none)
Set the host for the remote server that is sending data.
ip sets the host to the IP address of the remote server sending data.
dns sets the host to the reverse DNS entry for the IP address of the remote server sending data.
none leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
Default value is dns.
:param pulumi.Input[bool] disabled: Indicates if input is disabled.
:param pulumi.Input[str] host: Host from which the indexer gets data.
:param pulumi.Input[str] index: Index to store generated events. Defaults to default.
:param pulumi.Input[str] name: The input port which receives raw data.
:param pulumi.Input[str] queue: Valid values: (parsingQueue | indexQueue)
Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.
Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more information about props.conf and rules for timestamping and linebreaking, refer to props.conf and the online documentation at "Monitor files and directories with inputs.conf"
Set queue to indexQueue to send your data directly into the index.
:param pulumi.Input[int] raw_tcp_done_timeout: Specifies in seconds the timeout value for adding a Done-key. Default value is 10 seconds.
If a connection over the port specified by name remains idle after receiving data for specified number of seconds, it adds a Done-key. This implies the last event is completely received.
:param pulumi.Input[str] restrict_to_host: Allows for restricting this input to only accept data from the host specified here.
:param pulumi.Input[str] source: Sets the source key/field for events from this input. Defaults to the input file path.
Sets the source key initial value. The key is used during parsing/indexing, in particular to set the source field during indexing. It is also the source field used at search time. As a convenience, the chosen string is prepended with 'source::'.
:param pulumi.Input[str] sourcetype: Set the source type for events from this input.
"sourcetype=" is automatically prepended to <string>.
Defaults to audittrail (if signedaudit=true) or fschange (if signedaudit=false).
"""
if acl is not None:
pulumi.set(__self__, "acl", acl)
if connection_host is not None:
pulumi.set(__self__, "connection_host", connection_host)
if disabled is not None:
pulumi.set(__self__, "disabled", disabled)
if host is not None:
pulumi.set(__self__, "host", host)
if index is not None:
pulumi.set(__self__, "index", index)
if name is not None:
pulumi.set(__self__, "name", name)
if queue is not None:
pulumi.set(__self__, "queue", queue)
if raw_tcp_done_timeout is not None:
pulumi.set(__self__, "raw_tcp_done_timeout", raw_tcp_done_timeout)
if restrict_to_host is not None:
pulumi.set(__self__, "restrict_to_host", restrict_to_host)
if source is not None:
pulumi.set(__self__, "source", source)
if sourcetype is not None:
pulumi.set(__self__, "sourcetype", sourcetype)
@property
@pulumi.getter
def acl(self) -> Optional[pulumi.Input['InputsTcpRawAclArgs']]:
"""
The app/user context that is the namespace for the resource
"""
return pulumi.get(self, "acl")
@acl.setter
def acl(self, value: Optional[pulumi.Input['InputsTcpRawAclArgs']]):
pulumi.set(self, "acl", value)
@property
@pulumi.getter(name="connectionHost")
def connection_host(self) -> Optional[pulumi.Input[str]]:
"""
Valid values: (ip | dns | none)
Set the host for the remote server that is sending data.
ip sets the host to the IP address of the remote server sending data.
dns sets the host to the reverse DNS entry for the IP address of the remote server sending data.
none leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
Default value is dns.
"""
return pulumi.get(self, "connection_host")
@connection_host.setter
def connection_host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "connection_host", value)
@property
@pulumi.getter
def disabled(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates if input is disabled.
"""
return pulumi.get(self, "disabled")
@disabled.setter
def disabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "disabled", value)
@property
@pulumi.getter
def host(self) -> Optional[pulumi.Input[str]]:
"""
Host from which the indexer gets data.
"""
return pulumi.get(self, "host")
@host.setter
def host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host", value)
@property
@pulumi.getter
def index(self) -> Optional[pulumi.Input[str]]:
"""
Index to store generated events. Defaults to default.
"""
return pulumi.get(self, "index")
@index.setter
def index(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "index", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The input port which receives raw data.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def queue(self) -> Optional[pulumi.Input[str]]:
"""
Valid values: (parsingQueue | indexQueue)
Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.
Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more information about props.conf and rules for timestamping and linebreaking, refer to props.conf and the online documentation at "Monitor files and directories with inputs.conf"
Set queue to indexQueue to send your data directly into the index.
"""
return pulumi.get(self, "queue")
@queue.setter
def queue(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "queue", value)
@property
@pulumi.getter(name="rawTcpDoneTimeout")
def raw_tcp_done_timeout(self) -> Optional[pulumi.Input[int]]:
"""
Specifies in seconds the timeout value for adding a Done-key. Default value is 10 seconds.
If a connection over the port specified by name remains idle after receiving data for specified number of seconds, it adds a Done-key. This implies the last event is completely received.
"""
return pulumi.get(self, "raw_tcp_done_timeout")
@raw_tcp_done_timeout.setter
def raw_tcp_done_timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "raw_tcp_done_timeout", value)
@property
@pulumi.getter(name="restrictToHost")
def restrict_to_host(self) -> Optional[pulumi.Input[str]]:
"""
Allows for restricting this input to only accept data from the host specified here.
"""
return pulumi.get(self, "restrict_to_host")
@restrict_to_host.setter
def restrict_to_host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "restrict_to_host", value)
@property
@pulumi.getter
def source(self) -> Optional[pulumi.Input[str]]:
"""
Sets the source key/field for events from this input. Defaults to the input file path.
Sets the source key initial value. The key is used during parsing/indexing, in particular to set the source field during indexing. It is also the source field used at search time. As a convenience, the chosen string is prepended with 'source::'.
"""
return pulumi.get(self, "source")
@source.setter
def source(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source", value)
@property
@pulumi.getter
def sourcetype(self) -> Optional[pulumi.Input[str]]:
"""
Set the source type for events from this input.
"sourcetype=" is automatically prepended to <string>.
Defaults to audittrail (if signedaudit=true) or fschange (if signedaudit=false).
"""
return pulumi.get(self, "sourcetype")
@sourcetype.setter
def sourcetype(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sourcetype", value)
class InputsTcpRaw(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
acl: Optional[pulumi.Input[pulumi.InputType['InputsTcpRawAclArgs']]] = None,
connection_host: Optional[pulumi.Input[str]] = None,
disabled: Optional[pulumi.Input[bool]] = None,
host: Optional[pulumi.Input[str]] = None,
index: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
queue: Optional[pulumi.Input[str]] = None,
raw_tcp_done_timeout: Optional[pulumi.Input[int]] = None,
restrict_to_host: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[str]] = None,
sourcetype: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
## # Resource: InputsTcpRaw
Create or update raw TCP input information for managing raw tcp inputs from forwarders.
## Example Usage
```python
import pulumi
import pulumi_splunk as splunk
tcp_raw = splunk.InputsTcpRaw("tcpRaw",
disabled=False,
index="main",
queue="indexQueue",
source="new",
sourcetype="new")
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['InputsTcpRawAclArgs']] acl: The app/user context that is the namespace for the resource
:param pulumi.Input[str] connection_host: Valid values: (ip | dns | none)
Set the host for the remote server that is sending data.
ip sets the host to the IP address of the remote server sending data.
dns sets the host to the reverse DNS entry for the IP address of the remote server sending data.
none leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
Default value is dns.
:param pulumi.Input[bool] disabled: Indicates if input is disabled.
:param pulumi.Input[str] host: Host from which the indexer gets data.
:param pulumi.Input[str] index: Index to store generated events. Defaults to default.
:param pulumi.Input[str] name: The input port which receives raw data.
:param pulumi.Input[str] queue: Valid values: (parsingQueue | indexQueue)
Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.
Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more information about props.conf and rules for timestamping and linebreaking, refer to props.conf and the online documentation at "Monitor files and directories with inputs.conf"
Set queue to indexQueue to send your data directly into the index.
:param pulumi.Input[int] raw_tcp_done_timeout: Specifies in seconds the timeout value for adding a Done-key. Default value is 10 seconds.
If a connection over the port specified by name remains idle after receiving data for specified number of seconds, it adds a Done-key. This implies the last event is completely received.
:param pulumi.Input[str] restrict_to_host: Allows for restricting this input to only accept data from the host specified here.
:param pulumi.Input[str] source: Sets the source key/field for events from this input. Defaults to the input file path.
Sets the source key initial value. The key is used during parsing/indexing, in particular to set the source field during indexing. It is also the source field used at search time. As a convenience, the chosen string is prepended with 'source::'.
:param pulumi.Input[str] sourcetype: Set the source type for events from this input.
"sourcetype=" is automatically prepended to <string>.
Defaults to audittrail (if signedaudit=true) or fschange (if signedaudit=false).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[InputsTcpRawArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## # Resource: InputsTcpRaw
Create or update raw TCP input information for managing raw tcp inputs from forwarders.
## Example Usage
```python
import pulumi
import pulumi_splunk as splunk
tcp_raw = splunk.InputsTcpRaw("tcpRaw",
disabled=False,
index="main",
queue="indexQueue",
source="new",
sourcetype="new")
```
:param str resource_name: The name of the resource.
:param InputsTcpRawArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(InputsTcpRawArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
acl: Optional[pulumi.Input[pulumi.InputType['InputsTcpRawAclArgs']]] = None,
connection_host: Optional[pulumi.Input[str]] = None,
disabled: Optional[pulumi.Input[bool]] = None,
host: Optional[pulumi.Input[str]] = None,
index: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
queue: Optional[pulumi.Input[str]] = None,
raw_tcp_done_timeout: Optional[pulumi.Input[int]] = None,
restrict_to_host: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[str]] = None,
sourcetype: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = InputsTcpRawArgs.__new__(InputsTcpRawArgs)
__props__.__dict__["acl"] = acl
__props__.__dict__["connection_host"] = connection_host
__props__.__dict__["disabled"] = disabled
__props__.__dict__["host"] = host
__props__.__dict__["index"] = index
__props__.__dict__["name"] = name
__props__.__dict__["queue"] = queue
__props__.__dict__["raw_tcp_done_timeout"] = raw_tcp_done_timeout
__props__.__dict__["restrict_to_host"] = restrict_to_host
__props__.__dict__["source"] = source
__props__.__dict__["sourcetype"] = sourcetype
super(InputsTcpRaw, __self__).__init__(
'splunk:index/inputsTcpRaw:InputsTcpRaw',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
acl: Optional[pulumi.Input[pulumi.InputType['InputsTcpRawAclArgs']]] = None,
connection_host: Optional[pulumi.Input[str]] = None,
disabled: Optional[pulumi.Input[bool]] = None,
host: Optional[pulumi.Input[str]] = None,
index: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
queue: Optional[pulumi.Input[str]] = None,
raw_tcp_done_timeout: Optional[pulumi.Input[int]] = None,
restrict_to_host: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[str]] = None,
sourcetype: Optional[pulumi.Input[str]] = None) -> 'InputsTcpRaw':
"""
Get an existing InputsTcpRaw resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['InputsTcpRawAclArgs']] acl: The app/user context that is the namespace for the resource
:param pulumi.Input[str] connection_host: Valid values: (ip | dns | none)
Set the host for the remote server that is sending data.
ip sets the host to the IP address of the remote server sending data.
dns sets the host to the reverse DNS entry for the IP address of the remote server sending data.
none leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
Default value is dns.
:param pulumi.Input[bool] disabled: Indicates if input is disabled.
:param pulumi.Input[str] host: Host from which the indexer gets data.
:param pulumi.Input[str] index: Index to store generated events. Defaults to default.
:param pulumi.Input[str] name: The input port which receives raw data.
:param pulumi.Input[str] queue: Valid values: (parsingQueue | indexQueue)
Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.
Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more information about props.conf and rules for timestamping and linebreaking, refer to props.conf and the online documentation at "Monitor files and directories with inputs.conf"
Set queue to indexQueue to send your data directly into the index.
:param pulumi.Input[int] raw_tcp_done_timeout: Specifies in seconds the timeout value for adding a Done-key. Default value is 10 seconds.
If a connection over the port specified by name remains idle after receiving data for specified number of seconds, it adds a Done-key. This implies the last event is completely received.
:param pulumi.Input[str] restrict_to_host: Allows for restricting this input to only accept data from the host specified here.
:param pulumi.Input[str] source: Sets the source key/field for events from this input. Defaults to the input file path.
Sets the source key initial value. The key is used during parsing/indexing, in particular to set the source field during indexing. It is also the source field used at search time. As a convenience, the chosen string is prepended with 'source::'.
:param pulumi.Input[str] sourcetype: Set the source type for events from this input.
"sourcetype=" is automatically prepended to <string>.
Defaults to audittrail (if signedaudit=true) or fschange (if signedaudit=false).
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _InputsTcpRawState.__new__(_InputsTcpRawState)
__props__.__dict__["acl"] = acl
__props__.__dict__["connection_host"] = connection_host
__props__.__dict__["disabled"] = disabled
__props__.__dict__["host"] = host
__props__.__dict__["index"] = index
__props__.__dict__["name"] = name
__props__.__dict__["queue"] = queue
__props__.__dict__["raw_tcp_done_timeout"] = raw_tcp_done_timeout
__props__.__dict__["restrict_to_host"] = restrict_to_host
__props__.__dict__["source"] = source
__props__.__dict__["sourcetype"] = sourcetype
return InputsTcpRaw(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def acl(self) -> pulumi.Output['outputs.InputsTcpRawAcl']:
"""
The app/user context that is the namespace for the resource
"""
return pulumi.get(self, "acl")
@property
@pulumi.getter(name="connectionHost")
def connection_host(self) -> pulumi.Output[str]:
"""
Valid values: (ip | dns | none)
Set the host for the remote server that is sending data.
ip sets the host to the IP address of the remote server sending data.
dns sets the host to the reverse DNS entry for the IP address of the remote server sending data.
none leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.
Default value is dns.
"""
return pulumi.get(self, "connection_host")
@property
@pulumi.getter
def disabled(self) -> pulumi.Output[bool]:
"""
Indicates if input is disabled.
"""
return pulumi.get(self, "disabled")
@property
@pulumi.getter
def host(self) -> pulumi.Output[str]:
"""
Host from which the indexer gets data.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter
def index(self) -> pulumi.Output[str]:
"""
Index to store generated events. Defaults to default.
"""
return pulumi.get(self, "index")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The input port which receives raw data.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def queue(self) -> pulumi.Output[str]:
"""
Valid values: (parsingQueue | indexQueue)
Specifies where the input processor should deposit the events it reads. Defaults to parsingQueue.
Set queue to parsingQueue to apply props.conf and other parsing rules to your data. For more information about props.conf and rules for timestamping and linebreaking, refer to props.conf and the online documentation at "Monitor files and directories with inputs.conf"
Set queue to indexQueue to send your data directly into the index.
"""
return pulumi.get(self, "queue")
@property
@pulumi.getter(name="rawTcpDoneTimeout")
def raw_tcp_done_timeout(self) -> pulumi.Output[int]:
"""
Specifies in seconds the timeout value for adding a Done-key. Default value is 10 seconds.
If a connection over the port specified by name remains idle after receiving data for specified number of seconds, it adds a Done-key. This implies the last event is completely received.
"""
return pulumi.get(self, "raw_tcp_done_timeout")
@property
@pulumi.getter(name="restrictToHost")
def restrict_to_host(self) -> pulumi.Output[str]:
"""
Allows for restricting this input to only accept data from the host specified here.
"""
return pulumi.get(self, "restrict_to_host")
@property
@pulumi.getter
def source(self) -> pulumi.Output[str]:
"""
Sets the source key/field for events from this input. Defaults to the input file path.
Sets the source key initial value. The key is used during parsing/indexing, in particular to set the source field during indexing. It is also the source field used at search time. As a convenience, the chosen string is prepended with 'source::'.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter
def sourcetype(self) -> pulumi.Output[str]:
"""
Set the source type for events from this input.
"sourcetype=" is automatically prepended to <string>.
Defaults to audittrail (if signedaudit=true) or fschange (if signedaudit=false).
"""
return pulumi.get(self, "sourcetype")
| 49.748641
| 282
| 0.656261
| 4,653
| 36,615
| 5.035246
| 0.054588
| 0.069017
| 0.06334
| 0.067609
| 0.926672
| 0.919715
| 0.90755
| 0.904179
| 0.902002
| 0.899484
| 0
| 0.000552
| 0.257982
| 36,615
| 735
| 283
| 49.816327
| 0.861791
| 0.473686
| 0
| 0.859788
| 1
| 0
| 0.083484
| 0.003566
| 0
| 0
| 0
| 0
| 0
| 1
| 0.164021
| false
| 0.002646
| 0.018519
| 0
| 0.280423
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
27d2a9a24d16dad8cca3b2cd2716420a4a550830
| 101
|
py
|
Python
|
awful.py
|
filecoin-project/bacalhau
|
02b0fe927f3e223b0b75463f12c7574297821dec
|
[
"Apache-2.0"
] | 19
|
2022-01-07T15:35:35.000Z
|
2022-03-31T12:44:58.000Z
|
awful.py
|
filecoin-project/bacalhau
|
02b0fe927f3e223b0b75463f12c7574297821dec
|
[
"Apache-2.0"
] | 56
|
2022-01-25T19:27:59.000Z
|
2022-03-29T07:53:44.000Z
|
awful.py
|
filecoin-project/bacalhau
|
02b0fe927f3e223b0b75463f12c7574297821dec
|
[
"Apache-2.0"
] | 8
|
2022-01-12T16:09:05.000Z
|
2022-03-25T07:27:40.000Z
|
import random; import time; "0"*1024*1024*100 if random.random() > 0.3 else print("fast"); sleep(100)
| 101
| 101
| 0.70297
| 18
| 101
| 3.944444
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.188889
| 0.108911
| 101
| 1
| 101
| 101
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0.04902
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
fd7f9198b6fab9f66671565d926de5d3359a03ab
| 16,094
|
py
|
Python
|
torch_ecg/train/train_unet_cpsc2019/model.py
|
busyyang/torch_ecg
|
031d90a32b8a1e202364efe1e5a19a9ba1f0a726
|
[
"MIT"
] | null | null | null |
torch_ecg/train/train_unet_cpsc2019/model.py
|
busyyang/torch_ecg
|
031d90a32b8a1e202364efe1e5a19a9ba1f0a726
|
[
"MIT"
] | null | null | null |
torch_ecg/train/train_unet_cpsc2019/model.py
|
busyyang/torch_ecg
|
031d90a32b8a1e202364efe1e5a19a9ba1f0a726
|
[
"MIT"
] | null | null | null |
"""
"""
from copy import deepcopy
from functools import reduce
from typing import Union, Optional, Sequence, Tuple, List, NoReturn
import numpy as np
import pandas as pd
import torch
from torch import Tensor
from easydict import EasyDict as ED
import biosppy.signals.ecg as BSE
from torch_ecg.models.ecg_subtract_unet import ECG_SUBTRACT_UNET
from torch_ecg.models.ecg_unet import ECG_UNET
from .cfg import ModelCfg
from .utils import mask_to_intervals, _remove_spikes_naive
__all__ = [
"ECG_SUBTRACT_UNET_CPSC2019",
"ECG_UNET_CPSC2019",
]
class ECG_SUBTRACT_UNET_CPSC2019(ECG_SUBTRACT_UNET):
"""
"""
__DEBUG__ = True
__name__ = "ECG_SUBTRACT_UNET_CPSC2019"
def __init__(self, n_leads:int, config:Optional[ED]=None) -> NoReturn:
""" finished, checked,
Parameters:
-----------
n_leads: int,
number of leads (number of input channels)
config: dict, optional,
other hyper-parameters, including kernel sizes, etc.
ref. the corresponding config file
"""
model_config = deepcopy(ModelCfg.subtract_unet)
model_config.update(deepcopy(config) or {})
super().__init__(model_config.classes, n_leads, model_config)
@torch.no_grad()
def inference(self, input:Union[np.ndarray,Tensor], bin_pred_thr:float=0.5, duration_thr:int=4*16, dist_thr:Union[int,Sequence[int]]=200, correction:bool=False) -> Tuple[np.ndarray, List[np.ndarray]]:
""" finished, NOT checked,
auxiliary function to `forward`, for CPSC2019,
NOTE: each segment of input be better filtered using `_remove_spikes_naive`,
and normalized to a suitable mean and std
Parameters:
-----------
input: ndarray or Tensor,
input tensor, of shape (batch_size, channels, seq_len)
bin_pred_thr: float, default 0.5,
the threshold for making binary predictions from scalar predictions
duration_thr: int, default 4*16,
minimum duration for a "true" qrs complex, units in ms
dist_thr: int or sequence of int, default 200,
if is sequence of int,
(0-th element). minimum distance for two consecutive qrs complexes, units in ms;
(1st element).(optional) maximum distance for checking missing qrs complexes, units in ms,
e.g. [200, 1200]
if is int, then is the case of (0-th element).
correction: bool, default False,
if True, correct rpeaks to local maximum in a small nbh
of rpeaks detected by DL model using `BSE.correct_rpeaks`
Returns:
--------
pred: ndarray,
the array of scalar predictions
rpeaks: list of ndarray,
list of rpeak indices for each batch element
"""
if torch.cuda.is_available():
device = torch.device("cuda")
else:
device = torch.device("cpu")
self.to(device)
batch_size, channels, seq_len = input.shape
if isinstance(input, np.ndarray):
_input = torch.from_numpy(input).to(device)
else:
_input = input.to(device)
pred = self.forward(_input)
pred = self.sigmoid(pred)
pred = pred.cpu().detach().numpy().squeeze(-1)
# prob --> qrs mask --> qrs intervals --> rpeaks
rpeaks = self._inference_post_process(
pred=pred,
bin_pred_thr=bin_pred_thr,
duration_thr=duration_thr,
dist_thr=dist_thr
)
if correction:
rpeaks = [
BSE.correct_rpeaks(
signal=b_input,
rpeaks=b_rpeaks,
sampling_rate=self.config.fs,
tol=0.05,
)[0] for b_input, b_rpeaks in zip(_input.detach().numpy().squeeze(1), rpeaks)
]
return pred, rpeaks
def _inference_post_process(self, pred:np.ndarray, bin_pred_thr:float=0.5, duration_thr:int=4*16, dist_thr:Union[int,Sequence[int]]=200) -> List[np.ndarray]:
""" finished, checked,
prob --> qrs mask --> qrs intervals --> rpeaks
Parameters: ref. `self.inference`
"""
batch_size, prob_arr_len = pred.shape
input_len = prob_arr_len
model_spacing = 1000 / self.config.fs # units in ms
_duration_thr = duration_thr / model_spacing
_dist_thr = [dist_thr] if isinstance(dist_thr, int) else dist_thr
assert len(_dist_thr) <= 2
# mask = (pred >= bin_pred_thr).astype(int)
rpeaks = []
for b_idx in range(batch_size):
b_prob = pred[b_idx,...]
b_mask = (b_prob >= bin_pred_thr).astype(int)
b_qrs_intervals = mask_to_intervals(b_mask, 1)
b_rpeaks = np.array([
(itv[0]+itv[1])//2 for itv in b_qrs_intervals if itv[1]-itv[0] >= _duration_thr
])
# print(f"before post-process, b_qrs_intervals = {b_qrs_intervals}")
# print(f"before post-process, b_rpeaks = {b_rpeaks}")
check = True
dist_thr_inds = _dist_thr[0] / model_spacing
while check:
check = False
b_rpeaks_diff = np.diff(b_rpeaks)
for r in range(len(b_rpeaks_diff)):
if b_rpeaks_diff[r] < dist_thr_inds: # 200 ms
prev_r_ind = b_rpeaks[r]
next_r_ind = b_rpeaks[r+1]
if b_prob[prev_r_ind] > b_prob[next_r_ind]:
del_ind = r+1
else:
del_ind = r
b_rpeaks = np.delete(b_rpeaks, del_ind)
check = True
break
if len(_dist_thr) == 1:
b_rpeaks = b_rpeaks[np.where((b_rpeaks>=self.config.skip_dist) & (b_rpeaks<input_len-self.config.skip_dist))[0]]
rpeaks.append(b_rpeaks)
continue
check = True
# TODO: parallel the following block
# CAUTION !!!
# this part is extremely slow in some cases (long duration and low SNR)
dist_thr_inds = _dist_thr[1] / model_spacing
while check:
check = False
b_rpeaks_diff = np.diff(b_rpeaks)
for r in range(len(b_rpeaks_diff)):
if b_rpeaks_diff[r] >= dist_thr_inds: # 1200 ms
prev_r_ind = b_rpeaks[r]
next_r_ind = b_rpeaks[r+1]
prev_qrs = [itv for itv in b_qrs_intervals if itv[0]<=prev_r_ind<=itv[1]][0]
next_qrs = [itv for itv in b_qrs_intervals if itv[0]<=next_r_ind<=itv[1]][0]
check_itv = [prev_qrs[1], next_qrs[0]]
l_new_itv = mask_to_intervals(b_mask[check_itv[0]: check_itv[1]], 1)
if len(l_new_itv) == 0:
continue
l_new_itv = [[itv[0]+check_itv[0], itv[1]+check_itv[0]] for itv in l_new_itv]
new_itv = max(l_new_itv, key=lambda itv: itv[1]-itv[0])
new_max_prob = (b_prob[new_itv[0]:new_itv[1]]).max()
for itv in l_new_itv:
itv_prob = (b_prob[itv[0]:itv[1]]).max()
if itv[1] - itv[0] == new_itv[1] - new_itv[0] and itv_prob > new_max_prob:
new_itv = itv
new_max_prob = itv_prob
b_rpeaks = np.insert(b_rpeaks, r+1, 4*(new_itv[0]+new_itv[1]))
check = True
break
b_rpeaks = b_rpeaks[np.where((b_rpeaks>=self.config.skip_dist) & (b_rpeaks<input_len-self.config.skip_dist))[0]]
rpeaks.append(b_rpeaks)
return rpeaks
def inference_CPSC2019(self, input:Union[np.ndarray,Tensor], bin_pred_thr:float=0.5, duration_thr:int=4*16, dist_thr:Union[int,Sequence[int]]=200, correction:bool=False) -> Tuple[np.ndarray, List[np.ndarray]]:
"""
alias of `self.inference`
"""
return self.inference(input, bin_pred_thr, duration_thr, dist_thr, correction)
class ECG_UNET_CPSC2019(ECG_UNET):
"""
"""
__DEBUG__ = True
__name__ = "ECG_UNET_CPSC2019"
def __init__(self, n_leads:int, config:Optional[ED]=None) -> NoReturn:
""" finished, checked,
Parameters:
-----------
n_leads: int,
number of leads (number of input channels)
config: dict, optional,
other hyper-parameters, including kernel sizes, etc.
ref. the corresponding config file
"""
model_config = deepcopy(ModelCfg.unet)
model_config.update(deepcopy(config) or {})
super().__init__(model_config.classes, n_leads, model_config)
@torch.no_grad()
def inference(self, input:Union[np.ndarray,Tensor], bin_pred_thr:float=0.5, duration_thr:int=4*16, dist_thr:Union[int,Sequence[int]]=200, correction:bool=False) -> Tuple[np.ndarray, List[np.ndarray]]:
""" finished, NOT checked,
auxiliary function to `forward`, for CPSC2019,
NOTE: each segment of input be better filtered using `_remove_spikes_naive`,
and normalized to a suitable mean and std
Parameters:
-----------
input: ndarray or Tensor,
input tensor, of shape (batch_size, channels, seq_len)
bin_pred_thr: float, default 0.5,
the threshold for making binary predictions from scalar predictions
duration_thr: int, default 4*16,
minimum duration for a "true" qrs complex, units in ms
dist_thr: int or sequence of int, default 200,
if is sequence of int,
(0-th element). minimum distance for two consecutive qrs complexes, units in ms;
(1st element).(optional) maximum distance for checking missing qrs complexes, units in ms,
e.g. [200, 1200]
if is int, then is the case of (0-th element).
correction: bool, default False,
if True, correct rpeaks to local maximum in a small nbh
of rpeaks detected by DL model using `BSE.correct_rpeaks`
Returns:
--------
pred: ndarray,
the array of scalar predictions
rpeaks: list of ndarray,
list of rpeak indices for each batch element
"""
if torch.cuda.is_available():
device = torch.device("cuda")
else:
device = torch.device("cpu")
self.to(device)
batch_size, channels, seq_len = input.shape
if isinstance(input, np.ndarray):
_input = torch.from_numpy(input).to(device)
else:
_input = input.to(device)
pred = self.forward(_input)
pred = self.sigmoid(pred)
pred = pred.cpu().detach().numpy().squeeze(-1)
# prob --> qrs mask --> qrs intervals --> rpeaks
rpeaks = self._inference_post_process(
pred=pred,
bin_pred_thr=bin_pred_thr,
duration_thr=duration_thr,
dist_thr=dist_thr
)
if correction:
rpeaks = [
BSE.correct_rpeaks(
signal=b_input,
rpeaks=b_rpeaks,
sampling_rate=self.config.fs,
tol=0.05,
)[0] for b_input, b_rpeaks in zip(_input.detach().numpy().squeeze(1), rpeaks)
]
return pred, rpeaks
def _inference_post_process(self, pred:np.ndarray, bin_pred_thr:float=0.5, duration_thr:int=4*16, dist_thr:Union[int,Sequence[int]]=200) -> List[np.ndarray]:
""" finished, checked,
prob --> qrs mask --> qrs intervals --> rpeaks
Parameters: ref. `self.inference`
"""
batch_size, prob_arr_len = pred.shape
input_len = prob_arr_len
model_spacing = 1000 / self.config.fs # units in ms
_duration_thr = duration_thr / model_spacing
_dist_thr = [dist_thr] if isinstance(dist_thr, int) else dist_thr
assert len(_dist_thr) <= 2
# mask = (pred >= bin_pred_thr).astype(int)
rpeaks = []
for b_idx in range(batch_size):
b_prob = pred[b_idx,...]
b_mask = (b_prob >= bin_pred_thr).astype(int)
b_qrs_intervals = mask_to_intervals(b_mask, 1)
b_rpeaks = np.array([
(itv[0]+itv[1])//2 for itv in b_qrs_intervals if itv[1]-itv[0] >= _duration_thr
])
# print(f"before post-process, b_qrs_intervals = {b_qrs_intervals}")
# print(f"before post-process, b_rpeaks = {b_rpeaks}")
check = True
dist_thr_inds = _dist_thr[0] / model_spacing
while check:
check = False
b_rpeaks_diff = np.diff(b_rpeaks)
for r in range(len(b_rpeaks_diff)):
if b_rpeaks_diff[r] < dist_thr_inds: # 200 ms
prev_r_ind = b_rpeaks[r]
next_r_ind = b_rpeaks[r+1]
if b_prob[prev_r_ind] > b_prob[next_r_ind]:
del_ind = r+1
else:
del_ind = r
b_rpeaks = np.delete(b_rpeaks, del_ind)
check = True
break
if len(_dist_thr) == 1:
b_rpeaks = b_rpeaks[np.where((b_rpeaks>=self.config.skip_dist) & (b_rpeaks<input_len-self.config.skip_dist))[0]]
rpeaks.append(b_rpeaks)
continue
check = True
# TODO: parallel the following block
# CAUTION !!!
# this part is extremely slow in some cases (long duration and low SNR)
dist_thr_inds = _dist_thr[1] / model_spacing
while check:
check = False
b_rpeaks_diff = np.diff(b_rpeaks)
for r in range(len(b_rpeaks_diff)):
if b_rpeaks_diff[r] >= dist_thr_inds: # 1200 ms
prev_r_ind = b_rpeaks[r]
next_r_ind = b_rpeaks[r+1]
prev_qrs = [itv for itv in b_qrs_intervals if itv[0]<=prev_r_ind<=itv[1]][0]
next_qrs = [itv for itv in b_qrs_intervals if itv[0]<=next_r_ind<=itv[1]][0]
check_itv = [prev_qrs[1], next_qrs[0]]
l_new_itv = mask_to_intervals(b_mask[check_itv[0]: check_itv[1]], 1)
if len(l_new_itv) == 0:
continue
l_new_itv = [[itv[0]+check_itv[0], itv[1]+check_itv[0]] for itv in l_new_itv]
new_itv = max(l_new_itv, key=lambda itv: itv[1]-itv[0])
new_max_prob = (b_prob[new_itv[0]:new_itv[1]]).max()
for itv in l_new_itv:
itv_prob = (b_prob[itv[0]:itv[1]]).max()
if itv[1] - itv[0] == new_itv[1] - new_itv[0] and itv_prob > new_max_prob:
new_itv = itv
new_max_prob = itv_prob
b_rpeaks = np.insert(b_rpeaks, r+1, 4*(new_itv[0]+new_itv[1]))
check = True
break
b_rpeaks = b_rpeaks[np.where((b_rpeaks>=self.config.skip_dist) & (b_rpeaks<input_len-self.config.skip_dist))[0]]
rpeaks.append(b_rpeaks)
return rpeaks
def inference_CPSC2019(self, input:Union[np.ndarray,Tensor], bin_pred_thr:float=0.5, duration_thr:int=4*16, dist_thr:Union[int,Sequence[int]]=200, correction:bool=False) -> Tuple[np.ndarray, List[np.ndarray]]:
"""
alias of `self.inference`
"""
return self.inference(input, bin_pred_thr, duration_thr, dist_thr, correction)
| 43.032086
| 213
| 0.55791
| 2,088
| 16,094
| 4.048372
| 0.108716
| 0.051343
| 0.021294
| 0.014196
| 0.951851
| 0.935999
| 0.935999
| 0.935999
| 0.935999
| 0.935999
| 0
| 0.023383
| 0.340997
| 16,094
| 373
| 214
| 43.147453
| 0.773619
| 0.227538
| 0
| 0.88
| 0
| 0
| 0.008557
| 0.004449
| 0
| 0
| 0
| 0.005362
| 0.008889
| 1
| 0.035556
| false
| 0
| 0.057778
| 0
| 0.146667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fdbd72ed7bd0dbe7b951fb5a54f8e04d992201f9
| 647
|
py
|
Python
|
tests/parser/recursive_aggregates.13.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/recursive_aggregates.13.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/recursive_aggregates.13.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
% Company Controls
cv(X,X,Y,S) :- owns_stock(X,Y,S).
cv(X,Z,Y,S) :- company(X),controls(X,Z),owns_stock(Z,Y,S).
controls(X,Y) :-
50 < #sum{S,Z : cv(X,Z,Y,S) }, company(X),company(Y).
company(a).
company(b).
company(c).
% from [Van Gelder '93]
owns_stock(a,b,40).
owns_stock(b,b,20).
"""
output = """
% Company Controls
cv(X,X,Y,S) :- owns_stock(X,Y,S).
cv(X,Z,Y,S) :- company(X),controls(X,Z),owns_stock(Z,Y,S).
controls(X,Y) :-
50 < #sum{S,Z : cv(X,Z,Y,S) }, company(X),company(Y).
company(a).
company(b).
company(c).
% from [Van Gelder '93]
owns_stock(a,b,40).
owns_stock(b,b,20).
"""
| 18.485714
| 59
| 0.564142
| 128
| 647
| 2.789063
| 0.171875
| 0.056022
| 0.05042
| 0.056022
| 0.969188
| 0.969188
| 0.969188
| 0.969188
| 0.969188
| 0.969188
| 0
| 0.029907
| 0.173107
| 647
| 34
| 60
| 19.029412
| 0.637383
| 0
| 0
| 0.923077
| 0
| 0.153846
| 0.949757
| 0.210697
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8b4c9137b55d3a85848151be88131344ab4ae698
| 154
|
py
|
Python
|
ramda/prop_satisfies_test.py
|
jakobkolb/ramda.py
|
982b2172f4bb95b9a5b09eff8077362d6f2f0920
|
[
"MIT"
] | 56
|
2018-08-06T08:44:58.000Z
|
2022-03-17T09:49:03.000Z
|
ramda/prop_satisfies_test.py
|
jakobkolb/ramda.py
|
982b2172f4bb95b9a5b09eff8077362d6f2f0920
|
[
"MIT"
] | 28
|
2019-06-17T11:09:52.000Z
|
2022-02-18T16:59:21.000Z
|
ramda/prop_satisfies_test.py
|
jakobkolb/ramda.py
|
982b2172f4bb95b9a5b09eff8077362d6f2f0920
|
[
"MIT"
] | 5
|
2019-09-18T09:24:38.000Z
|
2021-07-21T08:40:23.000Z
|
from ramda import *
from ramda.private.asserts import *
def prop_satisfies_test():
assert_equal(prop_satisfies(lt(2), "x", {"x": 1, "y": 2}), True)
| 22
| 68
| 0.675325
| 24
| 154
| 4.166667
| 0.708333
| 0.18
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022901
| 0.149351
| 154
| 6
| 69
| 25.666667
| 0.740458
| 0
| 0
| 0
| 0
| 0
| 0.019481
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8b824c74fd67d682c53c5cb67442024490be67e7
| 3,551
|
py
|
Python
|
tests/cpp/python/python_http_tests.py
|
exasol/r-exasol
|
478693febe4650d02f77238ff04f315f48a55d08
|
[
"BSD-3-Clause"
] | 12
|
2018-12-07T12:47:07.000Z
|
2022-01-02T08:38:06.000Z
|
tests/cpp/python/python_http_tests.py
|
exasol/r-exasol
|
478693febe4650d02f77238ff04f315f48a55d08
|
[
"BSD-3-Clause"
] | 50
|
2019-03-21T13:13:57.000Z
|
2022-03-29T11:17:10.000Z
|
tests/cpp/python/python_http_tests.py
|
exasol/r-exasol
|
478693febe4650d02f77238ff04f315f48a55d08
|
[
"BSD-3-Clause"
] | 3
|
2021-05-25T07:42:43.000Z
|
2021-07-06T07:20:14.000Z
|
import socket
import string
import subprocess
import socket_wrapper
def reading_test(protocol: string):
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
serversocket.bind(("localhost", 5000))
# become a server socket
serversocket.listen(5)
p_unit_test = subprocess.Popen(["./r_exasol_tests", "Import" + protocol])
(clientsocket, address) = serversocket.accept()
clientsocket = socket_wrapper.wrap(clientsocket, protocol)
data = 'CHUNK DATA;' * 20
b = bytearray(f'{hex(len(data))}\n', 'UTF-8')
clientsocket.send(b)
d = bytearray(data, 'UTF-8')
d.append(0)
d.append(0)
clientsocket.send(d)
# Send zer termination
b = bytearray(f'{0}\n', 'UTF-8')
clientsocket.send(b)
recvmsg = clientsocket.recv(100)
assert recvmsg == b'HTTP/1.1 200 OK\r\nServer: EXASolution R Package\r\nConnection: close\r\n\r\n'
socket_wrapper.unwrap(clientsocket, protocol)
p_unit_test.wait()
try:
clientsocket.shutdown(socket.SHUT_RDWR)
except socket.error:
pass # Under MacOS shutdown can throw an exception if the client has closed the socket already
assert p_unit_test.returncode == 0
serversocket.close()
def reading_test_big(protocol: string):
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
serversocket.bind(("localhost", 5000))
# become a server socket
serversocket.listen(5)
p_unit_test = subprocess.Popen(["./r_exasol_tests", "ImportBig" + protocol])
(clientsocket, address) = serversocket.accept()
clientsocket = socket_wrapper.wrap(clientsocket, protocol)
for idxChunk in range(1000):
data = 'CHUNK DATA;' * 20000
b = bytearray(f'{hex(len(data))}\n', 'UTF-8')
clientsocket.send(b)
d = bytearray(data, 'UTF-8')
d.append(0)
d.append(0)
clientsocket.send(d)
# Send zer termination
b = bytearray(f'{0}\n', 'UTF-8')
clientsocket.send(b)
recvmsg = clientsocket.recv(100)
assert recvmsg == b'HTTP/1.1 200 OK\r\nServer: EXASolution R Package\r\nConnection: close\r\n\r\n'
socket_wrapper.unwrap(clientsocket, protocol)
p_unit_test.wait()
try:
clientsocket.shutdown(socket.SHUT_RDWR)
except socket.error:
pass # Under MacOS shutdown can throw an exception if the client has closed the socket already
assert p_unit_test.returncode == 0
serversocket.close()
def writing_test(protocol: string):
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
serversocket.bind(("localhost", 5000))
# become a server socket
serversocket.listen(5)
p_unit_test = subprocess.Popen(["./r_exasol_tests", "Export" + protocol])
(clientsocket, address) = serversocket.accept()
clientsocket = socket_wrapper.wrap(clientsocket, protocol)
header = b'HTTP/1.1 200 OK\r\nServer: EXASolution R Package\r\nContent-type: application/octet-stream\r\nContent-disposition: attachment; filename=data.csv\r\nConnection: close\r\n\r\n'
recvmsg = clientsocket.recv(len(header))
assert recvmsg == header
data = 'CHUNK DATA;' * 20
recvmsg = clientsocket.recv(len(data))
assert data.encode('UTF-8') == recvmsg
socket_wrapper.unwrap(clientsocket, protocol)
p_unit_test.wait()
try:
clientsocket.shutdown(socket.SHUT_RDWR)
except socket.error:
pass # Under MacOS shutdown can throw an exception if the client has closed the socket already
assert p_unit_test.returncode == 0
serversocket.close()
| 32.87963
| 189
| 0.689946
| 465
| 3,551
| 5.174194
| 0.223656
| 0.018703
| 0.033666
| 0.028263
| 0.844555
| 0.844555
| 0.844555
| 0.835827
| 0.835827
| 0.835827
| 0
| 0.022648
| 0.191777
| 3,551
| 107
| 190
| 33.186916
| 0.815679
| 0.105322
| 0
| 0.782051
| 0
| 0.038462
| 0.169508
| 0.046717
| 0
| 0
| 0
| 0
| 0.089744
| 1
| 0.038462
| false
| 0.038462
| 0.076923
| 0
| 0.115385
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
47331ee65014989b93a9bed428aff41571c645ac
| 49
|
py
|
Python
|
instance/config.py
|
edwinkim16/NewsHighlight
|
b9309a048797332fec5696e397a98ef06bd809bd
|
[
"MIT"
] | null | null | null |
instance/config.py
|
edwinkim16/NewsHighlight
|
b9309a048797332fec5696e397a98ef06bd809bd
|
[
"MIT"
] | null | null | null |
instance/config.py
|
edwinkim16/NewsHighlight
|
b9309a048797332fec5696e397a98ef06bd809bd
|
[
"MIT"
] | null | null | null |
NEWS_API_KEY = '4232538dea47480c930c501d928386b4'
| 49
| 49
| 0.897959
| 4
| 49
| 10.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.531915
| 0.040816
| 49
| 1
| 49
| 49
| 0.361702
| 0
| 0
| 0
| 0
| 0
| 0.64
| 0.64
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
47764a8ce08386fb12f68b398cff2a203aa6ac55
| 184
|
py
|
Python
|
scripts/cuboid.py
|
gauravd327/rubiks-cube
|
ea7fc1e65c292ecce75edef6a366b739279ea4a7
|
[
"MIT"
] | null | null | null |
scripts/cuboid.py
|
gauravd327/rubiks-cube
|
ea7fc1e65c292ecce75edef6a366b739279ea4a7
|
[
"MIT"
] | null | null | null |
scripts/cuboid.py
|
gauravd327/rubiks-cube
|
ea7fc1e65c292ecce75edef6a366b739279ea4a7
|
[
"MIT"
] | null | null | null |
class Cuboid():
def __init__(self, color):
self.color = color
def getColor(self):
return self.color
def setColor(self, color):
self.color = color
| 18.4
| 30
| 0.592391
| 22
| 184
| 4.772727
| 0.409091
| 0.428571
| 0.247619
| 0.342857
| 0.438095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.304348
| 184
| 10
| 31
| 18.4
| 0.820313
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0
| 0.142857
| 0.714286
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
47b0deee86988e477baef43d04eb21994bca1291
| 189
|
py
|
Python
|
server/problem_sets/gen/__init__.py
|
iiridescent/problem-sets
|
e906fe7509cd158ecdb5920853636339d4d531c3
|
[
"MIT"
] | null | null | null |
server/problem_sets/gen/__init__.py
|
iiridescent/problem-sets
|
e906fe7509cd158ecdb5920853636339d4d531c3
|
[
"MIT"
] | 5
|
2021-03-09T10:36:59.000Z
|
2022-02-26T14:36:08.000Z
|
server/problem_sets/gen/__init__.py
|
vinhowe/problem-sets
|
e906fe7509cd158ecdb5920853636339d4d531c3
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2019 Thomas Howe
from .format import *
from .gen import *
from .gen_problem import *
from .manager import *
from .math import *
from .testbed import *
from .util import *
| 18.9
| 33
| 0.719577
| 27
| 189
| 5
| 0.518519
| 0.444444
| 0.192593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026144
| 0.190476
| 189
| 9
| 34
| 21
| 0.856209
| 0.15873
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d03b1b62d2e903614e1430c546314094689d72b2
| 530
|
py
|
Python
|
eval_ricord1a_timm-regnetx_002_RandomSnow.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_ricord1a_timm-regnetx_002_RandomSnow.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_ricord1a_timm-regnetx_002_RandomSnow.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_ricord1a_unetplusplus_timm-regnetx_002_0_RandomSnow.yml",
"python main.py --configs configs/eval_ricord1a_unetplusplus_timm-regnetx_002_1_RandomSnow.yml",
"python main.py --configs configs/eval_ricord1a_unetplusplus_timm-regnetx_002_2_RandomSnow.yml",
"python main.py --configs configs/eval_ricord1a_unetplusplus_timm-regnetx_002_3_RandomSnow.yml",
"python main.py --configs configs/eval_ricord1a_unetplusplus_timm-regnetx_002_4_RandomSnow.yml",
]
for l in ls:
os.system(l)
| 48.181818
| 100
| 0.843396
| 80
| 530
| 5.2125
| 0.3
| 0.119904
| 0.143885
| 0.227818
| 0.892086
| 0.892086
| 0.892086
| 0.892086
| 0.892086
| 0.892086
| 0
| 0.0501
| 0.058491
| 530
| 11
| 101
| 48.181818
| 0.785571
| 0
| 0
| 0
| 0
| 0
| 0.875706
| 0.640301
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d05465e4a7e17d1860cdf197c84020d7ad706163
| 206
|
py
|
Python
|
platform/radio/efr32_multiphy_configurator/pylib_multi_phy_model/protocol_reference_files/parts/nixi/z_wave_ANZ_validation_testing.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 82
|
2016-06-29T17:24:43.000Z
|
2021-04-16T06:49:17.000Z
|
platform/radio/efr32_multiphy_configurator/pylib_multi_phy_model/protocol_reference_files/parts/nixi/z_wave_ANZ_validation_testing.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 6
|
2022-01-12T18:22:08.000Z
|
2022-03-25T10:19:27.000Z
|
platform/radio/efr32_multiphy_configurator/pylib_multi_phy_model/protocol_reference_files/parts/nixi/z_wave_ANZ_validation_testing.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 56
|
2016-08-02T10:50:50.000Z
|
2021-07-19T08:57:34.000Z
|
from ..nerio.z_wave_ANZ_validation_testing import z_wave_ANZ_validation_testing as z_wave_ANZ_validation_testing_nerio
class z_wave_ANZ_validation_testing(z_wave_ANZ_validation_testing_nerio):
pass
| 25.75
| 118
| 0.88835
| 33
| 206
| 4.878788
| 0.333333
| 0.15528
| 0.248447
| 0.559006
| 0.838509
| 0.372671
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082524
| 206
| 7
| 119
| 29.428571
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 7
|
d091628e8fac9bccf6ae2bdab112f1f6b229ecc8
| 80
|
py
|
Python
|
django_extensions/__init__.py
|
yutayamazaki/django-extensions
|
bc111bd21cba74ab2ab3ad9347d9950d0cf3f23d
|
[
"MIT"
] | null | null | null |
django_extensions/__init__.py
|
yutayamazaki/django-extensions
|
bc111bd21cba74ab2ab3ad9347d9950d0cf3f23d
|
[
"MIT"
] | null | null | null |
django_extensions/__init__.py
|
yutayamazaki/django-extensions
|
bc111bd21cba74ab2ab3ad9347d9950d0cf3f23d
|
[
"MIT"
] | null | null | null |
from django_extensions import middlewares
from django_extensions import version
| 26.666667
| 41
| 0.9
| 10
| 80
| 7
| 0.6
| 0.285714
| 0.571429
| 0.742857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 80
| 2
| 42
| 40
| 0.972222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
19038c764992bf056aed9a9929915e56c84f38b9
| 4,204
|
py
|
Python
|
lastgui/data.py
|
jc855/lastgraph
|
a2917e73f0e0b9409e897e4a83944e72161a33ce
|
[
"BSD-3-Clause"
] | 77
|
2015-01-03T20:26:28.000Z
|
2021-07-07T15:08:25.000Z
|
lastgui/data.py
|
jc855/lastgraph
|
a2917e73f0e0b9409e897e4a83944e72161a33ce
|
[
"BSD-3-Clause"
] | 1
|
2021-06-10T23:42:31.000Z
|
2021-06-10T23:42:31.000Z
|
lastgui/data.py
|
jc855/lastgraph
|
a2917e73f0e0b9409e897e4a83944e72161a33ce
|
[
"BSD-3-Clause"
] | 20
|
2015-01-17T16:33:41.000Z
|
2021-12-23T03:40:36.000Z
|
hotlink_png = '\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x000\x00\x00\x000\x08\x06\x00\x00\x00W\x02\xf9\x87\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\x00\x00\tpHYs\x00\x00\na\x00\x00\na\x01\xfc\xccJ%\x00\x00\x00\x19tEXtSoftware\x00www.inkscape.org\x9b\xee<\x1a\x00\x00\x05-IDATh\x81\xed\x98[l\x14e\x18\x86\x9fofK\xa1T\xc1\x9e\xb7\x05b\x88\x1c\xc4\x84\x18\xa3D\xe4\xa2\xb4\xa5R\x12\xa5Z\xba\x8b\xc4h\x08\x17V\x13\xbd\xf1\x82\x84\x0b\xa8\xab\t\xdc\xe95\xc6\x08\x1a\x89\t\x14\xe5\xa0\t\x14\x0c1\xd1\x96\x14\xbc\xc1\x90xK\xdar\xd8\xd2\x1a\x13\xda\xb2\xdb\xdd\xf9\xbc\xd8\x1d2\x9d\xecnwf\xc7P\x82o2\xd9\x9d\xff4\xef\xfb\xbf\xff\xff\xcf7\x9f\xa8*\x8f2\x8c\x87M\xa0T\xfc/\xe0a\xe3\xf1\x15\x10\x13\tL|)c\xf9\xea\xb8_\xe4h\n\xceFEL\xbf\x0f\xb6\x11\x151Spv\xbf\xc8Q?\xfd\xc5\xeb1\xba_\xe4h5\xec(\x07\xe3\x16\\2a{\xaf\xaa\xe5\xe7\xe11\x11#\rg\xc2\xd0\x92\x00k\x1cN~\xa6\xba\xdb\xcb\x18\x9e\x1c8 r\xa4\x1a\xba\x9a\xa0\xb2\x06*\x1a\xa0%\x05\xa7\xfd,\x81\x98\x88\x91\x82\xd3\r\xd0R\x03\x15MPY\r]\x07D\x8ex\x19\xa7\xe8\x07\xc7DB\n\xab\xca\xa1\xdc.\xab\x85\x8a0\xb4z\x15a\x93\x0fCk-T\xd8\xe5\xe5P\xae\xb0*&\x12\n\\@\xafj*\x04\xad7a\xe8.$\x9d"\x1a=\x88\xb0\xc97\xba\xc8\xdf\x85\xe4M\x18\nAk\xafj*p\x01Y\x11\xc9\x10\xb4\xb9E\xd4d\x9d\x98\x81S\x85D\xc4D\x8c\x198\x15\x86\xd6\x9a\xdc\xe4\xdbzU\x93\xf9\xfa\x97,\xa0\x90\x88\xac\x13m\xf9D\xd8\xe4\x1b\xa1-\xcf\xcc{&\x0f>N!\x07\xa1\x05)\xf8\xa5\x11^\xaaq\xec\x8b1\x98\xba\r\x17Mx\xd3>\x9d\xb2\xa7\xcd\x8f\r\xb0\xc5E>q\x13\xae\xf8%_\x92\x80"D\\0\xa1\x0b \r?4@{\xd0\xe4K\x16`\x8bH\xc3\xc50l\xc8%\x02 \x17\xf9[0d\xc2\x96R\xc8C\x00\x02\xa0\xb0\x08\xc8\xec\x0f\xbb,H\xf2\x10\x90\x00\xc8/\xc2\x89\xa0\xc9C\x80\xd1h\xafj\xd2\x84-\xb7`h\x0c\x12\xee\xfa\xb1\xff\x80<\x04\x1cN_\x874p\x8f\xcc\xaf\x1bi\xe0\xde\xf5\xdcu\xbe\x11\x98\x80\xa8\x88\xb9\x06\xce\xd4C\xb3s\xcd\xdb\xa8\x85\x8azh^\x03g\x82\x88bm\x04"\xc0&\xdf\x00\x9b\xeb\x1c\xe4\'\xe0\xfe\x04\xdc\xb7\xef\xeb2\x01\xe0\xe6 E\x94\xbc\x89\xa3"\xe6Z8[\x0f\xcdn\xf2\xa3\xf0;@\x13l\xaa\x82\x85v]\x1c\xa6\xee\xc0\xaf\x7f\xc1\xeb\xc7UKZR%9P\x88\xfc\x08\x0c\x98\xd0aB\xc7\x08\x0c\xb8\x9d\xa8\x87\xe6\xb5\x01|\x14\xf9v \x1f\xf9q\x98\x1e\x85\xc1\x10l\xb5\xa3\xca\x98H(\x05\xe7\x9b`c5,\xb2\xdb\x06\xe1\x84/\x07\xbc\x90\x87\x07\xa1\xf8\xd6Q\x18\x1c\x87i\xbb<\x08\'<;`\x93\xaf\x83\xe6\xfa\xd9\xcbfz\x04\x06\xe3\xd0qXu&W\xdf\x1e\x91\xb2:8\xb7\x0c6V9\x9c\xb8\x03Sq\x9fNxr\xc09\xf3n\xf2\xa3p\xb9\x10y\x80\xc3\xaa3q\xe8\x18\x85\xcb\x13\x0e\'\xeaKp\xa2h\x07\xb2!\xf1O~f\xde\x8d\xb9\x9c0\xe1\xb5b\x13\x05^\xf7\xc0\xac\x10`\x02\xa6\x87\x8b\x98y7l\'\x86]N\xe4z\xc6\\\xf0\xb4\x07\xb2\xdf\xb3\'\xeb\xa0}\x01\x18\xc3py\x0c\xb6z!\xefD\x8fHY-\x9c_\x0e/\'\xc1\x8a\xc3\x85\x10\xec\xf0\x92\xa6\xf1\xbc\x89\xb3K\xa9O\xe1\xa98\xbc\xea\x97\xbc\x8d\xecr\xea\x17\xf8\xdb\x84n\xaf9&_\xef\x81\xec7\xaf\xe1%{0\xc7x!\xc0\xf2\x93 \x0b\xec{\xe0a\xe1\xf1\xcdN\xcf\x17x\x16\xd0)\xf2DTdE1m\xbbE\xd6\x03DE\x96\xb8\xeb\xde\x15Y\xdc-\xb2\xd2\xbe\x8f\x8aT\xfa\xc9\xb1z\xeeP\x06\xcf(\xbc\x9d\xaf>*\xb2""\xd2\x99\x1d\xfc`K&\xa7\xda\xe5n\x97\x84&\x03\xf68\x8a6\xfc\tO{\xe53+\x89\x1a\x11\xd9\x9b\xfdk\x9eP=\x14\x15y^\xe1-`\xf0\x84\xea\xe9l\x87\x19\x0b^\xdc)\xf2E\x1a\x8e\xf5\xa9^\x8d\x8a|\xa0\x10\xb6\xe0\xcb\x10\xbc\x01\xb4\xef\x10\xf9\xc3\x84\xd4%\xd5TD\xa4v\x97Hc\n\xde7`\xa9\xc2w&\xc4\x013"\xb2\r\xf8\x07(7`qD\xa4\x07\xa8\x04\x96T\xc2\xc1iX\xa7\xf0\xa1\xc2$\xf0\xf3q\xd5sN\xce\x86\xebf}\x08\xbe7`mT\xa4\x12\xf8\xb8\x0f\xf6\t\xbc\xb3]\xe4A\xf8\xa00^\x06\xbd&|\x14\x15Y\xa7\xb04\x04_\x87`\x9f\x05W\x15\xfaO\xaa\x8e\xd8\xed\x05^I@B\xe0Y\x85O\x14z\xb2\xe3\xac4\xa0\xf3\x84\xea\x80\xc0j\x85j\x03\x96\x19p\xc5\x00k\x12V\xa5a\x8f\x05\xdf\x02\xed\xcfA\xbf\xdb\x81Y\x02\x14\xac\'\xe1\xb6\x05\x89\x04\x08\xb0T3\xe7\xecx\x19T9\x08\xdd\x18\x85)\x0b\x0c\x85\xb0\x01\x93\x0b\xe1\xb6BC>\xab\x17\x81\x05\x8c\x01\xd3Ffl\x8c\xcc\xacV\xb58\xd2\xe9\n\x96d9X \n\x9f\x0b\x1c\x00\xf6\xe6zO\xcc\xb5\x07\xfa#"\x9f\x02\xf7\x9c3\xea\xc4$\xfcf\xc1\x0bS\x10#\xe3\xc2\xb0\xc0\xce]"\xcb\x013\xd7\x06v\x90\xbdc\xc1W5p(_\x1b\x81\x15\xc05\x85m9\xc7R\xd5\x82\xd7{P6W\x1bw\xbb\xdd\xb0PU\x89\x80YL\xdfBW\x04\x8euCg\x14\xbe\xd9\t\xab\xdd\xf5\xf3\xfeM\x1c\x15\xa9\x12\xd8\x94\x86\x1b}\xaa\xd7\xdc\xf5\xf3^\xc0\\x\xfc\xde\xc4\xf3\r\x8f\xbc\x80\x7f\x01\xc8\xe8\xca\xc4&0\x97\xff\x00\x00\x00\x00IEND\xaeB`\x82'
| 4,204
| 4,204
| 0.738344
| 962
| 4,204
| 3.217256
| 0.404366
| 0.031018
| 0.01454
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.258403
| 0.002141
| 4,204
| 1
| 4,204
| 4,204
| 0.47938
| 0
| 0
| 0
| 0
| 4
| 0.890369
| 0.888942
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4be67d30009010ead9f077b54860f8562adef30d
| 162
|
py
|
Python
|
unicorn_binance_local_depth_cache/__init__.py
|
gitter-badger/unicorn-binance-local-depth-cache
|
3c016c78317128e8fbe597eaf814b3b48d442cc3
|
[
"MIT"
] | 12
|
2022-03-02T20:25:19.000Z
|
2022-03-30T14:44:12.000Z
|
unicorn_binance_local_depth_cache/__init__.py
|
gitter-badger/unicorn-binance-local-depth-cache
|
3c016c78317128e8fbe597eaf814b3b48d442cc3
|
[
"MIT"
] | 10
|
2022-03-04T15:14:22.000Z
|
2022-03-20T02:30:27.000Z
|
unicorn_binance_local_depth_cache/__init__.py
|
gitter-badger/unicorn-binance-local-depth-cache
|
3c016c78317128e8fbe597eaf814b3b48d442cc3
|
[
"MIT"
] | 2
|
2022-03-20T15:20:41.000Z
|
2022-03-20T21:55:57.000Z
|
from unicorn_binance_local_depth_cache.manager import BinanceLocalDepthCacheManager
from unicorn_binance_local_depth_cache.exceptions import DepthCacheOutOfSync
| 40.5
| 83
| 0.932099
| 18
| 162
| 7.944444
| 0.611111
| 0.153846
| 0.251748
| 0.321678
| 0.461538
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 162
| 3
| 84
| 54
| 0.934641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ef423ee51e95b3b9b096fb78d48c4e2e6c36561c
| 15,291
|
py
|
Python
|
tests/keras/test_keras_iris.py
|
Clearbox-AI/clearbox-wrapper
|
3629923763629d2a12cb96723695b5ab497ad756
|
[
"Apache-2.0"
] | 14
|
2020-06-26T13:38:54.000Z
|
2022-03-16T18:30:09.000Z
|
tests/keras/test_keras_iris.py
|
Clearbox-AI/clearbox-wrapper
|
3629923763629d2a12cb96723695b5ab497ad756
|
[
"Apache-2.0"
] | 9
|
2020-09-21T07:20:59.000Z
|
2021-09-02T13:49:45.000Z
|
tests/keras/test_keras_iris.py
|
Clearbox-AI/clearbox-wrapper
|
3629923763629d2a12cb96723695b5ab497ad756
|
[
"Apache-2.0"
] | null | null | null |
import os
from sys import version_info
import numpy as np
import pytest
import sklearn.datasets as datasets
import sklearn.preprocessing as sk_preprocessing
from tensorflow.keras.layers import Dense
from tensorflow.keras.models import Sequential
import yaml
import clearbox_wrapper as cbw
@pytest.fixture
def model_path(tmpdir):
return os.path.join(str(tmpdir), "model")
@pytest.fixture(scope="module")
def iris_data():
iris = datasets.load_iris()
x = iris.data
y = iris.target
return x, y
@pytest.fixture()
def sk_function_transformer():
def simple_preprocessor(numpy_x):
return numpy_x ** 2
transformer = sk_preprocessing.FunctionTransformer(
simple_preprocessor, validate=True
)
return transformer
@pytest.fixture()
def custom_transformer():
def simple_preprocessor(numpy_x):
transformed_x = numpy_x + 1.0
return transformed_x
return simple_preprocessor
@pytest.fixture()
def drop_column_transformer():
def drop_column(numpy_x):
transformed_x = np.delete(numpy_x, 0, axis=1)
return transformed_x
return drop_column
@pytest.fixture()
def keras_model():
keras_clf = Sequential()
keras_clf.add(Dense(8, input_dim=4, activation="relu"))
keras_clf.add(Dense(3, activation="softmax"))
keras_clf.compile(
optimizer="adam", loss="sparse_categorical_crossentropy", metrics=["accuracy"]
)
return keras_clf
def test_iris_keras_no_preprocessing(iris_data, keras_model, model_path):
x, y = iris_data
model = keras_model
model.fit(x, y, epochs=10, batch_size=10)
cbw.save_model(model_path, model, zip=False)
loaded_model = cbw.load_model(model_path)
original_model_predictions = model.predict(x)
loaded_model_predictions = loaded_model.predict(x)
np.testing.assert_array_equal(original_model_predictions, loaded_model_predictions)
@pytest.mark.parametrize(
"sk_transformer",
[
(sk_preprocessing.StandardScaler()),
(sk_preprocessing.QuantileTransformer(random_state=0, n_quantiles=50)),
(sk_preprocessing.KBinsDiscretizer(n_bins=2, encode="ordinal")),
(sk_preprocessing.RobustScaler()),
(sk_preprocessing.MaxAbsScaler()),
],
)
def test_iris_keras_preprocessing(sk_transformer, iris_data, keras_model, model_path):
x, y = iris_data
x_transformed = sk_transformer.fit_transform(x)
model = keras_model
model.fit(x_transformed, y)
cbw.save_model(model_path, model, preprocessing=sk_transformer, zip=False)
loaded_model = cbw.load_model(model_path)
original_model_predictions = model.predict(x_transformed)
loaded_model_predictions = loaded_model.predict(x)
np.testing.assert_array_equal(original_model_predictions, loaded_model_predictions)
def test_iris_keras_preprocessing_with_function_transformer(
sk_function_transformer, iris_data, keras_model, model_path
):
x, y = iris_data
x_transformed = sk_function_transformer.fit_transform(x)
model = keras_model
model.fit(x_transformed, y)
cbw.save_model(model_path, model, preprocessing=sk_function_transformer, zip=False)
loaded_model = cbw.load_model(model_path)
original_model_predictions = model.predict(x_transformed)
loaded_model_predictions = loaded_model.predict(x)
np.testing.assert_array_equal(original_model_predictions, loaded_model_predictions)
def test_iris_keras_preprocessing_with_custom_transformer(
custom_transformer, iris_data, keras_model, model_path
):
x, y = iris_data
x_transformed = custom_transformer(x)
model = keras_model
model.fit(x_transformed, y)
cbw.save_model(model_path, model, preprocessing=custom_transformer, zip=False)
loaded_model = cbw.load_model(model_path)
original_model_predictions = model.predict(x_transformed)
loaded_model_predictions = loaded_model.predict(x)
np.testing.assert_array_equal(original_model_predictions, loaded_model_predictions)
@pytest.mark.parametrize(
"preprocessor",
[
(sk_preprocessing.StandardScaler()),
(sk_preprocessing.QuantileTransformer(random_state=0, n_quantiles=50)),
(sk_preprocessing.KBinsDiscretizer(n_bins=2, encode="ordinal")),
(sk_preprocessing.RobustScaler()),
(sk_preprocessing.MaxAbsScaler()),
],
)
def test_iris_keras_data_preparation_and_preprocessing(
preprocessor, drop_column_transformer, iris_data, model_path
):
x, y = iris_data
x_prepared = drop_column_transformer(x)
x_transformed = preprocessor.fit_transform(x_prepared)
model = Sequential()
model.add(Dense(8, input_dim=x_transformed.shape[1], activation="relu"))
model.add(Dense(3, activation="softmax"))
model.compile(
optimizer="adam", loss="sparse_categorical_crossentropy", metrics=["accuracy"]
)
model.fit(x_transformed, y)
cbw.save_model(
model_path,
model,
preprocessing=preprocessor,
data_preparation=drop_column_transformer,
zip=False,
)
loaded_model = cbw.load_model(model_path)
original_model_predictions = model.predict(x_transformed)
loaded_model_predictions = loaded_model.predict(x)
np.testing.assert_array_equal(original_model_predictions, loaded_model_predictions)
def test_iris_keras_data_preparation_without_preprocessing(
iris_data, keras_model, model_path
):
x, y = iris_data
sk_transformer = sk_preprocessing.StandardScaler()
x_transformed = sk_transformer.fit_transform(x)
model = keras_model
model.fit(x_transformed, y)
with pytest.raises(ValueError):
cbw.save_model(
model_path, model, data_preparation=drop_column_transformer, zip=False
)
def test_iris_keras_load_preprocessing_without_preprocessing(
iris_data, keras_model, model_path
):
x, y = iris_data
model = keras_model
model.fit(x, y)
cbw.save_model(model_path, model, zip=False)
loaded_model = cbw.load_model(model_path)
with pytest.raises(cbw.ClearboxWrapperException):
loaded_model.preprocess_data(x)
def test_iris_keras_load_data_preparation_without_data_preparation(
iris_data, keras_model, model_path
):
x, y = iris_data
sk_transformer = sk_preprocessing.StandardScaler()
x_transformed = sk_transformer.fit_transform(x)
model = keras_model
model.fit(x_transformed, y)
cbw.save_model(model_path, model, preprocessing=sk_transformer, zip=False)
loaded_model = cbw.load_model(model_path)
with pytest.raises(cbw.ClearboxWrapperException):
loaded_model.prepare_data(x)
@pytest.mark.parametrize(
"preprocessor",
[
(sk_preprocessing.StandardScaler()),
(sk_preprocessing.QuantileTransformer(random_state=0, n_quantiles=50)),
(sk_preprocessing.KBinsDiscretizer(n_bins=2, encode="ordinal")),
(sk_preprocessing.RobustScaler()),
(sk_preprocessing.MaxAbsScaler()),
],
)
def test_iris_keras_get_preprocessed_data(
preprocessor, iris_data, keras_model, model_path
):
x, y = iris_data
x_transformed = preprocessor.fit_transform(x)
model = keras_model
model.fit(x_transformed, y)
cbw.save_model(model_path, model, preprocessing=preprocessor, zip=False)
loaded_model = cbw.load_model(model_path)
x_transformed_by_loaded_preprocessing = loaded_model.preprocess_data(x)
np.testing.assert_array_equal(x_transformed, x_transformed_by_loaded_preprocessing)
@pytest.mark.parametrize(
"preprocessor",
[
(sk_preprocessing.StandardScaler()),
(sk_preprocessing.QuantileTransformer(random_state=0, n_quantiles=50)),
(sk_preprocessing.KBinsDiscretizer(n_bins=2, encode="ordinal")),
(sk_preprocessing.RobustScaler()),
(sk_preprocessing.MaxAbsScaler()),
],
)
def test_iris_keras_get_prepared_data(
preprocessor, drop_column_transformer, iris_data, model_path
):
x, y = iris_data
x_prepared = drop_column_transformer(x)
x_transformed = preprocessor.fit_transform(x_prepared)
model = Sequential()
model.add(Dense(8, input_dim=x_transformed.shape[1], activation="relu"))
model.add(Dense(3, activation="softmax"))
model.compile(
optimizer="adam", loss="sparse_categorical_crossentropy", metrics=["accuracy"]
)
model.fit(x_transformed, y)
cbw.save_model(
model_path,
model,
preprocessing=preprocessor,
data_preparation=drop_column_transformer,
zip=False,
)
loaded_model = cbw.load_model(model_path)
x_prepared_by_loaded_data_preparation = loaded_model.prepare_data(x)
np.testing.assert_array_equal(x_prepared, x_prepared_by_loaded_data_preparation)
@pytest.mark.parametrize(
"preprocessor",
[
(sk_preprocessing.StandardScaler()),
(sk_preprocessing.QuantileTransformer(random_state=0, n_quantiles=50)),
(sk_preprocessing.KBinsDiscretizer(n_bins=2, encode="ordinal")),
(sk_preprocessing.RobustScaler()),
(sk_preprocessing.MaxAbsScaler()),
],
)
def test_iris_keras_get_prepared_and_processed_data(
preprocessor, drop_column_transformer, iris_data, model_path
):
x, y = iris_data
x_prepared = drop_column_transformer(x)
x_transformed = preprocessor.fit_transform(x_prepared)
model = Sequential()
model.add(Dense(8, input_dim=x_transformed.shape[1], activation="relu"))
model.add(Dense(3, activation="softmax"))
model.compile(
optimizer="adam", loss="sparse_categorical_crossentropy", metrics=["accuracy"]
)
model.fit(x_transformed, y)
cbw.save_model(
model_path,
model,
preprocessing=preprocessor,
data_preparation=drop_column_transformer,
zip=False,
)
loaded_model = cbw.load_model(model_path)
x_prepared_by_loaded_data_preparation = loaded_model.prepare_data(x)
x_transformed_by_loaded_preprocessing = loaded_model.preprocess_data(
x_prepared_by_loaded_data_preparation
)
np.testing.assert_array_equal(x_transformed, x_transformed_by_loaded_preprocessing)
@pytest.mark.parametrize(
"preprocessor",
[
(sk_preprocessing.StandardScaler()),
(sk_preprocessing.QuantileTransformer(random_state=0, n_quantiles=50)),
(sk_preprocessing.KBinsDiscretizer(n_bins=2, encode="ordinal")),
(sk_preprocessing.RobustScaler()),
(sk_preprocessing.MaxAbsScaler()),
],
)
def test_iris_keras_predict_without_preprocessing(
preprocessor, drop_column_transformer, iris_data, model_path
):
x, y = iris_data
x_prepared = drop_column_transformer(x)
x_transformed = preprocessor.fit_transform(x_prepared)
model = Sequential()
model.add(Dense(8, input_dim=x_transformed.shape[1], activation="relu"))
model.add(Dense(3, activation="softmax"))
model.compile(
optimizer="adam", loss="sparse_categorical_crossentropy", metrics=["accuracy"]
)
model.fit(x_transformed, y)
cbw.save_model(
model_path,
model,
preprocessing=preprocessor,
data_preparation=drop_column_transformer,
zip=False,
)
loaded_model = cbw.load_model(model_path)
original_model_predictions = model.predict(x_transformed)
loaded_model_predictions = loaded_model.predict(x, preprocess=False)
np.testing.assert_raises(
AssertionError,
np.testing.assert_array_equal,
original_model_predictions,
loaded_model_predictions,
)
@pytest.mark.parametrize(
"preprocessor",
[
(sk_preprocessing.StandardScaler()),
(sk_preprocessing.QuantileTransformer(random_state=0, n_quantiles=50)),
(sk_preprocessing.KBinsDiscretizer(n_bins=2, encode="ordinal")),
(sk_preprocessing.RobustScaler()),
(sk_preprocessing.MaxAbsScaler()),
],
)
def test_iris_keras_predict_without_data_preparation(
preprocessor, drop_column_transformer, iris_data, model_path
):
x, y = iris_data
x_prepared = drop_column_transformer(x)
x_transformed = preprocessor.fit_transform(x_prepared)
model = Sequential()
model.add(Dense(8, input_dim=x_transformed.shape[1], activation="relu"))
model.add(Dense(3, activation="softmax"))
model.compile(
optimizer="adam", loss="sparse_categorical_crossentropy", metrics=["accuracy"]
)
model.fit(x_transformed, y)
cbw.save_model(
model_path,
model,
preprocessing=preprocessor,
data_preparation=drop_column_transformer,
zip=False,
)
loaded_model = cbw.load_model(model_path)
with pytest.raises(ValueError):
loaded_model.predict(x, prepare_data=False)
def test_iris_keras_conda_env(iris_data, keras_model, model_path):
import cloudpickle
import tensorflow
x, y = iris_data
model = keras_model
model.fit(x, y)
cbw.save_model(model_path, model, zip=False)
with open(model_path + "/conda.yaml", "r") as f:
conda_env = yaml.safe_load(f)
python_version = "{major}.{minor}.{micro}".format(
major=version_info.major, minor=version_info.minor, micro=version_info.micro
)
tf_version = tensorflow.__version__
cloudpickle_version = cloudpickle.__version__
channels_list = ["defaults", "conda-forge"]
dependencies = [
"python={}".format(python_version),
"pip",
{
"pip": [
"cloudpickle=={}".format(cloudpickle_version),
"tensorflow=={}".format(tf_version),
]
},
]
assert conda_env["channels"] == channels_list
assert conda_env["dependencies"] == dependencies
def test_iris_keras_conda_env_additional_deps(iris_data, keras_model, model_path):
import cloudpickle
import tensorflow
x, y = iris_data
model = keras_model
model.fit(x, y)
add_deps = [
"torch==1.6.0",
"fake_package=2.1.0",
"fastapi==0.52.1",
"my_package==1.23.1",
]
cbw.save_model(model_path, model, additional_deps=add_deps, zip=False)
with open(model_path + "/conda.yaml", "r") as f:
conda_env = yaml.safe_load(f)
python_version = "{major}.{minor}.{micro}".format(
major=version_info.major, minor=version_info.minor, micro=version_info.micro
)
tf_version = tensorflow.__version__
cloudpickle_version = cloudpickle.__version__
channels_list = ["defaults", "conda-forge"]
dependencies = [
"python={}".format(python_version),
"pip",
{
"pip": [
"cloudpickle=={}".format(cloudpickle_version),
"torch==1.6.0",
"fake_package=2.1.0",
"fastapi==0.52.1",
"my_package==1.23.1",
"tensorflow=={}".format(tf_version),
]
},
]
assert conda_env["channels"] == channels_list
assert conda_env["dependencies"] == dependencies
def test_iris_keras_conda_env_additional_pip_deps_with_duplicates(
iris_data, keras_model, model_path
):
x, y = iris_data
model = keras_model
model.fit(x, y)
add_deps = ["torch==1.6.0", "torch==1.6.2"]
with pytest.raises(ValueError):
cbw.save_model(model_path, model, additional_deps=add_deps, zip=False)
| 30.890909
| 87
| 0.710876
| 1,833
| 15,291
| 5.584834
| 0.084561
| 0.048842
| 0.053336
| 0.025007
| 0.883462
| 0.869688
| 0.849663
| 0.845365
| 0.842825
| 0.836182
| 0
| 0.007155
| 0.186515
| 15,291
| 494
| 88
| 30.953441
| 0.815821
| 0
| 0
| 0.700495
| 0
| 0
| 0.055981
| 0.015172
| 0
| 0
| 0
| 0
| 0.037129
| 1
| 0.061881
| false
| 0
| 0.034653
| 0.00495
| 0.118812
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ef63bfb0d05fe0ebf1cf8810cc8fb84fac4e2d31
| 28,959
|
py
|
Python
|
unit_tests/disabled_test_db_vm.py
|
hep-gc/cloud-scheduler-2
|
180d9dc4f8751cf8c8254518e46f83f118187e84
|
[
"Apache-2.0"
] | 3
|
2020-03-03T03:25:36.000Z
|
2021-12-03T15:31:39.000Z
|
unit_tests/disabled_test_db_vm.py
|
hep-gc/cloud-scheduler-2
|
180d9dc4f8751cf8c8254518e46f83f118187e84
|
[
"Apache-2.0"
] | 341
|
2017-06-08T17:27:59.000Z
|
2022-01-28T19:37:57.000Z
|
unit_tests/disabled_test_db_vm.py
|
hep-gc/cloud-scheduler-2
|
180d9dc4f8751cf8c8254518e46f83f118187e84
|
[
"Apache-2.0"
] | 3
|
2018-04-25T16:13:20.000Z
|
2020-04-15T20:03:46.000Z
|
# The name of this file has been changed to prevent ./run_tests from running it.
from unit_test_common import execute_csv2_request, execute_csv2_command, initialize_csv2_request, ut_id
from sys import argv
# lno: VV - error code identifier.
def main(gvar, user_secret):
if not gvar:
gvar = {}
if len(argv) > 1:
initialize_csv2_request(gvar, selections=argv[1])
else:
initialize_csv2_request(gvar)
# 1
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid1'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': 0,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '5',
'poller_status': 'unregistered',
'foreign_vm': 0,
'hostname': 'vm-test-group--vm-test-cloud--vmid1',
'vmid': 'vmid1',
'keep_alive': 0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 2
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid2'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': 0,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'unregistered',
'foreign_vm': 0,
'hostname': 'vm-test-group--vm-test-cloud--vmid2',
'vmid': 'vmid2',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 3
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid3'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': 0,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'unregistered',
'foreign_vm': 0,
'hostname': 'vm-test-group--vm-test-cloud--vmid3',
'vmid': 'vmid3',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 4
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid4'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': None,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'foreign',
'foreign_vm': 1,
'hostname': 'foreign-cloud--vmid4',
'vmid': 'vmid4',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 5
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid5'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': None,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'foreign',
'foreign_vm': 1,
'hostname': 'foreign-cloud--vmid5',
'vmid': 'vmid5',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 6
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid6'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': None,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'foreign',
'foreign_vm': 1,
'hostname': 'foreign-cloud--vmid6',
'vmid': 'vmid6',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 7
# Setting Manual Control
execute_csv2_request(
gvar, 0, None, "vm update, VMs set to manual control: 0.",
'/vm/update/', group=ut_id(gvar, 'dtg1'),
form_data={'vm_option': 'manctl', 'hostname': 'foreign-cloud--vmid4'}
)
# 8
execute_csv2_command(
gvar, 0, None, 'vm update, VMs set to manual control: 0.',
['cloudscheduler', 'vm', 'update', '-vo', 'manctl', '-vh', 'foreign-cloud--vmid4']
)
# 9
execute_csv2_request(
gvar, 0, None, "vm update, VMs set to manual control: 1.",
'/vm/update/', group=ut_id(gvar, 'dtg1'), form_data={'vm_option': 'manctl', 'hostname': 'vm-test-group--vm-test-cloud--vmid1'}
)
# 10
execute_csv2_command(
gvar, 0, None, 'vm update, VMs set to manual control: 1.',
['cloudscheduler', 'vm', 'update', '-vo', 'manctl', '-vh', 'vm-test-group--vm-test-cloud--vmid2']
)
# 11
execute_csv2_request(
gvar, 0, None, "vm update, VMs set to manual control: 3.",
'/vm/update/', group=ut_id(gvar, 'dtg1'), form_data={'vm_option': 'manctl'}
)
# 12
execute_csv2_command(
gvar, 0, None, 'vm update, VMs set to manual control: 3.',
['cloudscheduler', 'vm', 'update', '-vo', 'manctl']
)
# 13
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid1'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 1,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': 0,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '5',
'poller_status': 'manual',
'foreign_vm': 0,
'hostname': 'vm-test-group--vm-test-cloud--vmid1',
'vmid': 'vmid1',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 14
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid2'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 1,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': 0,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'manual',
'foreign_vm': 0,
'hostname': 'vm-test-group--vm-test-cloud--vmid2',
'vmid': 'vmid2',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 15
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid3'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 1,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': 0,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'manual',
'foreign_vm': 0,
'hostname': 'vm-test-group--vm-test-cloud--vmid3',
'vmid': 'vmid3',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 16
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid4'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': None,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'foreign',
'foreign_vm': 1,
'hostname': 'foreign-cloud--vmid4',
'vmid': 'vmid4',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 17
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid5'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': None,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'foreign',
'foreign_vm': 1,
'hostname': 'foreign-cloud--vmid5',
'vmid': 'vmid5',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 18
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid6'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': None,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'foreign',
'foreign_vm': 1,
'hostname': 'foreign-cloud--vmid6',
'vmid': 'vmid6',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 19
# Setting System Control
execute_csv2_request(
gvar, 0, None, "vm update, VMs set to system control: 0.",
'/vm/update/', group=(ut_id(gvar, 'dtg1')), form_data={'vm_option': 'sysctl', 'hostname': 'foreign-cloud--vmid4'}
)
# 20
execute_csv2_command(
gvar, 0, None, 'vm update, VMs set to system control: 0.',
['cloudscheduler', 'vm', 'update', '-vo', 'sysctl', '-vh', 'foreign-cloud--vmid4']
)
# 21
execute_csv2_request(
gvar, 0, None, "vm update, VMs set to system control: 1.",
'/vm/update/', group=(ut_id(gvar, 'dtg1')), form_data={'vm_option': 'sysctl', 'hostname': 'vm-test-group--vm-test-cloud--vmid2'}
)
# 22
execute_csv2_command(
gvar, 0, None, 'vm update, VMs set to system control: 1.',
['cloudscheduler', 'vm', 'update', '-vo', 'sysctl', '-vh', 'vm-test-group--vm-test-cloud--vmid3']
)
# 23
execute_csv2_request(
gvar, 0, None, "vm update, VMs set to system control: 3.",
'/vm/update/', group=(ut_id(gvar, 'dtg1')), form_data={'vm_option': 'sysctl'}
)
# 24
execute_csv2_command(
gvar, 0, None, 'vm update, VMs set to system control: 3.',
['cloudscheduler', 'vm', 'update', '-vo', 'sysctl']
)
# 25
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid1'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': 0,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '5',
'poller_status': 'unregistered',
'foreign_vm': 0,
'hostname': 'vm-test-group--vm-test-cloud--vmid1',
'vmid': 'vmid1',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 26
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid2'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': 0,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'unregistered',
'foreign_vm': 0,
'hostname': 'vm-test-group--vm-test-cloud--vmid2',
'vmid': 'vmid2',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 27
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid3'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': 0,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'unregistered',
'foreign_vm': 0,
'hostname': 'vm-test-group--vm-test-cloud--vmid3',
'vmid': 'vmid3',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 28
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid4'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': None,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'foreign',
'foreign_vm': 1,
'hostname': 'foreign-cloud--vmid4',
'vmid': 'vmid4',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 29
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid5'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': None,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'foreign',
'foreign_vm': 1,
'hostname': 'foreign-cloud--vmid5',
'vmid': 'vmid5',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 30
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid6'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': None,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'foreign',
'foreign_vm': 1,
'hostname': 'foreign-cloud--vmid6',
'vmid': 'vmid6',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 31
# Setting Retire
execute_csv2_request(
gvar, 0, None, "vm update, VMs retired: 0.",
'/vm/update/', group=(ut_id(gvar, 'dtg1')), form_data={'vm_option': 'retire', 'hostname': 'foreign-cloud--vmid4'}
)
# 32
execute_csv2_command(
gvar, 0, None, 'vm update, VMs retired: 0.',
['cloudscheduler', 'vm', 'update', '-vo', 'retire', '-vh', 'foreign-cloud--vmid4']
)
# 33
execute_csv2_request(
gvar, 0, None, "vm update, VMs retired: 1.",
'/vm/update/', group=(ut_id(gvar, 'dtg1')), form_data={'vm_option': 'retire', 'hostname': 'vm-test-group--vm-test-cloud--vmid3'}
)
# 34
execute_csv2_command(
gvar, 0, None, 'vm update, VMs retired: 1.',
['cloudscheduler', 'vm', 'update', '-vo', 'retire', '-vh', 'vm-test-group--vm-test-cloud--vmid1']
)
# 35
execute_csv2_request(
gvar, 0, None, "vm update, VMs retired: 3.",
'/vm/update/', group=(ut_id(gvar, 'dtg1')), form_data={'vm_option': 'retire'}
)
# 36
execute_csv2_command(
gvar, 0, None, 'vm update, VMs retired: 3.',
['cloudscheduler', 'vm', 'update', '-vo', 'retire']
)
# 37
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid4'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': None,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'foreign',
'foreign_vm': 1,
'hostname': 'foreign-cloud--vmid4',
'vmid': 'vmid4',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 38
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid5'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': None,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'foreign',
'foreign_vm': 1,
'hostname': 'foreign-cloud--vmid5',
'vmid': 'vmid5',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 39
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid6'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': None,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'foreign',
'foreign_vm': 1,
'hostname': 'foreign-cloud--vmid6',
'vmid': 'vmid6',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 40
# Setting Kill
execute_csv2_request(
gvar, 0, None, "vm update, VMs killed: 0.",
'/vm/update/', group=(ut_id(gvar, 'dtg1')), form_data={'vm_option': 'kill', 'hostname': 'foreign-cloud--vmid4'}
)
# 41
execute_csv2_command(
gvar, 0, None, 'vm update, VMs killed: 0.',
['cloudscheduler', 'vm', 'update', '-vo', 'kill', '-vh', 'foreign-cloud--vmid4']
)
# 42
execute_csv2_request(
gvar, 0, None, "vm update, VMs killed: 1.",
'/vm/update/', group=(ut_id(gvar, 'dtg1')), form_data={'vm_option': 'kill', 'hostname': 'vm-test-group--vm-test-cloud--vmid1'}
)
# 43
execute_csv2_command(
gvar, 0, None, 'vm update, VMs killed: 1.',
['cloudscheduler', 'vm', 'update', '-vo', 'kill', '-vh', 'vm-test-group--vm-test-cloud--vmid2']
)
# 44
execute_csv2_request(
gvar, 0, None, "vm update, VMs killed: 3.",
'/vm/update/', group=(ut_id(gvar, 'dtg1')), form_data={'vm_option': 'kill'}
)
# 45
execute_csv2_command(
gvar, 0, None, 'vm update, VMs killed: 3.',
['cloudscheduler', 'vm', 'update', '-vo', 'kill']
)
# 46
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid1'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 1,
'flavor_id': '5',
'poller_status': 'unregistered',
'foreign_vm': 0,
'hostname': 'vm-test-group--vm-test-cloud--vmid1',
'vmid': 'vmid1',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 47
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid2'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 1,
'flavor_id': '4',
'poller_status': 'unregistered',
'foreign_vm': 0,
'hostname': 'vm-test-group--vm-test-cloud--vmid2',
'vmid': 'vmid2',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 48
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid3'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 1,
'flavor_id': '4',
'poller_status': 'unregistered',
'foreign_vm': 0,
'hostname': 'vm-test-group--vm-test-cloud--vmid3',
'vmid': 'vmid3',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 49
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid4'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': None,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'foreign',
'foreign_vm': 1,
'hostname': 'foreign-cloud--vmid4',
'vmid': 'vmid4',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 50
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid5'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': None,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'foreign',
'foreign_vm': 1,
'hostname': 'foreign-cloud--vmid5',
'vmid': 'vmid5',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
# 51
execute_csv2_request(
gvar, 0, None, None,
'/vm/list/', group=ut_id(gvar, 'dtg1'), expected_list='vm_list', list_filter={'group_name': ut_id(gvar, 'dtg1'), 'cloud_name': ut_id(gvar, 'dtc1'), 'vmid': 'vmid6'},
values={'power_status': 1,
'authurl': 'vm-test-authurl',
'manual_control': 0,
'project': 'vm-test-project',
'status': 'ACTIVE',
'retire_request_time': None,
'group_name': ut_id(gvar, 'dtg1'),
'terminate': 0,
'flavor_id': '4',
'poller_status': 'foreign',
'foreign_vm': 1,
'hostname': 'foreign-cloud--vmid6',
'vmid': 'vmid6',
'keep_alive':0,
'cloud_name': ut_id(gvar, 'dtc1')
},
server_user=ut_id(gvar, 'dtu1'), server_pw=user_secret
)
if __name__ == "__main__":
main(None)
| 36.9375
| 173
| 0.519631
| 3,386
| 28,959
| 4.207029
| 0.043709
| 0.04914
| 0.097719
| 0.090979
| 0.958091
| 0.953177
| 0.932678
| 0.928607
| 0.916532
| 0.916532
| 0
| 0.03303
| 0.301633
| 28,959
| 783
| 174
| 36.984674
| 0.671331
| 0.011361
| 0
| 0.794643
| 0
| 0
| 0.347508
| 0.0245
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001488
| false
| 0
| 0.002976
| 0
| 0.004464
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
326d008254b8ea145d28406365750862437dfece
| 27,986
|
py
|
Python
|
blockchain-workbench/rest-api-samples/python/swagger_client/api/contracts_api.py
|
chaosmail/blockchain
|
c78799d548c0d5deb86e03d16bf919df508d09fd
|
[
"MIT"
] | 738
|
2018-05-07T15:37:38.000Z
|
2022-03-30T08:16:04.000Z
|
blockchain-workbench/rest-api-samples/python/swagger_client/api/contracts_api.py
|
chaosmail/blockchain
|
c78799d548c0d5deb86e03d16bf919df508d09fd
|
[
"MIT"
] | 156
|
2018-05-08T14:01:25.000Z
|
2022-01-31T22:03:32.000Z
|
blockchain-workbench/rest-api-samples/python/swagger_client/api/contracts_api.py
|
cocoytech/blockchain
|
4a64a41275cf149c0ad66b7fd9864498ec6a7ed9
|
[
"MIT"
] | 682
|
2018-05-07T16:45:10.000Z
|
2022-03-31T16:50:13.000Z
|
# coding: utf-8
"""
Azure Blockchain Workbench REST API
The Azure Blockchain Workbench REST API is a Workbench extensibility point, which allows developers to create and manage blockchain applications, manage users and organizations within a consortium, integrate blockchain applications into services and platforms, perform transactions on a blockchain, and retrieve transactional and contract data from a blockchain. # noqa: E501
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class ContractsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def contract_action_get(self, contract_id, action_id, **kwargs): # noqa: E501
""" # noqa: E501
Gets the action matching the specified action ID. Users get the action if the user can take the action given the current state of the specified smart contract instance and the user's associated application role or smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_action_get(contract_id, action_id, async=True)
>>> result = thread.get()
:param async bool
:param int contract_id: The id of the contract (required)
:param int action_id: The id of the action (required)
:return: WorkflowStateTransition
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.contract_action_get_with_http_info(contract_id, action_id, **kwargs) # noqa: E501
else:
(data) = self.contract_action_get_with_http_info(contract_id, action_id, **kwargs) # noqa: E501
return data
def contract_action_get_with_http_info(self, contract_id, action_id, **kwargs): # noqa: E501
""" # noqa: E501
Gets the action matching the specified action ID. Users get the action if the user can take the action given the current state of the specified smart contract instance and the user's associated application role or smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_action_get_with_http_info(contract_id, action_id, async=True)
>>> result = thread.get()
:param async bool
:param int contract_id: The id of the contract (required)
:param int action_id: The id of the action (required)
:return: WorkflowStateTransition
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['contract_id', 'action_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method contract_action_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'contract_id' is set
if ('contract_id' not in params or
params['contract_id'] is None):
raise ValueError("Missing the required parameter `contract_id` when calling `contract_action_get`") # noqa: E501
# verify the required parameter 'action_id' is set
if ('action_id' not in params or
params['action_id'] is None):
raise ValueError("Missing the required parameter `action_id` when calling `contract_action_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'contract_id' in params:
path_params['contractId'] = params['contract_id'] # noqa: E501
if 'action_id' in params:
path_params['actionId'] = params['action_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/contracts/{contractId}/actions/{actionId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkflowStateTransition', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def contract_action_post(self, contract_id, **kwargs): # noqa: E501
""" # noqa: E501
Executes an action for the specified smart contract instance and action ID. Users are only able to execute the action given the current state of the specified smart contract instance and the user's associated application role or smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_action_post(contract_id, async=True)
>>> result = thread.get()
:param async bool
:param int contract_id: The id of the workflow instance (required)
:param WorkflowActionInput action_information: Parameters for a particular action
:return: LedgerActionOutput
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.contract_action_post_with_http_info(contract_id, **kwargs) # noqa: E501
else:
(data) = self.contract_action_post_with_http_info(contract_id, **kwargs) # noqa: E501
return data
def contract_action_post_with_http_info(self, contract_id, **kwargs): # noqa: E501
""" # noqa: E501
Executes an action for the specified smart contract instance and action ID. Users are only able to execute the action given the current state of the specified smart contract instance and the user's associated application role or smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_action_post_with_http_info(contract_id, async=True)
>>> result = thread.get()
:param async bool
:param int contract_id: The id of the workflow instance (required)
:param WorkflowActionInput action_information: Parameters for a particular action
:return: LedgerActionOutput
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['contract_id', 'action_information'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method contract_action_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'contract_id' is set
if ('contract_id' not in params or
params['contract_id'] is None):
raise ValueError("Missing the required parameter `contract_id` when calling `contract_action_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'contract_id' in params:
path_params['contractId'] = params['contract_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'action_information' in params:
body_params = params['action_information']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/contracts/{contractId}/actions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LedgerActionOutput', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def contract_actions_get(self, contract_id, **kwargs): # noqa: E501
""" # noqa: E501
Lists all actions, which can be taken by the given user and current state of the specified smart contract instance. Users get all applicable actions if the user has an associated application role or is associated with a smart contract instance role for the current state of the specified smart contract instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_actions_get(contract_id, async=True)
>>> result = thread.get()
:param async bool
:param int contract_id: The id of the contract (required)
:param int top: The maximum number of items to return
:param int skip: The number of items to skip before returning
:return: WorkflowStateTransitionList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.contract_actions_get_with_http_info(contract_id, **kwargs) # noqa: E501
else:
(data) = self.contract_actions_get_with_http_info(contract_id, **kwargs) # noqa: E501
return data
def contract_actions_get_with_http_info(self, contract_id, **kwargs): # noqa: E501
""" # noqa: E501
Lists all actions, which can be taken by the given user and current state of the specified smart contract instance. Users get all applicable actions if the user has an associated application role or is associated with a smart contract instance role for the current state of the specified smart contract instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_actions_get_with_http_info(contract_id, async=True)
>>> result = thread.get()
:param async bool
:param int contract_id: The id of the contract (required)
:param int top: The maximum number of items to return
:param int skip: The number of items to skip before returning
:return: WorkflowStateTransitionList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['contract_id', 'top', 'skip'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method contract_actions_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'contract_id' is set
if ('contract_id' not in params or
params['contract_id'] is None):
raise ValueError("Missing the required parameter `contract_id` when calling `contract_actions_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'contract_id' in params:
path_params['contractId'] = params['contract_id'] # noqa: E501
query_params = []
if 'top' in params:
query_params.append(('top', params['top'])) # noqa: E501
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/contracts/{contractId}/actions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkflowStateTransitionList', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def contract_get(self, contract_id, **kwargs): # noqa: E501
""" # noqa: E501
Creates and deploys a new smart contract instance by adding the instance to the Workbench database and sending a transaction to the blockchain. This method can only be performed by users who are specified within the Initiators collection of the workflow within the Workbench application configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_get(contract_id, async=True)
>>> result = thread.get()
:param async bool
:param int contract_id: The id of the contract (required)
:return: Contract
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.contract_get_with_http_info(contract_id, **kwargs) # noqa: E501
else:
(data) = self.contract_get_with_http_info(contract_id, **kwargs) # noqa: E501
return data
def contract_get_with_http_info(self, contract_id, **kwargs): # noqa: E501
""" # noqa: E501
Creates and deploys a new smart contract instance by adding the instance to the Workbench database and sending a transaction to the blockchain. This method can only be performed by users who are specified within the Initiators collection of the workflow within the Workbench application configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_get_with_http_info(contract_id, async=True)
>>> result = thread.get()
:param async bool
:param int contract_id: The id of the contract (required)
:return: Contract
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['contract_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method contract_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'contract_id' is set
if ('contract_id' not in params or
params['contract_id'] is None):
raise ValueError("Missing the required parameter `contract_id` when calling `contract_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'contract_id' in params:
path_params['contractId'] = params['contract_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/contracts/{contractId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Contract', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def contract_post(self, **kwargs): # noqa: E501
""" # noqa: E501
Gets the smart contract instance matching a specific workflow instance ID. Users who are Workbench administrators get the smart contract instance. Non-Workbench administrators get the smart contract instance if they have at least one associated application role or is associated with the smart contract instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_post(async=True)
>>> result = thread.get()
:param async bool
:param WorkflowActionInput workflow_action_input: The set of all contract action parameters.
:param int workflow_id: The ID of the workflow.
:param int contract_code_id: The ID of the ledger implementation.
:param int connection_id: The ID of chain instance running on the ledger.
:return: WorkflowActionInput
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.contract_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.contract_post_with_http_info(**kwargs) # noqa: E501
return data
def contract_post_with_http_info(self, **kwargs): # noqa: E501
""" # noqa: E501
Gets the smart contract instance matching a specific workflow instance ID. Users who are Workbench administrators get the smart contract instance. Non-Workbench administrators get the smart contract instance if they have at least one associated application role or is associated with the smart contract instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_post_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param WorkflowActionInput workflow_action_input: The set of all contract action parameters.
:param int workflow_id: The ID of the workflow.
:param int contract_code_id: The ID of the ledger implementation.
:param int connection_id: The ID of chain instance running on the ledger.
:return: WorkflowActionInput
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_action_input', 'workflow_id', 'contract_code_id', 'connection_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method contract_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'workflow_id' in params:
query_params.append(('workflowId', params['workflow_id'])) # noqa: E501
if 'contract_code_id' in params:
query_params.append(('contractCodeId', params['contract_code_id'])) # noqa: E501
if 'connection_id' in params:
query_params.append(('connectionId', params['connection_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'workflow_action_input' in params:
body_params = params['workflow_action_input']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/contracts', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkflowActionInput', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def contracts_get(self, **kwargs): # noqa: E501
""" # noqa: E501
Lists the smart contract instances of the specified workflow. Users who are Workbench administrators get all smart contract instances. Non-Workbench administrators get all smart contract instances for which they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contracts_get(async=True)
>>> result = thread.get()
:param async bool
:param int top: The maximum number of items to return
:param int skip: The number of items to skip before returning
:param int workflow_id: The ID of the associated workflow
:return: ContractList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.contracts_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.contracts_get_with_http_info(**kwargs) # noqa: E501
return data
def contracts_get_with_http_info(self, **kwargs): # noqa: E501
""" # noqa: E501
Lists the smart contract instances of the specified workflow. Users who are Workbench administrators get all smart contract instances. Non-Workbench administrators get all smart contract instances for which they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contracts_get_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param int top: The maximum number of items to return
:param int skip: The number of items to skip before returning
:param int workflow_id: The ID of the associated workflow
:return: ContractList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['top', 'skip', 'workflow_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method contracts_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'top' in params:
query_params.append(('top', params['top'])) # noqa: E501
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
if 'workflow_id' in params:
query_params.append(('workflowId', params['workflow_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/contracts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ContractList', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 45.066023
| 380
| 0.63128
| 3,307
| 27,986
| 5.146356
| 0.071061
| 0.041836
| 0.03455
| 0.025383
| 0.932017
| 0.921441
| 0.916388
| 0.910571
| 0.907691
| 0.889183
| 0
| 0.014121
| 0.291503
| 27,986
| 620
| 381
| 45.13871
| 0.84421
| 0.043486
| 0
| 0.756098
| 0
| 0
| 0.191209
| 0.051511
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.012195
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
328b4bdcd36684375aa1ec40185550acbc5efc76
| 1,981
|
py
|
Python
|
romodel/tests/test_e2e.py
|
tsaycal/romodel
|
0d9bd0616dad41b1683452a0c7ff8e31caf47742
|
[
"MIT"
] | null | null | null |
romodel/tests/test_e2e.py
|
tsaycal/romodel
|
0d9bd0616dad41b1683452a0c7ff8e31caf47742
|
[
"MIT"
] | null | null | null |
romodel/tests/test_e2e.py
|
tsaycal/romodel
|
0d9bd0616dad41b1683452a0c7ff8e31caf47742
|
[
"MIT"
] | null | null | null |
import pyutilib.th as unittest
import pyomo.environ as pe
import romodel.examples
class TestE2E(unittest.TestCase):
def test_knapsack_reformulation(self):
m = romodel.examples.Knapsack()
solver = pe.SolverFactory('romodel.reformulation')
solver.options['NonConvex'] = 2
solver.solve(m, tee=False)
m = romodel.examples.Knapsack()
m.w.uncset = m.P
solver.solve(m, tee=False)
def test_knapsack_cuts(self):
m = romodel.examples.Knapsack()
solver = pe.SolverFactory('romodel.cuts')
solver.solve(m, tee=False)
def test_portfolio_reformulation(self):
m = romodel.examples.Portfolio()
solver = pe.SolverFactory('romodel.reformulation')
solver.options['NonConvex'] = 2
solver.solve(m, tee=False)
def test_portfolio_cuts(self):
m = romodel.examples.Portfolio()
solver = pe.SolverFactory('romodel.cuts')
solver.solve(m, tee=False)
def test_pooling_reformulation_ellipsoidal(self):
m = romodel.examples.Pooling()
solver = pe.SolverFactory('romodel.reformulation')
solver.options['NonConvex'] = 2
solver.solve(m, tee=False)
def test_pooling_reformulation_polyhedral(self):
m = romodel.examples.Pooling()
solver = pe.SolverFactory('romodel.reformulation')
solver.options['NonConvex'] = 2
m.price_product.uncset = m.P
solver.solve(m, tee=False)
def test_pooling_cuts(self):
m = romodel.examples.Pooling()
solver = pe.SolverFactory('romodel.cuts')
solver.options['NonConvex'] = 2
solver.solve(m, tee=False)
m.price_product.uncset = m.P
solver.solve(m, tee=False)
def test_pooling_convex_cuts(self):
m = romodel.examples.Pooling()
solver = pe.SolverFactory('romodel.cuts')
solver.options['NonConvex'] = 2
m.price_product.uncset = m.C
solver.solve(m, tee=False)
| 33.576271
| 58
| 0.646643
| 233
| 1,981
| 5.403433
| 0.167382
| 0.119142
| 0.095314
| 0.119142
| 0.862589
| 0.819698
| 0.819698
| 0.819698
| 0.805401
| 0.667196
| 0
| 0.00462
| 0.235235
| 1,981
| 58
| 59
| 34.155172
| 0.826403
| 0
| 0
| 0.714286
| 0
| 0
| 0.093892
| 0.042403
| 0
| 0
| 0
| 0
| 0
| 1
| 0.163265
| false
| 0
| 0.061224
| 0
| 0.244898
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3ea18cd3363436169c3752227682f07f4a7b1698
| 1,025
|
py
|
Python
|
WPICtf/fuckkkkkkkkkkk.py
|
TrvvrT/ctf-writeups
|
dc93eb5e1296b07de4520881803d25ac757d4408
|
[
"MIT"
] | null | null | null |
WPICtf/fuckkkkkkkkkkk.py
|
TrvvrT/ctf-writeups
|
dc93eb5e1296b07de4520881803d25ac757d4408
|
[
"MIT"
] | null | null | null |
WPICtf/fuckkkkkkkkkkk.py
|
TrvvrT/ctf-writeups
|
dc93eb5e1296b07de4520881803d25ac757d4408
|
[
"MIT"
] | null | null | null |
hex_string = "7F454C4602010100000000000000000002003E000100000080004000000000004000000000000000E0000000000000000000000040003800010040000400030001000000070000008000000000000000800040000000000080004000000000004900000000000000490000000000000010000000000000000000000000000000B801000000BF01000000BEB400400031C9678B140E83C2316789140EFFC183F91575EEBA150000000F05B83C00000031FF0F0500261F184A3B034200470A0042032E0432414431044C002E7368737472746162002E74657874002E6461746100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000B00000001000000070000000000000080004000000000008000000000000000330000000000000000000000000000001000000000000000000000000000000011000000010000000300000000000000B400400000000000B4000000000000001500000000000000000000000000000004000000000000000000000000000000010000000300000000000000000000000000000000000000C9000000000000001700000000000000000000000000000001000000000000000000000000000000"
print(bytes.fromhex(hex_string).decode("utf-8"))
| 341.666667
| 975
| 0.982439
| 11
| 1,025
| 91.363636
| 0.818182
| 0.01791
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.900098
| 0.003902
| 1,025
| 2
| 976
| 512.5
| 0.084231
| 0
| 0
| 0
| 0
| 0
| 0.941463
| 0.936585
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
412a63e10a5daafa50ed2f3b7c0fabb500c0269e
| 244
|
py
|
Python
|
Python/tempCodeRunnerFile.py
|
marriagav/TC1001-team-7
|
1b543c3fcd4f81a1897b072147624ed016de7478
|
[
"MIT"
] | null | null | null |
Python/tempCodeRunnerFile.py
|
marriagav/TC1001-team-7
|
1b543c3fcd4f81a1897b072147624ed016de7478
|
[
"MIT"
] | null | null | null |
Python/tempCodeRunnerFile.py
|
marriagav/TC1001-team-7
|
1b543c3fcd4f81a1897b072147624ed016de7478
|
[
"MIT"
] | null | null | null |
ghosts = [
[vector(-180, 160), vector(5, 0)],
[vector(-180, -160), vector(0, 5)],
[vector(100, 160), vector(0, -5)],
[vector(100, -160), vector(-5, 0)],
[vector(50, 160), vector(-5, 0)],
[vector(50, -160), vector(-5, 0)]
| 34.857143
| 39
| 0.508197
| 37
| 244
| 3.351351
| 0.216216
| 0.435484
| 0.322581
| 0.354839
| 0.854839
| 0.717742
| 0.717742
| 0.717742
| 0.395161
| 0.395161
| 0
| 0.237113
| 0.204918
| 244
| 7
| 40
| 34.857143
| 0.402062
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
f5bd9066e6b97e63894cb825d1b5d750adc51d07
| 101,675
|
py
|
Python
|
router_traffic.py
|
mattsteinberg13/heuristic-qubit-mapping-algorithm
|
a58019e283c85e3ddc994c47b3fa3ec4dc99c8e8
|
[
"MIT"
] | null | null | null |
router_traffic.py
|
mattsteinberg13/heuristic-qubit-mapping-algorithm
|
a58019e283c85e3ddc994c47b3fa3ec4dc99c8e8
|
[
"MIT"
] | null | null | null |
router_traffic.py
|
mattsteinberg13/heuristic-qubit-mapping-algorithm
|
a58019e283c85e3ddc994c47b3fa3ec4dc99c8e8
|
[
"MIT"
] | null | null | null |
import networkx as nx
import matplotlib.pyplot as plt; plt.rcdefaults()
import numpy as np
from networkx.algorithms import shortest_paths
import time
import itertools as iter
import operator
from parser import parser
###################################################################################################
#create QPU graph
def noise_graph():
noise = nx.Graph()
qpu_size = 3
if qpu_size == 3:
noise.add_node(0,weight=np.random.rand()/1000,read_out=np.random.rand()/100)
noise.add_node(1,weight=np.random.rand()/1000,read_out=np.random.rand()/100)
noise.add_node(2,weight=np.random.rand()/1000,read_out=np.random.rand()/100)
noise.add_node(3,weight=np.random.rand()/1000,read_out=np.random.rand()/100)
noise.add_node(4,weight=np.random.rand()/1000,read_out=np.random.rand()/100)
noise.add_node(5,weight=np.random.rand()/1000,read_out=np.random.rand()/100)
noise.add_node(6,weight=np.random.rand()/1000,read_out=np.random.rand()/100)
noise.add_node(7,weight=np.random.rand()/1000,read_out=np.random.rand()/100)
noise.add_node(8,weight=np.random.rand()/1000,read_out=np.random.rand()/100)
noise.add_edge(0,1,weight=np.random.rand()/100)
noise.add_edge(0,5,weight=np.random.rand()/100)
noise.add_edge(1,2,weight=np.random.rand()/100)
noise.add_edge(1,4,weight=np.random.rand()/100)
noise.add_edge(2,3,weight=np.random.rand()/100)
noise.add_edge(3,4,weight=np.random.rand()/100)
noise.add_edge(3,8,weight=np.random.rand()/100)
noise.add_edge(4,5,weight=np.random.rand()/100)
noise.add_edge(4,7,weight=np.random.rand()/100)
noise.add_edge(5,6,weight=np.random.rand()/100)
noise.add_edge(6,7,weight=np.random.rand()/100)
noise.add_edge(7,8,weight=np.random.rand()/100)
elif qpu_size == 4:
noise.add_node(0,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(1,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(2,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(3,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(4,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(5,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(6,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(7,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(8,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(9,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(10,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(11,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(12,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(13,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(14,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(15,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_edge(0,1,weight=4*np.random.rand()/100)
noise.add_edge(0,7,weight=4*np.random.rand()/100)
noise.add_edge(1,2,weight=4*np.random.rand()/100)
noise.add_edge(1,6,weight=4*np.random.rand()/100)
noise.add_edge(2,5,weight=4*np.random.rand()/100)
noise.add_edge(3,2,weight=4*np.random.rand()/100)
noise.add_edge(3,4,weight=4*np.random.rand()/100)
noise.add_edge(4,5,weight=4*np.random.rand()/100)
noise.add_edge(4,11,weight=4*np.random.rand()/100)
noise.add_edge(5,8,weight=4*np.random.rand()/100)
noise.add_edge(6,5,weight=4*np.random.rand()/100)
noise.add_edge(5,10,weight=4*np.random.rand()/100)
noise.add_edge(6,9,weight=4*np.random.rand()/100)
noise.add_edge(6,7,weight=4*np.random.rand()/100)
noise.add_edge(8,7,weight=4*np.random.rand()/100)
noise.add_edge(8,15,weight=4*np.random.rand()/100)
noise.add_edge(8,9,weight=4*np.random.rand()/100)
noise.add_edge(9,10,weight=4*np.random.rand()/100)
noise.add_edge(9,14,weight=4*np.random.rand()/100)
noise.add_edge(10,13,weight=4*np.random.rand()/100)
noise.add_edge(11,10,weight=4*np.random.rand()/100)
noise.add_edge(11,12,weight=4*np.random.rand()/100)
noise.add_edge(12,13,weight=4*np.random.rand()/100)
noise.add_edge(13,14,weight=4*np.random.rand()/100)
noise.add_edge(14,15,weight=4*np.random.rand()/100)
elif qpu_size == 5:
noise.add_node(0,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(1,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(2,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(3,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(4,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(5,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(6,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(7,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(8,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(9,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(10,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(11,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(12,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(13,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(14,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(15,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(16,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(17,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(18,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(19,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(20,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(21,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(22,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(23,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(24,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_edge(0,1,weight=4*np.random.rand()/100)
noise.add_edge(0,9,weight=4*np.random.rand()/100)
noise.add_edge(1,2,weight=4*np.random.rand()/100)
noise.add_edge(1,8,weight=4*np.random.rand()/100)
noise.add_edge(2,3,weight=4*np.random.rand()/100)
noise.add_edge(2,7,weight=4*np.random.rand()/100)
noise.add_edge(3,6,weight=4*np.random.rand()/100)
noise.add_edge(3,4,weight=4*np.random.rand()/100)
noise.add_edge(4,5,weight=4*np.random.rand()/100)
noise.add_edge(5,14,weight=4*np.random.rand()/100)
noise.add_edge(5,6,weight=4*np.random.rand()/100)
noise.add_edge(6,13,weight=4*np.random.rand()/100)
noise.add_edge(6,7,weight=4*np.random.rand()/100)
noise.add_edge(7,12,weight=4*np.random.rand()/100)
noise.add_edge(7,8,weight=4*np.random.rand()/100)
noise.add_edge(8,11,weight=4*np.random.rand()/100)
noise.add_edge(8,9,weight=4*np.random.rand()/100)
noise.add_edge(9,10,weight=4*np.random.rand()/100)
noise.add_edge(10,19,weight=4*np.random.rand()/100)
noise.add_edge(10,11,weight=4*np.random.rand()/100)
noise.add_edge(11,18,weight=4*np.random.rand()/100)
noise.add_edge(12,11,weight=4*np.random.rand()/100)
noise.add_edge(12,17,weight=4*np.random.rand()/100)
noise.add_edge(12,13,weight=4*np.random.rand()/100)
noise.add_edge(13,16,weight=4*np.random.rand()/100)
noise.add_edge(13,14,weight=4*np.random.rand()/100)
noise.add_edge(14,15,weight=4*np.random.rand()/100)
noise.add_edge(15,24,weight=4*np.random.rand()/100)
noise.add_edge(15,16,weight=4*np.random.rand()/100)
noise.add_edge(16,23,weight=4*np.random.rand()/100)
noise.add_edge(16,17,weight=4*np.random.rand()/100)
noise.add_edge(17,18,weight=4*np.random.rand()/100)
noise.add_edge(17,22,weight=4*np.random.rand()/100)
noise.add_edge(18,21,weight=4*np.random.rand()/100)
noise.add_edge(18,19,weight=4*np.random.rand()/100)
noise.add_edge(20,19,weight=4*np.random.rand()/100)
noise.add_edge(20,21,weight=4*np.random.rand()/100)
noise.add_edge(22,21,weight=4*np.random.rand()/100)
noise.add_edge(22,23,weight=4*np.random.rand()/100)
noise.add_edge(24,23,weight=4*np.random.rand()/100)
elif qpu_size == 6:
noise.add_node(0,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(1,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(2,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(3,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(4,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(5,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(6,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(7,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(8,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(9,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(10,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(11,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(12,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(13,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(14,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(15,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(16,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(17,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(18,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(19,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(20,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(21,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(22,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(23,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(24,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(25,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(26,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(27,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(28,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(29,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(30,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(31,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(32,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(33,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(34,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(35,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_edge(0,1,weight=4*np.random.rand()/100)
noise.add_edge(0,11,weight=4*np.random.rand()/100)
noise.add_edge(2,1,weight=4*np.random.rand()/100)
noise.add_edge(10,1,weight=4*np.random.rand()/100)
noise.add_edge(2,3,weight=4*np.random.rand()/100)
noise.add_edge(2,9,weight=4*np.random.rand()/100)
noise.add_edge(3,8,weight=4*np.random.rand()/100)
noise.add_edge(3,4,weight=4*np.random.rand()/100)
noise.add_edge(4,5,weight=4*np.random.rand()/100)
noise.add_edge(4,7,weight=4*np.random.rand()/100)
noise.add_edge(5,6,weight=4*np.random.rand()/100)
noise.add_edge(6,17,weight=4*np.random.rand()/100)
noise.add_edge(6,7,weight=4*np.random.rand()/100)
noise.add_edge(7,16,weight=4*np.random.rand()/100)
noise.add_edge(7,8,weight=4*np.random.rand()/100)
noise.add_edge(8,15,weight=4*np.random.rand()/100)
noise.add_edge(8,9,weight=4*np.random.rand()/100)
noise.add_edge(9,10,weight=4*np.random.rand()/100)
noise.add_edge(9,14,weight=4*np.random.rand()/100)
noise.add_edge(10,11,weight=4*np.random.rand()/100)
noise.add_edge(10,13,weight=4*np.random.rand()/100)
noise.add_edge(12,11,weight=4*np.random.rand()/100)
noise.add_edge(12,23,weight=4*np.random.rand()/100)
noise.add_edge(12,13,weight=4*np.random.rand()/100)
noise.add_edge(13,22,weight=4*np.random.rand()/100)
noise.add_edge(13,14,weight=4*np.random.rand()/100)
noise.add_edge(14,15,weight=4*np.random.rand()/100)
noise.add_edge(14,21,weight=4*np.random.rand()/100)
noise.add_edge(20,15,weight=4*np.random.rand()/100)
noise.add_edge(15,16,weight=4*np.random.rand()/100)
noise.add_edge(16,19,weight=4*np.random.rand()/100)
noise.add_edge(17,16,weight=4*np.random.rand()/100)
noise.add_edge(17,18,weight=4*np.random.rand()/100)
noise.add_edge(19,18,weight=4*np.random.rand()/100)
noise.add_edge(29,18,weight=4*np.random.rand()/100)
noise.add_edge(20,19,weight=4*np.random.rand()/100)
noise.add_edge(28,19,weight=4*np.random.rand()/100)
noise.add_edge(20,21,weight=4*np.random.rand()/100)
noise.add_edge(20,27,weight=4*np.random.rand()/100)
noise.add_edge(21,26,weight=4*np.random.rand()/100)
noise.add_edge(21,22,weight=4*np.random.rand()/100)
noise.add_edge(22,25,weight=4*np.random.rand()/100)
noise.add_edge(22,23,weight=4*np.random.rand()/100)
noise.add_edge(23,24,weight=4*np.random.rand()/100)
noise.add_edge(24,35,weight=4*np.random.rand()/100)
noise.add_edge(24,25,weight=4*np.random.rand()/100)
noise.add_edge(25,34,weight=4*np.random.rand()/100)
noise.add_edge(25,26,weight=4*np.random.rand()/100)
noise.add_edge(26,33,weight=4*np.random.rand()/100)
noise.add_edge(27,26,weight=4*np.random.rand()/100)
noise.add_edge(27,32,weight=4*np.random.rand()/100)
noise.add_edge(27,28,weight=4*np.random.rand()/100)
noise.add_edge(28,31,weight=4*np.random.rand()/100)
noise.add_edge(28,29,weight=4*np.random.rand()/100)
noise.add_edge(29,30,weight=4*np.random.rand()/100)
noise.add_edge(30,31,weight=4*np.random.rand()/100)
noise.add_edge(32,31,weight=4*np.random.rand()/100)
noise.add_edge(33,32,weight=4*np.random.rand()/100)
noise.add_edge(34,33,weight=4*np.random.rand()/100)
noise.add_edge(34,35,weight=4*np.random.rand()/100)
elif qpu_size == 7:
noise.add_node(0,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(1,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(2,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(3,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(4,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(5,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(6,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(7,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(8,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(9,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(10,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(11,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(12,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(13,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(14,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(15,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(16,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(17,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(18,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(19,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(20,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(21,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(22,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(23,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(24,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(25,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(26,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(27,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(28,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(29,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(30,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(31,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(32,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(33,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(34,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(35,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(36,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(37,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(38,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(39,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(40,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(41,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(42,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(43,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(44,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(45,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(46,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(47,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(48,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_edge(0,1,weight=4*np.random.rand()/100)
noise.add_edge(0,13,weight=4*np.random.rand()/100)
noise.add_edge(2,1,weight=4*np.random.rand()/100)
noise.add_edge(12,1,weight=4*np.random.rand()/100)
noise.add_edge(2,3,weight=4*np.random.rand()/100)
noise.add_edge(2,11,weight=4*np.random.rand()/100)
noise.add_edge(3,10,weight=4*np.random.rand()/100)
noise.add_edge(3,4,weight=4*np.random.rand()/100)
noise.add_edge(4,9,weight=4*np.random.rand()/100)
noise.add_edge(5,4,weight=4*np.random.rand()/100)
noise.add_edge(5,8,weight=4*np.random.rand()/100)
noise.add_edge(6,5,weight=4*np.random.rand()/100)
noise.add_edge(6,7,weight=4*np.random.rand()/100)
noise.add_edge(7,20,weight=4*np.random.rand()/100)
noise.add_edge(7,8,weight=4*np.random.rand()/100)
noise.add_edge(8,19,weight=4*np.random.rand()/100)
noise.add_edge(8,9,weight=4*np.random.rand()/100)
noise.add_edge(9,18,weight=4*np.random.rand()/100)
noise.add_edge(9,10,weight=4*np.random.rand()/100)
noise.add_edge(10,11,weight=4*np.random.rand()/100)
noise.add_edge(10,17,weight=4*np.random.rand()/100)
noise.add_edge(16,11,weight=4*np.random.rand()/100)
noise.add_edge(12,11,weight=4*np.random.rand()/100)
noise.add_edge(12,15,weight=4*np.random.rand()/100)
noise.add_edge(13,12,weight=4*np.random.rand()/100)
noise.add_edge(13,14,weight=4*np.random.rand()/100)
noise.add_edge(14,27,weight=4*np.random.rand()/100)
noise.add_edge(14,15,weight=4*np.random.rand()/100)
noise.add_edge(15,26,weight=4*np.random.rand()/100)
noise.add_edge(16,15,weight=4*np.random.rand()/100)
noise.add_edge(16,25,weight=4*np.random.rand()/100)
noise.add_edge(17,16,weight=4*np.random.rand()/100)
noise.add_edge(24,17,weight=4*np.random.rand()/100)
noise.add_edge(17,18,weight=4*np.random.rand()/100)
noise.add_edge(23,18,weight=4*np.random.rand()/100)
noise.add_edge(18,19,weight=4*np.random.rand()/100)
noise.add_edge(22,19,weight=4*np.random.rand()/100)
noise.add_edge(19,20,weight=4*np.random.rand()/100)
noise.add_edge(20,21,weight=4*np.random.rand()/100)
noise.add_edge(21,34,weight=4*np.random.rand()/100)
noise.add_edge(22,21,weight=4*np.random.rand()/100)
noise.add_edge(22,33,weight=4*np.random.rand()/100)
noise.add_edge(23,22,weight=4*np.random.rand()/100)
noise.add_edge(23,32,weight=4*np.random.rand()/100)
noise.add_edge(23,24,weight=4*np.random.rand()/100)
noise.add_edge(31,24,weight=4*np.random.rand()/100)
noise.add_edge(25,24,weight=4*np.random.rand()/100)
noise.add_edge(25,30,weight=4*np.random.rand()/100)
noise.add_edge(26,29,weight=4*np.random.rand()/100)
noise.add_edge(26,27,weight=4*np.random.rand()/100)
noise.add_edge(27,28,weight=4*np.random.rand()/100)
noise.add_edge(28,41,weight=4*np.random.rand()/100)
noise.add_edge(28,29,weight=4*np.random.rand()/100)
noise.add_edge(29,40,weight=4*np.random.rand()/100)
noise.add_edge(29,30,weight=4*np.random.rand()/100)
noise.add_edge(30,39,weight=4*np.random.rand()/100)
noise.add_edge(30,31,weight=4*np.random.rand()/100)
noise.add_edge(38,31,weight=4*np.random.rand()/100)
noise.add_edge(31,32,weight=4*np.random.rand()/100)
noise.add_edge(32,37,weight=4*np.random.rand()/100)
noise.add_edge(32,33,weight=4*np.random.rand()/100)
noise.add_edge(36,33,weight=4*np.random.rand()/100)
noise.add_edge(34,33,weight=4*np.random.rand()/100)
noise.add_edge(34,35,weight=4*np.random.rand()/100)
noise.add_edge(35,48,weight=4*np.random.rand()/100)
noise.add_edge(35,36,weight=4*np.random.rand()/100)
noise.add_edge(36,47,weight=4*np.random.rand()/100)
noise.add_edge(36,37,weight=4*np.random.rand()/100)
noise.add_edge(37,46,weight=4*np.random.rand()/100)
noise.add_edge(38,37,weight=4*np.random.rand()/100)
noise.add_edge(38,45,weight=4*np.random.rand()/100)
noise.add_edge(38,39,weight=4*np.random.rand()/100)
noise.add_edge(39,44,weight=4*np.random.rand()/100)
noise.add_edge(39,40,weight=4*np.random.rand()/100)
noise.add_edge(40,43,weight=4*np.random.rand()/100)
noise.add_edge(40,41,weight=4*np.random.rand()/100)
noise.add_edge(41,42,weight=4*np.random.rand()/100)
noise.add_edge(42,43,weight=4*np.random.rand()/100)
noise.add_edge(43,44,weight=4*np.random.rand()/100)
noise.add_edge(44,45,weight=4*np.random.rand()/100)
noise.add_edge(45,46,weight=4*np.random.rand()/100)
noise.add_edge(46,47,weight=4*np.random.rand()/100)
noise.add_edge(47,48,weight=4*np.random.rand()/100)
elif qpu_size == 8:
noise.add_node(0,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(1,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(2,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(3,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(4,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(5,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(6,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(7,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(8,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(9,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(10,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(11,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(12,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(13,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(14,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(15,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(16,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(17,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(18,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(19,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(20,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(21,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(22,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(23,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(24,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(25,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(26,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(27,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(28,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(29,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(30,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(31,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(32,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(33,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(34,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(35,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(36,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(37,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(38,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(39,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(40,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(41,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(42,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(43,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(44,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(45,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(46,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(47,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(48,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(49,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(50,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(51,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(52,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(53,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(54,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(55,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(56,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(57,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(58,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(59,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(60,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(61,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(62,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_node(63,weight=4*np.random.rand()/1000,read_out=4*np.random.rand()/100)
noise.add_edge(0,1,weight=4*np.random.rand()/100)
noise.add_edge(0,13,weight=4*np.random.rand()/100)
noise.add_edge(2,1,weight=4*np.random.rand()/100)
noise.add_edge(12,1,weight=4*np.random.rand()/100)
noise.add_edge(2,3,weight=4*np.random.rand()/100)
noise.add_edge(2,11,weight=4*np.random.rand()/100)
noise.add_edge(3,10,weight=4*np.random.rand()/100)
noise.add_edge(3,4,weight=4*np.random.rand()/100)
noise.add_edge(4,9,weight=4*np.random.rand()/100)
noise.add_edge(5,4,weight=4*np.random.rand()/100)
noise.add_edge(5,8,weight=4*np.random.rand()/100)
noise.add_edge(6,5,weight=4*np.random.rand()/100)
noise.add_edge(6,7,weight=4*np.random.rand()/100)
noise.add_edge(7,20,weight=4*np.random.rand()/100)
noise.add_edge(7,8,weight=4*np.random.rand()/100)
noise.add_edge(8,19,weight=4*np.random.rand()/100)
noise.add_edge(8,9,weight=4*np.random.rand()/100)
noise.add_edge(9,18,weight=4*np.random.rand()/100)
noise.add_edge(9,10,weight=4*np.random.rand()/100)
noise.add_edge(10,11,weight=4*np.random.rand()/100)
noise.add_edge(10,17,weight=4*np.random.rand()/100)
noise.add_edge(16,11,weight=4*np.random.rand()/100)
noise.add_edge(12,11,weight=4*np.random.rand()/100)
noise.add_edge(12,15,weight=4*np.random.rand()/100)
noise.add_edge(13,12,weight=4*np.random.rand()/100)
noise.add_edge(13,14,weight=4*np.random.rand()/100)
noise.add_edge(14,27,weight=4*np.random.rand()/100)
noise.add_edge(14,15,weight=4*np.random.rand()/100)
noise.add_edge(15,26,weight=4*np.random.rand()/100)
noise.add_edge(16,15,weight=4*np.random.rand()/100)
noise.add_edge(16,25,weight=4*np.random.rand()/100)
noise.add_edge(17,16,weight=4*np.random.rand()/100)
noise.add_edge(24,17,weight=4*np.random.rand()/100)
noise.add_edge(17,18,weight=4*np.random.rand()/100)
noise.add_edge(23,18,weight=4*np.random.rand()/100)
noise.add_edge(18,19,weight=4*np.random.rand()/100)
noise.add_edge(22,19,weight=4*np.random.rand()/100)
noise.add_edge(19,20,weight=4*np.random.rand()/100)
noise.add_edge(20,21,weight=4*np.random.rand()/100)
noise.add_edge(21,34,weight=4*np.random.rand()/100)
noise.add_edge(22,21,weight=4*np.random.rand()/100)
noise.add_edge(22,33,weight=4*np.random.rand()/100)
noise.add_edge(23,22,weight=4*np.random.rand()/100)
noise.add_edge(23,32,weight=4*np.random.rand()/100)
noise.add_edge(23,24,weight=4*np.random.rand()/100)
noise.add_edge(31,24,weight=4*np.random.rand()/100)
noise.add_edge(25,24,weight=4*np.random.rand()/100)
noise.add_edge(25,30,weight=4*np.random.rand()/100)
noise.add_edge(26,29,weight=4*np.random.rand()/100)
noise.add_edge(26,27,weight=4*np.random.rand()/100)
noise.add_edge(27,28,weight=4*np.random.rand()/100)
noise.add_edge(28,41,weight=4*np.random.rand()/100)
noise.add_edge(28,29,weight=4*np.random.rand()/100)
noise.add_edge(29,40,weight=4*np.random.rand()/100)
noise.add_edge(29,30,weight=4*np.random.rand()/100)
noise.add_edge(30,39,weight=4*np.random.rand()/100)
noise.add_edge(30,31,weight=4*np.random.rand()/100)
noise.add_edge(38,31,weight=4*np.random.rand()/100)
noise.add_edge(31,32,weight=4*np.random.rand()/100)
noise.add_edge(32,37,weight=4*np.random.rand()/100)
noise.add_edge(32,33,weight=4*np.random.rand()/100)
noise.add_edge(36,33,weight=4*np.random.rand()/100)
noise.add_edge(34,33,weight=4*np.random.rand()/100)
noise.add_edge(34,35,weight=4*np.random.rand()/100)
noise.add_edge(35,48,weight=4*np.random.rand()/100)
noise.add_edge(35,36,weight=4*np.random.rand()/100)
noise.add_edge(36,47,weight=4*np.random.rand()/100)
noise.add_edge(36,37,weight=4*np.random.rand()/100)
noise.add_edge(37,46,weight=4*np.random.rand()/100)
noise.add_edge(38,37,weight=4*np.random.rand()/100)
noise.add_edge(38,45,weight=4*np.random.rand()/100)
noise.add_edge(38,39,weight=4*np.random.rand()/100)
noise.add_edge(39,44,weight=4*np.random.rand()/100)
noise.add_edge(39,40,weight=4*np.random.rand()/100)
noise.add_edge(40,43,weight=4*np.random.rand()/100)
noise.add_edge(40,41,weight=4*np.random.rand()/100)
noise.add_edge(41,42,weight=4*np.random.rand()/100)
noise.add_edge(42,43,weight=4*np.random.rand()/100)
noise.add_edge(43,44,weight=4*np.random.rand()/100)
noise.add_edge(44,45,weight=4*np.random.rand()/100)
noise.add_edge(45,46,weight=4*np.random.rand()/100)
noise.add_edge(46,47,weight=4*np.random.rand()/100)
noise.add_edge(47,48,weight=4*np.random.rand()/100)
noise.add_edge(49,48,weight=4*np.random.rand()/100)
noise.add_edge(49,50,weight=4*np.random.rand()/100)
noise.add_edge(47,50,weight=4*np.random.rand()/100)
noise.add_edge(51,50,weight=4*np.random.rand()/100)
noise.add_edge(46,51,weight=4*np.random.rand()/100)
noise.add_edge(51,52,weight=4*np.random.rand()/100)
noise.add_edge(52,45,weight=4*np.random.rand()/100)
noise.add_edge(52,53,weight=4*np.random.rand()/100)
noise.add_edge(44,53,weight=4*np.random.rand()/100)
noise.add_edge(53,54,weight=4*np.random.rand()/100)
noise.add_edge(43,54,weight=4*np.random.rand()/100)
noise.add_edge(54,55,weight=4*np.random.rand()/100)
noise.add_edge(42,55,weight=4*np.random.rand()/100)
noise.add_edge(55,56,weight=4*np.random.rand()/100)
noise.add_edge(57,56,weight=4*np.random.rand()/100)
noise.add_edge(42,57,weight=4*np.random.rand()/100)
noise.add_edge(41,58,weight=4*np.random.rand()/100)
noise.add_edge(57,58,weight=4*np.random.rand()/100)
noise.add_edge(58,59,weight=4*np.random.rand()/100)
noise.add_edge(28,59,weight=4*np.random.rand()/100)
noise.add_edge(60,59,weight=4*np.random.rand()/100)
noise.add_edge(60,27,weight=4*np.random.rand()/100)
noise.add_edge(61,60,weight=4*np.random.rand()/100)
noise.add_edge(61,14,weight=4*np.random.rand()/100)
noise.add_edge(62,61,weight=4*np.random.rand()/100)
noise.add_edge(62,13,weight=4*np.random.rand()/100)
noise.add_edge(62,63,weight=4*np.random.rand()/100)
noise.add_edge(0,63,weight=4*np.random.rand()/100)
elif qpu_size == 9:
noise.add_node(0,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(1,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(2,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(3,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(4,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(5,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(6,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(7,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(8,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(9,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(10,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(11,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(12,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(13,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(14,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(15,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(16,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(17,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(18,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(19,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(20,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(21,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(22,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(23,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(24,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(25,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(26,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(27,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(28,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(29,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(30,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(31,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(32,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(33,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(34,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(35,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(36,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(37,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(38,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(39,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(40,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(41,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(42,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(43,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(44,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(45,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(46,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(47,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(48,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(49,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(50,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(51,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(52,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(53,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(54,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(55,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(56,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(57,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(58,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(59,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(60,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(61,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(62,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(63,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(64,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(65,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(66,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(67,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(68,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(69,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(70,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(71,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(72,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(73,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(74,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(75,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(76,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(77,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(78,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(79,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_node(80,weight=64*np.random.rand()/1000,read_out=64*np.random.rand()/100)
noise.add_edge(0,1,weight=64*np.random.rand()/100)
noise.add_edge(0,13,weight=64*np.random.rand()/100)
noise.add_edge(2,1,weight=64*np.random.rand()/100)
noise.add_edge(12,1,weight=64*np.random.rand()/100)
noise.add_edge(2,3,weight=64*np.random.rand()/100)
noise.add_edge(2,11,weight=64*np.random.rand()/100)
noise.add_edge(3,10,weight=64*np.random.rand()/100)
noise.add_edge(3,4,weight=64*np.random.rand()/100)
noise.add_edge(4,9,weight=64*np.random.rand()/100)
noise.add_edge(5,4,weight=64*np.random.rand()/100)
noise.add_edge(5,8,weight=64*np.random.rand()/100)
noise.add_edge(6,5,weight=64*np.random.rand()/100)
noise.add_edge(6,7,weight=64*np.random.rand()/100)
noise.add_edge(7,20,weight=64*np.random.rand()/100)
noise.add_edge(7,8,weight=64*np.random.rand()/100)
noise.add_edge(8,19,weight=64*np.random.rand()/100)
noise.add_edge(8,9,weight=64*np.random.rand()/100)
noise.add_edge(9,18,weight=64*np.random.rand()/100)
noise.add_edge(9,10,weight=64*np.random.rand()/100)
noise.add_edge(10,11,weight=64*np.random.rand()/100)
noise.add_edge(10,17,weight=64*np.random.rand()/100)
noise.add_edge(16,11,weight=64*np.random.rand()/100)
noise.add_edge(12,11,weight=64*np.random.rand()/100)
noise.add_edge(12,15,weight=64*np.random.rand()/100)
noise.add_edge(13,12,weight=64*np.random.rand()/100)
noise.add_edge(13,14,weight=64*np.random.rand()/100)
noise.add_edge(14,27,weight=64*np.random.rand()/100)
noise.add_edge(14,15,weight=64*np.random.rand()/100)
noise.add_edge(15,26,weight=64*np.random.rand()/100)
noise.add_edge(16,15,weight=64*np.random.rand()/100)
noise.add_edge(16,25,weight=64*np.random.rand()/100)
noise.add_edge(17,16,weight=64*np.random.rand()/100)
noise.add_edge(24,17,weight=64*np.random.rand()/100)
noise.add_edge(17,18,weight=64*np.random.rand()/100)
noise.add_edge(23,18,weight=64*np.random.rand()/100)
noise.add_edge(18,19,weight=64*np.random.rand()/100)
noise.add_edge(22,19,weight=64*np.random.rand()/100)
noise.add_edge(19,20,weight=64*np.random.rand()/100)
noise.add_edge(20,21,weight=64*np.random.rand()/100)
noise.add_edge(21,34,weight=64*np.random.rand()/100)
noise.add_edge(22,21,weight=64*np.random.rand()/100)
noise.add_edge(22,33,weight=64*np.random.rand()/100)
noise.add_edge(23,22,weight=64*np.random.rand()/100)
noise.add_edge(23,32,weight=64*np.random.rand()/100)
noise.add_edge(23,24,weight=64*np.random.rand()/100)
noise.add_edge(31,24,weight=64*np.random.rand()/100)
noise.add_edge(25,24,weight=64*np.random.rand()/100)
noise.add_edge(25,30,weight=64*np.random.rand()/100)
noise.add_edge(26,29,weight=64*np.random.rand()/100)
noise.add_edge(26,27,weight=64*np.random.rand()/100)
noise.add_edge(27,28,weight=64*np.random.rand()/100)
noise.add_edge(28,41,weight=64*np.random.rand()/100)
noise.add_edge(28,29,weight=64*np.random.rand()/100)
noise.add_edge(29,40,weight=64*np.random.rand()/100)
noise.add_edge(29,30,weight=64*np.random.rand()/100)
noise.add_edge(30,39,weight=64*np.random.rand()/100)
noise.add_edge(30,31,weight=64*np.random.rand()/100)
noise.add_edge(38,31,weight=64*np.random.rand()/100)
noise.add_edge(31,32,weight=64*np.random.rand()/100)
noise.add_edge(32,37,weight=64*np.random.rand()/100)
noise.add_edge(32,33,weight=64*np.random.rand()/100)
noise.add_edge(36,33,weight=64*np.random.rand()/100)
noise.add_edge(34,33,weight=64*np.random.rand()/100)
noise.add_edge(34,35,weight=64*np.random.rand()/100)
noise.add_edge(35,48,weight=64*np.random.rand()/100)
noise.add_edge(35,36,weight=64*np.random.rand()/100)
noise.add_edge(36,47,weight=64*np.random.rand()/100)
noise.add_edge(36,37,weight=64*np.random.rand()/100)
noise.add_edge(37,46,weight=64*np.random.rand()/100)
noise.add_edge(38,37,weight=64*np.random.rand()/100)
noise.add_edge(38,45,weight=64*np.random.rand()/100)
noise.add_edge(38,39,weight=64*np.random.rand()/100)
noise.add_edge(39,44,weight=64*np.random.rand()/100)
noise.add_edge(39,40,weight=64*np.random.rand()/100)
noise.add_edge(40,43,weight=64*np.random.rand()/100)
noise.add_edge(40,41,weight=64*np.random.rand()/100)
noise.add_edge(41,42,weight=64*np.random.rand()/100)
noise.add_edge(42,43,weight=64*np.random.rand()/100)
noise.add_edge(43,44,weight=64*np.random.rand()/100)
noise.add_edge(44,45,weight=64*np.random.rand()/100)
noise.add_edge(45,46,weight=64*np.random.rand()/100)
noise.add_edge(46,47,weight=64*np.random.rand()/100)
noise.add_edge(47,48,weight=64*np.random.rand()/100)
noise.add_edge(49,48,weight=64*np.random.rand()/100)
noise.add_edge(49,50,weight=64*np.random.rand()/100)
noise.add_edge(47,50,weight=64*np.random.rand()/100)
noise.add_edge(51,50,weight=64*np.random.rand()/100)
noise.add_edge(46,51,weight=64*np.random.rand()/100)
noise.add_edge(51,52,weight=64*np.random.rand()/100)
noise.add_edge(52,45,weight=64*np.random.rand()/100)
noise.add_edge(52,53,weight=64*np.random.rand()/100)
noise.add_edge(44,53,weight=64*np.random.rand()/100)
noise.add_edge(53,54,weight=64*np.random.rand()/100)
noise.add_edge(43,54,weight=64*np.random.rand()/100)
noise.add_edge(54,55,weight=64*np.random.rand()/100)
noise.add_edge(42,55,weight=64*np.random.rand()/100)
noise.add_edge(55,56,weight=64*np.random.rand()/100)
noise.add_edge(57,56,weight=64*np.random.rand()/100)
noise.add_edge(42,57,weight=64*np.random.rand()/100)
noise.add_edge(41,58,weight=64*np.random.rand()/100)
noise.add_edge(57,58,weight=64*np.random.rand()/100)
noise.add_edge(58,59,weight=64*np.random.rand()/100)
noise.add_edge(28,59,weight=64*np.random.rand()/100)
noise.add_edge(60,59,weight=64*np.random.rand()/100)
noise.add_edge(60,27,weight=64*np.random.rand()/100)
noise.add_edge(61,60,weight=64*np.random.rand()/100)
noise.add_edge(61,14,weight=64*np.random.rand()/100)
noise.add_edge(62,61,weight=64*np.random.rand()/100)
noise.add_edge(62,13,weight=64*np.random.rand()/100)
noise.add_edge(62,63,weight=64*np.random.rand()/100)
noise.add_edge(0,63,weight=64*np.random.rand()/100)
noise.add_edge(64,63,weight=64*np.random.rand()/100)
noise.add_edge(65,64,weight=64*np.random.rand()/100)
noise.add_edge(62,65,weight=64*np.random.rand()/100)
noise.add_edge(65,66,weight=64*np.random.rand()/100)
noise.add_edge(61,66,weight=64*np.random.rand()/100)
noise.add_edge(66,67,weight=64*np.random.rand()/100)
noise.add_edge(60,67,weight=64*np.random.rand()/100)
noise.add_edge(0,63,weight=64*np.random.rand()/100)
noise.add_edge(67,68,weight=64*np.random.rand()/100)
noise.add_edge(59,68,weight=64*np.random.rand()/100)
noise.add_edge(68,69,weight=64*np.random.rand()/100)
noise.add_edge(58,69,weight=64*np.random.rand()/100)
noise.add_edge(69,70,weight=64*np.random.rand()/100)
noise.add_edge(57,70,weight=64*np.random.rand()/100)
noise.add_edge(71,70,weight=64*np.random.rand()/100)
noise.add_edge(71,72,weight=64*np.random.rand()/100)
noise.add_edge(71,56,weight=64*np.random.rand()/100)
noise.add_edge(72,73,weight=64*np.random.rand()/100)
noise.add_edge(56,73,weight=64*np.random.rand()/100)
noise.add_edge(73,74,weight=64*np.random.rand()/100)
noise.add_edge(55,74,weight=64*np.random.rand()/100)
noise.add_edge(75,74,weight=64*np.random.rand()/100)
noise.add_edge(75,54,weight=64*np.random.rand()/100)
noise.add_edge(75,76,weight=64*np.random.rand()/100)
noise.add_edge(76,53,weight=64*np.random.rand()/100)
noise.add_edge(76,77,weight=64*np.random.rand()/100)
noise.add_edge(77,52,weight=64*np.random.rand()/100)
noise.add_edge(78,77,weight=64*np.random.rand()/100)
noise.add_edge(51,78,weight=64*np.random.rand()/100)
noise.add_edge(79,78,weight=64*np.random.rand()/100)
noise.add_edge(79,50,weight=64*np.random.rand()/100)
noise.add_edge(79,80,weight=64*np.random.rand()/100)
noise.add_edge(49,80,weight=64*np.random.rand()/100)
elif qpu_size == 10:
noise.add_node(0,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(1,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(2,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(3,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(4,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(5,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(6,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(7,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(8,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(9,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(10,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(11,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(12,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(13,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(14,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(15,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(16,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(17,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(18,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(19,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(20,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(21,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(22,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(23,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(24,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(25,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(26,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(27,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(28,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(29,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(30,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(31,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(32,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(33,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(34,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(35,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(36,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(37,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(38,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(39,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(40,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(41,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(42,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(43,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(44,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(45,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(46,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(47,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(48,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(49,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(50,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(51,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(52,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(53,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(54,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(55,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(56,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(57,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(58,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(59,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(60,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(61,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(62,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(63,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(64,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(65,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(66,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(67,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(68,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(69,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(70,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(71,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(72,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(73,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(74,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(75,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(76,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(77,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(78,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(79,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(80,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(81,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(82,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(83,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(84,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(85,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(86,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(87,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(88,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(89,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(90,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(91,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(92,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(93,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(94,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(95,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(96,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(97,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(98,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_node(99,weight=128*np.random.rand()/1000,read_out=128*np.random.rand()/100)
noise.add_edge(0,1,weight=128*np.random.rand()/100)
noise.add_edge(0,13,weight=128*np.random.rand()/100)
noise.add_edge(2,1,weight=128*np.random.rand()/100)
noise.add_edge(12,1,weight=128*np.random.rand()/100)
noise.add_edge(2,3,weight=128*np.random.rand()/100)
noise.add_edge(2,11,weight=128*np.random.rand()/100)
noise.add_edge(3,10,weight=128*np.random.rand()/100)
noise.add_edge(3,4,weight=128*np.random.rand()/100)
noise.add_edge(4,9,weight=128*np.random.rand()/100)
noise.add_edge(5,4,weight=128*np.random.rand()/100)
noise.add_edge(5,8,weight=128*np.random.rand()/100)
noise.add_edge(6,5,weight=128*np.random.rand()/100)
noise.add_edge(6,7,weight=128*np.random.rand()/100)
noise.add_edge(7,20,weight=128*np.random.rand()/100)
noise.add_edge(7,8,weight=128*np.random.rand()/100)
noise.add_edge(8,19,weight=128*np.random.rand()/100)
noise.add_edge(8,9,weight=128*np.random.rand()/100)
noise.add_edge(9,18,weight=128*np.random.rand()/100)
noise.add_edge(9,10,weight=128*np.random.rand()/100)
noise.add_edge(10,11,weight=128*np.random.rand()/100)
noise.add_edge(10,17,weight=128*np.random.rand()/100)
noise.add_edge(16,11,weight=128*np.random.rand()/100)
noise.add_edge(12,11,weight=128*np.random.rand()/100)
noise.add_edge(12,15,weight=128*np.random.rand()/100)
noise.add_edge(13,12,weight=128*np.random.rand()/100)
noise.add_edge(13,14,weight=128*np.random.rand()/100)
noise.add_edge(14,27,weight=128*np.random.rand()/100)
noise.add_edge(14,15,weight=128*np.random.rand()/100)
noise.add_edge(15,26,weight=128*np.random.rand()/100)
noise.add_edge(16,15,weight=128*np.random.rand()/100)
noise.add_edge(16,25,weight=128*np.random.rand()/100)
noise.add_edge(17,16,weight=128*np.random.rand()/100)
noise.add_edge(24,17,weight=128*np.random.rand()/100)
noise.add_edge(17,18,weight=128*np.random.rand()/100)
noise.add_edge(23,18,weight=128*np.random.rand()/100)
noise.add_edge(18,19,weight=128*np.random.rand()/100)
noise.add_edge(22,19,weight=128*np.random.rand()/100)
noise.add_edge(19,20,weight=128*np.random.rand()/100)
noise.add_edge(20,21,weight=128*np.random.rand()/100)
noise.add_edge(21,34,weight=128*np.random.rand()/100)
noise.add_edge(22,21,weight=128*np.random.rand()/100)
noise.add_edge(22,33,weight=128*np.random.rand()/100)
noise.add_edge(23,22,weight=128*np.random.rand()/100)
noise.add_edge(23,32,weight=128*np.random.rand()/100)
noise.add_edge(23,24,weight=128*np.random.rand()/100)
noise.add_edge(31,24,weight=128*np.random.rand()/100)
noise.add_edge(25,24,weight=128*np.random.rand()/100)
noise.add_edge(25,30,weight=128*np.random.rand()/100)
noise.add_edge(26,29,weight=128*np.random.rand()/100)
noise.add_edge(26,27,weight=128*np.random.rand()/100)
noise.add_edge(27,28,weight=128*np.random.rand()/100)
noise.add_edge(28,41,weight=128*np.random.rand()/100)
noise.add_edge(28,29,weight=128*np.random.rand()/100)
noise.add_edge(29,40,weight=128*np.random.rand()/100)
noise.add_edge(29,30,weight=128*np.random.rand()/100)
noise.add_edge(30,39,weight=128*np.random.rand()/100)
noise.add_edge(30,31,weight=128*np.random.rand()/100)
noise.add_edge(38,31,weight=128*np.random.rand()/100)
noise.add_edge(31,32,weight=128*np.random.rand()/100)
noise.add_edge(32,37,weight=128*np.random.rand()/100)
noise.add_edge(32,33,weight=128*np.random.rand()/100)
noise.add_edge(36,33,weight=128*np.random.rand()/100)
noise.add_edge(34,33,weight=128*np.random.rand()/100)
noise.add_edge(34,35,weight=128*np.random.rand()/100)
noise.add_edge(35,48,weight=128*np.random.rand()/100)
noise.add_edge(35,36,weight=128*np.random.rand()/100)
noise.add_edge(36,47,weight=128*np.random.rand()/100)
noise.add_edge(36,37,weight=128*np.random.rand()/100)
noise.add_edge(37,46,weight=128*np.random.rand()/100)
noise.add_edge(38,37,weight=128*np.random.rand()/100)
noise.add_edge(38,45,weight=128*np.random.rand()/100)
noise.add_edge(38,39,weight=128*np.random.rand()/100)
noise.add_edge(39,44,weight=128*np.random.rand()/100)
noise.add_edge(39,40,weight=128*np.random.rand()/100)
noise.add_edge(40,43,weight=128*np.random.rand()/100)
noise.add_edge(40,41,weight=128*np.random.rand()/100)
noise.add_edge(41,42,weight=128*np.random.rand()/100)
noise.add_edge(42,43,weight=128*np.random.rand()/100)
noise.add_edge(43,44,weight=128*np.random.rand()/100)
noise.add_edge(44,45,weight=128*np.random.rand()/100)
noise.add_edge(45,46,weight=128*np.random.rand()/100)
noise.add_edge(46,47,weight=128*np.random.rand()/100)
noise.add_edge(47,48,weight=128*np.random.rand()/100)
noise.add_edge(49,48,weight=128*np.random.rand()/100)
noise.add_edge(49,50,weight=128*np.random.rand()/100)
noise.add_edge(47,50,weight=128*np.random.rand()/100)
noise.add_edge(51,50,weight=128*np.random.rand()/100)
noise.add_edge(46,51,weight=128*np.random.rand()/100)
noise.add_edge(51,52,weight=128*np.random.rand()/100)
noise.add_edge(52,45,weight=128*np.random.rand()/100)
noise.add_edge(52,53,weight=128*np.random.rand()/100)
noise.add_edge(44,53,weight=128*np.random.rand()/100)
noise.add_edge(53,54,weight=128*np.random.rand()/100)
noise.add_edge(43,54,weight=128*np.random.rand()/100)
noise.add_edge(54,55,weight=128*np.random.rand()/100)
noise.add_edge(42,55,weight=128*np.random.rand()/100)
noise.add_edge(55,56,weight=128*np.random.rand()/100)
noise.add_edge(57,56,weight=128*np.random.rand()/100)
noise.add_edge(42,57,weight=128*np.random.rand()/100)
noise.add_edge(41,58,weight=128*np.random.rand()/100)
noise.add_edge(57,58,weight=128*np.random.rand()/100)
noise.add_edge(58,59,weight=128*np.random.rand()/100)
noise.add_edge(28,59,weight=128*np.random.rand()/100)
noise.add_edge(60,59,weight=128*np.random.rand()/100)
noise.add_edge(60,27,weight=128*np.random.rand()/100)
noise.add_edge(61,60,weight=128*np.random.rand()/100)
noise.add_edge(61,14,weight=128*np.random.rand()/100)
noise.add_edge(62,61,weight=128*np.random.rand()/100)
noise.add_edge(62,13,weight=128*np.random.rand()/100)
noise.add_edge(62,63,weight=128*np.random.rand()/100)
noise.add_edge(0,63,weight=128*np.random.rand()/100)
noise.add_edge(64,63,weight=128*np.random.rand()/100)
noise.add_edge(65,64,weight=128*np.random.rand()/100)
noise.add_edge(62,65,weight=128*np.random.rand()/100)
noise.add_edge(65,66,weight=128*np.random.rand()/100)
noise.add_edge(61,66,weight=128*np.random.rand()/100)
noise.add_edge(66,67,weight=128*np.random.rand()/100)
noise.add_edge(60,67,weight=128*np.random.rand()/100)
noise.add_edge(0,63,weight=128*np.random.rand()/100)
noise.add_edge(67,68,weight=128*np.random.rand()/100)
noise.add_edge(59,68,weight=128*np.random.rand()/100)
noise.add_edge(68,69,weight=128*np.random.rand()/100)
noise.add_edge(58,69,weight=128*np.random.rand()/100)
noise.add_edge(69,70,weight=128*np.random.rand()/100)
noise.add_edge(57,70,weight=128*np.random.rand()/100)
noise.add_edge(71,70,weight=128*np.random.rand()/100)
noise.add_edge(71,72,weight=128*np.random.rand()/100)
noise.add_edge(71,56,weight=128*np.random.rand()/100)
noise.add_edge(72,73,weight=128*np.random.rand()/100)
noise.add_edge(56,73,weight=128*np.random.rand()/100)
noise.add_edge(73,74,weight=128*np.random.rand()/100)
noise.add_edge(55,74,weight=128*np.random.rand()/100)
noise.add_edge(75,74,weight=128*np.random.rand()/100)
noise.add_edge(75,54,weight=128*np.random.rand()/100)
noise.add_edge(75,76,weight=128*np.random.rand()/100)
noise.add_edge(76,53,weight=128*np.random.rand()/100)
noise.add_edge(76,77,weight=128*np.random.rand()/100)
noise.add_edge(77,52,weight=128*np.random.rand()/100)
noise.add_edge(78,77,weight=128*np.random.rand()/100)
noise.add_edge(51,78,weight=128*np.random.rand()/100)
noise.add_edge(79,78,weight=128*np.random.rand()/100)
noise.add_edge(79,50,weight=128*np.random.rand()/100)
noise.add_edge(79,80,weight=128*np.random.rand()/100)
noise.add_edge(49,80,weight=128*np.random.rand()/100)
noise.add_edge(81,80,weight=128*np.random.rand()/100)
noise.add_edge(81,82,weight=128*np.random.rand()/100)
noise.add_edge(79,82,weight=128*np.random.rand()/100)
noise.add_edge(83,82,weight=128*np.random.rand()/100)
noise.add_edge(78,83,weight=128*np.random.rand()/100)
noise.add_edge(84,83,weight=128*np.random.rand()/100)
noise.add_edge(77,84,weight=128*np.random.rand()/100)
noise.add_edge(85,84,weight=128*np.random.rand()/100)
noise.add_edge(76,85,weight=128*np.random.rand()/100)
noise.add_edge(85,86,weight=128*np.random.rand()/100)
noise.add_edge(75,86,weight=128*np.random.rand()/100)
noise.add_edge(86,87,weight=128*np.random.rand()/100)
noise.add_edge(74,87,weight=128*np.random.rand()/100)
noise.add_edge(88,87,weight=128*np.random.rand()/100)
noise.add_edge(73,88,weight=128*np.random.rand()/100)
noise.add_edge(88,89,weight=128*np.random.rand()/100)
noise.add_edge(89,72,weight=128*np.random.rand()/100)
noise.add_edge(90,89,weight=128*np.random.rand()/100)
noise.add_edge(91,90,weight=128*np.random.rand()/100)
noise.add_edge(72,91,weight=128*np.random.rand()/100)
noise.add_edge(92,91,weight=128*np.random.rand()/100)
noise.add_edge(92,71,weight=128*np.random.rand()/100)
noise.add_edge(93,92,weight=128*np.random.rand()/100)
noise.add_edge(93,70,weight=128*np.random.rand()/100)
noise.add_edge(94,93,weight=128*np.random.rand()/100)
noise.add_edge(69,94,weight=128*np.random.rand()/100)
noise.add_edge(94,95,weight=128*np.random.rand()/100)
noise.add_edge(95,68,weight=128*np.random.rand()/100)
noise.add_edge(96,95,weight=128*np.random.rand()/100)
noise.add_edge(67,96,weight=128*np.random.rand()/100)
noise.add_edge(97,96,weight=128*np.random.rand()/100)
noise.add_edge(66,97,weight=128*np.random.rand()/100)
noise.add_edge(97,98,weight=128*np.random.rand()/100)
noise.add_edge(99,98,weight=128*np.random.rand()/100)
noise.add_edge(98,65,weight=128*np.random.rand()/100)
noise.add_edge(64,99,weight=128*np.random.rand()/100)
return noise
###################################################################################################
#create traffic graph of our algorithm to be mapped
def traffic_graph(graph_counter):
#martonosi
# filename = ["hs2.qasm","hs4.qasm","hs6.qasm","bv4.qasm","bv6.qasm","bv8.qasm","toffoli.qasm","or.qasm","fredkin.qasm","peres.qasm","qft.qasm","adder.qasm"]
#3x3 - 7
# filename = ["linear3.qasm","linear4.qasm","linear5.qasm","linear6.qasm","linear7.qasm","linear8.qasm","linear9.qasm"]
#4x4 -10
# filename = ["linear3.qasm","linear4.qasm","linear5.qasm","linear6.qasm","linear7.qasm","linear8.qasm","linear9.qasm","linear10.qasm","linear15.qasm","linear16.qasm"]
#5x5 - 12
# filename = ["linear3.qasm","linear4.qasm","linear5.qasm","linear6.qasm","linear7.qasm","linear8.qasm","linear9.qasm","linear10.qasm","linear15.qasm","linear16.qasm","linear20.qasm","linear25.qasm"]
#6x6 - 15
# filename = ["linear3.qasm","linear4.qasm","linear5.qasm","linear6.qasm","linear7.qasm","linear8.qasm","linear9.qasm","linear10.qasm","linear15.qasm","linear16.qasm","linear20.qasm","linear25.qasm","linear30.qasm","linear35.qasm","linear36.qasm"]
#7x7 - 18
# filename = ["linear3.qasm","linear4.qasm","linear5.qasm","linear6.qasm","linear7.qasm","linear8.qasm","linear9.qasm","linear10.qasm","linear15.qasm","linear16.qasm","linear20.qasm","linear25.qasm","linear30.qasm","linear35.qasm","linear36.qasm","linear40.qasm","linear45.qasm","linear49.qasm"]
#8x8 - 22
# filename = ["linear3.qasm","linear4.qasm","linear5.qasm","linear6.qasm","linear7.qasm","linear8.qasm","linear9.qasm","linear10.qasm","linear15.qasm","linear16.qasm","linear20.qasm","linear25.qasm","linear30.qasm","linear35.qasm","linear36.qasm","linear40.qasm","linear45.qasm","linear49.qasm","linear50.qasm","linear55.qasm","linear60.qasm","linear64.qasm"]
#9x9 - 27
# filename = ["linear3.qasm","linear4.qasm","linear5.qasm","linear6.qasm","linear7.qasm","linear8.qasm","linear9.qasm","linear10.qasm","linear15.qasm","linear16.qasm","linear20.qasm","linear25.qasm","linear30.qasm","linear35.qasm","linear36.qasm","linear40.qasm","linear45.qasm","linear49.qasm","linear50.qasm","linear55.qasm","linear60.qasm","linear64.qasm","linear65.qasm","linear70.qasm","linear75.qasm","linear80.qasm","linear81.qasm"]
#10x10 - 31
# filename = ["linear3.qasm","linear4.qasm","linear5.qasm","linear6.qasm","linear7.qasm","linear8.qasm","linear9.qasm","linear10.qasm","linear15.qasm","linear16.qasm","linear20.qasm","linear25.qasm","linear30.qasm","linear35.qasm","linear36.qasm","linear40.qasm","linear45.qasm","linear49.qasm","linear50.qasm","linear55.qasm","linear60.qasm","linear64.qasm","linear65.qasm","linear70.qasm","linear75.qasm","linear80.qasm","linear81.qasm","linear85.qasm","linear90.qasm","linear95.qasm","linear100.qasm"]
#sequence 1 - bfs,heuristic, and trivial - 36
# filename = ["4QA.qasm","4QB.qasm","4QC.qasm","4QD.qasm","6QA.qasm","6QB.qasm","6QC.qasm","6QD.qasm","6QE.qasm","6QF.qasm","6QG.qasm","6QH.qasm","6QI.qasm","6QJ.qasm","6QK.qasm","8QA.qasm","8QB.qasm","8QC.qasm","8QD.qasm","8QE.qasm","8QF.qasm","8QG.qasm","8QH.qasm","8QI.qasm","8QJ.qasm","8QK.qasm","8QL.qasm","8QM.qasm","8QN.qasm","8QO.qasm","8QP.qasm","8QR.qasm","8QS.qasm","8QT.qasm","8QU.qasm","8QV.qasm"]
#sequence 1 - bfs,heuristic, and trivial - depth 2x - 36
filename = ["4QAD.qasm","4QBD.qasm","4QCD.qasm","4QDD.qasm","6QAD.qasm","6QBD.qasm","6QCD.qasm","6QDD.qasm","6QED.qasm","6QFD.qasm","6QGD.qasm","6QHD.qasm","6QID.qasm","6QJD.qasm","6QKD.qasm","8QAD.qasm","8QBD.qasm","8QCD.qasm","8QDD.qasm","8QED.qasm","8QFD.qasm","8QGD.qasm","8QHD.qasm","8QID.qasm","8QJD.qasm","8QKD.qasm","8QLD.qasm","8QMD.qasm","8QND.qasm","8QOD.qasm","8QPD.qasm","8QRD.qasm","8QSD.qasm","8QTD.qasm","8QUD.qasm","8QVD.qasm"]
#sequence 2 - bfs,heuristic, and trivial - 30
# filename = ["6QA.qasm","6QB.qasm","6QC2.qasm","6QD2.qasm","6QE2.qasm","6QF2.qasm","6QG2.qasm","6QH2.qasm","6QI2.qasm","6QJ2.qasm","6QK2.qasm","8QC2.qasm","8QD2.qasm","8QE2.qasm","8QF2.qasm","8QG2.qasm","8QH2.qasm","8QI2.qasm","8QJ2.qasm","8QK2.qasm","8QL2.qasm","8QM2.qasm","8QN2.qasm","8QO2.qasm","8QP2.qasm","8QR2.qasm","8QS2.qasm","8QT2.qasm","8QU2.qasm","8QV2.qasm"]
#sequence 2 - bfs,heuristic, and trivial - depth 2x - 28
# filename = ["6QC2D.qasm","6QD2D.qasm","6QE2D.qasm","6QF2D.qasm","6QG2D.qasm","6QH2D.qasm","6QI2D.qasm","6QJ2D.qasm","6QK2D.qasm","8QC2D.qasm","8QD2D.qasm","8QE2D.qasm","8QF2D.qasm","8QG2D.qasm","8QH2D.qasm","8QI2D.qasm","8QJ2D.qasm","8QK2D.qasm","8QL2D.qasm","8QM2D.qasm","8QN2D.qasm","8QO2D.qasm","8QP2D.qasm","8QR2D.qasm","8QS2D.qasm","8QT2D.qasm","8QU2D.qasm","8QV2D.qasm"]
traffic = parser(filename[graph_counter])
return traffic
###################################################################################################
#heuristic algorithm
class heuristic:
def __init__(self,noise,traffic):
self.noise = noise
self.traffic = traffic
self.create_node_and_edge_dicts()
self.allocate()
def create_node_and_edge_dicts(self):
self.edges = dict(self.noise.edges())
set_1 = list(self.edges.keys())
set_2 = list(self.edges.values())
set_3 = {}
inv_set_3 = {}
for i in range(0,len(set_2)):
set_3.update({set_1[i]:set_2[i]['weight']})
for j in range(0,len(set_2)):
inv_set_3.update({set_2[j]['weight']:set_1[j]})
self.edges = set_3
self.inv_edges = inv_set_3
self.nodes = dict(self.noise.nodes(data='weight',default=1))
self.inv_nodes = {}
set_1 = list(self.nodes.keys())
set_2 = list(self.nodes.values())
for n in range(0,len(set_1)):
self.inv_nodes.update({set_2[n]:set_1[n]})
#calculate the traffic coefficient and allocate the first 2 qubits
def allocate(self):
#designate an array of frequencies
frequencies = []
#start by calculating frequency for all nodes in traffic graph
for a in range(0,len(self.traffic)):
frequencies.append(self.traffic.nodes[a]['single'])
for b in range(0,len(self.traffic)):
if b in self.traffic[a]:
frequencies[a] += 2*self.traffic.edges[a,b]['double']
#calculate traffic coefficients
traf_coefs = [1/r for r in frequencies]
traf_coefs = [1-s for s in traf_coefs]
#define and calculate the normalization constant
normalization = 0
for c in range(0,len(traf_coefs)):
normalization += traf_coefs[c]
normalization = 1/normalization
#outputs the normalized traffic coefficents
normalized_traf_coefs = [normalization*u for u in traf_coefs]
#calculation of traffic coefficents
self.traf_qubits_and_coefs = {}
self.traf_qubits_and_coefs_inverse = {}
for i in range(0,len(traf_coefs)):
self.traf_qubits_and_coefs.update({i:normalized_traf_coefs[i]})
for key, value in self.traf_qubits_and_coefs.items():
if value not in self.traf_qubits_and_coefs_inverse:
self.traf_qubits_and_coefs_inverse[value] = [key]
else:
self.traf_qubits_and_coefs_inverse[value].append(key)
#start the allocation process
self.final_mapping = {}
#just map a simple linear algorithm by selecting the best edges
for i in range(0,len(self.traf_qubits_and_coefs)):
if len(self.final_mapping) == 0:
#need to ascertain the best connected nodes...
degrees_noise = dict(self.noise.degree())
degrees_noise = dict(sorted(degrees_noise.items(),key=operator.itemgetter(1),reverse=True))
connectivity = max(degrees_noise.values())
best_nodes = [keys for keys,values in degrees_noise.items() if int(values) >= max(degrees_noise.values())]
#...and find the best edge error rate for one of these nodes, supremum error rate ... finds all edges linked to each best_connected node
error_edges = list(self.noise.edges(best_nodes))
error_rates = []
#makes a list of all error rates
for j in range(0,len(error_edges)):
error_rates.append(self.noise[error_edges[j][0]][error_edges[j][1]]['weight'])
min_edge_rate = min(error_rates)
mapp_noise_qubit = self.inv_edges[min_edge_rate]
if mapp_noise_qubit[0] in best_nodes:
self.final_mapping.update({list(self.traf_qubits_and_coefs.keys())[i]:mapp_noise_qubit[0]})
elif mapp_noise_qubit[0] not in best_nodes:
self.final_mapping.update({list(self.traf_qubits_and_coefs.keys())[i]:mapp_noise_qubit[1]})
else:
#find a list of all nodes that are distance "distance" away from first traff. coefficient qubit
path_physical = nx.single_source_dijkstra_path_length(self.noise,self.final_mapping[list(self.traf_qubits_and_coefs.keys())[i-1]],cutoff=1)
path_physical = dict(sorted(path_physical.items(),key=operator.itemgetter(1),reverse=False))
path_physical = list(path_physical.keys())
#cycle through the physically available nodes, and check availibility
for k in range(0,len(path_physical)):
if path_physical[k] in self.final_mapping.values() and list(self.traf_qubits_and_coefs.keys())[i] in self.final_mapping.keys():
continue
elif path_physical[k] not in self.final_mapping.values() and list(self.traf_qubits_and_coefs.keys())[i] not in self.final_mapping.keys():
self.final_mapping.update({list(self.traf_qubits_and_coefs.keys())[i]:path_physical[k]})
###################################################################################################
#brute force algorithm
class brute:
def __init__(self,noise,traffic):
self.noise = noise
self.traffic = traffic
self.create_node_and_edge_dicts()
self.permute_and_generate()
def create_node_and_edge_dicts(self):
self.edges = dict(self.noise.edges())
set_1 = list(self.edges.keys())
set_2 = list(self.edges.values())
set_3 = {}
inv_set_3 = {}
for i in range(0,len(set_2)):
set_3.update({set_1[i]:set_2[i]['weight']})
for j in range(0,len(set_2)):
inv_set_3.update({set_2[j]['weight']:set_1[j]})
self.edges = set_3
self.inv_edges = inv_set_3
self.nodes = dict(self.noise.nodes(data='weight',default=1))
self.inv_nodes = {}
set_1 = list(self.nodes.keys())
set_2 = list(self.nodes.values())
for n in range(0,len(set_1)):
self.inv_nodes.update({set_2[n]:set_1[n]})
def permute_and_generate(self):
#creates list of tuples for all possible permutations of the physical QPU
permutations = list(iter.permutations(self.nodes.keys()))
self.final_mapping = {}
list_final_score = []
#loops of all of the possible permutations
for i in range(len(permutations)):
permutation_phys_qubits = permutations[i]
#loops over all v. qubits in algorithm
for j in range(len(self.traffic)):
self.final_mapping.update({j:permutation_phys_qubits[j]})
#evaluate the metric
self.brute = metric(self.noise,self.traffic,self.final_mapping)
list_final_score.append(self.brute.final_metric_product)
#list_final_score.append(self.brute.final_metric_logarithm)
self.max_final_score = max(list_final_score)
index_max_score = list_final_score.index(self.max_final_score)
max_permutation_phys_qubits = permutations[index_max_score]
for k in range(len(self.traffic)):
self.final_mapping.update({k:max_permutation_phys_qubits[k]})
###################################################################################################
#no-mapping solution
class nomapper:
def __init__(self,noise,traffic):
self.noise = noise
self.traffic = traffic
self.create_node_and_edge_dicts()
self.assign()
def create_node_and_edge_dicts(self):
self.edges = dict(self.noise.edges())
set_1 = list(self.edges.keys())
set_2 = list(self.edges.values())
set_3 = {}
inv_set_3 = {}
for i in range(0,len(set_2)):
set_3.update({set_1[i]:set_2[i]['weight']})
for j in range(0,len(set_2)):
inv_set_3.update({set_2[j]['weight']:set_1[j]})
self.edges = set_3
self.inv_edges = inv_set_3
self.nodes = dict(self.noise.nodes(data='weight',default=1))
self.inv_nodes = {}
set_1 = list(self.nodes.keys())
set_2 = list(self.nodes.values())
for n in range(0,len(set_1)):
self.inv_nodes.update({set_2[n]:set_1[n]})
def assign(self):
#assign 1-to-1 to self.final_mapping
self.final_mapping = {}
for i in range(0,len(self.nodes.keys())):
self.final_mapping.update({i:i})
###################################################################################################
#calculate the basic product metric
class metric:
def __init__(self,noise,traffic,final_mapping):
self.noise = noise
self.traffic = traffic
self.final_mapping = final_mapping
self.create_node_and_edge_dicts()
self.calculate_metric()
def calculate_metric(self):
self.single_gate_product_metric()
self.SWAP_double_gate_product_metric()
# self.calculate_normalization()
def create_node_and_edge_dicts(self):
self.edges = dict(self.noise.edges())
set_1 = list(self.edges.keys())
set_2 = list(self.edges.values())
set_3 = {}
inv_set_3 = {}
for i in range(0,len(set_2)):
set_3.update({set_1[i]:set_2[i]['weight']})
for j in range(0,len(set_2)):
inv_set_3.update({set_2[j]['weight']:set_1[j]})
self.edges = set_3
self.inv_edges = inv_set_3
self.nodes = dict(self.noise.nodes(data='weight',default=1))
self.inv_nodes = {}
set_1 = list(self.nodes.keys())
set_2 = list(self.nodes.values())
for n in range(0,len(set_1)):
self.inv_nodes.update({set_2[n]:set_1[n]})
def single_gate_product_metric(self):
#dictionary of form {algorithm:single_gate_frequency...}
single_invocation = nx.get_node_attributes(self.traffic,'single')
# print("single_invocation = ",single_invocation)
#single error rate for the product metric
self.single_product = 1
#cycles through the dictionary single_invocation
for key,val in single_invocation.items():
#extracts the error rate for a node
error_rate_single = self.nodes[self.final_mapping[key]]
self.single_product = self.single_product*((1-error_rate_single)**single_invocation[key])
def SWAP_double_gate_product_metric(self):
#set initial overall double-gate and SWAP errors
double_invocation = nx.get_edge_attributes(self.traffic,'double')
#outputs list form of the algorithm edges in tuples
algorithmic_edges = list(double_invocation.keys())
#set summations for the gate invocations
current_swap_invocation_number_sum = 0
double_gate_invocations_sum = 0
#set the error rate for each portion of the product metric
self.double_product = 1
self.swap_product = 1
self.final_metric_product = 1
#cycle through list of tuples and find their positions on physical lattice
for j in range(0, len(algorithmic_edges)):
#find the distance between the two nodes á la Dijkstra
distance = nx.shortest_path(self.noise,source=self.final_mapping[algorithmic_edges[j][0]],target=self.final_mapping[algorithmic_edges[j][1]])
#if the size of distance is 1 - then it is a double-gate error problem
if len(distance) == 2:
#find the error rate for physical edge used
error_rate_product = self.noise[distance[0]][distance[1]]['weight']
#find number of double gate iterations
double_gate_invocations = self.traffic[algorithmic_edges[j][0]][algorithmic_edges[j][1]]['double']
#multiply by the swap and double-gate error rate
self.double_product = self.double_product*((1-error_rate_product)**double_gate_invocations)
#if the size of distance is not 1 - then it is a SWAP-gate error problem
elif len(distance) > 2:
list_error_rates_product = []
#cycle through the list of physical qubits used and find error rates
for i in range(0,len(distance)-1):
#find the error rate for physical edge used
list_error_rates_product.append([distance[i],distance[i+1]])
list_error_rates_product = list_error_rates_product[0]
#adds the physical qubits to new variable for only the corresponding error rates
list_error_rates_metric = self.noise[list_error_rates_product[0]][list_error_rates_product[1]]
list_error_rates_product = []
#multiply the error rates together
list_error_rates_metric_base = 1
list_error_rates_metric_base = (1-list_error_rates_metric['weight'])*list_error_rates_metric_base
success_rates = 1
success_rates = (1-list_error_rates_metric['weight'])*list_error_rates_metric_base
#find the number of double-gate invocations needed for the SWAP route - multiply everything by this number
current_swap_invocation_number = self.traffic[algorithmic_edges[j][0]][algorithmic_edges[j][1]]['double']
#final calculation
self.swap_product = self.swap_product*(success_rates**(current_swap_invocation_number*2))
#final calculation for the overall error rate for single, double, and swap gates
self.final_metric_product = self.swap_product*self.double_product*self.single_product
# print("error_rate_before_adding_measurement_errors = ",self.final_metric_product)
#incorporate measurement errors
read_out_invocation = nx.get_node_attributes(self.noise,'read_out')
self.read_out_error = 1
#cycles through the dictionary read_out_invocation
for key,val in self.final_mapping.items():
#extracts the error rate for a node
error_rate_read_out = read_out_invocation[self.final_mapping[key]]
self.read_out_error = self.read_out_error*(1-error_rate_read_out)
self.final_metric_product = self.final_metric_product*self.read_out_error
###################################################################################################
#calculation of statistics
def data_analysis(set_metric_evals,set_metric_evals_brute,set_number_benchmarks):
average_metric_eval = []
average_metric_eval_brute = []
average_metric_eval_nomapper = []
set_number_benchmarks = set_number_benchmarks
for graph_counter in range(0,set_number_benchmarks):
average_metric_eval.append(sum(set_metric_evals[:,graph_counter])/len(set_metric_evals))
average_metric_eval_brute.append(sum(set_metric_evals_brute[:,graph_counter])/len(set_metric_evals_brute))
average_metric_eval_nomapper.append(sum(set_metric_evals_nomapper[:,graph_counter])/len(set_metric_evals_nomapper))
print("Heuristic Results = ",average_metric_eval)
print("Brute-Force Results = ",average_metric_eval_brute)
print("Trivial-Mapping Results = ",average_metric_eval_nomapper)
#3x3
# objects = ["hL3","tL3","hL4","tL4","hL5","tL5","hL6","tL6","hL7","tL7","hL8","tL8","hL9","tL9"]
#4x4
# objects = ["hL3","tL3","hL4","tL4","hL5","tL5","hL6","tL6","hL7","tL7","hL8","tL8","hL9","tL9","hL10","tL10","hL15","tL15","hL16","tL16"]
#5x5
# objects = ["hL3","tL3","hL4","tL4","hL5","tL5","hL6","tL6","hL7","tL7","hL8","tL8","hL9","tL9","hL10","tL10","hL15","tL15","hL16","tL16","hL20","tL20","hL25","tL25"]
#6x6
# objects = ["hL3","tL3","hL4","tL4","hL5","tL5","hL6","tL6","hL7","tL7","hL8","tL8","hL9","tL9","hL10","tL10","hL15","tL15","hL16","tL16","hL20","tL20","hL25","tL25","hL30","tL30","hL35","tL35","hL36","tL36"]
#7x7
# objects = ["hL3","tL3","hL4","tL4","hL5","tL5","hL6","tL6","hL7","tL7","hL8","tL8","hL9","tL9","hL10","tL10","hL15","tL15","hL16","tL16","hL20","tL20","hL25","tL25","hL30","tL30","hL35","tL35","hL36","tL36","hL40","tL40","hL45","tL45","hL49","tL49"]
#8x8
# objects = ["hL3","tL3","hL4","tL4","hL5","tL5","hL6","tL6","hL7","tL7","hL8","tL8","hL9","tL9","hL10","tL10","hL15","tL15","hL16","tL16","hL20","tL20","hL25","tL25","hL30","tL30","hL35","tL35","hL36","tL36","hL40","tL40","hL45","tL45","hL49","tL49","hL50","tL50","hL55","tL55","hL60","tL60","hL64","tL64"]
#9x9
# objects = ["hL3","tL3","hL4","tL4","hL5","tL5","hL6","tL6","hL7","tL7","hL8","tL8","hL9","tL9","hL10","tL10","hL15","tL15","hL16","tL16","hL20","tL20","hL25","tL25","hL30","tL30","hL35","tL35","hL36","tL36","hL40","tL40","hL45","tL45","hL49","tL49","hL50","tL50","hL55","tL55","hL60","tL60","hL64","tL64","hL64","tL645","hL65","tL70","hL70","tL75","hL75","tL80","hL81","tL81"]
#10x10
# objects = ["hL3","tL3","hL4","tL4","hL5","tL5","hL6","tL6","hL7","tL7","hL8","tL8","hL9","tL9","hL10","tL10","hL15","tL15","hL16","tL16","hL20","tL20","hL25","tL25","hL30","tL30","hL35","tL35","hL36","tL36","hL40","tL40","hL45","tL45","hL49","tL49","hL50","tL50","hL55","tL55","hL60","tL60","hL64","tL64","hL64","tL645","hL65","tL70","hL70","tL75","hL75","tL80","hL81","tL81","hL85","tL85","hL90","tL90","hL95","tL95","hL100","tL100"]
#sequence1
# objects = ["b4QA","h4QA","t4QA","b4QB","h4QB","t4QB","b4QC","h4QC","t4QC","b4QD","h4QD","t4QD","b6QA","h6QA","t6QA","b6QB","h6QB","t6QB","b6QC","h6QC","t6QC","b6QD","h6QD","t6QD","b6QE","h6QE","t6QE","b6QF","h6QF","t6QF","b6QG","h6QG","t6QG","b6QH","h6QH","t6QH","b6QI","h6QI","t6QI","b6QJ","h6QJ","t6QJ","b6QK","h6QK","t6QK","b8QA","h8QA","t8QA","b8QB","h8QB","t8QB","b8QC","h8QC","t8QC","b8QD","h8QD","t8QD","b8QE","h8QE","t8QE","b8QF","h8QF","t8QF","b8QG","h8QG","t8QG","b8QH","h8QH","t8QH","b8QI","h8QI","t8QI","b8QJ","h8QJ","t8QJ","b8QK","h8QK","t8QK","b8QL","h8QL","t8QL","b8QM","h8QM","t8QM","b8QN","h8QN","t8QN","b8QO","h8QO","t8QO","b8QP","h8QP","t8QP","b8QR","h8QR","t8QR","b8QS","h8QS","t8QS","b8QT","h8QT","t8QT","b8QU","h8QU","t8QU","b8QV","h8QV","t8QV"]
#sequence2
# objects = ["b6QC","h6QC","t6QC","b6QD","h6QD","t6QD","b6QE","h6QE","t6QE","b6QF","h6QF","t6QF","b6QG","h6QG","t6QG","b6QH","h6QH","t6QH","b6QI","h6QI","t6QI","b6QJ","h6QJ","t6QJ","b6QK","h6QK","t6QK","b8QC","h8QC","t8QC","b8QD","h8QD","t8QD","b8QE","h8QE","t8QE","b8QF","h8QF","t8QF","b8QG","h8QG","t8QG","b8QH","h8QH","t8QH","b8QI","h8QI","t8QI","b8QJ","h8QJ","t8QJ","b8QK","h8QK","t8QK","b8QL","h8QL","t8QL","b8QM","h8QM","t8QM","b8QN","h8QN","t8QN","b8QO","h8QO","t8QO","b8QP","h8QP","t8QP","b8QR","h8QR","t8QR","b8QS","h8QS","t8QS","b8QT","h8QT","t8QT","b8QU","h8QU","t8QU","b8QV","h8QV","t8QV"]
# performance = []
# colors = []
# for alpha in range(0,set_number_benchmarks):
# # brute = average_metric_eval_brute[alpha].round(decimals=3)
# heuristic = average_metric_eval[alpha].round(decimals=3)
# nomapper = average_metric_eval_nomapper[alpha].round(decimals=3)
# # performance.append(brute)
# performance.append(heuristic)
# performance.append(nomapper)
# # colors.append('r')
# colors.append('b')
# colors.append('g')
# #plotter portion
# y_pos = np.arange(len(objects))
# bars = plt.bar(y_pos, performance, width=0.90, align='center', alpha=0.5, color=colors)
# plt.xticks(y_pos, objects, fontsize=7,rotation=90)
# plt.ylabel('success rate')
# plt.yticks(np.arange(0, 1, 0.05))
# plt.tick_params(labeltop=False, labelright=True,labelbottom=True,labelleft=True)
# plt.title('Success-Rate Results')
# plt.ylim(0.0, 1.0)
# for bar in bars:
# yval = bar.get_height()
# plt.text(bar.get_x(), yval + .005, yval)
# plt.tight_layout()
# plt.show()
###################################################################################################
trials = 100
set_number_benchmarks = 36
set_metric_evals = np.zeros((trials,set_number_benchmarks),float)
set_metric_evals_brute = np.zeros((trials,set_number_benchmarks),float)
set_metric_evals_nomapper = np.zeros((trials,set_number_benchmarks),float)
time_brute = []
time_heuristic = []
time_trivial = []
for graph_counter in range(0,set_number_benchmarks):
for l in range(0,trials):
x = noise_graph()
y = traffic_graph(graph_counter)
#heuristic code
start = time.time()
hue = heuristic(x,y)
metric_heuristic = metric(hue.noise,hue.traffic,hue.final_mapping)
set_metric_evals[l,graph_counter] = metric_heuristic.final_metric_product
end = time.time()
time_heuristic.append((end-start))
#brute-force code
start = time.time()
bruteforce = brute(x,y)
metric_brute = metric(bruteforce.noise,bruteforce.traffic,bruteforce.final_mapping)
set_metric_evals_brute[l,graph_counter] = metric_brute.final_metric_product
end = time.time()
time_brute.append((end-start))
# set_metric_evals_brute = 0
#no-mapping solution
start = time.time()
nomapp = nomapper(x,y)
metric_nomapp = metric(nomapp.noise,nomapp.traffic,nomapp.final_mapping)
set_metric_evals_nomapper[l,graph_counter] = metric_nomapp.final_metric_product
end = time.time()
time_trivial.append((end-start))
print("graph completed = ",graph_counter)
time_brute = sum(time_brute)/(len(time_brute))
time_heuristic = sum(time_heuristic)/(len(time_heuristic))
time_trivial = sum(time_trivial)/(len(time_trivial))
print("BFS time: ",time_brute)
print("Heuristic time: ",time_heuristic)
print("Trivial time: ",time_trivial)
data_analysis(set_metric_evals,set_metric_evals_brute,set_number_benchmarks)
| 66.065627
| 773
| 0.657664
| 17,288
| 101,675
| 3.748843
| 0.043903
| 0.174664
| 0.261997
| 0.239546
| 0.878136
| 0.865453
| 0.851998
| 0.844222
| 0.840997
| 0.830967
| 0
| 0.131361
| 0.141293
| 101,675
| 1,538
| 774
| 66.108583
| 0.610945
| 0.108178
| 0
| 0.39104
| 0
| 0
| 0.006574
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012908
| false
| 0
| 0.006074
| 0
| 0.023538
| 0.005315
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
eb10f6da1643d54b936423169ef96044af6bfe78
| 8,906
|
py
|
Python
|
test.py
|
jsmaniac/hash-files
|
295f145c005676730ee5610f43f2e657e0769c54
|
[
"CC0-1.0"
] | null | null | null |
test.py
|
jsmaniac/hash-files
|
295f145c005676730ee5610f43f2e657e0769c54
|
[
"CC0-1.0"
] | null | null | null |
test.py
|
jsmaniac/hash-files
|
295f145c005676730ee5610f43f2e657e0769c54
|
[
"CC0-1.0"
] | 1
|
2021-10-02T16:07:35.000Z
|
2021-10-02T16:07:35.000Z
|
#!/usr/bin/env python3
import os, subprocess, tempfile
# Empty
with tempfile.TemporaryDirectory(prefix="test", dir="/tmp") as tempdir:
os.mkdir(tempdir+'/test')
os.mkdir(tempdir+'/test/foo')
h = subprocess.check_output([os.path.abspath('./hash-files.py'), 'test/foo'], cwd=tempdir).strip()
if h == b'dc99f8161ccf245e178102a00264e4f4f43cd0048ea525b6c9e226777414352f':
print("test passed: empty\n", flush=True)
else:
print("TEST FAILED: empty: got hash " + repr(h) + "\n", flush=True)
exit(1)
# Plain text file
with tempfile.TemporaryDirectory(prefix="test", dir="/tmp") as tempdir:
os.mkdir(tempdir+'/test')
os.mkdir(tempdir+'/test/foo')
os.system('echo a > '+tempdir+'/test/foo/x')
h = subprocess.check_output([os.path.abspath('./hash-files.py'), 'test/foo'], cwd=tempdir).strip()
if h == b'6b393b2233479ccc54975f83f4de0d39592d5ab78cd02b19597e7bbe97f43cf1':
print("test passed: plain text file\n", flush=True)
else:
print("TEST FAILED: plain text file: got hash " + repr(h) + "\n", flush=True)
exit(1)
# Plain text file and empty folder in subdirectory
with tempfile.TemporaryDirectory(prefix="test", dir="/tmp") as tempdir:
os.mkdir(tempdir+'/test')
os.mkdir(tempdir+'/test/foo')
os.mkdir(tempdir+'/test/foo/bar')
os.mkdir(tempdir+'/test/foo/baz')
os.mkdir(tempdir+'/test/foo/baz/quux')
os.system('echo a > '+tempdir+'/test/foo/baz/titi')
h = subprocess.check_output([os.path.abspath('./hash-files.py'), 'test/foo'], cwd=tempdir).strip()
if h == b'0bb2f31bf05eb215ebef32abcc62cddbfad2d8b0a1221bb335da0acaf3455558':
print("test passed: plain text and empty folder in subdirectory\n", flush=True)
else:
print("TEST FAILED: plain text and empty folder in subdirectory: got hash " + repr(h) + "\n", flush=True)
exit(1)
# Git directories
with tempfile.TemporaryDirectory(prefix="test", dir="/tmp") as tempdir:
os.mkdir(tempdir+'/test')
os.mkdir(tempdir+'/test/foo')
os.mkdir(tempdir+'/test/foo/bar')
os.mkdir(tempdir+'/test/foo/baz')
os.mkdir(tempdir+'/test/foo/baz/quux')
os.system('git init '+tempdir+'/test/foo/baz/git_workdir -b branchname --quiet')
os.system('git init '+tempdir+'/test/foo/baz/git_workdir_empty -b branchname --quiet')
os.system('git init --bare '+tempdir+'/test/foo/baz/git_bare -b branchname --quiet')
os.system('git init --bare '+tempdir+'/test/foo/baz/git_bare_empty -b branchname --quiet')
os.system('cd '+tempdir+'/test/foo/baz/git_workdir && echo a > toto')
os.system('cd '+tempdir+'/test/foo/baz/git_workdir && git add toto')
os.system('cd '+tempdir+'/test/foo/baz/git_workdir&& GIT_COMMITTER_DATE="Sun Feb 21 18:00 2020 +0000" GIT_AUTHOR_NAME="Suzanne Soy" GIT_AUTHOR_EMAIL="example@suzanne.soy" GIT_COMMITTER_NAME="Suzanne Soy" GIT_COMMITTER_EMAIL="example@suzanne.soy" git commit -m "example commit for tests" --date="Sun Feb 21 18:00 2020 +0000" --quiet')
os.system('cd '+tempdir+'/test/foo/baz/git_workdir && git push ../git_bare branchname --quiet')
os.system('echo a > '+tempdir+'/test/foo/baz/titi')
h = subprocess.check_output([os.path.abspath('./hash-files.py'), 'test/foo'], cwd=tempdir).strip()
if h == b'f31eb7e1bcb25e79be0d1305d58eeadbe3fd9bf38ecbd0449789e8c91b5f4340':
print("test passed: git\n", flush=True)
else:
print("TEST FAILED: git: got hash " + repr(h) + "\n", flush=True)
exit(1)
# Sqlite
with tempfile.TemporaryDirectory(prefix="test", dir="/tmp") as tempdir:
os.mkdir(tempdir+'/test')
os.mkdir(tempdir+'/test/foo')
os.mkdir(tempdir+'/test/foo/bar')
os.mkdir(tempdir+'/test/foo/baz')
os.mkdir(tempdir+'/test/foo/baz/quux')
os.system('git init '+tempdir+'/test/foo/baz/git_workdir -b branchname --quiet')
os.system('git init '+tempdir+'/test/foo/baz/git_workdir_empty -b branchname --quiet')
os.system('git init --bare '+tempdir+'/test/foo/baz/git_bare -b branchname --quiet')
os.system('git init --bare '+tempdir+'/test/foo/baz/git_bare_empty -b branchname --quiet')
os.system('cd '+tempdir+'/test/foo/baz/git_workdir && echo a > toto')
os.system('cd '+tempdir+'/test/foo/baz/git_workdir && git add toto')
os.system('cd '+tempdir+'/test/foo/baz/git_workdir&& GIT_COMMITTER_DATE="Sun Feb 21 18:00 2020 +0000" GIT_AUTHOR_NAME="Suzanne Soy" GIT_AUTHOR_EMAIL="example@suzanne.soy" GIT_COMMITTER_NAME="Suzanne Soy" GIT_COMMITTER_EMAIL="example@suzanne.soy" git commit -m "example commit for tests" --date="Sun Feb 21 18:00 2020 +0000" --quiet')
os.system('cd '+tempdir+'/test/foo/baz/git_workdir && git push ../git_bare branchname --quiet')
# It seems that sqlite databases are quite reproducible; running the same command produces identical files!
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "create table digits(d);"')
for i in range(10):
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "insert into digits(d) values('+str(i)+');"')
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "create table tbl(x);"')
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "insert into tbl(x) select d4.d * 100 + d5.d * 10 + d6.d from digits d4, digits d5, digits d6;"')
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "create table rnd(x);"')
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "insert into rnd(x) select x from tbl order by random();"')
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "create table tbl2(x);"')
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "insert into tbl2(x) select x from rnd order by x;"')
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "drop table rnd;"')
#os.system('sqlite3 '+tempdir+'/test/foo/baz/db "vacuum;"')
os.system('echo a > '+tempdir+'/test/foo/baz/titi')
h = subprocess.check_output([os.path.abspath('./hash-files.py'), 'test/foo'], cwd=tempdir).strip()
if h == b'b775b5c3ad1b403c08fa88e43be42bd76143f93c26bf42cb8881c595161a5509':
print("test passed: sqlite\n", flush=True)
else:
print("TEST FAILED: sqlite got hash " + repr(h) + "\n", flush=True)
exit(1)
# Sqlite big table
with tempfile.TemporaryDirectory(prefix="test", dir="/tmp") as tempdir:
os.mkdir(tempdir+'/test')
os.mkdir(tempdir+'/test/foo')
os.mkdir(tempdir+'/test/foo/bar')
os.mkdir(tempdir+'/test/foo/baz')
os.mkdir(tempdir+'/test/foo/baz/quux')
os.system('git init '+tempdir+'/test/foo/baz/git_workdir -b branchname --quiet')
os.system('git init '+tempdir+'/test/foo/baz/git_workdir_empty -b branchname --quiet')
os.system('git init --bare '+tempdir+'/test/foo/baz/git_bare -b branchname --quiet')
os.system('git init --bare '+tempdir+'/test/foo/baz/git_bare_empty -b branchname --quiet')
os.system('cd '+tempdir+'/test/foo/baz/git_workdir && echo a > toto')
os.system('cd '+tempdir+'/test/foo/baz/git_workdir && git add toto')
os.system('cd '+tempdir+'/test/foo/baz/git_workdir&& GIT_COMMITTER_DATE="Sun Feb 21 18:00 2020 +0000" GIT_AUTHOR_NAME="Suzanne Soy" GIT_AUTHOR_EMAIL="example@suzanne.soy" GIT_COMMITTER_NAME="Suzanne Soy" GIT_COMMITTER_EMAIL="example@suzanne.soy" git commit -m "example commit for tests" --date="Sun Feb 21 18:00 2020 +0000" --quiet')
os.system('cd '+tempdir+'/test/foo/baz/git_workdir && git push ../git_bare branchname --quiet')
# It seems that sqlite databases are quite reproducible; running the same command produces identical files!
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "create table digits(d);"')
for i in range(10):
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "insert into digits(d) values('+str(i)+');"')
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "create table tbl(x);"')
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "insert into tbl(x) select d0.d * 1000000 + d1.d * 100000 + d2.d * 10000 + d3.d * 1000 + d4.d * 100 + d5.d * 10 + d6.d from digits d0, digits d1, digits d2, digits d3, digits d4, digits d5, digits d6;"')
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "create table rnd(x);"')
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "insert into rnd(x) select x from tbl order by random();"')
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "create table tbl2(x);"')
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "insert into tbl2(x) select x from rnd order by x;"')
os.system('sqlite3 '+tempdir+'/test/foo/baz/db "drop table rnd;"')
#os.system('sqlite3 '+tempdir+'/test/foo/baz/db "vacuum;"')
os.system('echo a > '+tempdir+'/test/foo/baz/titi')
h = subprocess.check_output([os.path.abspath('./hash-files.py'), 'test/foo'], cwd=tempdir).strip()
if h == b'7d6917fef222456552b6359ddc4eee235a0cdca089c0a6d9b4b2f6a747987eb9':
print("test passed: sqlite big table\n", flush=True)
else:
print("TEST FAILED: sqlite big table got hash " + repr(h) + "\n", flush=True)
exit(1)
| 66.462687
| 337
| 0.677296
| 1,328
| 8,906
| 4.490211
| 0.107681
| 0.134664
| 0.157303
| 0.159651
| 0.895019
| 0.883783
| 0.883783
| 0.858964
| 0.847225
| 0.825423
| 0
| 0.052728
| 0.141815
| 8,906
| 134
| 338
| 66.462687
| 0.727463
| 0.051651
| 0
| 0.810345
| 0
| 0.077586
| 0.564604
| 0.173898
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.051724
| 0.008621
| 0
| 0.008621
| 0.103448
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
de46d141b2ac926f02651bc88b99efff65eb6c4b
| 97
|
py
|
Python
|
packages/core/minos-microservice-common/tests/ImportedModule.py
|
sorasful/minos-python
|
1189330eebf6444627a2af6b29f347670f95a4dd
|
[
"MIT"
] | 247
|
2022-01-24T14:55:30.000Z
|
2022-03-25T12:06:17.000Z
|
packages/core/minos-microservice-common/tests/ImportedModule.py
|
sorasful/minos-python
|
1189330eebf6444627a2af6b29f347670f95a4dd
|
[
"MIT"
] | 400
|
2021-04-03T08:51:40.000Z
|
2022-01-28T11:51:22.000Z
|
packages/core/minos-microservice-common/tests/ImportedModule.py
|
sorasful/minos-python
|
1189330eebf6444627a2af6b29f347670f95a4dd
|
[
"MIT"
] | 21
|
2022-02-06T17:25:58.000Z
|
2022-03-27T04:50:29.000Z
|
class ImportedClassTest(object):
def return_test_example(self):
return "test passed"
| 24.25
| 34
| 0.721649
| 11
| 97
| 6.181818
| 0.818182
| 0.294118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195876
| 97
| 3
| 35
| 32.333333
| 0.871795
| 0
| 0
| 0
| 0
| 0
| 0.113402
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0.333333
| 0.333333
| 1.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.