hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
56f0adb6ffb23ba292badc3740453920de7e1ff6
| 42,030
|
py
|
Python
|
src/move_arm/src/projik_example.py
|
citronella3alain/baxterDraw
|
c050254e8b4b8d4f5087e8743a34289844138e0c
|
[
"MIT"
] | null | null | null |
src/move_arm/src/projik_example.py
|
citronella3alain/baxterDraw
|
c050254e8b4b8d4f5087e8743a34289844138e0c
|
[
"MIT"
] | null | null | null |
src/move_arm/src/projik_example.py
|
citronella3alain/baxterDraw
|
c050254e8b4b8d4f5087e8743a34289844138e0c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import rospy
from moveit_msgs.srv import GetPositionIK, GetPositionIKRequest, GetPositionIKResponse
from geometry_msgs.msg import PoseStamped
from moveit_commander import MoveGroupCommander
import numpy as np
from numpy import linalg
import sys
def make0(robo, arm, xi, upper):
# Construct the request
request = GetPositionIKRequest()
request.ik_request.group_name = arm + "_arm"
# If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead
link = arm + "_gripper"
if robo == 'sawyer':
link += '_tip'
request.ik_request.ik_link_name = link
request.ik_request.attempts = 20
request.ik_request.pose_stamped.header.frame_id = "base"
# print('Opening...')
# right_gripper.open()
# rospy.sleep(1.0)
# print('Done!')
b = 0.048
a = 0.068
k = 0.193
h = 0.765
xi = np.round(xi, 3)
if upper == True:
#math equatio
y = np.round((np.sqrt((b**2)-((b**2/a**2)*((xi-h)**2))))+k, 3)
print(xi)
print(y)
if np.isnan(y):
y = 0.193
if xi == 0.765:
y = 0.293
#y = np.round(((b/a)*np.sqrt(((a**2)-((xi-h)**2)))+k), 3)
else:
print("im here")
y = np.round((-1*np.sqrt((b**2)-((b**2/a**2)*((xi-h)**2))))+k, 3)
print(xi)
print(y)
if np.isnan(y):
y = 0.193
if xi == 0.765:
y = 0.093
#y = np.round(((b/a)*-np.sqrt(((a**2)-((xi-h)**2)))+k), 3)
# Set the desired orientation for the end effector HERE
#switch = not switch
request.ik_request.pose_stamped.pose.position.x = xi
request.ik_request.pose_stamped.pose.position.y = y
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
return request
def make1(robo, arm, xi, xc, yc):
# Construct the request
request = GetPositionIKRequest()
request.ik_request.group_name = arm + "_arm"
# If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead
link = arm + "_gripper"
if robo == 'sawyer':
link += '_tip'
request.ik_request.ik_link_name = link
request.ik_request.attempts = 20
request.ik_request.pose_stamped.header.frame_id = "base"
y = yc+0.059
request.ik_request.pose_stamped.pose.position.x = xi
request.ik_request.pose_stamped.pose.position.y = y
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
return request
def make2(robo, arm, xi, upper, mid, center_x = 0.691, center_y = 0.259):
# Construct the request
request = GetPositionIKRequest()
request.ik_request.group_name = arm + "_arm"
# If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead
link = arm + "_gripper"
if robo == 'sawyer':
link += '_tip'
request.ik_request.ik_link_name = link
request.ik_request.attempts = 20
request.ik_request.pose_stamped.header.frame_id = "base"
#draw top of two
if upper == True and mid == True:
y = -np.sqrt((-xi+center_x)/-20) + center_y
elif upper == True:
# y = -20*((xi-center_x)**2)+center_y
y = np.sqrt((-xi+center_x)/-20) + center_y
elif mid == True:
# y = xi-.741+.209
# y = xi - center_x - 0.05 + center_y -0.05
y = -xi + center_y +.05 + center_x + .05
else:
# y = center_y -0.15
y = xi
xi = center_x + .15
request.ik_request.pose_stamped.pose.position.x = xi
request.ik_request.pose_stamped.pose.position.y = y
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
return request
def make3(robo, arm, xi, upper, xc= 0.691,yc= 0.259):
request = GetPositionIKRequest()
request.ik_request.group_name = arm + "_arm"
# If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead
link = arm + "_gripper"
if robo == 'sawyer':
link += '_tip'
request.ik_request.ik_link_name = link
request.ik_request.attempts = 20
request.ik_request.pose_stamped.header.frame_id = "base"
xi = round(xi,3)
if upper == True:
y= round(-30*((xi-xc)**2) +yc,3)
else:
y= round(-30*((xi-(xc*1.1))**2) +yc, 3)
request.ik_request.pose_stamped.pose.position.x = xi
request.ik_request.pose_stamped.pose.position.y = y
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
return request
def make4(robo, arm, xi, xc= 0.691,yc= 0.259):
request = GetPositionIKRequest()
request.ik_request.group_name = arm + "_arm"
# If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead
link = arm + "_gripper"
if robo == 'sawyer':
link += '_tip'
request.ik_request.ik_link_name = link
request.ik_request.attempts = 20
request.ik_request.pose_stamped.header.frame_id = "base"
if xi == 0:
request.ik_request.pose_stamped.pose.position.x = xc
request.ik_request.pose_stamped.pose.position.y = yc
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
if xi == 1:
request.ik_request.pose_stamped.pose.position.x = xc+(0.0788/2.0)
request.ik_request.pose_stamped.pose.position.y = yc
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
if xi == 2:
request.ik_request.pose_stamped.pose.position.x = xc+0.0788
request.ik_request.pose_stamped.pose.position.y = yc
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
if xi == 3:
request.ik_request.pose_stamped.pose.position.x = xc+0.0788
request.ik_request.pose_stamped.pose.position.y = yc + 0.059
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
if xi == 4:
request.ik_request.pose_stamped.pose.position.x = xc
request.ik_request.pose_stamped.pose.position.y = yc + 0.059
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
if xi == 5:
request.ik_request.pose_stamped.pose.position.x = xc+(0.1577/2.0)
request.ik_request.pose_stamped.pose.position.y = yc + 0.059
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
if xi == 6:
request.ik_request.pose_stamped.pose.position.x = xc+0.1577
request.ik_request.pose_stamped.pose.position.y = yc + 0.059
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
return request
def make5(robo, arm, xi, upper, mid, xc=0.6467, yc=0.2):
request = GetPositionIKRequest()
request.ik_request.group_name = arm + "_arm"
# If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead
link = arm + "_gripper"
if robo == 'sawyer':
link += '_tip'
request.ik_request.ik_link_name = link
request.ik_request.attempts = 20
request.ik_request.pose_stamped.header.frame_id = "base"
xi = round(xi,3)
if upper == True:
y = xi
xi = xc
elif mid == True:
y=yc
else:
y= round(-30*((xi-((xc+0.0443)*1.1))**2) +yc +0.059, 3)
request.ik_request.pose_stamped.pose.position.x = xi
request.ik_request.pose_stamped.pose.position.y = y
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
return request
def make6(robo, arm, xi, upper, center_x = 0.6566, center_y = 0.2235):
# Construct the request
request = GetPositionIKRequest()
request.ik_request.group_name = arm + "_arm"
# If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead
link = arm + "_gripper"
if robo == 'sawyer':
link += '_tip'
request.ik_request.ik_link_name = link
request.ik_request.attempts = 20
request.ik_request.pose_stamped.header.frame_id = "base"
#draw top of two
if upper == 0:
y = 30 * (xi-(center_x+0.0344))**2 + center_y - 0.0355
elif upper == 1:
y=center_y-0.0355
elif upper == 2:
y = 30*(xi - ((center_x+0.0344)*1.1))**2 + center_y - 0.0355
elif upper == 3:
y = -30*(xi - ((center_x+0.0344)*1.1))**2 + center_y +0.0355
elif upper==4:
y = 30*(xi - ((center_x+0.0344)*1.1))**2 + center_y - 0.0355
request.ik_request.pose_stamped.pose.position.x = xi
request.ik_request.pose_stamped.pose.position.y = y
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
return request
def make7(robo, arm, xi, upper, xc= 0.6467,yc= 0.2):
request = GetPositionIKRequest()
request.ik_request.group_name = arm + "_arm"
# If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead
link = arm + "_gripper"
if robo == 'sawyer':
link += '_tip'
request.ik_request.ik_link_name = link
request.ik_request.attempts = 20
request.ik_request.pose_stamped.header.frame_id = "base"
xi = round(xi,3)
if upper:
y=xi
xi = xc
else:
y = yc+0.059
request.ik_request.pose_stamped.pose.position.x = xi
request.ik_request.pose_stamped.pose.position.y = y
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
return request
def make8(robo, arm, xi, upper, mid, center_x = 0.6566, center_y = 0.2235):
# Construct the request
request = GetPositionIKRequest()
request.ik_request.group_name = arm + "_arm"
# If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead
link = arm + "_gripper"
if robo == 'sawyer':
link += '_tip'
request.ik_request.ik_link_name = link
request.ik_request.attempts = 20
request.ik_request.pose_stamped.header.frame_id = "base"
#draw top of two
if upper == True and mid ==True:
y = -30*((xi-(center_x+0.0344))**2)+center_y+0.0355
elif upper == False and mid == False:
y = 30*(xi - ((center_x+0.0344)*1.1))**2 + center_y - 0.0355 # y = xi-.741+.209
elif upper == False and mid == True:
y = -30*(xi - ((center_x+0.0344)*1.1))**2 + center_y +0.0355
elif upper == True and mid == False:
y = 30 * (xi-(center_x+0.0344))**2 + center_y - 0.0355
request.ik_request.pose_stamped.pose.position.x = xi
request.ik_request.pose_stamped.pose.position.y = y
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
return request
def make9(robo, arm, xi, upper, mid, center_x = 0.6566, center_y = 0.2235):
# Construct the request
request = GetPositionIKRequest()
request.ik_request.group_name = arm + "_arm"
# If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead
link = arm + "_gripper"
if robo == 'sawyer':
link += '_tip'
request.ik_request.ik_link_name = link
request.ik_request.attempts = 20
request.ik_request.pose_stamped.header.frame_id = "base"
#draw top of two
if upper == True and mid ==True:
y = -30*((xi-(center_x+0.0344))**2)+center_y+0.0355
elif upper == False and mid == False:
y = 30 * (xi-(center_x+0.0344))**2 + center_y - 0.0355 # y = xi-.741+.209
elif upper == False and mid == True:
y = -30*((xi-(center_x+0.0344))**2)+center_y+0.0355
elif upper == True and mid == False:
y = center_y + 0.0355
request.ik_request.pose_stamped.pose.position.x = xi
request.ik_request.pose_stamped.pose.position.y = y
request.ik_request.pose_stamped.pose.position.z = -0.1
request.ik_request.pose_stamped.pose.orientation.x = 0.0
request.ik_request.pose_stamped.pose.orientation.y = 1.0
request.ik_request.pose_stamped.pose.orientation.z = 0.0
request.ik_request.pose_stamped.pose.orientation.w = 0.0
return request
def main(robo):
# Wait for the IK service to become available
rospy.wait_for_service('compute_ik')
rospy.init_node('service_query')
# Set up the right gripper
right_gripper = robot_gripper.Gripper('right')
# Calibrate the gripper (other commands won't work unless you do this first)
print('Calibrating...')
right_gripper.calibrate()
rospy.sleep(2.0)
arm = 'left'
# Create the function used to call the service
compute_ik = rospy.ServiceProxy('compute_ik', GetPositionIK)
if robo == 'sawyer':
arm = 'right'
switch = True
number = 0 #change this to change the number drawn
while not rospy.is_shutdown():
raw_input('Press [ Enter ]: ')
if number == 9:
#Computer vision determines start point.
center_x = 0.6566
center_y = 0.2235
# for xi in np.linspace(0.641, 0.741, 3):
request = make9(robo, arm, center_x + 0.0344, upper=True, mid=True, center_x = 0.691, center_y = 0.259)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x + 0.0688, center_x, 3):
request = make9(robo, arm, xi, False, False, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x, center_x + 0.0344, 3):
request = make9(robo, arm, xi, False, True, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x + 0.0344, center_x + 0.1379, 3):
request = make9(robo, arm, xi, True, False, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
if number == 8:
#Computer vision determines start point.
center_x = 0.691
center_y = 0.259
# for xi in np.linspace(0.641, 0.741, 3):
for xi in np.linspace(center_x, center_x + 0.0688, 3):
request = make8(robo, arm, xi, True, True, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x + 0.0688, center_x + 0.1379, 3):
request = make8(robo, arm, xi, False, False, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x + 0.1379, center_x + 0.0688, 3):
request = make8(robo, arm, xi, False, True, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x + 0.0688, center_x, 3):
request = make8(robo, arm, xi, True, False, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
if number == 7:
center_x = 0.6467
center_y = 0.2
for xi in np.linspace(center_y, center_y+0.059, 3):
request = make7(robo, arm, xi, True, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x, center_x+0.1577, 3):
request = make7(robo, arm, xi, False, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
if number == 6:
#Computer vision determines start point.
center_x = 0.691
center_y = 0.259
# for xi in np.linspace(0.641, 0.741, 3):
for xi in np.linspace(center_x, center_x + 0.0344, 3):
request = make6(robo, arm, xi, 0, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x + 0.0344, center_x + 0.1035, 3):
request = make6(robo, arm, xi, 1, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x + 0.1035, center_x + 0.1379, 3):
request = make6(robo, arm, xi, 2, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x + 0.1379, center_x + 0.0688, 3):
request = make6(robo, arm, xi, 3, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x + 0.0688, center_x + 0.1035, 3):
request = make6(robo, arm, xi, 4, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
if number == 5:
center_x = 0.6467
center_y = 0.2
for xi in np.linspace(center_y+0.059, center_y, 3):
request = make5(robo, arm, xi, True, False, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x, center_x+0.0691, 3):
request = make5(robo, arm, xi, False, True, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x+0.0691, center_x+0.1577, 5):
request = make5(robo, arm, xi, False, False, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
if number == 4:
center_x = 0.6467
center_y = 0.2
for xi in range(7):
request = make4(robo, arm, xi, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
if number == 3:
center_x = 0.691
center_y = 0.259
for xi in np.linspace(center_x-0.0443, center_x+0.0345, 3):
request = make3(robo, arm, xi, True, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x+0.0345, center_x+0.0691, 3):
request = make3(robo, arm, xi, False, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x+0.0691, center_x+0.1134, 3):
request = make3(robo, arm, xi, False, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
if number == 2:
#Computer vision determines start point.
center_x = 0.691
center_y = 0.259
# for xi in np.linspace(0.641, 0.741, 3):
for xi in np.linspace(center_x + 0.05, center_x, 3):
request = make2(robo, arm, xi, True, True, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x, center_x + 0.05, 3):
request = make2(robo, arm, xi, True, False, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_x + 0.05, center_x + 0.15, 3):
request = make2(robo, arm, xi, False, True, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(center_y - 0.05, center_y + 0.05, 3):
request = make2(robo, arm, xi, False, False, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
# Set the desired orientation for the end effector HERE
if number==1:
center_x = 0.6467
center_y = 0.2
for xi in np.linspace(center_x, center_x+0.1577, 4):
request = make1(robo, arm, xi, center_x, center_y)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
if number == 0:
for xi in np.linspace(0.692, 0.765, 3):
request = make0(robo, arm, xi, True)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# TRY THIS
# Setting just the position without specifying the orientation
# group.set_position_target([0.5, 0.5, 0.0])
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(0.765, 0.838, 3):
request = make0(robo, arm, xi, True)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# TRY THIS
# Setting just the position without specifying the orientation
# group.set_position_target([0.5, 0.5, 0.0])
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(0.838, 0.765, 3):
print("new")
request = make0(robo, arm, xi, False)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# TRY THIS
# Setting just the position without specifying the orientation
# group.set_position_target([0.5, 0.5, 0.0])
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
for xi in np.linspace(0.765, 0.692, 3):
request = make0(robo, arm, xi, False)
try:
# Send the request to the service
response = compute_ik(request)
# Print the response HERE
# print(response)
group = MoveGroupCommander(arm + "_arm")
# Setting position and orientation target
group.set_pose_target(request.ik_request.pose_stamped)
# TRY THIS
# Setting just the position without specifying the orientation
# group.set_position_target([0.5, 0.5, 0.0])
# Plan IK and execute
group.go()
rospy.sleep(1.0)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
# Python's syntax for a main() method
if __name__ == '__main__':
if sys.argv[1] == 'sawyer':
from intera_interface import gripper as robot_gripper
else:
from baxter_interface import gripper as robot_gripper
main(sys.argv[1])
| 39.688385
| 115
| 0.550036
| 5,155
| 42,030
| 4.336566
| 0.042483
| 0.086155
| 0.130977
| 0.136882
| 0.939119
| 0.936658
| 0.919257
| 0.916797
| 0.90955
| 0.90548
| 0
| 0.040975
| 0.356626
| 42,030
| 1,058
| 116
| 39.725898
| 0.785733
| 0.157697
| 0
| 0.796073
| 0
| 0
| 0.03425
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.013595
| null | null | 0.057402
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
854a55982872822a46d98df1de533deb387f7009
| 179
|
py
|
Python
|
musicbrainz/world/admin.py
|
tourdownunder/musicbrainz-django
|
bd8f8767ca0144dc10fd9ba2b59beb9de9788640
|
[
"BSD-3-Clause"
] | 1
|
2020-09-19T11:00:31.000Z
|
2020-09-19T11:00:31.000Z
|
musicbrainz/world/admin.py
|
tourdownunder/musicbrainz-django
|
bd8f8767ca0144dc10fd9ba2b59beb9de9788640
|
[
"BSD-3-Clause"
] | 8
|
2019-12-30T14:30:13.000Z
|
2021-09-22T18:03:10.000Z
|
musicbrainz/world/admin.py
|
tourdownunder/musicbrainz-django
|
bd8f8767ca0144dc10fd9ba2b59beb9de9788640
|
[
"BSD-3-Clause"
] | 1
|
2021-10-03T10:06:29.000Z
|
2021-10-03T10:06:29.000Z
|
from django.contrib.gis import admin
from .models import WorldBorder
# admin.site.register(WorldBorder, admin.GeoModelAdmin)
admin.site.register(WorldBorder, admin.OSMGeoAdmin)
| 25.571429
| 55
| 0.826816
| 22
| 179
| 6.727273
| 0.545455
| 0.324324
| 0.22973
| 0.378378
| 0.445946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083799
| 179
| 6
| 56
| 29.833333
| 0.902439
| 0.296089
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a46bdd40357855cde77818fa1c5c112a3e77af37
| 1,272
|
py
|
Python
|
Chapter 10/05 - Mocks and unittest.mock module/test_mailer.py
|
bernoli/Expert-Python-Programming-Fourth-Edition
|
05b4bd64c66bea3252f06afee7a7a1e2bd93d171
|
[
"MIT"
] | 56
|
2021-05-24T15:24:51.000Z
|
2022-03-21T19:38:27.000Z
|
Chapter 10/05 - Mocks and unittest.mock module/test_mailer.py
|
saibaldas/Expert-Python-Programming-Fourth-Edition
|
572d47a802e7b1fe429f782d9aeb62f411cb5307
|
[
"MIT"
] | 2
|
2020-11-03T12:53:26.000Z
|
2021-05-11T23:47:39.000Z
|
Chapter 10/05 - Mocks and unittest.mock module/test_mailer.py
|
saibaldas/Expert-Python-Programming-Fourth-Edition
|
572d47a802e7b1fe429f782d9aeb62f411cb5307
|
[
"MIT"
] | 37
|
2021-05-27T12:32:21.000Z
|
2022-03-10T23:05:54.000Z
|
from unittest.mock import patch, Mock
from mailer import send
import smtplib
def test_send_unittest():
sender = "john.doe@example.com"
to = "jane.doe@example.com"
body = "Hello jane!"
subject = "How are you?"
with patch("smtplib.SMTP") as mock:
client = mock.return_value
client.sendmail.return_value = {}
res = send(sender, to, subject, body)
assert client.sendmail.called
assert client.sendmail.call_args[0][0] == sender
assert client.sendmail.call_args[0][1] == to
assert subject in client.sendmail.call_args[0][2]
assert body in client.sendmail.call_args[0][2]
assert res == {}
def test_send(monkeypatch):
sender = "john.doe@example.com"
to = "jane.doe@example.com"
body = "Hello jane!"
subject = "How are you?"
smtp = Mock()
monkeypatch.setattr(smtplib, "SMTP", smtp)
client = smtp.return_value
client.sendmail.return_value = {}
res = send(sender, to, subject, body)
assert client.sendmail.called
assert client.sendmail.call_args[0][0] == sender
assert client.sendmail.call_args[0][1] == to
assert subject in client.sendmail.call_args[0][2]
assert body in client.sendmail.call_args[0][2]
assert res == {}
| 28.266667
| 57
| 0.65173
| 175
| 1,272
| 4.651429
| 0.234286
| 0.206388
| 0.176904
| 0.216216
| 0.764128
| 0.764128
| 0.764128
| 0.764128
| 0.764128
| 0.764128
| 0
| 0.016211
| 0.224057
| 1,272
| 44
| 58
| 28.909091
| 0.808511
| 0
| 0
| 0.705882
| 0
| 0
| 0.111635
| 0
| 0
| 0
| 0
| 0
| 0.352941
| 1
| 0.058824
| false
| 0
| 0.088235
| 0
| 0.147059
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a46bfc5a5b83914619c528c488aa8d9ed4d9753d
| 6,162
|
py
|
Python
|
workspace/.c9/metadata/workspace/crawl/urls.py
|
mannyhappenings/WebCrawler
|
1451fe8e6dc55346a654665d736a7df2115e0c50
|
[
"MIT"
] | null | null | null |
workspace/.c9/metadata/workspace/crawl/urls.py
|
mannyhappenings/WebCrawler
|
1451fe8e6dc55346a654665d736a7df2115e0c50
|
[
"MIT"
] | null | null | null |
workspace/.c9/metadata/workspace/crawl/urls.py
|
mannyhappenings/WebCrawler
|
1451fe8e6dc55346a654665d736a7df2115e0c50
|
[
"MIT"
] | null | null | null |
{"filter":false,"title":"urls.py","tooltip":"/crawl/urls.py","undoManager":{"mark":41,"position":41,"stack":[[{"group":"doc","deltas":[{"start":{"row":5,"column":42},"end":{"row":6,"column":0},"action":"insert","lines":["",""]},{"start":{"row":6,"column":0},"end":{"row":6,"column":4},"action":"insert","lines":[" "]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":4},"end":{"row":6,"column":42},"action":"insert","lines":["url(r'^$', views.index, name='index'),"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":11},"end":{"row":6,"column":12},"action":"insert","lines":["c"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":11},"end":{"row":6,"column":12},"action":"remove","lines":["c"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":11},"end":{"row":6,"column":12},"action":"insert","lines":["s"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":12},"end":{"row":6,"column":13},"action":"insert","lines":["t"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":13},"end":{"row":6,"column":14},"action":"insert","lines":["a"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":14},"end":{"row":6,"column":15},"action":"insert","lines":["t"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":15},"end":{"row":6,"column":16},"action":"insert","lines":["u"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":16},"end":{"row":6,"column":17},"action":"insert","lines":["s"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":27},"end":{"row":6,"column":32},"action":"remove","lines":["index"]},{"start":{"row":6,"column":27},"end":{"row":6,"column":28},"action":"insert","lines":["s"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":28},"end":{"row":6,"column":29},"action":"insert","lines":["t"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":29},"end":{"row":6,"column":30},"action":"insert","lines":["a"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":30},"end":{"row":6,"column":31},"action":"insert","lines":["t"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":31},"end":{"row":6,"column":32},"action":"insert","lines":["u"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":32},"end":{"row":6,"column":33},"action":"insert","lines":["s"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":41},"end":{"row":6,"column":46},"action":"remove","lines":["index"]},{"start":{"row":6,"column":41},"end":{"row":6,"column":42},"action":"insert","lines":["s"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":42},"end":{"row":6,"column":43},"action":"insert","lines":["t"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":43},"end":{"row":6,"column":44},"action":"insert","lines":["a"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":44},"end":{"row":6,"column":45},"action":"insert","lines":["u"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":45},"end":{"row":6,"column":46},"action":"insert","lines":["t"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":45},"end":{"row":6,"column":46},"action":"remove","lines":["t"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":44},"end":{"row":6,"column":45},"action":"remove","lines":["u"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":44},"end":{"row":6,"column":45},"action":"insert","lines":["t"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":45},"end":{"row":6,"column":46},"action":"insert","lines":["u"]}]}],[{"group":"doc","deltas":[{"start":{"row":6,"column":46},"end":{"row":6,"column":47},"action":"insert","lines":["s"]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":0},"end":{"row":8,"column":0},"action":"insert","lines":[" url(r'^status$', views.status, name='status'),",""]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":11},"end":{"row":7,"column":18},"action":"remove","lines":["status$"]},{"start":{"row":7,"column":11},"end":{"row":7,"column":12},"action":"insert","lines":["s"]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":12},"end":{"row":7,"column":13},"action":"insert","lines":["t"]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":13},"end":{"row":7,"column":14},"action":"insert","lines":["o"]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":14},"end":{"row":7,"column":15},"action":"insert","lines":["p"]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":15},"end":{"row":7,"column":16},"action":"insert","lines":["&"]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":15},"end":{"row":7,"column":16},"action":"remove","lines":["&"]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":15},"end":{"row":7,"column":16},"action":"insert","lines":["$"]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":25},"end":{"row":7,"column":31},"action":"remove","lines":["status"]},{"start":{"row":7,"column":25},"end":{"row":7,"column":26},"action":"insert","lines":["s"]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":26},"end":{"row":7,"column":27},"action":"insert","lines":["t"]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":27},"end":{"row":7,"column":28},"action":"insert","lines":["o"]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":28},"end":{"row":7,"column":29},"action":"insert","lines":["p"]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":37},"end":{"row":7,"column":43},"action":"remove","lines":["status"]},{"start":{"row":7,"column":37},"end":{"row":7,"column":38},"action":"insert","lines":["s"]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":38},"end":{"row":7,"column":39},"action":"insert","lines":["t"]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":39},"end":{"row":7,"column":40},"action":"insert","lines":["o"]}]}],[{"group":"doc","deltas":[{"start":{"row":7,"column":40},"end":{"row":7,"column":41},"action":"insert","lines":["p"]}]}]]},"ace":{"folds":[],"scrolltop":0,"scrollleft":0,"selection":{"start":{"row":7,"column":41},"end":{"row":7,"column":41},"isBackwards":false},"options":{"guessTabSize":true,"useWrapMode":false,"wrapToView":true},"firstLineState":0},"timestamp":1428186714049,"hash":"9e5d3fcc85eb3cee304ad7fa0120fff49a0b2a85"}
| 6,162
| 6,162
| 0.542681
| 851
| 6,162
| 3.929495
| 0.099882
| 0.068182
| 0.170455
| 0.238636
| 0.80323
| 0.75299
| 0.737141
| 0.730861
| 0.672847
| 0.599282
| 0
| 0.053333
| 0.001947
| 6,162
| 1
| 6,162
| 6,162
| 0.490407
| 0
| 0
| 0
| 0
| 0
| 0.496349
| 0.00649
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
a4880c07ec70056b56e0d43278747671d02751da
| 4,716
|
py
|
Python
|
tests/middleware/test_http_to_https.py
|
ai-mocap/hypercorn
|
0c1a74a726d5e54a2a3876edba8ad2a0a547c5d5
|
[
"MIT"
] | 264
|
2018-06-02T17:49:46.000Z
|
2022-03-29T07:39:06.000Z
|
tests/middleware/test_http_to_https.py
|
ai-mocap/hypercorn
|
0c1a74a726d5e54a2a3876edba8ad2a0a547c5d5
|
[
"MIT"
] | 52
|
2018-06-14T19:30:00.000Z
|
2022-02-27T04:26:48.000Z
|
tests/middleware/test_http_to_https.py
|
nonebot/nonecorn
|
813408d385f11b6bbdaee63d6b6ace8c87586d25
|
[
"MIT"
] | 29
|
2018-06-13T23:54:48.000Z
|
2022-02-20T15:23:14.000Z
|
from __future__ import annotations
import pytest
from hypercorn.middleware import HTTPToHTTPSRedirectMiddleware
from hypercorn.typing import HTTPScope, WebsocketScope
from ..helpers import empty_framework
@pytest.mark.asyncio
@pytest.mark.parametrize("raw_path", [b"/abc", b"/abc%3C"])
async def test_http_to_https_redirect_middleware_http(raw_path: bytes) -> None:
app = HTTPToHTTPSRedirectMiddleware(empty_framework, "localhost")
sent_events = []
async def send(message: dict) -> None:
nonlocal sent_events
sent_events.append(message)
scope: HTTPScope = {
"type": "http",
"asgi": {},
"http_version": "2",
"method": "GET",
"scheme": "http",
"path": raw_path.decode(),
"raw_path": raw_path,
"query_string": b"a=b",
"root_path": "",
"headers": [],
"client": ("127.0.0.1", 80),
"server": None,
"extensions": {},
}
await app(scope, None, send)
assert sent_events == [
{
"type": "http.response.start",
"status": 307,
"headers": [(b"location", b"https://localhost%s?a=b" % raw_path)],
},
{"type": "http.response.body"},
]
@pytest.mark.asyncio
@pytest.mark.parametrize("raw_path", [b"/abc", b"/abc%3C"])
async def test_http_to_https_redirect_middleware_websocket(raw_path: bytes) -> None:
app = HTTPToHTTPSRedirectMiddleware(empty_framework, "localhost")
sent_events = []
async def send(message: dict) -> None:
nonlocal sent_events
sent_events.append(message)
scope: WebsocketScope = {
"type": "websocket",
"asgi": {},
"http_version": "1.1",
"scheme": "ws",
"path": raw_path.decode(),
"raw_path": raw_path,
"query_string": b"a=b",
"root_path": "",
"headers": [],
"client": None,
"server": None,
"subprotocols": [],
"extensions": {"websocket.http.response": {}},
}
await app(scope, None, send)
assert sent_events == [
{
"type": "websocket.http.response.start",
"status": 307,
"headers": [(b"location", b"wss://localhost%s?a=b" % raw_path)],
},
{"type": "websocket.http.response.body"},
]
@pytest.mark.asyncio
async def test_http_to_https_redirect_middleware_websocket_http2() -> None:
app = HTTPToHTTPSRedirectMiddleware(empty_framework, "localhost")
sent_events = []
async def send(message: dict) -> None:
nonlocal sent_events
sent_events.append(message)
scope: WebsocketScope = {
"type": "websocket",
"asgi": {},
"http_version": "2",
"scheme": "ws",
"path": "/abc",
"raw_path": b"/abc",
"query_string": b"a=b",
"root_path": "",
"headers": [],
"client": None,
"server": None,
"subprotocols": [],
"extensions": {"websocket.http.response": {}},
}
await app(scope, None, send)
assert sent_events == [
{
"type": "websocket.http.response.start",
"status": 307,
"headers": [(b"location", b"https://localhost/abc?a=b")],
},
{"type": "websocket.http.response.body"},
]
@pytest.mark.asyncio
async def test_http_to_https_redirect_middleware_websocket_no_rejection() -> None:
app = HTTPToHTTPSRedirectMiddleware(empty_framework, "localhost")
sent_events = []
async def send(message: dict) -> None:
nonlocal sent_events
sent_events.append(message)
scope: WebsocketScope = {
"type": "websocket",
"asgi": {},
"http_version": "2",
"scheme": "ws",
"path": "/abc",
"raw_path": b"/abc",
"query_string": b"a=b",
"root_path": "",
"headers": [],
"client": None,
"server": None,
"subprotocols": [],
"extensions": {},
}
await app(scope, None, send)
assert sent_events == [{"type": "websocket.close"}]
def test_http_to_https_redirect_new_url_header() -> None:
app = HTTPToHTTPSRedirectMiddleware(empty_framework, None)
new_url = app._new_url(
"https",
{
"http_version": "1.1",
"asgi": {},
"method": "GET",
"headers": [(b"host", b"localhost")],
"path": "/",
"root_path": "",
"query_string": b"",
"raw_path": b"/",
"scheme": "http",
"type": "http",
"client": None,
"server": None,
"extensions": {},
},
)
assert new_url == "https://localhost/"
| 27.578947
| 84
| 0.545589
| 475
| 4,716
| 5.218947
| 0.172632
| 0.064542
| 0.050827
| 0.02622
| 0.806777
| 0.786608
| 0.769665
| 0.756353
| 0.756353
| 0.752723
| 0
| 0.00806
| 0.289652
| 4,716
| 170
| 85
| 27.741176
| 0.73194
| 0
| 0
| 0.710345
| 0
| 0
| 0.227523
| 0.03838
| 0
| 0
| 0
| 0
| 0.034483
| 1
| 0.006897
| false
| 0
| 0.034483
| 0
| 0.041379
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f13d9608f48b0c24994ffc9597596e461e4ed4e2
| 1,632
|
py
|
Python
|
mysite/home/models.py
|
Rudancy/My_Business
|
f2c5f51e697214a2699b40d37f206307d19af84b
|
[
"MIT"
] | null | null | null |
mysite/home/models.py
|
Rudancy/My_Business
|
f2c5f51e697214a2699b40d37f206307d19af84b
|
[
"MIT"
] | null | null | null |
mysite/home/models.py
|
Rudancy/My_Business
|
f2c5f51e697214a2699b40d37f206307d19af84b
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
class home_page(models.Model):
masthead_image = models.ImageField(upload_to='static/images', default='', blank=True)
masthead = models.CharField(max_length=30, default='', blank=True)
description = models.TextField(max_length=200, default='', blank=True)
icon_1_header = models.CharField(max_length=30, default='', blank=True)
icon_1_description = models.TextField(max_length=30, default='', blank=True)
icon_2_header = models.CharField(max_length=30, default='', blank=True)
icon_2_description = models.TextField(max_length=30, default='', blank=True)
icon_3_header = models.CharField(max_length=30, default='', blank=True)
icon_3_description = models.TextField(max_length=30, default='', blank=True)
showcase_1_header = models.CharField(max_length=30, default='', blank=True)
showcase_1_image = models.ImageField(upload_to='static/images', default='', blank=True)
showcase_1_description = models.TextField(max_length=700, default='', blank=True)
showcase_2_header = models.CharField(max_length=30, default='', blank=True)
showcase_2_image = models.ImageField(upload_to='static/images', default='', blank=True)
showcase_2_description = models.TextField(max_length=700, default='', blank=True)
showcase_3_header = models.CharField(max_length=30, default='', blank=True)
showcase_3_image = models.ImageField(upload_to='static/images', default='', blank=True)
showcase_3_description = models.TextField(max_length=700, default='', blank=True)
def __str__(self):
return self.masthead
| 58.285714
| 91
| 0.741422
| 217
| 1,632
| 5.327189
| 0.184332
| 0.186851
| 0.249135
| 0.155709
| 0.900519
| 0.851211
| 0.847751
| 0.847751
| 0.801903
| 0.709343
| 0
| 0.032844
| 0.123162
| 1,632
| 27
| 92
| 60.444444
| 0.774983
| 0.014706
| 0
| 0
| 0
| 0
| 0.032379
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0.045455
| 0.045455
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
f16a774d5548b032544d49f97e7b259834c08fa6
| 580
|
py
|
Python
|
one cmd creator/defaults.py
|
JustRedTTG/one-command-block-creator
|
81f57da5be04d6b19332b7b0da2db6b2cee088a1
|
[
"MIT"
] | null | null | null |
one cmd creator/defaults.py
|
JustRedTTG/one-command-block-creator
|
81f57da5be04d6b19332b7b0da2db6b2cee088a1
|
[
"MIT"
] | null | null | null |
one cmd creator/defaults.py
|
JustRedTTG/one-command-block-creator
|
81f57da5be04d6b19332b7b0da2db6b2cee088a1
|
[
"MIT"
] | null | null | null |
functions = {
'default-start':"""@minecraft:activator_rail\n""",
'default-end':"""setblock ~ ~1 ~ minecraft:chain_command_block[facing=up]{auto:1,Command:"fill ~ ~ ~ ~ ~-2 ~ air"}
setblock ~ ~ ~ minecraft:command_block[facing=up]{auto:1,Command:"kill @e[type=minecraft:command_block_minecart,distance=..2]"}\n""",
'default-fullend':"""setblock ~ ~1 ~ minecraft:chain_command_block[facing=up]{auto:1,Command:"fill ~ ~ ~ ~ ~-3 ~ air"}
setblock ~ ~ ~ minecraft:command_block[facing=up]{auto:1,Command:"kill @e[type=minecraft:command_block_minecart,distance=..2]"}\n"""
}
| 72.5
| 133
| 0.687931
| 77
| 580
| 5.038961
| 0.337662
| 0.185567
| 0.185567
| 0.206186
| 0.819588
| 0.819588
| 0.819588
| 0.819588
| 0.819588
| 0.819588
| 0
| 0.018975
| 0.091379
| 580
| 7
| 134
| 82.857143
| 0.717268
| 0
| 0
| 0
| 0
| 0.571429
| 0.896552
| 0.646552
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
f16d85b67524c6dca5cdb704cfed6566472ca5d7
| 10,239
|
py
|
Python
|
tests/test_vmtkScripts/test_vmtksurfacecelldatatopointdata.py
|
ramtingh/vmtk
|
4d6f58ce65d73628353ba2b110cbc29a2e7aa7b3
|
[
"Apache-2.0"
] | null | null | null |
tests/test_vmtkScripts/test_vmtksurfacecelldatatopointdata.py
|
ramtingh/vmtk
|
4d6f58ce65d73628353ba2b110cbc29a2e7aa7b3
|
[
"Apache-2.0"
] | null | null | null |
tests/test_vmtkScripts/test_vmtksurfacecelldatatopointdata.py
|
ramtingh/vmtk
|
4d6f58ce65d73628353ba2b110cbc29a2e7aa7b3
|
[
"Apache-2.0"
] | 1
|
2019-06-18T23:41:11.000Z
|
2019-06-18T23:41:11.000Z
|
## Program: VMTK
## Language: Python
## Date: February 12, 2018
## Version: 1.4
## Copyright (c) Richard Izzo, Luca Antiga, All rights reserved.
## See LICENSE file for details.
## This software is distributed WITHOUT ANY WARRANTY; without even
## the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
## PURPOSE. See the above copyright notices for more information.
## Note: this code was contributed by
## Richard Izzo (Github @rlizzo)
## University at Buffalo
import pytest
import vmtk.vmtksurfacecelldatatopointdata as celltopoint
from vtk.numpy_interface import dataset_adapter as dsa
import numpy as np
@pytest.fixture(scope='module')
def centerline_pointdata(aorta_centerline_branches):
ctp = celltopoint.vmtkSurfaceCellDataToPointData()
ctp.Surface = aorta_centerline_branches
ctp.Execute()
return ctp.Surface
@pytest.mark.parametrize("expectedKey",[
('CenterlineIds'),
('TractIds'),
('Blanking'),
('GroupIds')
])
def test_expected_cell_data_keys(centerline_pointdata, expectedKey):
wp = dsa.WrapDataObject(centerline_pointdata)
assert expectedKey in wp.CellData.keys()
@pytest.mark.parametrize("expectedKey",[
('MaximumInscribedSphereRadius'),
('EdgeArray'),
('EdgePCoordArray'),
('CenterlineIds'),
('TractIds'),
('Blanking'),
('GroupIds')
])
def test_expected_point_data_keys(centerline_pointdata, expectedKey):
wp = dsa.WrapDataObject(centerline_pointdata)
assert expectedKey in wp.PointData.keys()
def test_number_of_cell_data_keys_is_4(centerline_pointdata):
wp = dsa.WrapDataObject(centerline_pointdata)
assert len(wp.CellData.keys()) == 4
def test_number_of_point_data_keys_is_7(centerline_pointdata):
wp = dsa.WrapDataObject(centerline_pointdata)
assert len(wp.PointData.keys()) == 7
def test_expected_number_of_points_in_output(centerline_pointdata):
wp = dsa.WrapDataObject(centerline_pointdata)
assert wp.Points.shape == (417, 3)
def test_blanking_array_is_correct(centerline_pointdata):
wp = dsa.WrapDataObject(centerline_pointdata)
expectedOutput = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
assert np.allclose(wp.PointData.GetArray('Blanking'), expectedOutput) == True
def test_centerlineids_array_is_correct(centerline_pointdata):
wp = dsa.WrapDataObject(centerline_pointdata)
expectedOutput = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1])
assert np.allclose(wp.PointData.GetArray('CenterlineIds'), expectedOutput) == True
def test_groupids_array_is_correct(centerline_pointdata):
wp = dsa.WrapDataObject(centerline_pointdata)
expectedOutput = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3])
assert np.allclose(wp.PointData.GetArray('GroupIds'), expectedOutput) == True
def test_tractids_array_is_correct(centerline_pointdata):
wp = dsa.WrapDataObject(centerline_pointdata)
expectedOutput = np.array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2])
assert np.allclose(wp.PointData.GetArray('TractIds'), expectedOutput) == True
| 61.680723
| 97
| 0.377381
| 2,001
| 10,239
| 1.896552
| 0.058971
| 0.463241
| 0.688538
| 0.909618
| 0.704348
| 0.704348
| 0.667457
| 0.640053
| 0.623452
| 0.623452
| 0
| 0.288653
| 0.430218
| 10,239
| 165
| 98
| 62.054545
| 0.361844
| 0.046977
| 0
| 0.6875
| 0
| 0
| 0.019624
| 0.002877
| 0
| 0
| 0
| 0
| 0.070313
| 1
| 0.078125
| false
| 0
| 0.03125
| 0
| 0.117188
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
74d1fba55e38668fb6474e6c5a72b31dae8639fa
| 147
|
py
|
Python
|
feersum_nlu_util/__init__.py
|
praekelt/feersum-nlu-api-wrappers
|
6580e2bab2c8a764fe868a505330b3fee6029074
|
[
"BSD-3-Clause"
] | 9
|
2017-10-10T12:24:23.000Z
|
2021-08-18T14:07:51.000Z
|
feersum_nlu_util/__init__.py
|
praekelt/feersum-nlu-api-wrappers
|
6580e2bab2c8a764fe868a505330b3fee6029074
|
[
"BSD-3-Clause"
] | 1
|
2020-12-06T11:03:25.000Z
|
2021-04-14T05:21:23.000Z
|
feersum_nlu_util/__init__.py
|
praekelt/feersum-nlu-api-wrappers
|
6580e2bab2c8a764fe868a505330b3fee6029074
|
[
"BSD-3-Clause"
] | 2
|
2019-02-12T08:26:06.000Z
|
2022-02-01T09:39:47.000Z
|
# coding: utf-8
# flake8: noqa
"""
FeersumNLU API Utils
"""
from feersum_nlu_util import transfer
from feersum_nlu_util import image_utils
| 12.25
| 40
| 0.741497
| 21
| 147
| 4.952381
| 0.714286
| 0.211538
| 0.269231
| 0.346154
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016667
| 0.183673
| 147
| 11
| 41
| 13.363636
| 0.85
| 0.326531
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2d092a4e4e1af0db3f5776c271d8aede971fce4f
| 17,330
|
py
|
Python
|
tests/webservice/test_client.py
|
3D-e-Chem/python-modified-tanimoto
|
618cc4ae3cb55d9cba2cc297e9c05212353b218e
|
[
"Apache-2.0"
] | 8
|
2017-05-25T19:40:37.000Z
|
2021-06-12T06:59:26.000Z
|
tests/webservice/test_client.py
|
3D-e-Chem/kripodb
|
618cc4ae3cb55d9cba2cc297e9c05212353b218e
|
[
"Apache-2.0"
] | 44
|
2016-02-05T14:02:57.000Z
|
2019-07-29T07:58:20.000Z
|
tests/webservice/test_client.py
|
3D-e-Chem/python-modified-tanimoto
|
618cc4ae3cb55d9cba2cc297e9c05212353b218e
|
[
"Apache-2.0"
] | 1
|
2016-05-05T08:47:49.000Z
|
2016-05-05T08:47:49.000Z
|
# Copyright 2016 Netherlands eScience Center
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import pytest
import requests_mock
from rdkit.Chem.AllChem import Mol
from requests import HTTPError
from kripodb.webservice.client import WebserviceClient, IncompleteFragments, IncompletePharmacophores
from .test_server import expected_fragments_info, expected_fragments_info_with_mol
from ..test_pharmacophores import example1_phar, example3_phar
@pytest.fixture
def base_url():
return 'http://localhost:8084/kripo'
@pytest.fixture
def client(base_url):
return WebserviceClient(base_url)
def test_similar_fragments(base_url, client):
with requests_mock.mock() as m:
expected = [
{'query_frag_id': '3j7u_NDP_frag24', 'hit_frag_id': '3j7u_NDP_frag23', 'score': 0.8991},
]
url = base_url + '/fragments/3j7u_NDP_frag24/similar?cutoff=0.75&limit=1'
m.get(url, json=expected)
response = client.similar_fragments(fragment_id='3j7u_NDP_frag24', cutoff=0.75, limit=1)
assert response == expected
def test_fragments_by_id(base_url, client):
with requests_mock.mock() as m:
expected = [
{'smiles': '[*]C1OC(COP(=O)([O-])OP(=O)([O-])OCC2OC(N3C=CCC(C(N)=O)=C3)C(O)C2O)C(O)C1[*]',
'pdb_code': '3j7u',
'pdb_title': 'Catalase structure determined by electron crystallography of thin 3D crystals',
'atom_codes': 'PA,O1A,O2A,O5B,C5B,C4B,O4B,C3B,O3B,C2B,C1B,O3,PN,O1N,O2N,O5D,C5D,C4D,O4D,C3D,O3D,C2D,O2D,C1D,N1N,C2N,C3N,C7N,O7N,N7N,C4N,C5N,C6N',
'uniprot_acc': 'P00432',
'mol': '3j7u_NDP_frag24\n RDKit 3D\n\n 35 37 0 0 0 0 0 0 0 0999 V2000\n -15.1410 -11.1250 -79.4200 P 0 0 0 0 0 0 0 0 0 0 0 0\n -14.6900 -10.9960 -80.8600 O 0 0 0 0 0 0 0 0 0 0 0 0\n -16.5040 -11.6890 -79.0770 O 0 0 0 0 0 0 0 0 0 0 0 0\n -14.9990 -9.6870 -78.7060 O 0 0 0 0 0 0 0 0 0 0 0 0\n -15.1870 -8.4550 -79.4050 C 0 0 0 0 0 0 0 0 0 0 0 0\n -14.6700 -7.3160 -78.5260 C 0 0 0 0 0 0 0 0 0 0 0 0\n -13.2400 -7.2390 -78.5880 O 0 0 0 0 0 0 0 0 0 0 0 0\n -15.2130 -5.9510 -78.9460 C 0 0 0 0 0 0 0 0 0 0 0 0\n -16.1600 -5.4570 -77.9880 O 0 0 0 0 0 0 0 0 0 0 0 0\n -14.0000 -5.0420 -79.0650 C 0 0 0 0 0 0 0 0 0 0 0 0\n -14.1790 -3.8250 -78.3260 R 0 0 0 0 0 1 0 0 0 0 0 0\n -12.8370 -5.8690 -78.5180 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.5470 -5.6210 -79.2410 R 0 0 0 0 0 1 0 0 0 0 0 0\n -14.0270 -11.9960 -78.6490 O 0 0 0 0 0 0 0 0 0 0 0 0\n -14.1810 -13.5930 -78.4870 P 0 0 0 0 0 0 0 0 0 0 0 0\n -14.5480 -14.2030 -79.8230 O 0 0 0 0 0 0 0 0 0 0 0 0\n -15.0330 -13.8500 -77.2690 O 0 0 0 0 0 0 0 0 0 0 0 0\n -12.6800 -14.0730 -78.1770 O 0 0 0 0 0 0 0 0 0 0 0 0\n -12.1840 -14.2350 -76.8490 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.1340 -13.1670 -76.6050 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.6880 -11.8550 -76.6770 O 0 0 0 0 0 0 0 0 0 0 0 0\n -10.5070 -13.2750 -75.2350 C 0 0 0 0 0 0 0 0 0 0 0 0\n -9.4070 -14.1780 -75.3000 O 0 0 0 0 0 0 0 0 0 0 0 0\n -10.0970 -11.8400 -74.9280 C 0 0 0 0 0 0 0 0 0 0 0 0\n -8.6920 -11.6460 -75.1050 O 0 0 0 0 0 0 0 0 0 0 0 0\n -10.8280 -10.9760 -75.9460 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.5890 -9.8540 -75.3660 N 0 0 0 0 0 0 0 0 0 0 0 0\n -12.7860 -10.0630 -74.7850 C 0 0 0 0 0 0 0 0 0 0 0 0\n -13.5340 -9.0090 -74.2510 C 0 0 0 0 0 0 0 0 0 0 0 0\n -14.8620 -9.2740 -73.5990 C 0 0 0 0 0 0 0 0 0 0 0 0\n -15.1890 -10.4300 -73.3940 O 0 0 0 0 0 0 0 0 0 0 0 0\n -15.6600 -8.2650 -73.2400 N 0 0 0 0 0 0 0 0 0 0 0 0\n -13.0230 -7.5870 -74.3390 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.7130 -7.4960 -74.9740 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.0640 -8.6200 -75.4710 C 0 0 0 0 0 0 0 0 0 0 0 0\n 1 2 2 0\n 1 3 1 0\n 1 4 1 0\n 1 14 1 0\n 4 5 1 0\n 5 6 1 0\n 6 7 1 0\n 6 8 1 0\n 7 12 1 0\n 8 9 1 0\n 8 10 1 0\n 10 11 1 0\n 10 12 1 0\n 12 13 1 0\n 14 15 1 0\n 15 16 2 0\n 15 17 1 0\n 15 18 1 0\n 18 19 1 0\n 19 20 1 0\n 20 21 1 0\n 20 22 1 0\n 21 26 1 0\n 22 23 1 0\n 22 24 1 0\n 24 25 1 0\n 24 26 1 0\n 26 27 1 0\n 27 28 1 0\n 27 35 1 0\n 28 29 2 0\n 29 30 1 0\n 29 33 1 0\n 30 31 2 0\n 30 32 1 0\n 33 34 1 0\n 34 35 2 0\nM CHG 2 3 -1 17 -1\nM END\n',
'prot_chain': 'A', 'het_seq_nr': 602, 'het_code': 'NDP', 'prot_name': 'Catalase',
'ec_number': '1.11.1.6', 'frag_nr': 24, 'frag_id': '3j7u_NDP_frag24', 'rowid': 7059,
'uniprot_name': 'Catalase', 'nr_r_groups': 2, 'het_chain': 'A', 'hash_code': '6ef5a609fb192dba'}
]
url = base_url + '/fragments?fragment_ids=3j7u_NDP_frag24,3j7u_NDP_frag23'
m.get(url, json=expected)
response = client.fragments_by_id(fragment_ids=['3j7u_NDP_frag24', '3j7u_NDP_frag23'])
assert isinstance(response[0]['mol'], Mol)
del response[0]['mol']
del expected[0]['mol']
assert response == expected
def test_fragments_by_pdb_codes(base_url, client):
with requests_mock.mock() as m:
molblock = '3j7u_NDP_frag24\n RDKit 3D\n\n 35 37 0 0 0 0 0 0 0 0999 V2000\n -15.1410 -11.1250 -79.4200 P 0 0 0 0 0 0 0 0 0 0 0 0\n -14.6900 -10.9960 -80.8600 O 0 0 0 0 0 0 0 0 0 0 0 0\n -16.5040 -11.6890 -79.0770 O 0 0 0 0 0 0 0 0 0 0 0 0\n -14.9990 -9.6870 -78.7060 O 0 0 0 0 0 0 0 0 0 0 0 0\n -15.1870 -8.4550 -79.4050 C 0 0 0 0 0 0 0 0 0 0 0 0\n -14.6700 -7.3160 -78.5260 C 0 0 0 0 0 0 0 0 0 0 0 0\n -13.2400 -7.2390 -78.5880 O 0 0 0 0 0 0 0 0 0 0 0 0\n -15.2130 -5.9510 -78.9460 C 0 0 0 0 0 0 0 0 0 0 0 0\n -16.1600 -5.4570 -77.9880 O 0 0 0 0 0 0 0 0 0 0 0 0\n -14.0000 -5.0420 -79.0650 C 0 0 0 0 0 0 0 0 0 0 0 0\n -14.1790 -3.8250 -78.3260 R 0 0 0 0 0 1 0 0 0 0 0 0\n -12.8370 -5.8690 -78.5180 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.5470 -5.6210 -79.2410 R 0 0 0 0 0 1 0 0 0 0 0 0\n -14.0270 -11.9960 -78.6490 O 0 0 0 0 0 0 0 0 0 0 0 0\n -14.1810 -13.5930 -78.4870 P 0 0 0 0 0 0 0 0 0 0 0 0\n -14.5480 -14.2030 -79.8230 O 0 0 0 0 0 0 0 0 0 0 0 0\n -15.0330 -13.8500 -77.2690 O 0 0 0 0 0 0 0 0 0 0 0 0\n -12.6800 -14.0730 -78.1770 O 0 0 0 0 0 0 0 0 0 0 0 0\n -12.1840 -14.2350 -76.8490 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.1340 -13.1670 -76.6050 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.6880 -11.8550 -76.6770 O 0 0 0 0 0 0 0 0 0 0 0 0\n -10.5070 -13.2750 -75.2350 C 0 0 0 0 0 0 0 0 0 0 0 0\n -9.4070 -14.1780 -75.3000 O 0 0 0 0 0 0 0 0 0 0 0 0\n -10.0970 -11.8400 -74.9280 C 0 0 0 0 0 0 0 0 0 0 0 0\n -8.6920 -11.6460 -75.1050 O 0 0 0 0 0 0 0 0 0 0 0 0\n -10.8280 -10.9760 -75.9460 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.5890 -9.8540 -75.3660 N 0 0 0 0 0 0 0 0 0 0 0 0\n -12.7860 -10.0630 -74.7850 C 0 0 0 0 0 0 0 0 0 0 0 0\n -13.5340 -9.0090 -74.2510 C 0 0 0 0 0 0 0 0 0 0 0 0\n -14.8620 -9.2740 -73.5990 C 0 0 0 0 0 0 0 0 0 0 0 0\n -15.1890 -10.4300 -73.3940 O 0 0 0 0 0 0 0 0 0 0 0 0\n -15.6600 -8.2650 -73.2400 N 0 0 0 0 0 0 0 0 0 0 0 0\n -13.0230 -7.5870 -74.3390 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.7130 -7.4960 -74.9740 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.0640 -8.6200 -75.4710 C 0 0 0 0 0 0 0 0 0 0 0 0\n 1 2 2 0\n 1 3 1 0\n 1 4 1 0\n 1 14 1 0\n 4 5 1 0\n 5 6 1 0\n 6 7 1 0\n 6 8 1 0\n 7 12 1 0\n 8 9 1 0\n 8 10 1 0\n 10 11 1 0\n 10 12 1 0\n 12 13 1 0\n 14 15 1 0\n 15 16 2 0\n 15 17 1 0\n 15 18 1 0\n 18 19 1 0\n 19 20 1 0\n 20 21 1 0\n 20 22 1 0\n 21 26 1 0\n 22 23 1 0\n 22 24 1 0\n 24 25 1 0\n 24 26 1 0\n 26 27 1 0\n 27 28 1 0\n 27 35 1 0\n 28 29 2 0\n 29 30 1 0\n 29 33 1 0\n 30 31 2 0\n 30 32 1 0\n 33 34 1 0\n 34 35 2 0\nM CHG 2 3 -1 17 -1\nM END\n'
m.get(base_url + '/fragments?pdb_codes=3j7u', json=[{'pdb_code': '3j7u', 'mol': molblock}])
m.get(base_url + '/fragments?pdb_codes=3wxm', json=[{'pdb_code': '3wxm', 'mol': molblock}])
response = client.fragments_by_pdb_codes(pdb_codes=['3j7u', '3wxm'], chunk_size=1)
assert isinstance(response[0]['mol'], Mol)
assert isinstance(response[1]['mol'], Mol)
del response[0]['mol']
del response[1]['mol']
expected = [{'pdb_code': '3j7u'}, {'pdb_code': '3wxm'}]
assert response == expected
def test_fragments_by_id_withmolisnone(base_url, client):
with requests_mock.mock() as m:
expected = [
{'smiles': None,
'pdb_code': '3j7u',
'pdb_title': 'Catalase structure determined by electron crystallography of thin 3D crystals',
'atom_codes': 'PA,O1A,O2A,O5B,C5B,C4B,O4B,C3B,O3B,C2B,C1B,O3,PN,O1N,O2N,O5D,C5D,C4D,O4D,C3D,O3D,C2D,O2D,C1D,N1N,C2N,C3N,C7N,O7N,N7N,C4N,C5N,C6N',
'uniprot_acc': 'P00432',
'mol': None,
'prot_chain': 'A', 'het_seq_nr': 602, 'het_code': 'NDP', 'prot_name': 'Catalase',
'ec_number': '1.11.1.6', 'frag_nr': 24, 'frag_id': '3j7u_NDP_frag24', 'rowid': 7059,
'uniprot_name': 'Catalase', 'nr_r_groups': 2, 'het_chain': 'A', 'hash_code': '6ef5a609fb192dba'}
]
url = base_url + '/fragments?fragment_ids=3j7u_NDP_frag24,3j7u_NDP_frag23'
m.get(url, json=expected)
response = client.fragments_by_id(fragment_ids=['3j7u_NDP_frag24', '3j7u_NDP_frag23'])
assert response == expected
def test_fragments_by_id___withsinglechunk_withsomenotfound(base_url, client, expected_fragments_info_with_mol):
with requests_mock.mock() as m:
url = base_url + '/fragments?fragment_ids=3j7u_NDP_frag24,foo'
molblock = '3j7u_NDP_frag24\n RDKit 3D\n\n 35 37 0 0 0 0 0 0 0 0999 V2000\n -15.1410 -11.1250 -79.4200 P 0 0 0 0 0 0 0 0 0 0 0 0\n -14.6900 -10.9960 -80.8600 O 0 0 0 0 0 0 0 0 0 0 0 0\n -16.5040 -11.6890 -79.0770 O 0 0 0 0 0 0 0 0 0 0 0 0\n -14.9990 -9.6870 -78.7060 O 0 0 0 0 0 0 0 0 0 0 0 0\n -15.1870 -8.4550 -79.4050 C 0 0 0 0 0 0 0 0 0 0 0 0\n -14.6700 -7.3160 -78.5260 C 0 0 0 0 0 0 0 0 0 0 0 0\n -13.2400 -7.2390 -78.5880 O 0 0 0 0 0 0 0 0 0 0 0 0\n -15.2130 -5.9510 -78.9460 C 0 0 0 0 0 0 0 0 0 0 0 0\n -16.1600 -5.4570 -77.9880 O 0 0 0 0 0 0 0 0 0 0 0 0\n -14.0000 -5.0420 -79.0650 C 0 0 0 0 0 0 0 0 0 0 0 0\n -14.1790 -3.8250 -78.3260 R 0 0 0 0 0 1 0 0 0 0 0 0\n -12.8370 -5.8690 -78.5180 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.5470 -5.6210 -79.2410 R 0 0 0 0 0 1 0 0 0 0 0 0\n -14.0270 -11.9960 -78.6490 O 0 0 0 0 0 0 0 0 0 0 0 0\n -14.1810 -13.5930 -78.4870 P 0 0 0 0 0 0 0 0 0 0 0 0\n -14.5480 -14.2030 -79.8230 O 0 0 0 0 0 0 0 0 0 0 0 0\n -15.0330 -13.8500 -77.2690 O 0 0 0 0 0 0 0 0 0 0 0 0\n -12.6800 -14.0730 -78.1770 O 0 0 0 0 0 0 0 0 0 0 0 0\n -12.1840 -14.2350 -76.8490 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.1340 -13.1670 -76.6050 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.6880 -11.8550 -76.6770 O 0 0 0 0 0 0 0 0 0 0 0 0\n -10.5070 -13.2750 -75.2350 C 0 0 0 0 0 0 0 0 0 0 0 0\n -9.4070 -14.1780 -75.3000 O 0 0 0 0 0 0 0 0 0 0 0 0\n -10.0970 -11.8400 -74.9280 C 0 0 0 0 0 0 0 0 0 0 0 0\n -8.6920 -11.6460 -75.1050 O 0 0 0 0 0 0 0 0 0 0 0 0\n -10.8280 -10.9760 -75.9460 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.5890 -9.8540 -75.3660 N 0 0 0 0 0 0 0 0 0 0 0 0\n -12.7860 -10.0630 -74.7850 C 0 0 0 0 0 0 0 0 0 0 0 0\n -13.5340 -9.0090 -74.2510 C 0 0 0 0 0 0 0 0 0 0 0 0\n -14.8620 -9.2740 -73.5990 C 0 0 0 0 0 0 0 0 0 0 0 0\n -15.1890 -10.4300 -73.3940 O 0 0 0 0 0 0 0 0 0 0 0 0\n -15.6600 -8.2650 -73.2400 N 0 0 0 0 0 0 0 0 0 0 0 0\n -13.0230 -7.5870 -74.3390 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.7130 -7.4960 -74.9740 C 0 0 0 0 0 0 0 0 0 0 0 0\n -11.0640 -8.6200 -75.4710 C 0 0 0 0 0 0 0 0 0 0 0 0\n 1 2 2 0\n 1 3 1 0\n 1 4 1 0\n 1 14 1 0\n 4 5 1 0\n 5 6 1 0\n 6 7 1 0\n 6 8 1 0\n 7 12 1 0\n 8 9 1 0\n 8 10 1 0\n 10 11 1 0\n 10 12 1 0\n 12 13 1 0\n 14 15 1 0\n 15 16 2 0\n 15 17 1 0\n 15 18 1 0\n 18 19 1 0\n 19 20 1 0\n 20 21 1 0\n 20 22 1 0\n 21 26 1 0\n 22 23 1 0\n 22 24 1 0\n 24 25 1 0\n 24 26 1 0\n 26 27 1 0\n 27 28 1 0\n 27 35 1 0\n 28 29 2 0\n 29 30 1 0\n 29 33 1 0\n 30 31 2 0\n 30 32 1 0\n 33 34 1 0\n 34 35 2 0\nM CHG 2 3 -1 17 -1\nM END\n'
mocked_body = {
'detail': "Fragment with identifier 'foo,bar' not found",
'absent_identifiers': ['foo'],
'fragments': [{
'smiles': '[*]C1OC(COP(=O)([O-])OP(=O)([O-])OCC2OC(N3C=CCC(C(N)=O)=C3)C(O)C2O)C(O)C1[*]',
'pdb_code': '3j7u',
'pdb_title': 'Catalase structure determined by electron crystallography of thin 3D crystals',
'atom_codes': 'PA,O1A,O2A,O5B,C5B,C4B,O4B,C3B,O3B,C2B,C1B,O3,PN,O1N,O2N,O5D,C5D,C4D,O4D,C3D,O3D,C2D,O2D,C1D,N1N,C2N,C3N,C7N,O7N,N7N,C4N,C5N,C6N',
'uniprot_acc': 'P00432',
'prot_chain': 'A', 'het_seq_nr': 602, 'het_code': 'NDP', 'prot_name': 'Catalase',
'ec_number': '1.11.1.6', 'frag_nr': 24, 'frag_id': '3j7u_NDP_frag24', 'rowid': 7059,
'uniprot_name': 'Catalase', 'nr_r_groups': 2, 'het_chain': 'A', 'hash_code': '6ef5a609fb192dba',
'mol': molblock
}],
'status': 404,
'title': 'Not Found',
'type': 'about:blank'
}
m.get(url, json=mocked_body, status_code=404, headers={'Content-Type': 'application/problem+json'})
with pytest.raises(IncompleteFragments) as e:
client.fragments_by_id(fragment_ids=['3j7u_NDP_frag24', 'foo'])
assert len(e.value.fragments) == 1
assert e.value.fragments[0]['frag_id'] == '3j7u_NDP_frag24'
assert e.value.absent_identifiers == ['foo']
def test_pharmacophores(base_url, client, example1_phar, example3_phar):
with requests_mock.mock() as m:
m.get(base_url + '/fragments/3j7u_NDP_frag24.phar', text=example1_phar)
m.get(base_url + '/fragments/3j7u_NDP_frag23.phar', text=example3_phar)
response = client.pharmacophores(['3j7u_NDP_frag24', '3j7u_NDP_frag23'])
assert response == [example1_phar, example3_phar]
def test_pharmacophores_somenotfound_incomplete(base_url, client, example1_phar):
with requests_mock.mock() as m:
m.get(base_url + '/fragments/3j7u_NDP_frag24.phar', text=example1_phar)
notfound = {
'detail': "Fragment with identifier '3j7u_NDP_frag23' not found",
'identifier': '3j7u_NDP_frag23',
'status': 404,
'title': 'Not Found',
'type': 'about:blank'
}
m.get(base_url + '/fragments/3j7u_NDP_frag23.phar', status_code=404, json=notfound, headers={'Content-Type': 'application/problem+json'})
with pytest.raises(IncompletePharmacophores) as excinfo:
client.pharmacophores(['3j7u_NDP_frag24', '3j7u_NDP_frag23'])
assert excinfo.value.absent_identifiers == ['3j7u_NDP_frag23']
assert excinfo.value.pharmacophores == [example1_phar, None]
def test_pharmacophores_server500(base_url, client):
with requests_mock.mock() as m:
m.get(base_url + '/fragments/3j7u_NDP_frag24.phar', text='Internal server error', status_code=500)
with pytest.raises(HTTPError) as excinfo:
client.pharmacophores(['3j7u_NDP_frag24'])
assert excinfo.value.response.status_code == 500
| 97.359551
| 3,145
| 0.540969
| 3,805
| 17,330
| 2.4
| 0.101708
| 0.254271
| 0.343955
| 0.408673
| 0.793145
| 0.774529
| 0.757994
| 0.733027
| 0.71901
| 0.672908
| 0
| 0.368292
| 0.343047
| 17,330
| 177
| 3,146
| 97.909605
| 0.433816
| 0.032429
| 0
| 0.456693
| 0
| 0.062992
| 0.711293
| 0.059448
| 0
| 0
| 0
| 0
| 0.110236
| 1
| 0.07874
| false
| 0
| 0.062992
| 0.015748
| 0.15748
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
740c8dc8dddfb897c3530278bf49ab1ef511472e
| 9,517
|
py
|
Python
|
articles/test_editarticle.py
|
kylejuliandev/dev_blog_assignment
|
272466cb591f9b45fb81c2a42e86b25bff3cd9ad
|
[
"MIT"
] | null | null | null |
articles/test_editarticle.py
|
kylejuliandev/dev_blog_assignment
|
272466cb591f9b45fb81c2a42e86b25bff3cd9ad
|
[
"MIT"
] | null | null | null |
articles/test_editarticle.py
|
kylejuliandev/dev_blog_assignment
|
272466cb591f9b45fb81c2a42e86b25bff3cd9ad
|
[
"MIT"
] | null | null | null |
from uuid import uuid4
from django.test import TestCase
from accounts.models import User
from articles.models import Article
from articles.testhelpers import create_user, create_mock_article, get_random_string
class EditArticleTest(TestCase):
def __init__(self, methodName: str = ...) -> None:
super().__init__(methodName)
def setUp(self) -> None:
"""Sets up test data. Creates three users and a mock article"""
create_user('TestUser', 'TestUser_P', False, False)
author = create_user('AuthorTestUser', 'AuthorTestUser_P', True, False)
create_user('AdminTestUser', 'AdminTestUser_P', False, True)
create_mock_article(author)
return super().setUp()
def tearDown(self) -> None:
User.objects.all().delete()
Article.objects.all().delete()
return super().tearDown()
def test_edit_article_get_as_unauthenticated(self):
"""
Given an article exists,
When I try to edit the article as an unauthenticated user,
Then I am redirected to the home page
"""
article = Article.objects.all().first()
response = self.client.get('/article/' + str(article.id) + '/edit')
self.assertEqual(response.status_code, 302)
def test_edit_article_get_as_authenticated_but_not_original_author(self):
"""
Given an article exists,
When I try to edit the article as not the original article author,
Then I am redirected to the home page
"""
article = Article.objects.all().first()
self.client.login(username='testuser', password='TestUser_P')
response = self.client.get('/article/' + str(article.id) + '/edit')
self.assertEqual(response.status_code, 302)
def test_edit_article_get_as_authenticated_as_original_author(self):
"""
Given an article exists,
When I try to edit the article as the original article author,
Then I am redirected to the article edit page
"""
article = Article.objects.all().first()
self.client.login(username='authortestuser', password='AuthorTestUser_P')
response = self.client.get('/article/' + str(article.id) + '/edit')
checkArticle = Article.objects.get(id=article.id)
self.assertEqual(response.status_code, 200)
self.assertEqual(checkArticle.title, article.title)
self.assertEqual(checkArticle.summary, article.summary)
self.assertEqual(checkArticle.content, article.content)
self.assertEqual(checkArticle.created_on, article.created_on)
self.assertEqual(checkArticle.updated_on, article.updated_on)
form = response.context['form']
actual_title = str(form.fields['title'].initial)
actual_summary = str(form.fields['summary'].initial)
actual_content = str(form.fields['content'].initial)
self.assertEqual(actual_title, article.title)
self.assertEqual(actual_summary, article.summary)
self.assertEqual(actual_content, article.content)
def test_edit_article_get_as_authenticated_as_admin(self):
"""
Given an article exists,
When I try to edit the article as an admin,
Then I am redirected to the article edit page
"""
article = Article.objects.all().first()
self.client.login(username='admintestuser', password='AdminTestUser_P')
response = self.client.get('/article/' + str(article.id) + '/edit')
checkArticle = Article.objects.get(id=article.id)
self.assertEqual(response.status_code, 200)
self.assertEqual(checkArticle.title, article.title)
self.assertEqual(checkArticle.summary, article.summary)
self.assertEqual(checkArticle.content, article.content)
self.assertEqual(checkArticle.created_on, article.created_on)
self.assertEqual(checkArticle.updated_on, article.updated_on)
form = response.context['form']
actual_title = str(form.fields['title'].initial)
actual_summary = str(form.fields['summary'].initial)
actual_content = str(form.fields['content'].initial)
self.assertEqual(actual_title, article.title)
self.assertEqual(actual_summary, article.summary)
self.assertEqual(actual_content, article.content)
def test_edit_article_get_with_missing_article(self):
"""
Given article does not exist,
When I try to edit the article an an author,
Then I am redirected to the home page
"""
self.client.login(username='authortestuser', password='AuthorTestUser_P')
response = self.client.get('/article/' + str(uuid4()) + '/edit')
self.assertEqual(response.status_code, 302)
def test_edit_article_get_with_missing_article_as_admin(self):
"""
Given article does not exist,
When I try to edit the article an an admin,
Then I am redirected to the home page
"""
self.client.login(username='admintestuser', password='AdminTestUser_P')
response = self.client.get('/article/' + str(uuid4()) + '/edit')
self.assertEqual(response.status_code, 302)
def test_edit_article_with_title_too_long(self):
"""
Given an article exist,
When I try to edit the article with a title that is too long,
Then I am redirected to the article edit page and my changes do not save
"""
article = Article.objects.all().first()
title = get_random_string(201)
data = {
'title': title,
'summary': 'New summary',
'content': 'New content'
}
self.client.login(username='authortestuser', password='AuthorTestUser_P')
response = self.client.post('/article/' + str(article.id) + '/edit', data=data)
checkArticle = Article.objects.get(id=article.id)
self.assertEqual(response.status_code, 200)
self.assertNotEqual(checkArticle.title, title)
self.assertNotEqual(checkArticle.summary, 'New summary')
self.assertNotEqual(checkArticle.content, 'New content')
self.assertEqual(checkArticle.created_on, article.created_on)
self.assertEqual(checkArticle.updated_on, article.updated_on)
def test_edit_article_with_summary_too_long(self):
"""
Given an article exist,
When I try to edit the article with a summary that is too long,
Then I am redirected to the article edit page and my changes do not save
"""
article = Article.objects.all().first()
summary = get_random_string(256)
data = {
'title': 'New title',
'summary': summary,
'content': 'New content'
}
self.client.login(username='authortestuser', password='AuthorTestUser_P')
response = self.client.post('/article/' + str(article.id) + '/edit', data=data)
checkArticle = Article.objects.get(id=article.id)
self.assertEqual(response.status_code, 200)
self.assertNotEqual(checkArticle.title, 'New title')
self.assertNotEqual(checkArticle.summary, summary)
self.assertNotEqual(checkArticle.content, 'New content')
self.assertEqual(checkArticle.created_on, article.created_on)
self.assertEqual(checkArticle.updated_on, article.updated_on)
def test_edit_article(self):
"""
Given an article exist,
When I try to edit the article as the author,
Then I am redirected to the article edit page and my changes save
"""
article = Article.objects.all().first()
data = {
'title': 'New title',
'summary': 'New summary',
'content': 'New content'
}
self.client.login(username='authortestuser', password='AuthorTestUser_P')
response = self.client.post('/article/' + str(article.id) + '/edit', data=data)
checkArticle = Article.objects.get(id=article.id)
self.assertEqual(response.status_code, 200)
self.assertEqual(checkArticle.title, 'New title')
self.assertEqual(checkArticle.summary, 'New summary')
self.assertEqual(checkArticle.content, 'New content')
self.assertEqual(checkArticle.created_on, article.created_on)
self.assertNotEqual(checkArticle.updated_on, article.updated_on)
def test_edit_article_as_admin(self):
"""
Given an article exist,
When I try to edit the article as an admin,
Then I am redirected to the article edit page and my changes save
"""
article = Article.objects.all().first()
data = {
'title': 'New title',
'summary': 'New summary',
'content': 'New content'
}
self.client.login(username='admintestuser', password='AdminTestUser_P')
response = self.client.post('/article/' + str(article.id) + '/edit', data=data)
checkArticle = Article.objects.get(id=article.id)
self.assertEqual(response.status_code, 200)
self.assertEqual(checkArticle.title, 'New title')
self.assertEqual(checkArticle.summary, 'New summary')
self.assertEqual(checkArticle.content, 'New content')
self.assertEqual(checkArticle.created_on, article.created_on)
self.assertNotEqual(checkArticle.updated_on, article.updated_on)
| 41.199134
| 87
| 0.651151
| 1,101
| 9,517
| 5.500454
| 0.099909
| 0.094122
| 0.098085
| 0.029723
| 0.881275
| 0.859974
| 0.854194
| 0.854194
| 0.844287
| 0.842801
| 0
| 0.005414
| 0.243039
| 9,517
| 231
| 88
| 41.199134
| 0.83523
| 0.144268
| 0
| 0.70073
| 0
| 0
| 0.104601
| 0
| 0
| 0
| 0
| 0
| 0.335766
| 1
| 0.094891
| false
| 0.065693
| 0.036496
| 0
| 0.153285
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
7440d94d7871edd47482f8141c9c0e81af6169de
| 16,430
|
py
|
Python
|
codes/tests/qs_test.py
|
madokast/cctpy
|
b02c64220ea533a4fc9cad0b882d1be6edadf1c0
|
[
"MIT"
] | 1
|
2021-12-27T13:20:43.000Z
|
2021-12-27T13:20:43.000Z
|
codes/tests/qs_test.py
|
madokast/cctpy
|
b02c64220ea533a4fc9cad0b882d1be6edadf1c0
|
[
"MIT"
] | null | null | null |
codes/tests/qs_test.py
|
madokast/cctpy
|
b02c64220ea533a4fc9cad0b882d1be6edadf1c0
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
from cctpy.baseutils import Vectors, Equal, Stream
from cctpy.constant import M, MM, YI, XI, Protons, ZI, MRAD
from cctpy.particle import RunningParticle, ParticleFactory, PhaseSpaceParticle, ParticleRunner
from cctpy.qs_hard_edge_magnet import QsHardEdgeMagnet
from cctpy.abstract_classes import LocalCoordinateSystem
from cctpy.plotuils import Plot2
class QsTest(unittest.TestCase):
def test_quad_0(self):
"""
测试 qs 四极场
Returns
-------
"""
length = 0.2 * M
aper = 30 * MM
g = 10.
L = 0
lc = LocalCoordinateSystem(main_direction=YI, second_direction=-XI)
qs = QsHardEdgeMagnet(length, g, L, aper, lc)
m = qs.magnetic_field_at(Vectors.create(10 * MM, 0.1, 0))
self.assertTrue(Equal.equal_vector(m, Vectors.create(0.0, 0.0, -0.1)))
m = qs.magnetic_field_at(Vectors.create(15 * MM, 0.1, 0))
self.assertTrue(Equal.equal_vector(m, Vectors.create(0.0, 0.0, -0.15)))
m = qs.magnetic_field_at(Vectors.create(15 * MM, 0.1, 5 * MM))
self.assertTrue(Equal.equal_vector(m, Vectors.create(-0.05, -3.061616997868383E-18, -0.15)))
def test_quad_1(self):
"""
测试 qs 四极场
Returns
-------
"""
length = 0.2 * M
aper = 30 * MM
g = -45.7
L = 0
lc = LocalCoordinateSystem(main_direction=YI, second_direction=-XI)
qs = QsHardEdgeMagnet(length, g, L, aper, lc)
m = qs.magnetic_field_at(Vectors.create(10 * MM, 0.1, 0))
self.assertTrue(Equal.equal_vector(m, Vectors.create(0.0, 0.0, 0.457)))
m = qs.magnetic_field_at(Vectors.create(15 * MM, 0.1, 0))
self.assertTrue(Equal.equal_vector(m, Vectors.create(0.0, 0.0, 0.6855)))
m = qs.magnetic_field_at(Vectors.create(15 * MM, 0.1, 5 * MM))
self.assertTrue(Equal.equal_vector(m, Vectors.create(0.2285, 1.399158968025851E-17, 0.6855)))
def test_second_0(self):
length = 0.2 * M
aper = 30 * MM
g = 0
lc = LocalCoordinateSystem(main_direction=YI, second_direction=-XI)
mx = Stream.linspace(-100, 100, 10).map(lambda k: QsHardEdgeMagnet(length, g, k, aper, lc)).map(
lambda qs: qs.magnetic_field_at(Vectors.create(10 * MM, 0.1, 0))).map(lambda m: m[0]).to_vector()
my = Stream.linspace(-100, 100, 10).map(lambda k: QsHardEdgeMagnet(length, g, k, aper, lc)).map(
lambda qs: qs.magnetic_field_at(Vectors.create(10 * MM, 0.1, 0))).map(lambda m: m[1]).to_vector()
mz = Stream.linspace(-100, 100, 10).map(lambda k: QsHardEdgeMagnet(length, g, k, aper, lc)).map(
lambda qs: qs.magnetic_field_at(Vectors.create(10 * MM, 0.1, 0))).map(lambda m: m[2]).to_vector()
self.assertTrue(Equal.equal_vector(mx, np.array([-0.0, -0.0, -0.0, -0.0, -0.0, 0.0, 0.0, 0.0, 0.0, 0.0])))
self.assertTrue(Equal.equal_vector(my, np.array([-0.0, -0.0, -0.0, -0.0, -0.0, 0.0, 0.0, 0.0, 0.0, 0.0])))
self.assertTrue(Equal.equal_vector(mz, np.array(
[-0.005, -0.0038888888888888888, -0.002777777777777778, -0.0016666666666666672, -5.555555555555558E-4,
5.555555555555558E-4, 0.001666666666666666, 0.0027777777777777775, 0.0038888888888888888, 0.005])))
def test_second_1(self):
length = 0.2 * M
aper = 30 * MM
g = 0
lc = LocalCoordinateSystem(main_direction=YI, second_direction=-XI)
mx = Stream.linspace(-100, 100, 10).map(lambda k: QsHardEdgeMagnet(length, g, k, aper, lc)).map(
lambda qs: qs.magnetic_field_at(Vectors.create(10 * MM, 0.1, 1 * MM))).map(lambda m: m[0]).to_vector()
my = Stream.linspace(-100, 100, 10).map(lambda k: QsHardEdgeMagnet(length, g, k, aper, lc)).map(
lambda qs: qs.magnetic_field_at(Vectors.create(10 * MM, 0.1, 1 * MM))).map(lambda m: m[1]).to_vector()
mz = Stream.linspace(-100, 100, 10).map(lambda k: QsHardEdgeMagnet(length, g, k, aper, lc)).map(
lambda qs: qs.magnetic_field_at(Vectors.create(10 * MM, 0.1, 1 * MM))).map(lambda m: m[2]).to_vector()
self.assertTrue(Equal.equal_vector(mx, np.array(
[-0.001, -7.777777777777777E-4, -5.555555555555557E-4, -3.3333333333333343E-4, -1.1111111111111116E-4,
1.1111111111111116E-4, 3.3333333333333316E-4, 5.555555555555554E-4, 7.777777777777777E-4, 0.001]
)))
self.assertTrue(Equal.equal_vector(my, np.array(
[-6.123233995736766E-20, -4.762515330017485E-20, -3.4017966642982043E-20, -2.0410779985789227E-20,
-6.80359332859641E-21, 6.80359332859641E-21, 2.041077998578921E-20, 3.4017966642982025E-20,
4.762515330017485E-20, 6.123233995736766E-20]
)))
self.assertTrue(Equal.equal_vector(mz, np.array(
[-0.00495, -0.00385, -0.0027500000000000003, -0.0016500000000000006, -5.500000000000002E-4,
5.500000000000002E-4, 0.0016499999999999991, 0.0027499999999999994, 0.00385, 0.00495])))
def test_second_2(self):
length = 0.2 * M
aper = 30 * MM
g = 0
lc = LocalCoordinateSystem(main_direction=YI, second_direction=-XI)
mx = Stream.linspace(-100, 100, 10).map(lambda k: QsHardEdgeMagnet(length, g, k, aper, lc)).map(
lambda qs: qs.magnetic_field_at(Vectors.create(10 * MM, 0.1, -1 * MM))).map(lambda m: m[0]).to_vector()
my = Stream.linspace(-100, 100, 10).map(lambda k: QsHardEdgeMagnet(length, g, k, aper, lc)).map(
lambda qs: qs.magnetic_field_at(Vectors.create(10 * MM, 0.1, -1 * MM))).map(lambda m: m[1]).to_vector()
mz = Stream.linspace(-100, 100, 10).map(lambda k: QsHardEdgeMagnet(length, g, k, aper, lc)).map(
lambda qs: qs.magnetic_field_at(Vectors.create(10 * MM, 0.1, -1 * MM))).map(lambda m: m[2]).to_vector()
self.assertTrue(Equal.equal_vector(mx, np.array(
[0.001, 7.777777777777777E-4, 5.555555555555557E-4, 3.3333333333333343E-4, 1.1111111111111116E-4,
-1.1111111111111116E-4, -3.3333333333333316E-4, -5.555555555555554E-4, -7.777777777777777E-4, -0.001]
)))
self.assertTrue(Equal.equal_vector(my, np.array(
[6.123233995736766E-20, 4.762515330017485E-20, 3.4017966642982043E-20, 2.0410779985789227E-20,
6.80359332859641E-21, -6.80359332859641E-21, -2.041077998578921E-20, -3.4017966642982025E-20,
-4.762515330017485E-20, -6.123233995736766E-20]
)))
self.assertTrue(Equal.equal_vector(mz, np.array(
[-0.00495, -0.00385, -0.0027500000000000003, -0.0016500000000000006, -5.500000000000002E-4,
5.500000000000002E-4, 0.0016499999999999991, 0.0027499999999999994, 0.00385, 0.00495]
)))
def test_second_3(self):
length = 0.2 * M
aper = 30 * MM
g = 0
lc = LocalCoordinateSystem(main_direction=YI, second_direction=-XI)
mx = Stream.linspace(-100, 100, 10).map(lambda k: QsHardEdgeMagnet(length, g, k, aper, lc)).map(
lambda qs: qs.magnetic_field_at(Vectors.create(-5 * MM, 0.1, -1 * MM))).map(lambda m: m[0]).to_vector()
my = Stream.linspace(-100, 100, 10).map(lambda k: QsHardEdgeMagnet(length, g, k, aper, lc)).map(
lambda qs: qs.magnetic_field_at(Vectors.create(-5 * MM, 0.1, -1 * MM))).map(lambda m: m[1]).to_vector()
mz = Stream.linspace(-100, 100, 10).map(lambda k: QsHardEdgeMagnet(length, g, k, aper, lc)).map(
lambda qs: qs.magnetic_field_at(Vectors.create(-5 * MM, 0.1, -1 * MM))).map(lambda m: m[2]).to_vector()
self.assertTrue(Equal.equal_vector(mx, np.array(
[-5.0E-4, -3.8888888888888887E-4, -2.7777777777777783E-4, -1.6666666666666672E-4, -5.555555555555558E-5,
5.555555555555558E-5, 1.6666666666666658E-4, 2.777777777777777E-4, 3.8888888888888887E-4, 5.0E-4]
)))
self.assertTrue(Equal.equal_vector(my, np.array(
[-3.061616997868383E-20, -2.3812576650087424E-20, -1.7008983321491022E-20, -1.0205389992894614E-20,
-3.401796664298205E-21, 3.401796664298205E-21, 1.0205389992894605E-20, 1.7008983321491013E-20,
2.3812576650087424E-20, 3.061616997868383E-20]
)))
self.assertTrue(Equal.equal_vector(mz, np.array(
[-0.0012000000000000001, -9.333333333333333E-4, -6.666666666666668E-4, -4.0000000000000013E-4,
-1.3333333333333337E-4, 1.3333333333333337E-4, 3.999999999999998E-4, 6.666666666666665E-4,
9.333333333333333E-4, 0.0012000000000000001]
)))
def test_second_4(self):
length = 0.2 * M
aper = 30 * MM
g = 0
lc = LocalCoordinateSystem(main_direction=YI, second_direction=-XI)
mx = Stream.linspace(-100, 100, 10).map(lambda k: QsHardEdgeMagnet(length, g, k, aper, lc)).map(
lambda qs: qs.magnetic_field_at(Vectors.create(-8 * MM, 0.1, 1 * MM))).map(lambda m: m[0]).to_vector()
my = Stream.linspace(-100, 100, 10).map(lambda k: QsHardEdgeMagnet(length, g, k, aper, lc)).map(
lambda qs: qs.magnetic_field_at(Vectors.create(-8 * MM, 0.1, 1 * MM))).map(lambda m: m[1]).to_vector()
mz = Stream.linspace(-100, 100, 10).map(lambda k: QsHardEdgeMagnet(length, g, k, aper, lc)).map(
lambda qs: qs.magnetic_field_at(Vectors.create(-8 * MM, 0.1, 1 * MM))).map(lambda m: m[2]).to_vector()
self.assertTrue(Equal.equal_vector(mx, np.array(
[7.999999999999999E-4, 6.222222222222221E-4, 4.444444444444444E-4, 2.6666666666666673E-4,
8.88888888888889E-5, -8.88888888888889E-5, -2.666666666666665E-4, -4.444444444444443E-4,
-6.222222222222221E-4, -7.999999999999999E-4]
)))
self.assertTrue(Equal.equal_vector(my, np.array(
[4.8985871965894125E-20, 3.8100122640139875E-20, 2.7214373314385626E-20, 1.632862398863138E-20,
5.442874662877126E-21, -5.442874662877126E-21, -1.6328623988631368E-20, -2.721437331438562E-20,
-3.8100122640139875E-20, -4.8985871965894125E-20]
)))
self.assertTrue(Equal.equal_vector(mz, np.array(
[-0.00315, -0.00245, -0.00175, -0.0010500000000000004, -3.500000000000001E-4, 3.500000000000001E-4,
0.0010499999999999995, 0.0017499999999999996, 0.00245, 0.00315]
)))
def test_quad_and_second_0(self):
length = 0.2 * M
aper = 30 * MM
lc = LocalCoordinateSystem(main_direction=YI, second_direction=-XI)
p = Vectors.create(-8 * MM, 0.1, 1 * MM)
mx = Stream.linspace(-100, 100, 10).map(
lambda k: QsHardEdgeMagnet(length, np.sin(k / 180) * 20, (1.1 ** (k / 2)) * 2, aper, lc)).map(
lambda qs: qs.magnetic_field_at(p)).map(lambda m: m[0]).to_vector()
my = Stream.linspace(-100, 100, 10).map(
lambda k: QsHardEdgeMagnet(length, np.sin(k / 180) * 20, (1.1 ** (k / 2)) * 2, aper, lc)).map(
lambda qs: qs.magnetic_field_at(p)).map(lambda m: m[1]).to_vector()
mz = Stream.linspace(-100, 100, 10).map(
lambda k: QsHardEdgeMagnet(length, np.sin(k / 180) * 20, (1.1 ** (k / 2)) * 2, aper, lc)).map(
lambda qs: qs.magnetic_field_at(p)).map(lambda m: m[2]).to_vector()
self.assertTrue(Equal.equal_vector(mx, np.array(
[0.01054817141861684, 0.008375158765307863, 0.006074168017454833, 0.0036793034142077055,
0.0012243616386317005, -0.0012609533747681091, -0.003760913727964488, -0.006301201552951284,
-0.009026933421046367, -0.012426561361512444]
)))
self.assertTrue(Equal.equal_vector(my, np.array(
[6.458892182333354E-19, 5.128305687142586E-19, 3.7193552100296426E-19, 2.2529235746506973E-19,
7.497052808745602E-20, -7.721112571419089E-20, -2.302895479410525E-19, -3.8583731563020608E-19,
-5.52740256010035E-19, -7.609074297892195E-19]
)))
self.assertTrue(Equal.equal_vector(mz, np.array(
[-0.08438592505476789, -0.06700286672870447, -0.04859794794067867, -0.029447702336309195,
-0.009833171528290597, 0.009977251489327706, 0.029769042946726516, 0.04949189248669594,
0.06956922943567483, 0.09178208545491932]
)))
def test_track_y(self):
"""
六级 qs track 对比 y 方向
Returns
-------
"""
plane = PhaseSpaceParticle.YYP_PLANE
delta = 0.
number = 6
lc = LocalCoordinateSystem(main_direction=YI, second_direction=-XI)
qs = QsHardEdgeMagnet(0.2, 0, 10000 * 2, 300000 * MM, lc)
rp = ParticleFactory.create_proton(
Vectors.create(0, -0.5, 0), YI
)
# print(f"rp={rp}")
pps = PhaseSpaceParticle.phase_space_particles_along_positive_ellipse_in_plane(
plane, 3.5 * MM, 7.2 * MM, delta, number
)
# print(*pps, sep='\n', end='\n\n')
pp = ParticleFactory.create_from_phase_space_particles(
rp, rp.get_natural_coordinate_system(y_direction=ZI), pps
)
# print(*pp, sep='\n\n')
ParticleRunner.run_ps_only_cpu0(pp, qs, 1.2)
ParticleRunner.run_only(rp, qs, 1.2)
# print(f"rp={rp}")
# print(*pp, sep='\n\n')
pps_end = PhaseSpaceParticle.create_from_running_particles(rp, rp.get_natural_coordinate_system(), pp)
li = PhaseSpaceParticle.phase_space_particles_project_to_plane(pps_end, plane)
li = np.array(
[[x / MM, xp / MRAD] for x, xp in li]
)
x = li[:, 0]
y = li[:, 1]
x0 = np.array(
[4.571009592873671, 13.005311328487931, 4.473631539146663, -4.5763158484424205, -13.005311328486815,
-4.473631539149022]
)
y0 = np.array(
[1.9535672206449075, 13.092945863265955, 5.607514554681223, -1.9596240807758292, -13.092945863264303,
-5.6075145546827025]
)
self.assertTrue(
(np.abs(x.flatten() - x0.flatten()) < 0.05).all()
)
self.assertTrue(
(np.abs(y.flatten() - y0.flatten()) < 0.05).all()
)
# Plot2.plot2d([(li, 'r.')])
#
# Plot2.plot2d([(np.column_stack((x0, y0)), 'k.')])
#
# Plot2.show()
def test_track_x(self):
"""
六级 QS track 对比 x 方向
Returns
-------
"""
plane = PhaseSpaceParticle.XXP_PLANE
delta = 0.
number = 6
lc = LocalCoordinateSystem(main_direction=YI, second_direction=-XI)
qs = QsHardEdgeMagnet(0.2, 0, 10000 * 2, 300000 * MM, lc)
rp = ParticleFactory.create_proton(
Vectors.create(0, -0.5, 0), YI
)
# print(f"rp={rp}")
pps = PhaseSpaceParticle.phase_space_particles_along_positive_ellipse_in_plane(
plane, 3.5 * MM, 7.2 * MM, delta, number
)
# print(*pps, sep='\n', end='\n\n')
pp = ParticleFactory.create_from_phase_space_particles(
rp, rp.get_natural_coordinate_system(y_direction=ZI), pps
)
# print(*pp, sep='\n\n')
ParticleRunner.run_ps_only_cpu0(pp, qs, 1.2)
ParticleRunner.run_only(rp, qs, 1.2)
# print(f"rp={rp}")
# print(*pp, sep='\n\n')
pps_end = PhaseSpaceParticle.create_from_running_particles(rp, rp.get_natural_coordinate_system(), pp)
li = PhaseSpaceParticle.phase_space_particles_project_to_plane(pps_end, plane)
li = np.array(
[[x / MM, xp / MRAD] for x, xp in li]
)
x = li[:, 0]
y = li[:, 1]
x0 = np.array(
[-1.6363082716640025, -2.964662344582841, 3.848704911140664, -10.799631136000919, -29.05411958099093,
-5.103782688758285]
)
y0 = np.array(
[-8.439377477318738, -14.025193841237206, 4.508357473356099, -12.381715740031598, -40.36236303026269,
-6.717141236931342]
)
self.assertTrue(
(np.abs(x.flatten() - x0.flatten()) < 0.05).all()
)
self.assertTrue(
(np.abs(y.flatten() - y0.flatten()) < 0.05).all()
)
# Plot2.plot2d([(li, 'r.')])
#
# Plot2.plot2d([(np.column_stack((x0, y0)), 'k.')])
#
# Plot2.show()
if __name__ == '__main__':
unittest.main()
| 43.12336
| 116
| 0.609251
| 2,174
| 16,430
| 4.50598
| 0.126955
| 0.011433
| 0.0147
| 0.01715
| 0.715394
| 0.712332
| 0.712332
| 0.712332
| 0.704369
| 0.687015
| 0
| 0.288805
| 0.240475
| 16,430
| 380
| 117
| 43.236842
| 0.496194
| 0.032623
| 0
| 0.537255
| 0
| 0
| 0.000509
| 0
| 0
| 0
| 0
| 0
| 0.109804
| 1
| 0.039216
| false
| 0
| 0.031373
| 0
| 0.07451
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
74737681cf0d8052b62c5b5de4ce70418a73d187
| 61
|
py
|
Python
|
ml/Graph/scratch.py
|
Shivams9/pythoncodecamp
|
e6cd27f4704a407ee360414a8c9236b254117a59
|
[
"MIT"
] | 6
|
2021-08-04T08:15:22.000Z
|
2022-02-02T11:15:56.000Z
|
ML/Graph/scratch.py
|
Maurya232Abhishek/Python-repository-for-basics
|
3dcec5c529a0847df07c9dcc1424675754ce6376
|
[
"MIT"
] | 14
|
2021-08-02T06:28:00.000Z
|
2022-03-25T10:44:15.000Z
|
ML/Graph/scratch.py
|
Maurya232Abhishek/Python-repository-for-basics
|
3dcec5c529a0847df07c9dcc1424675754ce6376
|
[
"MIT"
] | 6
|
2021-07-16T04:56:41.000Z
|
2022-02-16T04:40:06.000Z
|
l=[[1,2,3],[2,2,3],[3,3,3]]
print(l)
a=[2,3]
b=[4,5]
print(b)
| 12.2
| 27
| 0.459016
| 20
| 61
| 1.4
| 0.45
| 0.214286
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.22807
| 0.065574
| 61
| 5
| 28
| 12.2
| 0.263158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
747d2dffa5879ddf1f60b8ac85c5568915e42c9d
| 178
|
py
|
Python
|
boxPrint/printers/printrun_printer.py
|
bobofei/Mohou_Box-master
|
3d1c320a6258422406e2ba2f96ec7986beba1330
|
[
"Apache-2.0"
] | null | null | null |
boxPrint/printers/printrun_printer.py
|
bobofei/Mohou_Box-master
|
3d1c320a6258422406e2ba2f96ec7986beba1330
|
[
"Apache-2.0"
] | null | null | null |
boxPrint/printers/printrun_printer.py
|
bobofei/Mohou_Box-master
|
3d1c320a6258422406e2ba2f96ec7986beba1330
|
[
"Apache-2.0"
] | null | null | null |
import threaded_printer
class Printer(threaded_printer.Printer):
def __init__(self, profile, usb_info):
threaded_printer.Printer.__init__(self, profile, usb_info)
| 22.25
| 66
| 0.769663
| 22
| 178
| 5.636364
| 0.454545
| 0.362903
| 0.354839
| 0.290323
| 0.354839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146067
| 178
| 7
| 67
| 25.428571
| 0.815789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0.75
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
|
0
| 8
|
748c0c6816a34dc727849d503d55e952ab640086
| 87,762
|
py
|
Python
|
test/features/pit/fixtures_pit.py
|
NikkaZ/dbtvault_spark
|
383723cd2a35a0bc7b82fd4e77fb1eda0f68cb07
|
[
"Apache-2.0"
] | null | null | null |
test/features/pit/fixtures_pit.py
|
NikkaZ/dbtvault_spark
|
383723cd2a35a0bc7b82fd4e77fb1eda0f68cb07
|
[
"Apache-2.0"
] | null | null | null |
test/features/pit/fixtures_pit.py
|
NikkaZ/dbtvault_spark
|
383723cd2a35a0bc7b82fd4e77fb1eda0f68cb07
|
[
"Apache-2.0"
] | null | null | null |
from behave import fixture
@fixture
def pit(context):
"""
Define the structures and metadata to perform PIT load
"""
context.vault_structure_type = "pit"
context.hashed_columns = {
"STG_CUSTOMER_DETAILS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["CUSTOMER_ADDRESS", "CUSTOMER_DOB", "CUSTOMER_NAME"]
}
},
"STG_CUSTOMER_LOGIN": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["DEVICE_USED", "LAST_LOGIN_DATE"]
}
},
"STG_CUSTOMER_PROFILE": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["DASHBOARD_COLOUR", "DISPLAY_NAME"]
}
}
}
context.derived_columns = {
"STG_CUSTOMER_DETAILS": {
"EFFECTIVE_FROM": "LOAD_DATE"
},
"STG_CUSTOMER_LOGIN": {
"EFFECTIVE_FROM": "LOAD_DATE"
},
"STG_CUSTOMER_PROFILE": {
"EFFECTIVE_FROM": "LOAD_DATE"
}
}
context.vault_structure_columns = {
"HUB_CUSTOMER": {
"source_model": ["STG_CUSTOMER_DETAILS",
"STG_CUSTOMER_LOGIN",
"STG_CUSTOMER_PROFILE"],
"src_pk": "CUSTOMER_PK",
"src_nk": "CUSTOMER_ID",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_DETAILS": {
"source_model": "STG_CUSTOMER_DETAILS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["CUSTOMER_NAME", "CUSTOMER_ADDRESS", "CUSTOMER_DOB"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_LOGIN": {
"source_model": "STG_CUSTOMER_LOGIN",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["LAST_LOGIN_DATE", "DEVICE_USED"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_PROFILE": {
"source_model": "STG_CUSTOMER_PROFILE",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["DASHBOARD_COLOUR", "DISPLAY_NAME"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"PIT_CUSTOMER": {
"source_model": "HUB_CUSTOMER",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
},
"SAT_CUSTOMER_LOGIN": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
},
"SAT_CUSTOMER_PROFILE": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS": "LOAD_DATE",
"STG_CUSTOMER_LOGIN": "LOAD_DATE",
"STG_CUSTOMER_PROFILE": "LOAD_DATE"
},
"src_ldts": "LOAD_DATE"
}
}
context.stage_columns = {
"RAW_STAGE_DETAILS":
["CUSTOMER_ID",
"CUSTOMER_NAME",
"CUSTOMER_ADDRESS",
"CUSTOMER_DOB",
"LOAD_DATE",
"SOURCE"]
,
"RAW_STAGE_LOGIN":
["CUSTOMER_ID",
"LAST_LOGIN_DATE",
"DEVICE_USED",
"LOAD_DATE",
"SOURCE"]
,
"RAW_STAGE_PROFILE":
["CUSTOMER_ID",
"DASHBOARD_COLOUR",
"DISPLAY_NAME",
"LOAD_DATE",
"SOURCE"]
}
context.seed_config = {
"RAW_STAGE_DETAILS": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR",
"CUSTOMER_NAME": "VARCHAR",
"CUSTOMER_ADDRESS": "VARCHAR",
"CUSTOMER_DOB": "DATE",
"LOAD_DATE": "DATETIME",
"SOURCE": "VARCHAR"
}
},
"RAW_STAGE_LOGIN": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR",
"LAST_LOGIN_DATE": "DATETIME",
"DEVICE_USED": "VARCHAR",
"LOAD_DATE": "DATETIME",
"SOURCE": "VARCHAR"
}
},
"RAW_STAGE_PROFILE": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR",
"DASHBOARD_COLOUR": "VARCHAR",
"DISPLAY_NAME": "VARCHAR",
"LOAD_DATE": "DATETIME",
"SOURCE": "VARCHAR"
}
},
"HUB_CUSTOMER": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"CUSTOMER_ID": "VARCHAR",
"LOAD_DATE": "DATETIME",
"SOURCE": "VARCHAR"
}
},
"SAT_CUSTOMER_DETAILS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"CUSTOMER_NAME": "VARCHAR",
"CUSTOMER_ADDRESS": "VARCHAR",
"CUSTOMER_DOB": "DATE",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATE": "DATETIME",
"SOURCE": "VARCHAR"
}
},
"SAT_CUSTOMER_LOGIN": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"DEVICE_USED": "VARCHAR",
"LAST_LOGIN_DATE": "DATETIME",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATE": "DATETIME",
"SOURCE": "VARCHAR"
}
},
"SAT_CUSTOMER_PROFILE": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"DASHBOARD_COLOUR": "VARCHAR",
"DISPLAY_NAME": "VARCHAR",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATE": "DATETIME",
"SOURCE": "VARCHAR"
}
},
"AS_OF_DATE": {
"+column_types": {
"AS_OF_DATE": "DATETIME"
}
},
"PIT_CUSTOMER": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_LDTS": "DATETIME",
"SAT_CUSTOMER_LOGIN_PK": "BINARY(16)",
"SAT_CUSTOMER_LOGIN_LDTS": "DATETIME",
"SAT_CUSTOMER_PROFILE_PK": "BINARY(16)",
"SAT_CUSTOMER_PROFILE_LDTS": "DATETIME"
}
}
}
@fixture
def pit_one_sat(context):
"""
Define the structures and metadata to perform PIT load
"""
context.vault_structure_type = "pit"
context.hashed_columns = {
"STG_CUSTOMER_DETAILS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["CUSTOMER_ADDRESS", "CUSTOMER_DOB", "CUSTOMER_NAME"]
}
},
"STG_CUSTOMER_DETAILS_TS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["CUSTOMER_ADDRESS", "CUSTOMER_DOB", "CUSTOMER_NAME"]
}
}
}
context.derived_columns = {
"STG_CUSTOMER_DETAILS": {
"EFFECTIVE_FROM": "LOAD_DATE"
},
"STG_CUSTOMER_DETAILS_TS": {
"EFFECTIVE_FROM": "LOAD_DATETIME"
}
}
context.vault_structure_columns = {
"HUB_CUSTOMER": {
"source_model": ["STG_CUSTOMER_DETAILS",
],
"src_pk": "CUSTOMER_PK",
"src_nk": "CUSTOMER_ID",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"HUB_CUSTOMER_TS": {
"source_model": ["STG_CUSTOMER_DETAILS_TS",
],
"src_pk": "CUSTOMER_PK",
"src_nk": "CUSTOMER_ID",
"src_ldts": "LOAD_DATETIME",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_DETAILS": {
"source_model": "STG_CUSTOMER_DETAILS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["CUSTOMER_NAME", "CUSTOMER_ADDRESS", "CUSTOMER_DOB"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_DETAILS_TS": {
"source_model": "STG_CUSTOMER_DETAILS_TS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["CUSTOMER_NAME", "CUSTOMER_ADDRESS", "CUSTOMER_DOB"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATETIME",
"src_source": "SOURCE"
},
"PIT_CUSTOMER": {
"source_model": "HUB_CUSTOMER",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS": "LOAD_DATE",
},
"src_ldts": "LOAD_DATE"
},
"PIT_CUSTOMER_TS": {
"source_model": "HUB_CUSTOMER_TS",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS_TS": "LOAD_DATETIME",
},
"src_ldts": "LOAD_DATETIME"
},
"PIT_CUSTOMER_LG": {
"source_model": "HUB_CUSTOMER_TS",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS_TS": "LOAD_DATETIME",
},
"src_ldts": "LOAD_DATETIME"
},
"PIT_CUSTOMER_HG": {
"source_model": "HUB_CUSTOMER",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS": "LOAD_DATE",
},
"src_ldts": "LOAD_DATE"
}
}
context.stage_columns = {
"RAW_STAGE_DETAILS":
["CUSTOMER_ID",
"CUSTOMER_NAME",
"CUSTOMER_ADDRESS",
"CUSTOMER_DOB",
"LOAD_DATE",
"SOURCE"],
"RAW_STAGE_DETAILS_TS":
["CUSTOMER_ID",
"CUSTOMER_NAME",
"CUSTOMER_ADDRESS",
"CUSTOMER_DOB",
"LOAD_DATETIME",
"SOURCE"]
}
context.seed_config = {
"RAW_STAGE_DETAILS": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR",
"CUSTOMER_NAME": "VARCHAR",
"CUSTOMER_ADDRESS": "VARCHAR",
"CUSTOMER_DOB": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR"
}
},
"RAW_STAGE_DETAILS_TS": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR",
"CUSTOMER_NAME": "VARCHAR",
"CUSTOMER_ADDRESS": "VARCHAR",
"CUSTOMER_DOB": "DATE",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR"
}
},
"HUB_CUSTOMER": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"CUSTOMER_ID": "VARCHAR",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR"
}
},
"HUB_CUSTOMER_TS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"CUSTOMER_ID": "VARCHAR",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR"
}
},
"SAT_CUSTOMER_DETAILS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"CUSTOMER_NAME": "VARCHAR",
"CUSTOMER_ADDRESS": "VARCHAR",
"CUSTOMER_DOB": "DATE",
"EFFECTIVE_FROM": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR"
}
},
"SAT_CUSTOMER_DETAILS_TS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"CUSTOMER_NAME": "VARCHAR",
"CUSTOMER_ADDRESS": "VARCHAR",
"CUSTOMER_DOB": "DATE",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR"
}
},
"AS_OF_DATE": {
"+column_types": {
"AS_OF_DATE": "DATETIME"
}
},
"PIT_CUSTOMER": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_TS": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_LG": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_HG": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_LDTS": "DATETIME"
}
}
}
@fixture
def pit_two_sats(context):
"""
Define the structures and metadata to perform PIT load
"""
context.vault_structure_type = "pit"
context.hashed_columns = {
"STG_CUSTOMER_DETAILS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["CUSTOMER_ADDRESS", "CUSTOMER_DOB", "CUSTOMER_NAME"]
}
},
"STG_CUSTOMER_DETAILS_TS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["CUSTOMER_ADDRESS", "CUSTOMER_DOB", "CUSTOMER_NAME"]
}
},
"STG_CUSTOMER_LOGIN": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["DEVICE_USED", "LAST_LOGIN_DATE"]
}
},
"STG_CUSTOMER_LOGIN_TS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["DEVICE_USED", "LAST_LOGIN_DATE"]
}
}
}
context.derived_columns = {
"STG_CUSTOMER_DETAILS": {
"EFFECTIVE_FROM": "LOAD_DATE"
},
"STG_CUSTOMER_DETAILS_TS": {
"EFFECTIVE_FROM": "LOAD_DATETIME"
},
"STG_CUSTOMER_LOGIN": {
"EFFECTIVE_FROM": "LOAD_DATE"
},
"STG_CUSTOMER_LOGIN_TS": {
"EFFECTIVE_FROM": "LOAD_DATETIME"
}
}
context.vault_structure_columns = {
"HUB_CUSTOMER": {
"source_model": ["STG_CUSTOMER_DETAILS",
],
"src_pk": "CUSTOMER_PK",
"src_nk": "CUSTOMER_ID",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"HUB_CUSTOMER_TS": {
"source_model": ["STG_CUSTOMER_DETAILS_TS",
],
"src_pk": "CUSTOMER_PK",
"src_nk": "CUSTOMER_ID",
"src_ldts": "LOAD_DATETIME",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_DETAILS": {
"source_model": "STG_CUSTOMER_DETAILS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["CUSTOMER_NAME", "CUSTOMER_ADDRESS", "CUSTOMER_DOB"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_DETAILS_TS": {
"source_model": "STG_CUSTOMER_DETAILS_TS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["CUSTOMER_NAME", "CUSTOMER_ADDRESS", "CUSTOMER_DOB"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATETIME",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_LOGIN": {
"source_model": "STG_CUSTOMER_LOGIN",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["DEVICE_USED", "LAST_LOGIN_DATE"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_LOGIN_TS": {
"source_model": "STG_CUSTOMER_LOGIN_TS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["DEVICE_USED", "LAST_LOGIN_DATE"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATETIME",
"src_source": "SOURCE"
},
"PIT_CUSTOMER": {
"source_model": "HUB_CUSTOMER",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
},
"SAT_CUSTOMER_LOGIN": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS": "LOAD_DATE",
"STG_CUSTOMER_LOGIN": "LOAD_DATE"
},
"src_ldts": "LOAD_DATE"
},
"PIT_CUSTOMER_TS": {
"source_model": "HUB_CUSTOMER_TS",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
},
"SAT_CUSTOMER_LOGIN_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS_TS": "LOAD_DATETIME",
"STG_CUSTOMER_LOGIN_TS": "LOAD_DATETIME",
},
"src_ldts": "LOAD_DATETIME"
},
"PIT_CUSTOMER_LG": {
"source_model": "HUB_CUSTOMER_TS",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
},
"SAT_CUSTOMER_LOGIN_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS_TS": "LOAD_DATETIME",
"STG_CUSTOMER_LOGIN_TS": "LOAD_DATETIME",
},
"src_ldts": "LOAD_DATETIME"
},
"PIT_CUSTOMER_HG": {
"source_model": "HUB_CUSTOMER",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
},
"SAT_CUSTOMER_LOGIN": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS": "LOAD_DATE",
"STG_CUSTOMER_LOGIN": "LOAD_DATE",
},
"src_ldts": "LOAD_DATE"
}
}
context.stage_columns = {
"RAW_STAGE_DETAILS":
["CUSTOMER_ID",
"CUSTOMER_NAME",
"CUSTOMER_ADDRESS",
"CUSTOMER_DOB",
"LOAD_DATE",
"SOURCE"],
"RAW_STAGE_DETAILS_TS":
["CUSTOMER_ID",
"CUSTOMER_NAME",
"CUSTOMER_ADDRESS",
"CUSTOMER_DOB",
"LOAD_DATETIME",
"SOURCE"],
"RAW_STAGE_LOGIN":
["CUSTOMER_ID",
"LAST_LOGIN_DATE",
"DEVICE_USED",
"LOAD_DATE",
"SOURCE"],
"RAW_STAGE_LOGIN_TS":
["CUSTOMER_ID",
"LAST_LOGIN_DATE",
"DEVICE_USED",
"LOAD_DATETIME",
"SOURCE"]
}
context.seed_config = {
"RAW_STAGE_DETAILS": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR",
"CUSTOMER_NAME": "VARCHAR",
"CUSTOMER_ADDRESS": "VARCHAR",
"CUSTOMER_DOB": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR"
}
},
"RAW_STAGE_DETAILS_TS": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR",
"CUSTOMER_NAME": "VARCHAR",
"CUSTOMER_ADDRESS": "VARCHAR",
"CUSTOMER_DOB": "DATE",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR"
}
},
"RAW_STAGE_LOGIN": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR",
"LAST_LOGIN_DATE": "DATETIME",
"DEVICE_USED": "VARCHAR",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR"
}
},
"RAW_STAGE_LOGIN_TS": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR",
"LAST_LOGIN_DATE": "DATETIME",
"DEVICE_USED": "VARCHAR",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR"
}
},
"HUB_CUSTOMER": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"CUSTOMER_ID": "VARCHAR",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR"
}
},
"HUB_CUSTOMER_TS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"CUSTOMER_ID": "VARCHAR",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR"
}
},
"SAT_CUSTOMER_DETAILS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"CUSTOMER_NAME": "VARCHAR",
"CUSTOMER_ADDRESS": "VARCHAR",
"CUSTOMER_DOB": "DATE",
"EFFECTIVE_FROM": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR"
}
},
"SAT_CUSTOMER_DETAILS_TS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"CUSTOMER_NAME": "VARCHAR",
"CUSTOMER_ADDRESS": "VARCHAR",
"CUSTOMER_DOB": "DATE",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR"
}
},
"SAT_CUSTOMER_LOGIN": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"DEVICE_USED": "VARCHAR",
"LAST_LOGIN_DATE": "DATETIME",
"EFFECTIVE_FROM": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR"
}
},
"SAT_CUSTOMER_LOGIN_TS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"DEVICE_USED": "VARCHAR",
"LAST_LOGIN_DATE": "DATETIME",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR"
}
},
"AS_OF_DATE": {
"+column_types": {
"AS_OF_DATE": "DATETIME"
}
},
"PIT_CUSTOMER": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_LDTS": "DATETIME",
"SAT_CUSTOMER_LOGIN_PK": "BINARY(16)",
"SAT_CUSTOMER_LOGIN_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_TS": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_LDTS": "DATETIME",
"SAT_CUSTOMER_LOGIN_TS_PK": "BINARY(16)",
"SAT_CUSTOMER_LOGIN_TS_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_LG": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_LDTS": "DATETIME",
"SAT_CUSTOMER_LOGIN_TS_PK": "BINARY(16)",
"SAT_CUSTOMER_LOGIN_TS_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_HG": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_LDTS": "DATETIME",
"SAT_CUSTOMER_LOGIN_PK": "BINARY(16)",
"SAT_CUSTOMER_LOGIN_LDTS": "DATETIME"
}
}
}
@fixture
def pit_bigquery(context):
"""
Define the structures and metadata to perform PIT load
"""
context.vault_structure_type = "pit"
context.hashed_columns = {
"STG_CUSTOMER_DETAILS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["CUSTOMER_ADDRESS", "CUSTOMER_DOB", "CUSTOMER_NAME"]
}
},
"STG_CUSTOMER_LOGIN": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["DEVICE_USED", "LAST_LOGIN_DATE"]
}
},
"STG_CUSTOMER_PROFILE": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["DASHBOARD_COLOUR", "DISPLAY_NAME"]
}
}
}
context.derived_columns = {
"STG_CUSTOMER_DETAILS": {
"EFFECTIVE_FROM": "LOAD_DATE"
},
"STG_CUSTOMER_LOGIN": {
"EFFECTIVE_FROM": "LOAD_DATE"
},
"STG_CUSTOMER_PROFILE": {
"EFFECTIVE_FROM": "LOAD_DATE"
}
}
context.vault_structure_columns = {
"HUB_CUSTOMER": {
"source_model": ["STG_CUSTOMER_DETAILS",
"STG_CUSTOMER_LOGIN",
"STG_CUSTOMER_PROFILE"],
"src_pk": "CUSTOMER_PK",
"src_nk": "CUSTOMER_ID",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_DETAILS": {
"source_model": "STG_CUSTOMER_DETAILS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["CUSTOMER_NAME", "CUSTOMER_ADDRESS", "CUSTOMER_DOB"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_LOGIN": {
"source_model": "STG_CUSTOMER_LOGIN",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["LAST_LOGIN_DATE", "DEVICE_USED"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_PROFILE": {
"source_model": "STG_CUSTOMER_PROFILE",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["DASHBOARD_COLOUR", "DISPLAY_NAME"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"PIT_CUSTOMER": {
"source_model": "HUB_CUSTOMER",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
},
"SAT_CUSTOMER_LOGIN": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
},
"SAT_CUSTOMER_PROFILE": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS": "LOAD_DATE",
"STG_CUSTOMER_LOGIN": "LOAD_DATE",
"STG_CUSTOMER_PROFILE": "LOAD_DATE"
},
"src_ldts": "LOAD_DATE"
}
}
context.stage_columns = {
"RAW_STAGE_DETAILS":
["CUSTOMER_ID",
"CUSTOMER_NAME",
"CUSTOMER_ADDRESS",
"CUSTOMER_DOB",
"LOAD_DATE",
"SOURCE"]
,
"RAW_STAGE_LOGIN":
["CUSTOMER_ID",
"LAST_LOGIN_DATE",
"DEVICE_USED",
"LOAD_DATE",
"SOURCE"]
,
"RAW_STAGE_PROFILE":
["CUSTOMER_ID",
"DASHBOARD_COLOUR",
"DISPLAY_NAME",
"LOAD_DATE",
"SOURCE"]
}
context.seed_config = {
"RAW_STAGE_DETAILS": {
"+column_types": {
"CUSTOMER_ID": "STRING",
"CUSTOMER_NAME": "STRING",
"CUSTOMER_ADDRESS": "STRING",
"CUSTOMER_DOB": "DATE",
"LOAD_DATE": "DATETIME",
"SOURCE": "STRING"
}
},
"RAW_STAGE_LOGIN": {
"+column_types": {
"CUSTOMER_ID": "STRING",
"LAST_LOGIN_DATE": "DATETIME",
"DEVICE_USED": "STRING",
"LOAD_DATE": "DATETIME",
"SOURCE": "STRING"
}
},
"RAW_STAGE_PROFILE": {
"+column_types": {
"CUSTOMER_ID": "STRING",
"DASHBOARD_COLOUR": "STRING",
"DISPLAY_NAME": "STRING",
"LOAD_DATE": "DATETIME",
"SOURCE": "STRING"
}
},
"HUB_CUSTOMER": {
"+column_types": {
"CUSTOMER_PK": "STRING",
"CUSTOMER_ID": "STRING",
"LOAD_DATE": "DATETIME",
"SOURCE": "STRING"
}
},
"SAT_CUSTOMER_DETAILS": {
"+column_types": {
"CUSTOMER_PK": "STRING",
"HASHDIFF": "STRING",
"CUSTOMER_NAME": "STRING",
"CUSTOMER_ADDRESS": "STRING",
"CUSTOMER_DOB": "DATE",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATE": "DATETIME",
"SOURCE": "STRING"
}
},
"SAT_CUSTOMER_LOGIN": {
"+column_types": {
"CUSTOMER_PK": "STRING",
"HASHDIFF": "STRING",
"DEVICE_USED": "STRING",
"LAST_LOGIN_DATE": "DATETIME",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATE": "DATETIME",
"SOURCE": "STRING"
}
},
"SAT_CUSTOMER_PROFILE": {
"+column_types": {
"CUSTOMER_PK": "STRING",
"HASHDIFF": "STRING",
"DASHBOARD_COLOUR": "STRING",
"DISPLAY_NAME": "STRING",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATE": "DATETIME",
"SOURCE": "STRING"
}
},
"AS_OF_DATE": {
"+column_types": {
"AS_OF_DATE": "DATETIME"
}
},
"PIT_CUSTOMER": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "STRING",
"SAT_CUSTOMER_DETAILS_PK": "STRING",
"SAT_CUSTOMER_DETAILS_LDTS": "DATETIME",
"SAT_CUSTOMER_LOGIN_PK": "STRING",
"SAT_CUSTOMER_LOGIN_LDTS": "DATETIME",
"SAT_CUSTOMER_PROFILE_PK": "STRING",
"SAT_CUSTOMER_PROFILE_LDTS": "DATETIME"
}
}
}
@fixture
def pit_one_sat_bigquery(context):
"""
Define the structures and metadata to perform PIT load
"""
context.vault_structure_type = "pit"
context.hashed_columns = {
"STG_CUSTOMER_DETAILS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["CUSTOMER_ADDRESS", "CUSTOMER_DOB", "CUSTOMER_NAME"]
}
},
"STG_CUSTOMER_DETAILS_TS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["CUSTOMER_ADDRESS", "CUSTOMER_DOB", "CUSTOMER_NAME"]
}
}
}
context.derived_columns = {
"STG_CUSTOMER_DETAILS": {
"EFFECTIVE_FROM": "LOAD_DATE"
},
"STG_CUSTOMER_DETAILS_TS": {
"EFFECTIVE_FROM": "LOAD_DATETIME"
}
}
context.vault_structure_columns = {
"HUB_CUSTOMER": {
"source_model": ["STG_CUSTOMER_DETAILS",
],
"src_pk": "CUSTOMER_PK",
"src_nk": "CUSTOMER_ID",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"HUB_CUSTOMER_TS": {
"source_model": ["STG_CUSTOMER_DETAILS_TS",
],
"src_pk": "CUSTOMER_PK",
"src_nk": "CUSTOMER_ID",
"src_ldts": "LOAD_DATETIME",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_DETAILS": {
"source_model": "STG_CUSTOMER_DETAILS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["CUSTOMER_NAME", "CUSTOMER_ADDRESS", "CUSTOMER_DOB"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_DETAILS_TS": {
"source_model": "STG_CUSTOMER_DETAILS_TS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["CUSTOMER_NAME", "CUSTOMER_ADDRESS", "CUSTOMER_DOB"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATETIME",
"src_source": "SOURCE"
},
"PIT_CUSTOMER": {
"source_model": "HUB_CUSTOMER",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS": "LOAD_DATE",
},
"src_ldts": "LOAD_DATE"
},
"PIT_CUSTOMER_TS": {
"source_model": "HUB_CUSTOMER_TS",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS_TS": "LOAD_DATETIME",
},
"src_ldts": "LOAD_DATETIME"
},
"PIT_CUSTOMER_LG": {
"source_model": "HUB_CUSTOMER_TS",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS_TS": "LOAD_DATETIME",
},
"src_ldts": "LOAD_DATETIME"
},
"PIT_CUSTOMER_HG": {
"source_model": "HUB_CUSTOMER",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS": "LOAD_DATE",
},
"src_ldts": "LOAD_DATE"
}
}
context.stage_columns = {
"RAW_STAGE_DETAILS":
["CUSTOMER_ID",
"CUSTOMER_NAME",
"CUSTOMER_ADDRESS",
"CUSTOMER_DOB",
"LOAD_DATE",
"SOURCE"],
"RAW_STAGE_DETAILS_TS":
["CUSTOMER_ID",
"CUSTOMER_NAME",
"CUSTOMER_ADDRESS",
"CUSTOMER_DOB",
"LOAD_DATETIME",
"SOURCE"]
}
context.seed_config = {
"RAW_STAGE_DETAILS": {
"+column_types": {
"CUSTOMER_ID": "STRING",
"CUSTOMER_NAME": "STRING",
"CUSTOMER_ADDRESS": "STRING",
"CUSTOMER_DOB": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "STRING"
}
},
"RAW_STAGE_DETAILS_TS": {
"+column_types": {
"CUSTOMER_ID": "STRING",
"CUSTOMER_NAME": "STRING",
"CUSTOMER_ADDRESS": "STRING",
"CUSTOMER_DOB": "DATE",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "STRING"
}
},
"HUB_CUSTOMER": {
"+column_types": {
"CUSTOMER_PK": "STRING",
"CUSTOMER_ID": "STRING",
"LOAD_DATE": "DATE",
"SOURCE": "STRING"
}
},
"HUB_CUSTOMER_TS": {
"+column_types": {
"CUSTOMER_PK": "STRING",
"CUSTOMER_ID": "STRING",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "STRING"
}
},
"SAT_CUSTOMER_DETAILS": {
"+column_types": {
"CUSTOMER_PK": "STRING",
"HASHDIFF": "STRING",
"CUSTOMER_NAME": "STRING",
"CUSTOMER_ADDRESS": "STRING",
"CUSTOMER_DOB": "DATE",
"EFFECTIVE_FROM": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "STRING"
}
},
"SAT_CUSTOMER_DETAILS_TS": {
"+column_types": {
"CUSTOMER_PK": "STRING",
"HASHDIFF": "STRING",
"CUSTOMER_NAME": "STRING",
"CUSTOMER_ADDRESS": "STRING",
"CUSTOMER_DOB": "DATE",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "STRING"
}
},
"AS_OF_DATE": {
"+column_types": {
"AS_OF_DATE": "DATETIME"
}
},
"PIT_CUSTOMER": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "STRING",
"SAT_CUSTOMER_DETAILS_PK": "STRING",
"SAT_CUSTOMER_DETAILS_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_TS": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "STRING",
"SAT_CUSTOMER_DETAILS_TS_PK": "STRING",
"SAT_CUSTOMER_DETAILS_TS_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_LG": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "STRING",
"SAT_CUSTOMER_DETAILS_TS_PK": "STRING",
"SAT_CUSTOMER_DETAILS_TS_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_HG": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "STRING",
"SAT_CUSTOMER_DETAILS_PK": "STRING",
"SAT_CUSTOMER_DETAILS_LDTS": "DATETIME"
}
}
}
@fixture
def pit_two_sats_bigquery(context):
"""
Define the structures and metadata to perform PIT load
"""
context.vault_structure_type = "pit"
context.hashed_columns = {
"STG_CUSTOMER_DETAILS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["CUSTOMER_ADDRESS", "CUSTOMER_DOB", "CUSTOMER_NAME"]
}
},
"STG_CUSTOMER_DETAILS_TS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["CUSTOMER_ADDRESS", "CUSTOMER_DOB", "CUSTOMER_NAME"]
}
},
"STG_CUSTOMER_LOGIN": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["DEVICE_USED", "LAST_LOGIN_DATE"]
}
},
"STG_CUSTOMER_LOGIN_TS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["DEVICE_USED", "LAST_LOGIN_DATE"]
}
}
}
context.derived_columns = {
"STG_CUSTOMER_DETAILS": {
"EFFECTIVE_FROM": "LOAD_DATE"
},
"STG_CUSTOMER_DETAILS_TS": {
"EFFECTIVE_FROM": "LOAD_DATETIME"
},
"STG_CUSTOMER_LOGIN": {
"EFFECTIVE_FROM": "LOAD_DATE"
},
"STG_CUSTOMER_LOGIN_TS": {
"EFFECTIVE_FROM": "LOAD_DATETIME"
}
}
context.vault_structure_columns = {
"HUB_CUSTOMER": {
"source_model": ["STG_CUSTOMER_DETAILS",
],
"src_pk": "CUSTOMER_PK",
"src_nk": "CUSTOMER_ID",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"HUB_CUSTOMER_TS": {
"source_model": ["STG_CUSTOMER_DETAILS_TS",
],
"src_pk": "CUSTOMER_PK",
"src_nk": "CUSTOMER_ID",
"src_ldts": "LOAD_DATETIME",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_DETAILS": {
"source_model": "STG_CUSTOMER_DETAILS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["CUSTOMER_NAME", "CUSTOMER_ADDRESS", "CUSTOMER_DOB"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_DETAILS_TS": {
"source_model": "STG_CUSTOMER_DETAILS_TS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["CUSTOMER_NAME", "CUSTOMER_ADDRESS", "CUSTOMER_DOB"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATETIME",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_LOGIN": {
"source_model": "STG_CUSTOMER_LOGIN",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["DEVICE_USED", "LAST_LOGIN_DATE"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_LOGIN_TS": {
"source_model": "STG_CUSTOMER_LOGIN_TS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["DEVICE_USED", "LAST_LOGIN_DATE"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATETIME",
"src_source": "SOURCE"
},
"PIT_CUSTOMER": {
"source_model": "HUB_CUSTOMER",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
},
"SAT_CUSTOMER_LOGIN": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS": "LOAD_DATE",
"STG_CUSTOMER_LOGIN": "LOAD_DATE"
},
"src_ldts": "LOAD_DATE"
},
"PIT_CUSTOMER_TS": {
"source_model": "HUB_CUSTOMER_TS",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
},
"SAT_CUSTOMER_LOGIN_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS_TS": "LOAD_DATETIME",
"STG_CUSTOMER_LOGIN_TS": "LOAD_DATETIME",
},
"src_ldts": "LOAD_DATETIME"
},
"PIT_CUSTOMER_LG": {
"source_model": "HUB_CUSTOMER_TS",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
},
"SAT_CUSTOMER_LOGIN_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS_TS": "LOAD_DATETIME",
"STG_CUSTOMER_LOGIN_TS": "LOAD_DATETIME",
},
"src_ldts": "LOAD_DATETIME"
},
"PIT_CUSTOMER_HG": {
"source_model": "HUB_CUSTOMER",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
},
"SAT_CUSTOMER_LOGIN": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS": "LOAD_DATE",
"STG_CUSTOMER_LOGIN": "LOAD_DATE",
},
"src_ldts": "LOAD_DATE"
}
}
context.stage_columns = {
"RAW_STAGE_DETAILS":
["CUSTOMER_ID",
"CUSTOMER_NAME",
"CUSTOMER_ADDRESS",
"CUSTOMER_DOB",
"LOAD_DATE",
"SOURCE"],
"RAW_STAGE_DETAILS_TS":
["CUSTOMER_ID",
"CUSTOMER_NAME",
"CUSTOMER_ADDRESS",
"CUSTOMER_DOB",
"LOAD_DATETIME",
"SOURCE"],
"RAW_STAGE_LOGIN":
["CUSTOMER_ID",
"LAST_LOGIN_DATE",
"DEVICE_USED",
"LOAD_DATE",
"SOURCE"],
"RAW_STAGE_LOGIN_TS":
["CUSTOMER_ID",
"LAST_LOGIN_DATE",
"DEVICE_USED",
"LOAD_DATETIME",
"SOURCE"]
}
context.seed_config = {
"RAW_STAGE_DETAILS": {
"+column_types": {
"CUSTOMER_ID": "STRING",
"CUSTOMER_NAME": "STRING",
"CUSTOMER_ADDRESS": "STRING",
"CUSTOMER_DOB": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "STRING"
}
},
"RAW_STAGE_DETAILS_TS": {
"+column_types": {
"CUSTOMER_ID": "STRING",
"CUSTOMER_NAME": "STRING",
"CUSTOMER_ADDRESS": "STRING",
"CUSTOMER_DOB": "DATE",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "STRING"
}
},
"RAW_STAGE_LOGIN": {
"+column_types": {
"CUSTOMER_ID": "STRING",
"LAST_LOGIN_DATE": "DATETIME",
"DEVICE_USED": "STRING",
"LOAD_DATE": "DATE",
"SOURCE": "STRING"
}
},
"RAW_STAGE_LOGIN_TS": {
"+column_types": {
"CUSTOMER_ID": "STRING",
"LAST_LOGIN_DATE": "DATETIME",
"DEVICE_USED": "STRING",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "STRING"
}
},
"HUB_CUSTOMER": {
"+column_types": {
"CUSTOMER_PK": "STRING",
"CUSTOMER_ID": "STRING",
"LOAD_DATE": "DATE",
"SOURCE": "STRING"
}
},
"HUB_CUSTOMER_TS": {
"+column_types": {
"CUSTOMER_PK": "STRING",
"CUSTOMER_ID": "STRING",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "STRING"
}
},
"SAT_CUSTOMER_DETAILS": {
"+column_types": {
"CUSTOMER_PK": "STRING",
"HASHDIFF": "STRING",
"CUSTOMER_NAME": "STRING",
"CUSTOMER_ADDRESS": "STRING",
"CUSTOMER_DOB": "DATE",
"EFFECTIVE_FROM": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "STRING"
}
},
"SAT_CUSTOMER_DETAILS_TS": {
"+column_types": {
"CUSTOMER_PK": "STRING",
"HASHDIFF": "STRING",
"CUSTOMER_NAME": "STRING",
"CUSTOMER_ADDRESS": "STRING",
"CUSTOMER_DOB": "DATE",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "STRING"
}
},
"SAT_CUSTOMER_LOGIN": {
"+column_types": {
"CUSTOMER_PK": "STRING",
"HASHDIFF": "STRING",
"DEVICE_USED": "STRING",
"LAST_LOGIN_DATE": "DATETIME",
"EFFECTIVE_FROM": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "STRING"
}
},
"SAT_CUSTOMER_LOGIN_TS": {
"+column_types": {
"CUSTOMER_PK": "STRING",
"HASHDIFF": "STRING",
"DEVICE_USED": "STRING",
"LAST_LOGIN_DATE": "DATETIME",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "STRING"
}
},
"AS_OF_DATE": {
"+column_types": {
"AS_OF_DATE": "DATETIME"
}
},
"PIT_CUSTOMER": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "STRING",
"SAT_CUSTOMER_DETAILS_PK": "STRING",
"SAT_CUSTOMER_DETAILS_LDTS": "DATETIME",
"SAT_CUSTOMER_LOGIN_PK": "STRING",
"SAT_CUSTOMER_LOGIN_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_TS": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "STRING",
"SAT_CUSTOMER_DETAILS_TS_PK": "STRING",
"SAT_CUSTOMER_DETAILS_TS_LDTS": "DATETIME",
"SAT_CUSTOMER_LOGIN_TS_PK": "STRING",
"SAT_CUSTOMER_LOGIN_TS_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_LG": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "STRING",
"SAT_CUSTOMER_DETAILS_TS_PK": "STRING",
"SAT_CUSTOMER_DETAILS_TS_LDTS": "DATETIME",
"SAT_CUSTOMER_LOGIN_TS_PK": "STRING",
"SAT_CUSTOMER_LOGIN_TS_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_HG": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "STRING",
"SAT_CUSTOMER_DETAILS_PK": "STRING",
"SAT_CUSTOMER_DETAILS_LDTS": "DATETIME",
"SAT_CUSTOMER_LOGIN_PK": "STRING",
"SAT_CUSTOMER_LOGIN_LDTS": "DATETIME"
}
}
}
@fixture
def pit_sqlserver(context):
"""
Define the structures and metadata to perform PIT load
"""
context.vault_structure_type = "pit"
context.hashed_columns = {
"STG_CUSTOMER_DETAILS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["CUSTOMER_ADDRESS", "CUSTOMER_DOB", "CUSTOMER_NAME"]
}
},
"STG_CUSTOMER_LOGIN": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["DEVICE_USED", "LAST_LOGIN_DATE"]
}
},
"STG_CUSTOMER_PROFILE": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["DASHBOARD_COLOUR", "DISPLAY_NAME"]
}
}
}
context.derived_columns = {
"STG_CUSTOMER_DETAILS": {
"EFFECTIVE_FROM": "LOAD_DATE"
},
"STG_CUSTOMER_LOGIN": {
"EFFECTIVE_FROM": "LOAD_DATE"
},
"STG_CUSTOMER_PROFILE": {
"EFFECTIVE_FROM": "LOAD_DATE"
}
}
context.vault_structure_columns = {
"HUB_CUSTOMER": {
"source_model": ["STG_CUSTOMER_DETAILS",
"STG_CUSTOMER_LOGIN",
"STG_CUSTOMER_PROFILE"],
"src_pk": "CUSTOMER_PK",
"src_nk": "CUSTOMER_ID",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_DETAILS": {
"source_model": "STG_CUSTOMER_DETAILS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["CUSTOMER_NAME", "CUSTOMER_ADDRESS", "CUSTOMER_DOB"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_LOGIN": {
"source_model": "STG_CUSTOMER_LOGIN",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["LAST_LOGIN_DATE", "DEVICE_USED"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_PROFILE": {
"source_model": "STG_CUSTOMER_PROFILE",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["DASHBOARD_COLOUR", "DISPLAY_NAME"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"PIT_CUSTOMER": {
"source_model": "HUB_CUSTOMER",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
},
"SAT_CUSTOMER_LOGIN": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
},
"SAT_CUSTOMER_PROFILE": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS": "LOAD_DATE",
"STG_CUSTOMER_LOGIN": "LOAD_DATE",
"STG_CUSTOMER_PROFILE": "LOAD_DATE"
},
"src_ldts": "LOAD_DATE"
}
}
context.stage_columns = {
"RAW_STAGE_DETAILS":
["CUSTOMER_ID",
"CUSTOMER_NAME",
"CUSTOMER_ADDRESS",
"CUSTOMER_DOB",
"LOAD_DATE",
"SOURCE"]
,
"RAW_STAGE_LOGIN":
["CUSTOMER_ID",
"LAST_LOGIN_DATE",
"DEVICE_USED",
"LOAD_DATE",
"SOURCE"]
,
"RAW_STAGE_PROFILE":
["CUSTOMER_ID",
"DASHBOARD_COLOUR",
"DISPLAY_NAME",
"LOAD_DATE",
"SOURCE"]
}
context.seed_config = {
"RAW_STAGE_DETAILS": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR(5)",
"CUSTOMER_NAME": "VARCHAR(10)",
"CUSTOMER_ADDRESS": "VARCHAR(30)",
"CUSTOMER_DOB": "DATE",
"LOAD_DATE": "DATETIME",
"SOURCE": "VARCHAR(10)"
}
},
"RAW_STAGE_LOGIN": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR(5)",
"LAST_LOGIN_DATE": "DATETIME",
"DEVICE_USED": "VARCHAR(10)",
"LOAD_DATE": "DATETIME",
"SOURCE": "VARCHAR(10)"
}
},
"RAW_STAGE_PROFILE": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR(5)",
"DASHBOARD_COLOUR": "VARCHAR(10)",
"DISPLAY_NAME": "VARCHAR(10)",
"LOAD_DATE": "DATETIME",
"SOURCE": "VARCHAR(10)"
}
},
"HUB_CUSTOMER": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"CUSTOMER_ID": "VARCHAR(5)",
"LOAD_DATE": "DATETIME",
"SOURCE": "VARCHAR(10)"
}
},
"SAT_CUSTOMER_DETAILS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"CUSTOMER_NAME": "VARCHAR(10)",
"CUSTOMER_ADDRESS": "VARCHAR(30)",
"CUSTOMER_DOB": "DATE",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATE": "DATETIME",
"SOURCE": "VARCHAR(10)"
}
},
"SAT_CUSTOMER_LOGIN": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"DEVICE_USED": "VARCHAR(10)",
"LAST_LOGIN_DATE": "DATETIME",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATE": "DATETIME",
"SOURCE": "VARCHAR(10)"
}
},
"SAT_CUSTOMER_PROFILE": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"DASHBOARD_COLOUR": "VARCHAR(10)",
"DISPLAY_NAME": "VARCHAR(10)",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATE": "DATETIME",
"SOURCE": "VARCHAR(10)"
}
},
"AS_OF_DATE": {
"+column_types": {
"AS_OF_DATE": "DATETIME"
}
},
"PIT_CUSTOMER": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_LDTS": "DATETIME",
"SAT_CUSTOMER_LOGIN_PK": "BINARY(16)",
"SAT_CUSTOMER_LOGIN_LDTS": "DATETIME",
"SAT_CUSTOMER_PROFILE_PK": "BINARY(16)",
"SAT_CUSTOMER_PROFILE_LDTS": "DATETIME"
}
}
}
@fixture
def pit_one_sat_sqlserver(context):
"""
Define the structures and metadata to perform PIT load
"""
context.vault_structure_type = "pit"
context.hashed_columns = {
"STG_CUSTOMER_DETAILS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["CUSTOMER_ADDRESS", "CUSTOMER_DOB", "CUSTOMER_NAME"]
}
},
"STG_CUSTOMER_DETAILS_TS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["CUSTOMER_ADDRESS", "CUSTOMER_DOB", "CUSTOMER_NAME"]
}
}
}
context.derived_columns = {
"STG_CUSTOMER_DETAILS": {
"EFFECTIVE_FROM": "LOAD_DATE"
},
"STG_CUSTOMER_DETAILS_TS": {
"EFFECTIVE_FROM": "LOAD_DATETIME"
}
}
context.vault_structure_columns = {
"HUB_CUSTOMER": {
"source_model": ["STG_CUSTOMER_DETAILS",
],
"src_pk": "CUSTOMER_PK",
"src_nk": "CUSTOMER_ID",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"HUB_CUSTOMER_TS": {
"source_model": ["STG_CUSTOMER_DETAILS_TS",
],
"src_pk": "CUSTOMER_PK",
"src_nk": "CUSTOMER_ID",
"src_ldts": "LOAD_DATETIME",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_DETAILS": {
"source_model": "STG_CUSTOMER_DETAILS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["CUSTOMER_NAME", "CUSTOMER_ADDRESS", "CUSTOMER_DOB"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_DETAILS_TS": {
"source_model": "STG_CUSTOMER_DETAILS_TS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["CUSTOMER_NAME", "CUSTOMER_ADDRESS", "CUSTOMER_DOB"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATETIME",
"src_source": "SOURCE"
},
"PIT_CUSTOMER": {
"source_model": "HUB_CUSTOMER",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS": "LOAD_DATE",
},
"src_ldts": "LOAD_DATE"
},
"PIT_CUSTOMER_TS": {
"source_model": "HUB_CUSTOMER_TS",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS_TS": "LOAD_DATETIME",
},
"src_ldts": "LOAD_DATETIME"
},
"PIT_CUSTOMER_LG": {
"source_model": "HUB_CUSTOMER_TS",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS_TS": "LOAD_DATETIME",
},
"src_ldts": "LOAD_DATETIME"
},
"PIT_CUSTOMER_HG": {
"source_model": "HUB_CUSTOMER",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS": "LOAD_DATE",
},
"src_ldts": "LOAD_DATE"
}
}
context.stage_columns = {
"RAW_STAGE_DETAILS":
["CUSTOMER_ID",
"CUSTOMER_NAME",
"CUSTOMER_ADDRESS",
"CUSTOMER_DOB",
"LOAD_DATE",
"SOURCE"],
"RAW_STAGE_DETAILS_TS":
["CUSTOMER_ID",
"CUSTOMER_NAME",
"CUSTOMER_ADDRESS",
"CUSTOMER_DOB",
"LOAD_DATETIME",
"SOURCE"]
}
context.seed_config = {
"RAW_STAGE_DETAILS": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR(50)",
"CUSTOMER_NAME": "VARCHAR(50)",
"CUSTOMER_ADDRESS": "VARCHAR(50)",
"CUSTOMER_DOB": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR(50)"
}
},
"RAW_STAGE_DETAILS_TS": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR(50)",
"CUSTOMER_NAME": "VARCHAR(50)",
"CUSTOMER_ADDRESS": "VARCHAR(50)",
"CUSTOMER_DOB": "DATE",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR(50)"
}
},
"HUB_CUSTOMER": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"CUSTOMER_ID": "VARCHAR(50)",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR(50)"
}
},
"HUB_CUSTOMER_TS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"CUSTOMER_ID": "VARCHAR(50)",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR(50)"
}
},
"SAT_CUSTOMER_DETAILS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"CUSTOMER_NAME": "VARCHAR(50)",
"CUSTOMER_ADDRESS": "VARCHAR(50)",
"CUSTOMER_DOB": "DATE",
"EFFECTIVE_FROM": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR(50)"
}
},
"SAT_CUSTOMER_DETAILS_TS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"CUSTOMER_NAME": "VARCHAR(50)",
"CUSTOMER_ADDRESS": "VARCHAR(50)",
"CUSTOMER_DOB": "DATE",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR(50)"
}
},
"AS_OF_DATE": {
"+column_types": {
"AS_OF_DATE": "DATETIME"
}
},
"PIT_CUSTOMER": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_TS": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_LG": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_HG": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_LDTS": "DATETIME"
}
}
}
@fixture
def pit_two_sats_sqlserver(context):
"""
Define the structures and metadata to perform PIT load
"""
context.vault_structure_type = "pit"
context.hashed_columns = {
"STG_CUSTOMER_DETAILS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["CUSTOMER_ADDRESS", "CUSTOMER_DOB", "CUSTOMER_NAME"]
}
},
"STG_CUSTOMER_DETAILS_TS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["CUSTOMER_ADDRESS", "CUSTOMER_DOB", "CUSTOMER_NAME"]
}
},
"STG_CUSTOMER_LOGIN": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["DEVICE_USED", "LAST_LOGIN_DATE"]
}
},
"STG_CUSTOMER_LOGIN_TS": {
"CUSTOMER_PK": "CUSTOMER_ID",
"HASHDIFF": {"is_hashdiff": True,
"columns": ["DEVICE_USED", "LAST_LOGIN_DATE"]
}
}
}
context.derived_columns = {
"STG_CUSTOMER_DETAILS": {
"EFFECTIVE_FROM": "LOAD_DATE"
},
"STG_CUSTOMER_DETAILS_TS": {
"EFFECTIVE_FROM": "LOAD_DATETIME"
},
"STG_CUSTOMER_LOGIN": {
"EFFECTIVE_FROM": "LOAD_DATE"
},
"STG_CUSTOMER_LOGIN_TS": {
"EFFECTIVE_FROM": "LOAD_DATETIME"
}
}
context.vault_structure_columns = {
"HUB_CUSTOMER": {
"source_model": ["STG_CUSTOMER_DETAILS",
],
"src_pk": "CUSTOMER_PK",
"src_nk": "CUSTOMER_ID",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"HUB_CUSTOMER_TS": {
"source_model": ["STG_CUSTOMER_DETAILS_TS",
],
"src_pk": "CUSTOMER_PK",
"src_nk": "CUSTOMER_ID",
"src_ldts": "LOAD_DATETIME",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_DETAILS": {
"source_model": "STG_CUSTOMER_DETAILS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["CUSTOMER_NAME", "CUSTOMER_ADDRESS", "CUSTOMER_DOB"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_DETAILS_TS": {
"source_model": "STG_CUSTOMER_DETAILS_TS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["CUSTOMER_NAME", "CUSTOMER_ADDRESS", "CUSTOMER_DOB"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATETIME",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_LOGIN": {
"source_model": "STG_CUSTOMER_LOGIN",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["DEVICE_USED", "LAST_LOGIN_DATE"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
},
"SAT_CUSTOMER_LOGIN_TS": {
"source_model": "STG_CUSTOMER_LOGIN_TS",
"src_pk": "CUSTOMER_PK",
"src_hashdiff": "HASHDIFF",
"src_payload": ["DEVICE_USED", "LAST_LOGIN_DATE"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATETIME",
"src_source": "SOURCE"
},
"PIT_CUSTOMER": {
"source_model": "HUB_CUSTOMER",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
},
"SAT_CUSTOMER_LOGIN": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS": "LOAD_DATE",
"STG_CUSTOMER_LOGIN": "LOAD_DATE"
},
"src_ldts": "LOAD_DATE"
},
"PIT_CUSTOMER_TS": {
"source_model": "HUB_CUSTOMER_TS",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
},
"SAT_CUSTOMER_LOGIN_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS_TS": "LOAD_DATETIME",
"STG_CUSTOMER_LOGIN_TS": "LOAD_DATETIME",
},
"src_ldts": "LOAD_DATETIME"
},
"PIT_CUSTOMER_LG": {
"source_model": "HUB_CUSTOMER_TS",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
},
"SAT_CUSTOMER_LOGIN_TS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATETIME"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS_TS": "LOAD_DATETIME",
"STG_CUSTOMER_LOGIN_TS": "LOAD_DATETIME",
},
"src_ldts": "LOAD_DATETIME"
},
"PIT_CUSTOMER_HG": {
"source_model": "HUB_CUSTOMER",
"src_pk": "CUSTOMER_PK",
"as_of_dates_table": "AS_OF_DATE",
"satellites":
{
"SAT_CUSTOMER_DETAILS": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
},
"SAT_CUSTOMER_LOGIN": {
"pk":
{"PK": "CUSTOMER_PK"},
"ldts":
{"LDTS": "LOAD_DATE"}
}
},
"stage_tables":
{
"STG_CUSTOMER_DETAILS": "LOAD_DATE",
"STG_CUSTOMER_LOGIN": "LOAD_DATE",
},
"src_ldts": "LOAD_DATE"
}
}
context.stage_columns = {
"RAW_STAGE_DETAILS":
["CUSTOMER_ID",
"CUSTOMER_NAME",
"CUSTOMER_ADDRESS",
"CUSTOMER_DOB",
"LOAD_DATE",
"SOURCE"],
"RAW_STAGE_DETAILS_TS":
["CUSTOMER_ID",
"CUSTOMER_NAME",
"CUSTOMER_ADDRESS",
"CUSTOMER_DOB",
"LOAD_DATETIME",
"SOURCE"],
"RAW_STAGE_LOGIN":
["CUSTOMER_ID",
"LAST_LOGIN_DATE",
"DEVICE_USED",
"LOAD_DATE",
"SOURCE"],
"RAW_STAGE_LOGIN_TS":
["CUSTOMER_ID",
"LAST_LOGIN_DATE",
"DEVICE_USED",
"LOAD_DATETIME",
"SOURCE"]
}
context.seed_config = {
"RAW_STAGE_DETAILS": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR(50)",
"CUSTOMER_NAME": "VARCHAR(50)",
"CUSTOMER_ADDRESS": "VARCHAR(50)",
"CUSTOMER_DOB": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR(50)"
}
},
"RAW_STAGE_DETAILS_TS": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR(50)",
"CUSTOMER_NAME": "VARCHAR(50)",
"CUSTOMER_ADDRESS": "VARCHAR(50)",
"CUSTOMER_DOB": "DATE",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR(50)"
}
},
"RAW_STAGE_LOGIN": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR(50)",
"LAST_LOGIN_DATE": "DATETIME",
"DEVICE_USED": "VARCHAR(50)",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR(50)"
}
},
"RAW_STAGE_LOGIN_TS": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR(50)",
"LAST_LOGIN_DATE": "DATETIME",
"DEVICE_USED": "VARCHAR(50)",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR(50)"
}
},
"HUB_CUSTOMER": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"CUSTOMER_ID": "VARCHAR(50)",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR(50)"
}
},
"HUB_CUSTOMER_TS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"CUSTOMER_ID": "VARCHAR(50)",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR(50)"
}
},
"SAT_CUSTOMER_DETAILS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"CUSTOMER_NAME": "VARCHAR(50)",
"CUSTOMER_ADDRESS": "VARCHAR(50)",
"CUSTOMER_DOB": "DATE",
"EFFECTIVE_FROM": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR(50)"
}
},
"SAT_CUSTOMER_DETAILS_TS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"CUSTOMER_NAME": "VARCHAR(50)",
"CUSTOMER_ADDRESS": "VARCHAR(50)",
"CUSTOMER_DOB": "DATE",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR(50)"
}
},
"SAT_CUSTOMER_LOGIN": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"DEVICE_USED": "VARCHAR(50)",
"LAST_LOGIN_DATE": "DATETIME",
"EFFECTIVE_FROM": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR(50)"
}
},
"SAT_CUSTOMER_LOGIN_TS": {
"+column_types": {
"CUSTOMER_PK": "BINARY(16)",
"HASHDIFF": "BINARY(16)",
"DEVICE_USED": "VARCHAR(50)",
"LAST_LOGIN_DATE": "DATETIME",
"EFFECTIVE_FROM": "DATETIME",
"LOAD_DATETIME": "DATETIME",
"SOURCE": "VARCHAR(50)"
}
},
"AS_OF_DATE": {
"+column_types": {
"AS_OF_DATE": "DATETIME"
}
},
"PIT_CUSTOMER": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_LDTS": "DATETIME",
"SAT_CUSTOMER_LOGIN_PK": "BINARY(16)",
"SAT_CUSTOMER_LOGIN_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_TS": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_LDTS": "DATETIME",
"SAT_CUSTOMER_LOGIN_TS_PK": "BINARY(16)",
"SAT_CUSTOMER_LOGIN_TS_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_LG": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_TS_LDTS": "DATETIME",
"SAT_CUSTOMER_LOGIN_TS_PK": "BINARY(16)",
"SAT_CUSTOMER_LOGIN_TS_LDTS": "DATETIME"
}
},
"PIT_CUSTOMER_HG": {
"+column_types": {
"AS_OF_DATE": "DATETIME",
"CUSTOMER_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_PK": "BINARY(16)",
"SAT_CUSTOMER_DETAILS_LDTS": "DATETIME",
"SAT_CUSTOMER_LOGIN_PK": "BINARY(16)",
"SAT_CUSTOMER_LOGIN_LDTS": "DATETIME"
}
}
}
| 33.293627
| 89
| 0.418997
| 6,654
| 87,762
| 5.064172
| 0.012023
| 0.06232
| 0.040597
| 0.030715
| 0.998665
| 0.998665
| 0.99638
| 0.988337
| 0.974449
| 0.971422
| 0
| 0.006588
| 0.450001
| 87,762
| 2,635
| 90
| 33.306262
| 0.69152
| 0.005629
| 0
| 0.773265
| 0
| 0
| 0.403113
| 0.047686
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003549
| false
| 0
| 0.000394
| 0
| 0.003943
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
776e3a1ebfc475a08f1ca5cdf7ac7816f0fc1b69
| 12,441
|
py
|
Python
|
mainapp/models.py
|
mmiyaji/Dansible
|
06d5ea1fce1345388e7101db33f23e6460e3c3cf
|
[
"MIT"
] | null | null | null |
mainapp/models.py
|
mmiyaji/Dansible
|
06d5ea1fce1345388e7101db33f23e6460e3c3cf
|
[
"MIT"
] | null | null | null |
mainapp/models.py
|
mmiyaji/Dansible
|
06d5ea1fce1345388e7101db33f23e6460e3c3cf
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
"""
models.py
Created by mmiyaji on 2016-07-11.
Copyright (c) 2016 ruhenheim.org. All rights reserved.
"""
from __future__ import unicode_literals
import datetime, time, uuid
from django.db import models
from django.contrib.auth import models as auth_models
class ServerAttribute(models.Model):
"""
属性モデル
"""
name = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
comment = models.TextField(default="")
uuid = models.CharField(max_length = 32, default=uuid.uuid4().hex, editable=False, unique=True)
isvalid = models.BooleanField(default=True, db_index=True)
updated_at = models.DateTimeField(auto_now = True, db_index=True)
created_at = models.DateTimeField(auto_now_add = True, db_index=True)
@staticmethod
def get_all():
return ServerAttribute.objects.filter(isvalid__exact=True)
@staticmethod
def get_items(page=0, span=10):
result = ServerAttribute.objects.filter(isvalid__exact=True)
if page!=0:
page = page*span - span
endpage = page + span
return result[page:endpage],result.count()
@staticmethod
def get_list():
return ServerAttribute.objects.all()
@staticmethod
def get_by_id(id):
result=None
try:
result = ServerAttribute.objects.get(id=int(id))
except:
result = None
return result
@staticmethod
def get_by_name(name=""):
result=None
try:
result = ServerAttribute.objects.filter(name=name).get()
except:
result = None
return result
def __unicode__(self):
return self.name
def get_absolute_url(self):
return "/server_attribute/%s" % self.id
class Server(models.Model):
"""
サーバモデル
"""
name = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
comment = models.TextField(default="")
uuid = models.CharField(max_length = 32, default=uuid.uuid4().hex, editable=False, unique=True)
isvalid = models.BooleanField(default=True, db_index=True)
updated_at = models.DateTimeField(auto_now = True, db_index=True)
created_at = models.DateTimeField(auto_now_add = True, db_index=True)
@staticmethod
def get_all():
return Server.objects.filter(isvalid__exact=True)
@staticmethod
def get_items(page=0, span=10):
result = Server.objects.filter(isvalid__exact=True)
if page!=0:
page = page*span - span
endpage = page + span
return result[page:endpage],result.count()
@staticmethod
def get_list():
return Server.objects.all()
@staticmethod
def get_by_id(id):
result=None
try:
result = Server.objects.get(id=int(id))
except:
result = None
return result
@staticmethod
def get_by_uuid(target_uuid):
result = None
try:
result = Server.objects.filter(uuid__exact=target_uuid)[0]
except:
result = None
return result
@staticmethod
def get_by_name(name=""):
result=None
try:
result = Server.objects.filter(name=name).get()
except:
result = None
return result
def __unicode__(self):
return self.name
def get_absolute_url(self):
return "/server/%s" % self.id
class OSTemplate(models.Model):
"""
OSテンプレートモデル
"""
name = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
comment = models.TextField(default="")
server_attribute = models.ForeignKey(ServerAttribute, db_index=True)
uuid = models.CharField(max_length = 32, default=uuid.uuid4().hex, editable=False, unique=True)
isvalid = models.BooleanField(default=True, db_index=True)
updated_at = models.DateTimeField(auto_now = True, db_index=True)
created_at = models.DateTimeField(auto_now_add = True, db_index=True)
@staticmethod
def get_all():
return OSTemplate.objects.filter(isvalid__exact=True)
@staticmethod
def get_items(page=0, span=10):
result = OSTemplate.objects.filter(isvalid__exact=True)
if page!=0:
page = page*span - span
endpage = page + span
return result[page:endpage],result.count()
@staticmethod
def get_list():
return OSTemplate.objects.all()
@staticmethod
def get_by_id(id):
result=None
try:
result = OSTemplate.objects.get(id=int(id))
except:
result = None
return result
@staticmethod
def get_by_name(name=""):
result=None
try:
result = OSTemplate.objects.filter(name=name).get()
except:
result = None
return result
def __unicode__(self):
return self.name
def get_absolute_url(self):
return "/os_template/%s" % self.id
class ConfigFile(models.Model):
"""
設定モデル
"""
name = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
comment = models.TextField(default="")
file_path = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
file_permittion = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
file_owner = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
file_group = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
uuid = models.CharField(max_length = 32, default=uuid.uuid4().hex, editable=False, unique=True)
isvalid = models.BooleanField(default=True, db_index=True)
updated_at = models.DateTimeField(auto_now = True, db_index=True)
created_at = models.DateTimeField(auto_now_add = True, db_index=True)
@staticmethod
def get_all():
return ConfigFile.objects.filter(isvalid__exact=True)
@staticmethod
def get_items(page=0, span=10):
result = ConfigFile.objects.filter(isvalid__exact=True)
if page!=0:
page = page*span - span
endpage = page + span
return result[page:endpage],result.count()
@staticmethod
def get_list():
return ConfigFile.objects.all()
@staticmethod
def get_by_id(id):
result=None
try:
result = ConfigFile.objects.get(id=int(id))
except:
result = None
return result
@staticmethod
def get_by_name(name=""):
result=None
try:
result = ConfigFile.objects.filter(name=name).get()
except:
result = None
return result
def __unicode__(self):
return self.name
def get_absolute_url(self):
return "/config_file/%s" % self.id
class ConfigData(models.Model):
"""
設定モデル
"""
name = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
comment = models.TextField(default="")
file_path = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
file_permittion = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
file_owner = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
file_group = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
uuid = models.CharField(max_length = 32, default=uuid.uuid4().hex, editable=False, unique=True)
isvalid = models.BooleanField(default=True, db_index=True)
updated_at = models.DateTimeField(auto_now = True, db_index=True)
created_at = models.DateTimeField(auto_now_add = True, db_index=True)
@staticmethod
def get_all():
return ConfigData.objects.filter(isvalid__exact=True)
@staticmethod
def get_items(page=0, span=10):
result = ConfigData.objects.filter(isvalid__exact=True)
if page!=0:
page = page*span - span
endpage = page + span
return result[page:endpage],result.count()
@staticmethod
def get_list():
return ConfigData.objects.all()
@staticmethod
def get_by_id(id):
result=None
try:
result = ConfigData.objects.get(id=int(id))
except:
result = None
return result
@staticmethod
def get_by_name(name=""):
result=None
try:
result = ConfigData.objects.filter(name=name).get()
except:
result = None
return result
def __unicode__(self):
return self.name
def get_absolute_url(self):
return "/config_data/%s" % self.id
class ConfigCommand(models.Model):
"""
設定モデル
"""
name = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
comment = models.TextField(default="")
command = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
command_user = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
uuid = models.CharField(max_length = 32, default=uuid.uuid4().hex, editable=False, unique=True)
isvalid = models.BooleanField(default=True, db_index=True)
updated_at = models.DateTimeField(auto_now = True, db_index=True)
created_at = models.DateTimeField(auto_now_add = True, db_index=True)
@staticmethod
def get_all():
return ConfigCommand.objects.filter(isvalid__exact=True)
@staticmethod
def get_items(page=0, span=10):
result = ConfigCommand.objects.filter(isvalid__exact=True)
if page!=0:
page = page*span - span
endpage = page + span
return result[page:endpage],result.count()
@staticmethod
def get_list():
return ConfigCommand.objects.all()
@staticmethod
def get_by_id(id):
result=None
try:
result = ConfigCommand.objects.get(id=int(id))
except:
result = None
return result
@staticmethod
def get_by_name(name=""):
result=None
try:
result = ConfigCommand.objects.filter(name=name).get()
except:
result = None
return result
def __unicode__(self):
return self.name
def get_absolute_url(self):
return "/config_command/%s" % self.id
class Config(models.Model):
"""
設定モデル
"""
CONFIG_CHOICES = (
('f', 'File'),
('d', 'Data'),
('c', 'Command'),
)
name = models.CharField(max_length = 100, default="", blank=False, null=False, db_index=True)
comment = models.TextField(default="")
config_type = models.CharField(max_length = 10, choices=CONFIG_CHOICES, default="c", blank=False, null=False, db_index=True)
config_file = models.ForeignKey(ConfigFile, db_index=True)
config_data = models.ForeignKey(ConfigData, db_index=True)
config_command = models.ForeignKey(ConfigCommand, db_index=True)
uuid = models.CharField(max_length = 32, default=uuid.uuid4().hex, editable=False, unique=True)
isvalid = models.BooleanField(default=True, db_index=True)
updated_at = models.DateTimeField(auto_now = True, db_index=True)
created_at = models.DateTimeField(auto_now_add = True, db_index=True)
@staticmethod
def get_all():
return Config.objects.filter(isvalid__exact=True)
@staticmethod
def get_items(page=0, span=10):
result = Config.objects.filter(isvalid__exact=True)
if page!=0:
page = page*span - span
endpage = page + span
return result[page:endpage],result.count()
@staticmethod
def get_list():
return Config.objects.all()
@staticmethod
def get_by_id(id):
result=None
try:
result = Config.objects.get(id=int(id))
except:
result = None
return result
@staticmethod
def get_by_name(name=""):
result=None
try:
result = Config.objects.filter(name=name).get()
except:
result = None
return result
def __unicode__(self):
return self.name
def get_absolute_url(self):
return "/config/%s" % self.id
class Meta:
ordering = ['-created_at']
| 33.991803
| 128
| 0.63982
| 1,507
| 12,441
| 5.112143
| 0.079628
| 0.039071
| 0.061397
| 0.077882
| 0.88188
| 0.88188
| 0.846314
| 0.836708
| 0.836708
| 0.836708
| 0
| 0.01238
| 0.246845
| 12,441
| 365
| 129
| 34.084932
| 0.809819
| 0.014951
| 0
| 0.780255
| 0
| 0
| 0.010955
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.159236
| false
| 0
| 0.012739
| 0.089172
| 0.544586
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
778fbcef49997f0ef563feb1bdf7e1bde1261eac
| 62,860
|
py
|
Python
|
sdk/python/pulumi_gcp/compute/route.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/compute/route.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/compute/route.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['RouteArgs', 'Route']
@pulumi.input_type
class RouteArgs:
def __init__(__self__, *,
dest_range: pulumi.Input[str],
network: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
next_hop_gateway: Optional[pulumi.Input[str]] = None,
next_hop_ilb: Optional[pulumi.Input[str]] = None,
next_hop_instance: Optional[pulumi.Input[str]] = None,
next_hop_instance_zone: Optional[pulumi.Input[str]] = None,
next_hop_ip: Optional[pulumi.Input[str]] = None,
next_hop_vpn_tunnel: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
project: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Route resource.
:param pulumi.Input[str] dest_range: The destination range of outgoing packets that this route applies to.
Only IPv4 is supported.
:param pulumi.Input[str] network: The network that this route applies to.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property
when you create the resource.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
:param pulumi.Input[str] next_hop_gateway: URL to a gateway that should handle matching packets.
Currently, you can only specify the internet gateway, using a full or
partial valid URL:
* `https://www.googleapis.com/compute/v1/projects/project/global/gateways/default-internet-gateway`
* `projects/project/global/gateways/default-internet-gateway`
* `global/gateways/default-internet-gateway`
* The string `default-internet-gateway`.
:param pulumi.Input[str] next_hop_ilb: The IP address or URL to a forwarding rule of type
loadBalancingScheme=INTERNAL that should handle matching
packets.
With the GA provider you can only specify the forwarding
rule as a partial or full URL. For example, the following
are all valid values:
* 10.128.0.56
* https://www.googleapis.com/compute/v1/projects/project/regions/region/forwardingRules/forwardingRule
* regions/region/forwardingRules/forwardingRule
When the beta provider, you can also specify the IP address
of a forwarding rule from the same VPC or any peered VPC.
Note that this can only be used when the destinationRange is
a public (non-RFC 1918) IP CIDR range.
:param pulumi.Input[str] next_hop_instance: URL to an instance that should handle matching packets.
You can specify this as a full or partial URL. For example:
* `https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance`
* `projects/project/zones/zone/instances/instance`
* `zones/zone/instances/instance`
* Just the instance name, with the zone in `next_hop_instance_zone`.
:param pulumi.Input[str] next_hop_instance_zone: (Optional when `next_hop_instance` is
specified) The zone of the instance specified in
`next_hop_instance`. Omit if `next_hop_instance` is specified as
a URL.
:param pulumi.Input[str] next_hop_ip: Network IP address of an instance that should handle matching packets.
:param pulumi.Input[str] next_hop_vpn_tunnel: URL to a VpnTunnel that should handle matching packets.
:param pulumi.Input[int] priority: The priority of this route. Priority is used to break ties in cases
where there is more than one matching route of equal prefix length.
In the case of two routes with equal prefix length, the one with the
lowest-numbered priority value wins.
Default value is 1000. Valid range is 0 through 65535.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of instance tags to which this route applies.
"""
pulumi.set(__self__, "dest_range", dest_range)
pulumi.set(__self__, "network", network)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if next_hop_gateway is not None:
pulumi.set(__self__, "next_hop_gateway", next_hop_gateway)
if next_hop_ilb is not None:
pulumi.set(__self__, "next_hop_ilb", next_hop_ilb)
if next_hop_instance is not None:
pulumi.set(__self__, "next_hop_instance", next_hop_instance)
if next_hop_instance_zone is not None:
pulumi.set(__self__, "next_hop_instance_zone", next_hop_instance_zone)
if next_hop_ip is not None:
pulumi.set(__self__, "next_hop_ip", next_hop_ip)
if next_hop_vpn_tunnel is not None:
pulumi.set(__self__, "next_hop_vpn_tunnel", next_hop_vpn_tunnel)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if project is not None:
pulumi.set(__self__, "project", project)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="destRange")
def dest_range(self) -> pulumi.Input[str]:
"""
The destination range of outgoing packets that this route applies to.
Only IPv4 is supported.
"""
return pulumi.get(self, "dest_range")
@dest_range.setter
def dest_range(self, value: pulumi.Input[str]):
pulumi.set(self, "dest_range", value)
@property
@pulumi.getter
def network(self) -> pulumi.Input[str]:
"""
The network that this route applies to.
"""
return pulumi.get(self, "network")
@network.setter
def network(self, value: pulumi.Input[str]):
pulumi.set(self, "network", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of this resource. Provide this property
when you create the resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nextHopGateway")
def next_hop_gateway(self) -> Optional[pulumi.Input[str]]:
"""
URL to a gateway that should handle matching packets.
Currently, you can only specify the internet gateway, using a full or
partial valid URL:
* `https://www.googleapis.com/compute/v1/projects/project/global/gateways/default-internet-gateway`
* `projects/project/global/gateways/default-internet-gateway`
* `global/gateways/default-internet-gateway`
* The string `default-internet-gateway`.
"""
return pulumi.get(self, "next_hop_gateway")
@next_hop_gateway.setter
def next_hop_gateway(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_gateway", value)
@property
@pulumi.getter(name="nextHopIlb")
def next_hop_ilb(self) -> Optional[pulumi.Input[str]]:
"""
The IP address or URL to a forwarding rule of type
loadBalancingScheme=INTERNAL that should handle matching
packets.
With the GA provider you can only specify the forwarding
rule as a partial or full URL. For example, the following
are all valid values:
* 10.128.0.56
* https://www.googleapis.com/compute/v1/projects/project/regions/region/forwardingRules/forwardingRule
* regions/region/forwardingRules/forwardingRule
When the beta provider, you can also specify the IP address
of a forwarding rule from the same VPC or any peered VPC.
Note that this can only be used when the destinationRange is
a public (non-RFC 1918) IP CIDR range.
"""
return pulumi.get(self, "next_hop_ilb")
@next_hop_ilb.setter
def next_hop_ilb(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_ilb", value)
@property
@pulumi.getter(name="nextHopInstance")
def next_hop_instance(self) -> Optional[pulumi.Input[str]]:
"""
URL to an instance that should handle matching packets.
You can specify this as a full or partial URL. For example:
* `https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance`
* `projects/project/zones/zone/instances/instance`
* `zones/zone/instances/instance`
* Just the instance name, with the zone in `next_hop_instance_zone`.
"""
return pulumi.get(self, "next_hop_instance")
@next_hop_instance.setter
def next_hop_instance(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_instance", value)
@property
@pulumi.getter(name="nextHopInstanceZone")
def next_hop_instance_zone(self) -> Optional[pulumi.Input[str]]:
"""
(Optional when `next_hop_instance` is
specified) The zone of the instance specified in
`next_hop_instance`. Omit if `next_hop_instance` is specified as
a URL.
"""
return pulumi.get(self, "next_hop_instance_zone")
@next_hop_instance_zone.setter
def next_hop_instance_zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_instance_zone", value)
@property
@pulumi.getter(name="nextHopIp")
def next_hop_ip(self) -> Optional[pulumi.Input[str]]:
"""
Network IP address of an instance that should handle matching packets.
"""
return pulumi.get(self, "next_hop_ip")
@next_hop_ip.setter
def next_hop_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_ip", value)
@property
@pulumi.getter(name="nextHopVpnTunnel")
def next_hop_vpn_tunnel(self) -> Optional[pulumi.Input[str]]:
"""
URL to a VpnTunnel that should handle matching packets.
"""
return pulumi.get(self, "next_hop_vpn_tunnel")
@next_hop_vpn_tunnel.setter
def next_hop_vpn_tunnel(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_vpn_tunnel", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
The priority of this route. Priority is used to break ties in cases
where there is more than one matching route of equal prefix length.
In the case of two routes with equal prefix length, the one with the
lowest-numbered priority value wins.
Default value is 1000. Valid range is 0 through 65535.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of instance tags to which this route applies.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _RouteState:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
dest_range: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
next_hop_gateway: Optional[pulumi.Input[str]] = None,
next_hop_ilb: Optional[pulumi.Input[str]] = None,
next_hop_instance: Optional[pulumi.Input[str]] = None,
next_hop_instance_zone: Optional[pulumi.Input[str]] = None,
next_hop_ip: Optional[pulumi.Input[str]] = None,
next_hop_network: Optional[pulumi.Input[str]] = None,
next_hop_vpn_tunnel: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
project: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering Route resources.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property
when you create the resource.
:param pulumi.Input[str] dest_range: The destination range of outgoing packets that this route applies to.
Only IPv4 is supported.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
:param pulumi.Input[str] network: The network that this route applies to.
:param pulumi.Input[str] next_hop_gateway: URL to a gateway that should handle matching packets.
Currently, you can only specify the internet gateway, using a full or
partial valid URL:
* `https://www.googleapis.com/compute/v1/projects/project/global/gateways/default-internet-gateway`
* `projects/project/global/gateways/default-internet-gateway`
* `global/gateways/default-internet-gateway`
* The string `default-internet-gateway`.
:param pulumi.Input[str] next_hop_ilb: The IP address or URL to a forwarding rule of type
loadBalancingScheme=INTERNAL that should handle matching
packets.
With the GA provider you can only specify the forwarding
rule as a partial or full URL. For example, the following
are all valid values:
* 10.128.0.56
* https://www.googleapis.com/compute/v1/projects/project/regions/region/forwardingRules/forwardingRule
* regions/region/forwardingRules/forwardingRule
When the beta provider, you can also specify the IP address
of a forwarding rule from the same VPC or any peered VPC.
Note that this can only be used when the destinationRange is
a public (non-RFC 1918) IP CIDR range.
:param pulumi.Input[str] next_hop_instance: URL to an instance that should handle matching packets.
You can specify this as a full or partial URL. For example:
* `https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance`
* `projects/project/zones/zone/instances/instance`
* `zones/zone/instances/instance`
* Just the instance name, with the zone in `next_hop_instance_zone`.
:param pulumi.Input[str] next_hop_instance_zone: (Optional when `next_hop_instance` is
specified) The zone of the instance specified in
`next_hop_instance`. Omit if `next_hop_instance` is specified as
a URL.
:param pulumi.Input[str] next_hop_ip: Network IP address of an instance that should handle matching packets.
:param pulumi.Input[str] next_hop_network: URL to a Network that should handle matching packets.
:param pulumi.Input[str] next_hop_vpn_tunnel: URL to a VpnTunnel that should handle matching packets.
:param pulumi.Input[int] priority: The priority of this route. Priority is used to break ties in cases
where there is more than one matching route of equal prefix length.
In the case of two routes with equal prefix length, the one with the
lowest-numbered priority value wins.
Default value is 1000. Valid range is 0 through 65535.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] self_link: The URI of the created resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of instance tags to which this route applies.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if dest_range is not None:
pulumi.set(__self__, "dest_range", dest_range)
if name is not None:
pulumi.set(__self__, "name", name)
if network is not None:
pulumi.set(__self__, "network", network)
if next_hop_gateway is not None:
pulumi.set(__self__, "next_hop_gateway", next_hop_gateway)
if next_hop_ilb is not None:
pulumi.set(__self__, "next_hop_ilb", next_hop_ilb)
if next_hop_instance is not None:
pulumi.set(__self__, "next_hop_instance", next_hop_instance)
if next_hop_instance_zone is not None:
pulumi.set(__self__, "next_hop_instance_zone", next_hop_instance_zone)
if next_hop_ip is not None:
pulumi.set(__self__, "next_hop_ip", next_hop_ip)
if next_hop_network is not None:
pulumi.set(__self__, "next_hop_network", next_hop_network)
if next_hop_vpn_tunnel is not None:
pulumi.set(__self__, "next_hop_vpn_tunnel", next_hop_vpn_tunnel)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if project is not None:
pulumi.set(__self__, "project", project)
if self_link is not None:
pulumi.set(__self__, "self_link", self_link)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of this resource. Provide this property
when you create the resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="destRange")
def dest_range(self) -> Optional[pulumi.Input[str]]:
"""
The destination range of outgoing packets that this route applies to.
Only IPv4 is supported.
"""
return pulumi.get(self, "dest_range")
@dest_range.setter
def dest_range(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dest_range", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def network(self) -> Optional[pulumi.Input[str]]:
"""
The network that this route applies to.
"""
return pulumi.get(self, "network")
@network.setter
def network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network", value)
@property
@pulumi.getter(name="nextHopGateway")
def next_hop_gateway(self) -> Optional[pulumi.Input[str]]:
"""
URL to a gateway that should handle matching packets.
Currently, you can only specify the internet gateway, using a full or
partial valid URL:
* `https://www.googleapis.com/compute/v1/projects/project/global/gateways/default-internet-gateway`
* `projects/project/global/gateways/default-internet-gateway`
* `global/gateways/default-internet-gateway`
* The string `default-internet-gateway`.
"""
return pulumi.get(self, "next_hop_gateway")
@next_hop_gateway.setter
def next_hop_gateway(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_gateway", value)
@property
@pulumi.getter(name="nextHopIlb")
def next_hop_ilb(self) -> Optional[pulumi.Input[str]]:
"""
The IP address or URL to a forwarding rule of type
loadBalancingScheme=INTERNAL that should handle matching
packets.
With the GA provider you can only specify the forwarding
rule as a partial or full URL. For example, the following
are all valid values:
* 10.128.0.56
* https://www.googleapis.com/compute/v1/projects/project/regions/region/forwardingRules/forwardingRule
* regions/region/forwardingRules/forwardingRule
When the beta provider, you can also specify the IP address
of a forwarding rule from the same VPC or any peered VPC.
Note that this can only be used when the destinationRange is
a public (non-RFC 1918) IP CIDR range.
"""
return pulumi.get(self, "next_hop_ilb")
@next_hop_ilb.setter
def next_hop_ilb(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_ilb", value)
@property
@pulumi.getter(name="nextHopInstance")
def next_hop_instance(self) -> Optional[pulumi.Input[str]]:
"""
URL to an instance that should handle matching packets.
You can specify this as a full or partial URL. For example:
* `https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance`
* `projects/project/zones/zone/instances/instance`
* `zones/zone/instances/instance`
* Just the instance name, with the zone in `next_hop_instance_zone`.
"""
return pulumi.get(self, "next_hop_instance")
@next_hop_instance.setter
def next_hop_instance(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_instance", value)
@property
@pulumi.getter(name="nextHopInstanceZone")
def next_hop_instance_zone(self) -> Optional[pulumi.Input[str]]:
"""
(Optional when `next_hop_instance` is
specified) The zone of the instance specified in
`next_hop_instance`. Omit if `next_hop_instance` is specified as
a URL.
"""
return pulumi.get(self, "next_hop_instance_zone")
@next_hop_instance_zone.setter
def next_hop_instance_zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_instance_zone", value)
@property
@pulumi.getter(name="nextHopIp")
def next_hop_ip(self) -> Optional[pulumi.Input[str]]:
"""
Network IP address of an instance that should handle matching packets.
"""
return pulumi.get(self, "next_hop_ip")
@next_hop_ip.setter
def next_hop_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_ip", value)
@property
@pulumi.getter(name="nextHopNetwork")
def next_hop_network(self) -> Optional[pulumi.Input[str]]:
"""
URL to a Network that should handle matching packets.
"""
return pulumi.get(self, "next_hop_network")
@next_hop_network.setter
def next_hop_network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_network", value)
@property
@pulumi.getter(name="nextHopVpnTunnel")
def next_hop_vpn_tunnel(self) -> Optional[pulumi.Input[str]]:
"""
URL to a VpnTunnel that should handle matching packets.
"""
return pulumi.get(self, "next_hop_vpn_tunnel")
@next_hop_vpn_tunnel.setter
def next_hop_vpn_tunnel(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_vpn_tunnel", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
The priority of this route. Priority is used to break ties in cases
where there is more than one matching route of equal prefix length.
In the case of two routes with equal prefix length, the one with the
lowest-numbered priority value wins.
Default value is 1000. Valid range is 0 through 65535.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> Optional[pulumi.Input[str]]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@self_link.setter
def self_link(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "self_link", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of instance tags to which this route applies.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class Route(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
dest_range: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
next_hop_gateway: Optional[pulumi.Input[str]] = None,
next_hop_ilb: Optional[pulumi.Input[str]] = None,
next_hop_instance: Optional[pulumi.Input[str]] = None,
next_hop_instance_zone: Optional[pulumi.Input[str]] = None,
next_hop_ip: Optional[pulumi.Input[str]] = None,
next_hop_vpn_tunnel: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
project: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
Represents a Route resource.
A route is a rule that specifies how certain packets should be handled by
the virtual network. Routes are associated with virtual machines by tag,
and the set of routes for a particular virtual machine is called its
routing table. For each packet leaving a virtual machine, the system
searches that virtual machine's routing table for a single best matching
route.
Routes match packets by destination IP address, preferring smaller or more
specific ranges over larger ones. If there is a tie, the system selects
the route with the smallest priority value. If there is still a tie, it
uses the layer three and four packet headers to select just one of the
remaining matching routes. The packet is then forwarded as specified by
the next_hop field of the winning route -- either to another virtual
machine destination, a virtual machine gateway or a Compute
Engine-operated gateway. Packets that do not match any route in the
sending virtual machine's routing table will be dropped.
A Route resource must have exactly one specification of either
nextHopGateway, nextHopInstance, nextHopIp, nextHopVpnTunnel, or
nextHopIlb.
To get more information about Route, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/rest/v1/routes)
* How-to Guides
* [Using Routes](https://cloud.google.com/vpc/docs/using-routes)
## Example Usage
### Route Basic
```python
import pulumi
import pulumi_gcp as gcp
default_network = gcp.compute.Network("defaultNetwork")
default_route = gcp.compute.Route("defaultRoute",
dest_range="15.0.0.0/24",
network=default_network.name,
next_hop_ip="10.132.1.5",
priority=100)
```
### Route Ilb
```python
import pulumi
import pulumi_gcp as gcp
default_network = gcp.compute.Network("defaultNetwork", auto_create_subnetworks=False)
default_subnetwork = gcp.compute.Subnetwork("defaultSubnetwork",
ip_cidr_range="10.0.1.0/24",
region="us-central1",
network=default_network.id)
hc = gcp.compute.HealthCheck("hc",
check_interval_sec=1,
timeout_sec=1,
tcp_health_check=gcp.compute.HealthCheckTcpHealthCheckArgs(
port=80,
))
backend = gcp.compute.RegionBackendService("backend",
region="us-central1",
health_checks=[hc.id])
default_forwarding_rule = gcp.compute.ForwardingRule("defaultForwardingRule",
region="us-central1",
load_balancing_scheme="INTERNAL",
backend_service=backend.id,
all_ports=True,
network=default_network.name,
subnetwork=default_subnetwork.name)
route_ilb = gcp.compute.Route("route-ilb",
dest_range="0.0.0.0/0",
network=default_network.name,
next_hop_ilb=default_forwarding_rule.id,
priority=2000)
```
### Route Ilb Vip
```python
import pulumi
import pulumi_gcp as gcp
producer_network = gcp.compute.Network("producerNetwork", auto_create_subnetworks=False,
opts=pulumi.ResourceOptions(provider=google_beta))
producer_subnetwork = gcp.compute.Subnetwork("producerSubnetwork",
ip_cidr_range="10.0.1.0/24",
region="us-central1",
network=producer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
consumer_network = gcp.compute.Network("consumerNetwork", auto_create_subnetworks=False,
opts=pulumi.ResourceOptions(provider=google_beta))
consumer_subnetwork = gcp.compute.Subnetwork("consumerSubnetwork",
ip_cidr_range="10.0.2.0/24",
region="us-central1",
network=consumer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
peering1 = gcp.compute.NetworkPeering("peering1",
network=consumer_network.id,
peer_network=producer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
peering2 = gcp.compute.NetworkPeering("peering2",
network=producer_network.id,
peer_network=consumer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
hc = gcp.compute.HealthCheck("hc",
check_interval_sec=1,
timeout_sec=1,
tcp_health_check=gcp.compute.HealthCheckTcpHealthCheckArgs(
port=80,
),
opts=pulumi.ResourceOptions(provider=google_beta))
backend = gcp.compute.RegionBackendService("backend",
region="us-central1",
health_checks=[hc.id],
opts=pulumi.ResourceOptions(provider=google_beta))
default = gcp.compute.ForwardingRule("default",
region="us-central1",
load_balancing_scheme="INTERNAL",
backend_service=backend.id,
all_ports=True,
network=producer_network.name,
subnetwork=producer_subnetwork.name,
opts=pulumi.ResourceOptions(provider=google_beta))
route_ilb = gcp.compute.Route("route-ilb",
dest_range="0.0.0.0/0",
network=consumer_network.name,
next_hop_ilb=default.ip_address,
priority=2000,
tags=[
"tag1",
"tag2",
],
opts=pulumi.ResourceOptions(provider=google_beta,
depends_on=[
peering1,
peering2,
]))
```
## Import
Route can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/route:Route default projects/{{project}}/global/routes/{{name}}
```
```sh
$ pulumi import gcp:compute/route:Route default {{project}}/{{name}}
```
```sh
$ pulumi import gcp:compute/route:Route default {{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property
when you create the resource.
:param pulumi.Input[str] dest_range: The destination range of outgoing packets that this route applies to.
Only IPv4 is supported.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
:param pulumi.Input[str] network: The network that this route applies to.
:param pulumi.Input[str] next_hop_gateway: URL to a gateway that should handle matching packets.
Currently, you can only specify the internet gateway, using a full or
partial valid URL:
* `https://www.googleapis.com/compute/v1/projects/project/global/gateways/default-internet-gateway`
* `projects/project/global/gateways/default-internet-gateway`
* `global/gateways/default-internet-gateway`
* The string `default-internet-gateway`.
:param pulumi.Input[str] next_hop_ilb: The IP address or URL to a forwarding rule of type
loadBalancingScheme=INTERNAL that should handle matching
packets.
With the GA provider you can only specify the forwarding
rule as a partial or full URL. For example, the following
are all valid values:
* 10.128.0.56
* https://www.googleapis.com/compute/v1/projects/project/regions/region/forwardingRules/forwardingRule
* regions/region/forwardingRules/forwardingRule
When the beta provider, you can also specify the IP address
of a forwarding rule from the same VPC or any peered VPC.
Note that this can only be used when the destinationRange is
a public (non-RFC 1918) IP CIDR range.
:param pulumi.Input[str] next_hop_instance: URL to an instance that should handle matching packets.
You can specify this as a full or partial URL. For example:
* `https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance`
* `projects/project/zones/zone/instances/instance`
* `zones/zone/instances/instance`
* Just the instance name, with the zone in `next_hop_instance_zone`.
:param pulumi.Input[str] next_hop_instance_zone: (Optional when `next_hop_instance` is
specified) The zone of the instance specified in
`next_hop_instance`. Omit if `next_hop_instance` is specified as
a URL.
:param pulumi.Input[str] next_hop_ip: Network IP address of an instance that should handle matching packets.
:param pulumi.Input[str] next_hop_vpn_tunnel: URL to a VpnTunnel that should handle matching packets.
:param pulumi.Input[int] priority: The priority of this route. Priority is used to break ties in cases
where there is more than one matching route of equal prefix length.
In the case of two routes with equal prefix length, the one with the
lowest-numbered priority value wins.
Default value is 1000. Valid range is 0 through 65535.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of instance tags to which this route applies.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RouteArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Represents a Route resource.
A route is a rule that specifies how certain packets should be handled by
the virtual network. Routes are associated with virtual machines by tag,
and the set of routes for a particular virtual machine is called its
routing table. For each packet leaving a virtual machine, the system
searches that virtual machine's routing table for a single best matching
route.
Routes match packets by destination IP address, preferring smaller or more
specific ranges over larger ones. If there is a tie, the system selects
the route with the smallest priority value. If there is still a tie, it
uses the layer three and four packet headers to select just one of the
remaining matching routes. The packet is then forwarded as specified by
the next_hop field of the winning route -- either to another virtual
machine destination, a virtual machine gateway or a Compute
Engine-operated gateway. Packets that do not match any route in the
sending virtual machine's routing table will be dropped.
A Route resource must have exactly one specification of either
nextHopGateway, nextHopInstance, nextHopIp, nextHopVpnTunnel, or
nextHopIlb.
To get more information about Route, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/rest/v1/routes)
* How-to Guides
* [Using Routes](https://cloud.google.com/vpc/docs/using-routes)
## Example Usage
### Route Basic
```python
import pulumi
import pulumi_gcp as gcp
default_network = gcp.compute.Network("defaultNetwork")
default_route = gcp.compute.Route("defaultRoute",
dest_range="15.0.0.0/24",
network=default_network.name,
next_hop_ip="10.132.1.5",
priority=100)
```
### Route Ilb
```python
import pulumi
import pulumi_gcp as gcp
default_network = gcp.compute.Network("defaultNetwork", auto_create_subnetworks=False)
default_subnetwork = gcp.compute.Subnetwork("defaultSubnetwork",
ip_cidr_range="10.0.1.0/24",
region="us-central1",
network=default_network.id)
hc = gcp.compute.HealthCheck("hc",
check_interval_sec=1,
timeout_sec=1,
tcp_health_check=gcp.compute.HealthCheckTcpHealthCheckArgs(
port=80,
))
backend = gcp.compute.RegionBackendService("backend",
region="us-central1",
health_checks=[hc.id])
default_forwarding_rule = gcp.compute.ForwardingRule("defaultForwardingRule",
region="us-central1",
load_balancing_scheme="INTERNAL",
backend_service=backend.id,
all_ports=True,
network=default_network.name,
subnetwork=default_subnetwork.name)
route_ilb = gcp.compute.Route("route-ilb",
dest_range="0.0.0.0/0",
network=default_network.name,
next_hop_ilb=default_forwarding_rule.id,
priority=2000)
```
### Route Ilb Vip
```python
import pulumi
import pulumi_gcp as gcp
producer_network = gcp.compute.Network("producerNetwork", auto_create_subnetworks=False,
opts=pulumi.ResourceOptions(provider=google_beta))
producer_subnetwork = gcp.compute.Subnetwork("producerSubnetwork",
ip_cidr_range="10.0.1.0/24",
region="us-central1",
network=producer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
consumer_network = gcp.compute.Network("consumerNetwork", auto_create_subnetworks=False,
opts=pulumi.ResourceOptions(provider=google_beta))
consumer_subnetwork = gcp.compute.Subnetwork("consumerSubnetwork",
ip_cidr_range="10.0.2.0/24",
region="us-central1",
network=consumer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
peering1 = gcp.compute.NetworkPeering("peering1",
network=consumer_network.id,
peer_network=producer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
peering2 = gcp.compute.NetworkPeering("peering2",
network=producer_network.id,
peer_network=consumer_network.id,
opts=pulumi.ResourceOptions(provider=google_beta))
hc = gcp.compute.HealthCheck("hc",
check_interval_sec=1,
timeout_sec=1,
tcp_health_check=gcp.compute.HealthCheckTcpHealthCheckArgs(
port=80,
),
opts=pulumi.ResourceOptions(provider=google_beta))
backend = gcp.compute.RegionBackendService("backend",
region="us-central1",
health_checks=[hc.id],
opts=pulumi.ResourceOptions(provider=google_beta))
default = gcp.compute.ForwardingRule("default",
region="us-central1",
load_balancing_scheme="INTERNAL",
backend_service=backend.id,
all_ports=True,
network=producer_network.name,
subnetwork=producer_subnetwork.name,
opts=pulumi.ResourceOptions(provider=google_beta))
route_ilb = gcp.compute.Route("route-ilb",
dest_range="0.0.0.0/0",
network=consumer_network.name,
next_hop_ilb=default.ip_address,
priority=2000,
tags=[
"tag1",
"tag2",
],
opts=pulumi.ResourceOptions(provider=google_beta,
depends_on=[
peering1,
peering2,
]))
```
## Import
Route can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/route:Route default projects/{{project}}/global/routes/{{name}}
```
```sh
$ pulumi import gcp:compute/route:Route default {{project}}/{{name}}
```
```sh
$ pulumi import gcp:compute/route:Route default {{name}}
```
:param str resource_name: The name of the resource.
:param RouteArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RouteArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
dest_range: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
next_hop_gateway: Optional[pulumi.Input[str]] = None,
next_hop_ilb: Optional[pulumi.Input[str]] = None,
next_hop_instance: Optional[pulumi.Input[str]] = None,
next_hop_instance_zone: Optional[pulumi.Input[str]] = None,
next_hop_ip: Optional[pulumi.Input[str]] = None,
next_hop_vpn_tunnel: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
project: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RouteArgs.__new__(RouteArgs)
__props__.__dict__["description"] = description
if dest_range is None and not opts.urn:
raise TypeError("Missing required property 'dest_range'")
__props__.__dict__["dest_range"] = dest_range
__props__.__dict__["name"] = name
if network is None and not opts.urn:
raise TypeError("Missing required property 'network'")
__props__.__dict__["network"] = network
__props__.__dict__["next_hop_gateway"] = next_hop_gateway
__props__.__dict__["next_hop_ilb"] = next_hop_ilb
__props__.__dict__["next_hop_instance"] = next_hop_instance
__props__.__dict__["next_hop_instance_zone"] = next_hop_instance_zone
__props__.__dict__["next_hop_ip"] = next_hop_ip
__props__.__dict__["next_hop_vpn_tunnel"] = next_hop_vpn_tunnel
__props__.__dict__["priority"] = priority
__props__.__dict__["project"] = project
__props__.__dict__["tags"] = tags
__props__.__dict__["next_hop_network"] = None
__props__.__dict__["self_link"] = None
super(Route, __self__).__init__(
'gcp:compute/route:Route',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
dest_range: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
next_hop_gateway: Optional[pulumi.Input[str]] = None,
next_hop_ilb: Optional[pulumi.Input[str]] = None,
next_hop_instance: Optional[pulumi.Input[str]] = None,
next_hop_instance_zone: Optional[pulumi.Input[str]] = None,
next_hop_ip: Optional[pulumi.Input[str]] = None,
next_hop_network: Optional[pulumi.Input[str]] = None,
next_hop_vpn_tunnel: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
project: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'Route':
"""
Get an existing Route resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property
when you create the resource.
:param pulumi.Input[str] dest_range: The destination range of outgoing packets that this route applies to.
Only IPv4 is supported.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
:param pulumi.Input[str] network: The network that this route applies to.
:param pulumi.Input[str] next_hop_gateway: URL to a gateway that should handle matching packets.
Currently, you can only specify the internet gateway, using a full or
partial valid URL:
* `https://www.googleapis.com/compute/v1/projects/project/global/gateways/default-internet-gateway`
* `projects/project/global/gateways/default-internet-gateway`
* `global/gateways/default-internet-gateway`
* The string `default-internet-gateway`.
:param pulumi.Input[str] next_hop_ilb: The IP address or URL to a forwarding rule of type
loadBalancingScheme=INTERNAL that should handle matching
packets.
With the GA provider you can only specify the forwarding
rule as a partial or full URL. For example, the following
are all valid values:
* 10.128.0.56
* https://www.googleapis.com/compute/v1/projects/project/regions/region/forwardingRules/forwardingRule
* regions/region/forwardingRules/forwardingRule
When the beta provider, you can also specify the IP address
of a forwarding rule from the same VPC or any peered VPC.
Note that this can only be used when the destinationRange is
a public (non-RFC 1918) IP CIDR range.
:param pulumi.Input[str] next_hop_instance: URL to an instance that should handle matching packets.
You can specify this as a full or partial URL. For example:
* `https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance`
* `projects/project/zones/zone/instances/instance`
* `zones/zone/instances/instance`
* Just the instance name, with the zone in `next_hop_instance_zone`.
:param pulumi.Input[str] next_hop_instance_zone: (Optional when `next_hop_instance` is
specified) The zone of the instance specified in
`next_hop_instance`. Omit if `next_hop_instance` is specified as
a URL.
:param pulumi.Input[str] next_hop_ip: Network IP address of an instance that should handle matching packets.
:param pulumi.Input[str] next_hop_network: URL to a Network that should handle matching packets.
:param pulumi.Input[str] next_hop_vpn_tunnel: URL to a VpnTunnel that should handle matching packets.
:param pulumi.Input[int] priority: The priority of this route. Priority is used to break ties in cases
where there is more than one matching route of equal prefix length.
In the case of two routes with equal prefix length, the one with the
lowest-numbered priority value wins.
Default value is 1000. Valid range is 0 through 65535.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] self_link: The URI of the created resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of instance tags to which this route applies.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RouteState.__new__(_RouteState)
__props__.__dict__["description"] = description
__props__.__dict__["dest_range"] = dest_range
__props__.__dict__["name"] = name
__props__.__dict__["network"] = network
__props__.__dict__["next_hop_gateway"] = next_hop_gateway
__props__.__dict__["next_hop_ilb"] = next_hop_ilb
__props__.__dict__["next_hop_instance"] = next_hop_instance
__props__.__dict__["next_hop_instance_zone"] = next_hop_instance_zone
__props__.__dict__["next_hop_ip"] = next_hop_ip
__props__.__dict__["next_hop_network"] = next_hop_network
__props__.__dict__["next_hop_vpn_tunnel"] = next_hop_vpn_tunnel
__props__.__dict__["priority"] = priority
__props__.__dict__["project"] = project
__props__.__dict__["self_link"] = self_link
__props__.__dict__["tags"] = tags
return Route(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
An optional description of this resource. Provide this property
when you create the resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="destRange")
def dest_range(self) -> pulumi.Output[str]:
"""
The destination range of outgoing packets that this route applies to.
Only IPv4 is supported.
"""
return pulumi.get(self, "dest_range")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def network(self) -> pulumi.Output[str]:
"""
The network that this route applies to.
"""
return pulumi.get(self, "network")
@property
@pulumi.getter(name="nextHopGateway")
def next_hop_gateway(self) -> pulumi.Output[Optional[str]]:
"""
URL to a gateway that should handle matching packets.
Currently, you can only specify the internet gateway, using a full or
partial valid URL:
* `https://www.googleapis.com/compute/v1/projects/project/global/gateways/default-internet-gateway`
* `projects/project/global/gateways/default-internet-gateway`
* `global/gateways/default-internet-gateway`
* The string `default-internet-gateway`.
"""
return pulumi.get(self, "next_hop_gateway")
@property
@pulumi.getter(name="nextHopIlb")
def next_hop_ilb(self) -> pulumi.Output[Optional[str]]:
"""
The IP address or URL to a forwarding rule of type
loadBalancingScheme=INTERNAL that should handle matching
packets.
With the GA provider you can only specify the forwarding
rule as a partial or full URL. For example, the following
are all valid values:
* 10.128.0.56
* https://www.googleapis.com/compute/v1/projects/project/regions/region/forwardingRules/forwardingRule
* regions/region/forwardingRules/forwardingRule
When the beta provider, you can also specify the IP address
of a forwarding rule from the same VPC or any peered VPC.
Note that this can only be used when the destinationRange is
a public (non-RFC 1918) IP CIDR range.
"""
return pulumi.get(self, "next_hop_ilb")
@property
@pulumi.getter(name="nextHopInstance")
def next_hop_instance(self) -> pulumi.Output[Optional[str]]:
"""
URL to an instance that should handle matching packets.
You can specify this as a full or partial URL. For example:
* `https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance`
* `projects/project/zones/zone/instances/instance`
* `zones/zone/instances/instance`
* Just the instance name, with the zone in `next_hop_instance_zone`.
"""
return pulumi.get(self, "next_hop_instance")
@property
@pulumi.getter(name="nextHopInstanceZone")
def next_hop_instance_zone(self) -> pulumi.Output[str]:
"""
(Optional when `next_hop_instance` is
specified) The zone of the instance specified in
`next_hop_instance`. Omit if `next_hop_instance` is specified as
a URL.
"""
return pulumi.get(self, "next_hop_instance_zone")
@property
@pulumi.getter(name="nextHopIp")
def next_hop_ip(self) -> pulumi.Output[str]:
"""
Network IP address of an instance that should handle matching packets.
"""
return pulumi.get(self, "next_hop_ip")
@property
@pulumi.getter(name="nextHopNetwork")
def next_hop_network(self) -> pulumi.Output[str]:
"""
URL to a Network that should handle matching packets.
"""
return pulumi.get(self, "next_hop_network")
@property
@pulumi.getter(name="nextHopVpnTunnel")
def next_hop_vpn_tunnel(self) -> pulumi.Output[Optional[str]]:
"""
URL to a VpnTunnel that should handle matching packets.
"""
return pulumi.get(self, "next_hop_vpn_tunnel")
@property
@pulumi.getter
def priority(self) -> pulumi.Output[Optional[int]]:
"""
The priority of this route. Priority is used to break ties in cases
where there is more than one matching route of equal prefix length.
In the case of two routes with equal prefix length, the one with the
lowest-numbered priority value wins.
Default value is 1000. Valid range is 0 through 65535.
"""
return pulumi.get(self, "priority")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> pulumi.Output[str]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of instance tags to which this route applies.
"""
return pulumi.get(self, "tags")
| 46.59748
| 134
| 0.64071
| 7,769
| 62,860
| 5.02304
| 0.0502
| 0.042871
| 0.060988
| 0.056939
| 0.955899
| 0.948186
| 0.939038
| 0.931427
| 0.927865
| 0.923893
| 0
| 0.009014
| 0.271126
| 62,860
| 1,348
| 135
| 46.632047
| 0.842722
| 0.547598
| 0
| 0.814894
| 1
| 0
| 0.092383
| 0.009844
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165957
| false
| 0.002128
| 0.010638
| 0
| 0.276596
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
77abe08317bc71eea3c52df6cb5e00aa0db46e67
| 8,183
|
py
|
Python
|
venv/lib/python3.8/site-packages/spaceone/api/identity/v1/domain_owner_pb2_grpc.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/spaceone/api/identity/v1/domain_owner_pb2_grpc.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/spaceone/api/identity/v1/domain_owner_pb2_grpc.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from spaceone.api.identity.v1 import domain_owner_pb2 as spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2
class DomainOwnerStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.create = channel.unary_unary(
'/spaceone.api.identity.v1.DomainOwner/create',
request_serializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.CreateDomainOwner.SerializeToString,
response_deserializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.FromString,
)
self.update = channel.unary_unary(
'/spaceone.api.identity.v1.DomainOwner/update',
request_serializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.UpdateDomainOwner.SerializeToString,
response_deserializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.FromString,
)
self.delete = channel.unary_unary(
'/spaceone.api.identity.v1.DomainOwner/delete',
request_serializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.get = channel.unary_unary(
'/spaceone.api.identity.v1.DomainOwner/get',
request_serializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.GetDomainOwnerRequest.SerializeToString,
response_deserializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.FromString,
)
class DomainOwnerServicer(object):
"""Missing associated documentation comment in .proto file."""
def create(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def update(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def delete(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def get(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_DomainOwnerServicer_to_server(servicer, server):
rpc_method_handlers = {
'create': grpc.unary_unary_rpc_method_handler(
servicer.create,
request_deserializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.CreateDomainOwner.FromString,
response_serializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.SerializeToString,
),
'update': grpc.unary_unary_rpc_method_handler(
servicer.update,
request_deserializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.UpdateDomainOwner.FromString,
response_serializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.SerializeToString,
),
'delete': grpc.unary_unary_rpc_method_handler(
servicer.delete,
request_deserializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'get': grpc.unary_unary_rpc_method_handler(
servicer.get,
request_deserializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.GetDomainOwnerRequest.FromString,
response_serializer=spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'spaceone.api.identity.v1.DomainOwner', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class DomainOwner(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def create(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.identity.v1.DomainOwner/create',
spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.CreateDomainOwner.SerializeToString,
spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def update(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.identity.v1.DomainOwner/update',
spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.UpdateDomainOwner.SerializeToString,
spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.identity.v1.DomainOwner/delete',
spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def get(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.identity.v1.DomainOwner/get',
spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.GetDomainOwnerRequest.SerializeToString,
spaceone_dot_api_dot_identity_dot_v1_dot_domain__owner__pb2.DomainOwnerInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 49
| 135
| 0.70121
| 857
| 8,183
| 6.240373
| 0.12252
| 0.047307
| 0.060209
| 0.069933
| 0.867801
| 0.861818
| 0.848729
| 0.814136
| 0.753179
| 0.731862
| 0
| 0.009519
| 0.229745
| 8,183
| 166
| 136
| 49.295181
| 0.838966
| 0.076867
| 0
| 0.548872
| 1
| 0
| 0.078455
| 0.051056
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075188
| false
| 0
| 0.022556
| 0.030075
| 0.150376
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
77b37d9171a1365ad576494257206d89ba25adf1
| 111
|
py
|
Python
|
session_05/dictionaries.py
|
dravate/spark_python_course
|
519389fdb21d78cd6d19e1ad2f7c782bc1449a83
|
[
"MIT"
] | null | null | null |
session_05/dictionaries.py
|
dravate/spark_python_course
|
519389fdb21d78cd6d19e1ad2f7c782bc1449a83
|
[
"MIT"
] | null | null | null |
session_05/dictionaries.py
|
dravate/spark_python_course
|
519389fdb21d78cd6d19e1ad2f7c782bc1449a83
|
[
"MIT"
] | 1
|
2021-07-27T14:16:39.000Z
|
2021-07-27T14:16:39.000Z
|
d1 = {}
d2 = {'one': 1, 'two': 2 }
d3 = dict(one=1, two=2)
d4 = dict((1, 2), (3, 4))
d5 = dict({1:2, 3:4})
| 11.1
| 26
| 0.414414
| 24
| 111
| 1.916667
| 0.5
| 0.173913
| 0.304348
| 0.347826
| 0.347826
| 0
| 0
| 0
| 0
| 0
| 0
| 0.204819
| 0.252252
| 111
| 9
| 27
| 12.333333
| 0.349398
| 0
| 0
| 0
| 0
| 0
| 0.054054
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
77bdb223eed0186fb8d9de25b3c3b87e0f4d5217
| 4,013
|
py
|
Python
|
file_manager/migrations/0059_auto_20220208_1641.py
|
xiaofengxie128/Proteomic-Data-Manager
|
79756c7021b1d5e4cc4cdb26d741f6ea18846a02
|
[
"Apache-2.0"
] | null | null | null |
file_manager/migrations/0059_auto_20220208_1641.py
|
xiaofengxie128/Proteomic-Data-Manager
|
79756c7021b1d5e4cc4cdb26d741f6ea18846a02
|
[
"Apache-2.0"
] | null | null | null |
file_manager/migrations/0059_auto_20220208_1641.py
|
xiaofengxie128/Proteomic-Data-Manager
|
79756c7021b1d5e4cc4cdb26d741f6ea18846a02
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.2.7 on 2022-02-08 23:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('file_manager', '0058_auto_20220118_1418'),
]
operations = [
migrations.AddField(
model_name='rawfile',
name='column_sn',
field=models.TextField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='rawfile',
name='spe_sn',
field=models.TextField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='maxquantqueue',
name='evidence_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/maxquant/2022/2/8'),
),
migrations.AlterField(
model_name='maxquantqueue',
name='other_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/maxquant/2022/2/8'),
),
migrations.AlterField(
model_name='maxquantqueue',
name='peptide_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/maxquant/2022/2/8'),
),
migrations.AlterField(
model_name='maxquantqueue',
name='protein_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/maxquant/2022/2/8'),
),
migrations.AlterField(
model_name='maxquantqueue',
name='setting_xml',
field=models.FileField(blank=True, null=True, upload_to='maxquant_xml/2022/2/8'),
),
migrations.AlterField(
model_name='msfraggerqueue',
name='ion_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/msfragger/2022/2/8'),
),
migrations.AlterField(
model_name='msfraggerqueue',
name='peptide_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/msfragger/2022/2/8'),
),
migrations.AlterField(
model_name='msfraggerqueue',
name='protein_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/msfragger/2022/2/8'),
),
migrations.AlterField(
model_name='msfraggerqueue',
name='psm_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/msfragger/2022/2/8'),
),
migrations.AlterField(
model_name='notefile',
name='notefile',
field=models.FileField(blank=True, null=True, upload_to='notefiles/2022/ 2/8'),
),
migrations.AlterField(
model_name='pdqueue',
name='consensus_method',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/proteindiscoverer/2022/2/8'),
),
migrations.AlterField(
model_name='pdqueue',
name='export_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/proteindiscoverer/2022/2/8'),
),
migrations.AlterField(
model_name='pdqueue',
name='processing_method',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/proteindiscoverer/2022/2/8'),
),
migrations.AlterField(
model_name='pdqueue',
name='result_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/proteindiscoverer/2022/2/8'),
),
migrations.AlterField(
model_name='rawfile',
name='note_file',
field=models.ManyToManyField(blank=True, to='file_manager.NoteFile'),
),
migrations.AlterField(
model_name='spectrominequeue',
name='result_file',
field=models.FileField(blank=True, null=True, upload_to='hdstorage/spectromine/2022/2/8'),
),
]
| 38.586538
| 108
| 0.592574
| 411
| 4,013
| 5.644769
| 0.177616
| 0.069828
| 0.172414
| 0.2
| 0.830172
| 0.830172
| 0.786207
| 0.786207
| 0.786207
| 0.704741
| 0
| 0.04396
| 0.28009
| 4,013
| 103
| 109
| 38.961165
| 0.759086
| 0.011214
| 0
| 0.742268
| 1
| 0
| 0.223903
| 0.115734
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010309
| 0
| 0.041237
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
77be2f55131b3349b56faffc31d3bd578f487785
| 9,334
|
py
|
Python
|
tests/tests_user_demo_orders_add.py
|
Bitsgap/Bitsgap-api-py
|
4102e578c7958dfabb945eb17fdff4ef6cab8fdd
|
[
"MIT"
] | 2
|
2021-05-12T01:14:24.000Z
|
2021-12-08T14:36:00.000Z
|
tests/tests_user_demo_orders_add.py
|
Bitsgap/Bitsgap-rest-api-py
|
4102e578c7958dfabb945eb17fdff4ef6cab8fdd
|
[
"MIT"
] | null | null | null |
tests/tests_user_demo_orders_add.py
|
Bitsgap/Bitsgap-rest-api-py
|
4102e578c7958dfabb945eb17fdff4ef6cab8fdd
|
[
"MIT"
] | null | null | null |
import asyncio
import logging
from unittest import TestCase
from rest_api_py_lib import BitsgapClient
from tests.keys import public_key,private_key
class TestRestUserDemoOrdersAdd(TestCase):
""" Place order on demo market """
def test_user_demo_orders_add_valid_data(self):
async def run_test():
market = 'okex'
pair = 'ETH_BTC'
price = '0.015'
amount = '1'
side = 'buy'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'ok')
self.assertIn('time', result)
self.assertIn('data', result)
data = result['data']
self.assertIsNotNone(data)
# check fields
self.assertIn('id', data)
self.assertIn('price', data)
self.assertIn('amount', data)
self.assertIn('state', data)
self.assertIn('pair', data)
self.assertIn('type', data)
self.assertIn('side', data)
self.assertIn('uts', data)
self.assertIn('state', data)
if 'state' in data:
self.assertIn(data['state'], 'opened')
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid market """
def test_user_demo_orders_add_invalid_market(self):
async def run_test():
market = 'no_market'
pair = 'ETH_BTC'
price = '0.015'
amount = '0.1'
side = 'buy'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('time', result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'error')
self.assertIn('message', result)
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid pair """
def test_user_demo_orders_add_invalid_pair(self):
async def run_test():
market = 'okex'
pair = 'no_pair'
price = '0.015'
amount = '0.1'
side = 'buy'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('time', result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'error')
self.assertIn('message', result)
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid side """
def test_user_demo_orders_add_invalid_side(self):
async def run_test():
market = 'okex'
pair = 'ETH_BTC'
price = '0.015'
amount = '0.1'
side = 'invalid'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('time', result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'error')
self.assertIn('message', result)
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid type """
def test_user_demo_orders_add_invalid_type(self):
async def run_test():
market = 'okex'
pair = 'ETH_BTC'
price = '0.015'
amount = '0.1'
side = 'buy'
ord_type = 'invalid'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('time', result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'error')
self.assertIn('message', result)
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid price format """
def test_user_demo_orders_add_invalid_price_format(self):
async def run_test():
market = 'okex'
pair = 'ETH_BTC'
price = '0*250'
amount = '0.1'
side = 'buy'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('time', result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'error')
self.assertIn('message', result)
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid zero price """
def test_user_demo_orders_add_invalid_price_zero(self):
async def run_test():
market = 'okex'
pair = 'ETH_BTC'
price = '0'
amount = '0.1'
side = 'buy'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'ok')
self.assertIn('time', result)
self.assertIn('data', result)
data = result['data']
self.assertIsNotNone(data)
# check fields
self.assertIn('id', data)
self.assertIn('price', data)
self.assertIn('amount', data)
self.assertIn('state', data)
self.assertIn('pair', data)
self.assertIn('type', data)
self.assertIn('side', data)
self.assertIn('uts', data)
self.assertIn('state', data)
if 'state' in data:
self.assertIn(data['state'], 'opened')
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid amount format"""
def test_user_demo_orders_add_invalid_amount_format(self):
async def run_test():
market = 'okex'
pair = 'ETH_BTC'
price = '0.015'
amount = '100*'
side = 'buy'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('time', result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'error')
self.assertIn('message', result)
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
""" Invalid amount zero"""
def test_user_demo_orders_add_invalid_amount_zero(self):
async def run_test():
market = 'okex'
pair = 'ETH_BTC'
price = '0.015'
amount = '0'
side = 'buy'
ord_type = 'limit'
lib = BitsgapClient(public_key, private_key)
result = lib.demo_orders_add(market, pair, amount, price, side, ord_type)
logging.debug(result)
self.assertIn('time', result)
self.assertIn('status', result)
self.assertTrue(result['status'] == 'error')
await asyncio.sleep(1)
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
coro = asyncio.coroutine(run_test)
event_loop.run_until_complete(coro())
event_loop.close()
| 29.352201
| 85
| 0.561174
| 1,027
| 9,334
| 4.87147
| 0.079844
| 0.097142
| 0.046772
| 0.037977
| 0.946032
| 0.946032
| 0.941035
| 0.909055
| 0.870877
| 0.870877
| 0
| 0.009399
| 0.327512
| 9,334
| 317
| 86
| 29.444795
| 0.787637
| 0.005785
| 0
| 0.890909
| 0
| 0
| 0.065624
| 0
| 0
| 0
| 0
| 0
| 0.259091
| 1
| 0.040909
| false
| 0
| 0.022727
| 0
| 0.068182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
77f172359238ee4f9aa64dd9f0ca2c3d158a0bf1
| 1,989
|
py
|
Python
|
tests/feed/test_refresh_user_feeds.py
|
sslavov93/rss_scraper
|
4621e01b295de341d0921cade026b406f2b389e3
|
[
"MIT"
] | 1
|
2020-09-17T12:40:34.000Z
|
2020-09-17T12:40:34.000Z
|
tests/feed/test_refresh_user_feeds.py
|
sslavov93/rss_scraper
|
4621e01b295de341d0921cade026b406f2b389e3
|
[
"MIT"
] | null | null | null |
tests/feed/test_refresh_user_feeds.py
|
sslavov93/rss_scraper
|
4621e01b295de341d0921cade026b406f2b389e3
|
[
"MIT"
] | null | null | null |
from unittest.mock import patch
from feed.models import FeedItem
from tests import BaseTestFixture, basic_auth_headers
class TestRefreshUserFeeds(BaseTestFixture):
def test_refresh_single_feed_when_not_authenticated(self):
response = self.client.post('/api/my-feeds/5/update')
self.assertEqual(401, response.status_code)
def test_refresh_all_user_feeds_when_not_authenticated(self):
response = self.client.post('/api/my-feeds/update')
self.assertEqual(401, response.status_code)
def test_refresh_single_feed_when_feed_not_exist(self):
response = self.client.post(
'/api/my-feeds/5/update',
headers=basic_auth_headers("user", "pass")
)
self.assertEqual(404, response.status_code)
@patch("feed.routes.Scraper")
@patch("feed.routes.scrape_single")
def test_refresh_single_feed_no_error(self, scrape_single_task, scraper):
scraper.parse.return_value = [FeedItem(id=5), FeedItem(id=6)]
scrape_single_task.return_value = {}
response = self.client.post(
'/api/my-feeds/1/update',
headers=basic_auth_headers("user", "pass")
)
self.assertEqual(200, response.status_code)
self.assertTrue(scraper.persist.called_with([FeedItem(id=5), FeedItem(id=6)]))
self.assertTrue(scrape_single_task.delay.called)
@patch("feed.routes.Scraper")
@patch("feed.routes.scrape_single")
def test_refresh_all_user_feeds_no_error(self, scrape_single_task, scraper):
scraper.parse.return_value = [FeedItem(id=5), FeedItem(id=6)]
scrape_single_task.return_value = {}
response = self.client.post(
'/api/my-feeds/update',
headers=basic_auth_headers("user", "pass")
)
self.assertEqual(200, response.status_code)
self.assertTrue(scraper.persist.called_with([FeedItem(id=5), FeedItem(id=6)]))
self.assertTrue(scrape_single_task.delay.called)
| 38.25
| 86
| 0.691302
| 251
| 1,989
| 5.223108
| 0.231076
| 0.073227
| 0.073227
| 0.083905
| 0.866514
| 0.858886
| 0.811594
| 0.811594
| 0.811594
| 0.776506
| 0
| 0.016129
| 0.189542
| 1,989
| 51
| 87
| 39
| 0.797146
| 0
| 0
| 0.55
| 0
| 0
| 0.109603
| 0.058321
| 0
| 0
| 0
| 0
| 0.225
| 1
| 0.125
| false
| 0.075
| 0.075
| 0
| 0.225
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
7aeb3ca58882967e00e1a0f51dac95fb61f266aa
| 384
|
py
|
Python
|
s04_variaveis_tipos_de_dados/s04_exercicios/s04_exercicio_07.py
|
adeogliari/GeekUniversity_Python
|
1b6badc45ca1dfbaa2f42196fb2dedac417b866e
|
[
"MIT"
] | null | null | null |
s04_variaveis_tipos_de_dados/s04_exercicios/s04_exercicio_07.py
|
adeogliari/GeekUniversity_Python
|
1b6badc45ca1dfbaa2f42196fb2dedac417b866e
|
[
"MIT"
] | null | null | null |
s04_variaveis_tipos_de_dados/s04_exercicios/s04_exercicio_07.py
|
adeogliari/GeekUniversity_Python
|
1b6badc45ca1dfbaa2f42196fb2dedac417b866e
|
[
"MIT"
] | null | null | null |
"""
7) Leia uma temperatura em graus Fahrenheit e apresente-a convertida em graus Celsius. A fórmula de conversão é: C = (F-32.0)*5.0/9.0, sendo C a temperatura em Celsius e F a temperatura em Fahrenheit.
"""
fahrenheit = float(input('Digite a temperatura em graus Fahrenheit F: \n'))
print(f'A temperatura em graus Celsius de {fahrenheit}F é: {(fahrenheit - 32.0) * 5.0 / 9.0}ºC')
| 48
| 200
| 0.716146
| 69
| 384
| 3.985507
| 0.42029
| 0.236364
| 0.203636
| 0.203636
| 0.050909
| 0.050909
| 0
| 0
| 0
| 0
| 0
| 0.046584
| 0.161458
| 384
| 7
| 201
| 54.857143
| 0.807453
| 0.520833
| 0
| 0
| 0
| 0.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
bb1819c9bd2f8222a4a5fb0e41cee2222fe0b742
| 12,923
|
py
|
Python
|
abstract_matrix.py
|
sredroboto/math-matrix
|
71b8d53fb2b3bd7f8a27dde6a5ee2a4847b135ad
|
[
"BSD-3-Clause"
] | 2
|
2019-11-21T14:28:50.000Z
|
2019-11-21T20:58:46.000Z
|
abstract_matrix.py
|
sredroboto/SistemaSolar
|
4fa946adab7e5308639954feb00a2b3bc7153f91
|
[
"BSD-3-Clause"
] | null | null | null |
abstract_matrix.py
|
sredroboto/SistemaSolar
|
4fa946adab7e5308639954feb00a2b3bc7153f91
|
[
"BSD-3-Clause"
] | null | null | null |
from abc import ABC, abstractmethod
class AbstractMatrix(ABC):
"""
Classe utilizada para representar uma matriz
"""
def __init__(self, rows, cols, data = []):
"""
Args:
rows: Quantidade de linhas da matriz
cols: Quantidade de colunas da matriz
data: Lista com os valores da matriz,
caso a lista seja vazia deve-se preencher com zeros
"""
self.rows = rows
self.cols = cols
self._init_data(data)
@abstractmethod
def __getitem__(self, key):
"""Recupera um valor armazenado na matriz.
Recupera um valor armazenado na posição i, j. A indexação da matriz inicia-se em 1.
Args:
key: Tupla que contém os valores de i e j
Returns:
data: O valor armazenado na posição i, j
"""
pass
@abstractmethod
def __setitem__(self, key, value):
"""Armazena um valor na matriz.
Armazena um valor na posição i, j. A indexação da matriz inicia-se em 1.
Args:
key: Tupla que contém os valores de i e j
value: Valor a ser armazenado na matriz
"""
pass
@abstractmethod
def __repr__(self):
"""Representação em formato string da matriz.
Exibe os dados armazenados em formato de matrix quando o objeto é chamado
sem nenhuma invocação de método.
Returns:
Exibe os dados da matriz formatados no console. Por exemplo:
#> a = Matrix(2,2,[1, 2, 3, 4])
#> a
1.0000 2.0000
3.0000 4.0000
"""
pass
@abstractmethod
def __str__(self):
"""Representação em formato string da matriz durante conversão.
Exibe os dados armazenados em formato de matrix quando o objeto é convertido para string.
Returns:
Exibe os dados da matriz formatados no console. Por exemplo:
#> a = Matrix(2,2,[1, 2, 3, 4])
#> a
1.0000 2.0000
3.0000 4.0000
"""
pass
@abstractmethod
def __radd__(self, other):
"""Realiza a soma da matrix como operando direito.
Realiza a soma da matrix, como operando direito, com outra matrix ou escalar.
Para realizar a soma de matrizes, ambas necessitam possuir o mesmo tamanho e na soma
de um escalar com a matriz, some o escalar com cada elemento da matriz.
Args:
other: Matrix ou escalar a ser somado com o objeto atual
Returns:
Retorna a matrix resultante da operação, por exemplo:
#> a = Matrix(2,2,[1, 2, 3, 4])
#> b = Matrix(2,2,[2, 4, 6, 8])
#> c = b + a
#> c
3.0000 6.0000
9.0000 12.0000
#> a = Matrix(2,2,[1, 2, 3, 4])
#> c = 2 + a
#> c
3.0000 4.0000
5.0000 6.0000
"""
pass
@abstractmethod
def __add__(self, other):
"""Realiza a soma da matrix como operando esquerdo.
Realiza a soma da matrix, como operando esquerdo, com outra matrix ou escalar.
Para realizar a soma de matrizes, ambas necessitam possuir o mesmo tamanho e na soma
de um escalar com a matriz, some o escalar com cada elemento da matriz.
Args:
other: Matrix ou escalar a ser somado com o objeto atual
Returns:
Retorna a matrix resultante da operação, por exemplo:
#> a = Matrix(2,2,[1, 2, 3, 4])
#> b = Matrix(2,2,[2, 4, 6, 8])
#> c = a + b
#> c
3.0000 6.0000
9.0000 12.0000
#> a = Matrix(2,2,[1, 2, 3, 4])
#> c = a + 2
#> c
3.0000 4.0000
5.0000 6.0000
"""
pass
@abstractmethod
def __rsub__(self, other):
"""Realiza a subtração da matrix como operando direito.
Realiza a subtração da matrix, como operando direito, com outra matrix ou escalar.
Para realizar a subtração de matrizes, ambas necessitam possuir o mesmo tamanho e na subtração
de um escalar com a matriz, subtraia o escalar com cada elemento da matriz.
Args:
other: Matrix ou escalar a ser subtraido com o objeto atual
Returns:
Retorna a matrix resultante da operação, por exemplo:
#> a = Matrix(2,2,[1, 2, 3, 4])
#> b = Matrix(2,2,[2, 4, 6, 8])
#> c = b - a
#> c
1.0000 2.0000
3.0000 4.0000
#> a = Matrix(2,2,[1, 2, 3, 4])
#> c = 1 - a
#> c
0.0000 1.0000
2.0000 3.0000
"""
pass
@abstractmethod
def __sub__(self, other):
"""Realiza a subtração da matrix como operando esquerdo.
Realiza a subtração da matrix, como operando esquerdo, com outra matrix ou escalar.
Para realizar a subtração de matrizes, ambas necessitam possuir o mesmo tamanho e na subtração
de um escalar com a matriz, subtraia o escalar com cada elemento da matriz.
Returns:
Retorna a matrix resultante da operação, por exemplo:
#> a = Matrix(2,2,[1, 2, 3, 4])
#> b = Matrix(2,2,[2, 4, 6, 8])
#> c = a - b
#> c
-1.0000 -2.0000
-3.0000 -4.0000
#> a = Matrix(2,2,[1, 2, 3, 4])
#> c = a - 1
#> c
0.0000 1.0000
2.0000 3.0000
"""
pass
@abstractmethod
def __rmul__(self, other):
"""Realiza a multiplicação elemento a elemento da matrix como operando direito.
Realiza a multiplicação elemento a elemento da matrix, como operando direito,
com outra matrix ou escalar.
Para realizar a multiplicação elemento a elemento de matrizes, ambas necessitam
possuir o mesmo tamanho e na multiplicação elemento a elemento de um escalar com
a matriz, multiplique o escalar com cada elemento da matriz.
Args:
other: Matrix ou escalar a ser multiplicado com o objeto atual
Returns:
Retorna a matrix resultante da operação, por exemplo:
#> a = Matrix(2,2,[1, 2, 3, 4])
#> b = Matrix(2,2,[2, 4, 6, 8])
#> c = b * a
#> c
2.0000 8.0000
18.0000 32.0000
#> a = Matrix(2,2,[1, 2, 3, 4])
#> c = 2 * a
#> c
2.0000 4.0000
8.0000 8.0000
"""
pass
@abstractmethod
def __mul__(self, other):
"""Realiza a multiplicação elemento a elemento da matrix como operando esquerdo.
Realiza a multiplicação elemento a elemento da matrix, como operando esquerdo,
com outra matrix ou escalar.
Para realizar a multiplicação elemento a elemento de matrizes, ambas necessitam
possuir o mesmo tamanho e na multiplicação elemento a elemento de um escalar com
a matriz, multiplique o escalar com cada elemento da matriz.
Returns:
Retorna a matrix resultante da operação, por exemplo:
#> a = Matrix(2,2,[1, 2, 3, 4])
#> b = Matrix(2,2,[2, 4, 6, 8])
#> c = a * b
#> c
2.0000 8.0000
18.0000 32.0000
#> a = Matrix(2,2,[1, 2, 3, 4])
#> c = a * 2
#> c
2.0000 4.0000
8.0000 8.0000
"""
pass
@abstractmethod
def __rtruediv__(self, other):
"""Realiza a divisão elemento a elemento da matrix como operando direito.
Realiza a divisão elemento a elemento da matrix, como operando direito,
com outra matrix ou escalar.
Para realizar a divisão elemento a elemento de matrizes, ambas necessitam
possuir o mesmo tamanho e na divisão elemento a elemento de um escalar com
a matriz, divida o escalar com cada elemento da matriz.
Args:
other: Matrix ou escalar a ser dividido com o objeto atual
Returns:
Retorna a matrix resultante da operação, por exemplo:
#> a = Matrix(2,2,[1, 2, 3, 4])
#> b = Matrix(2,2,[2, 4, 6, 8])
#> c = b / a
#> c
2.0000 2.0000
2.0000 2.0000
#> a = Matrix(2,2,[1, 2, 3, 4])
#> c = 2 / a
#> c
0.5000 1.0000
1.5000 2.0000
"""
pass
@abstractmethod
def __truediv__(self, other):
"""Realiza a divisão elemento a elemento da matrix como operando esquerdo.
Realiza a divisão elemento a elemento da matrix, como operando esquerdo,
com outra matrix ou escalar.
Para realizar a divisão elemento a elemento de matrizes, ambas necessitam
possuir o mesmo tamanho e na divisão elemento a elemento de um escalar com
a matriz, divida o escalar com cada elemento da matriz.
Args:
other: Matrix ou escalar a ser dividido com o objeto atual
"Returns:
Retorna a matrix resultante da operação, por exemplo:
#> a = Matrix(2,2,[1, 2, 3, 4])
#> b = Matrix(2,2,[2, 4, 6, 8])
#> c = a / b
#> c
0.5000 0.5000
0.5000 0.5000
#> a = Matrix(2,2,[1, 2, 3, 4])
#> c = a / 2
#> c
0.5000 1.0000
1.5000 2.0000
"""
pass
@abstractmethod
def dot(self, other):
"""Realiza a multiplicação entre matrizes
Para realizar a multiplicação entre matrizes, a primeira matriz deve possuir
uma quantidade de colunas igual a quantidade de linhas da segunda matriz.
Args:
other: Matrix a ser multiplicada com o objeto atual
"Returns:
Retorna a matrix resultante da operação, por exemplo:
#> a = Matrix(2,2,[1, 2, 3, 4])
#> b = Matrix(2,2,[2, 4, 6, 8])
#> c = a.dot(b)
#> c
14.0000 20.0000
30.0000 44.0000
"""
pass
@abstractmethod
def transpose(self):
"""Realiza a transposição de uma matriz
A transposição de uma matriz altera a posição do elemento ij para a posição ji.
"Returns:
Retorna a matrix resultante da operação, por exemplo:
#> a = Matrix(2,3,[1, 2, 3, 4, 5, 6])
#> a
1.0000 2.0000 3.0000
4.0000 5.0000 6.0000
#> c = a.transpose()
#> c
1.0000 4.0000
2.0000 5.0000
3.0000 6.0000
"""
pass
@abstractmethod
def gauss_jordan(self):
"""Aplica o algoritmo de Gauss Jordan na matriz
Aplica o método de Gauss-Jordan na matriz corrente. Pode ser utilizado para resolver
um sistema de equações lineares, calcular matrix inversa, etc.
"Returns:
Retorna a matrix resultante da operação, por exemplo:
#> a = Matrix(3,4,[1, -2, 1, 0, 0, 2, -8, 8, 5, 0, -5, 10])
#> a
1.0000 -2.0000 1.0000 0.0000
0.0000 2.0000 -8.0000 8.0000
5.0000 0.0000 -5.0000 10.0000
#> c = a.gauss_jordan()
#> c
1.0000 0.0000 0.0000 1.0000
0.0000 1.0000 0.0000 0.0000
0.0000 0.0000 1.0000 -1.0000
"""
pass
@abstractmethod
def inverse(self):
"""Calcula a matriz inversa da matriz corrente
Realiza o calculo da matrix inversa utilizando o algoritmo de Gauss-Jordan.
"Returns:
Retorna a matrix resultante da operação, por exemplo:
#> a = Matrix(2,2,[1, 2, 3, 4])
#> a
1.0000 -2.0000 1.0000 0.0000
0.0000 2.0000 -8.0000 8.0000
5.0000 0.0000 -5.0000 10.0000
#> c = a.inverse()
#> c
-2.0000 1.0000
1.5000 -0.5000
"""
pass
def _init_data(self, data):
if data:
try:
if len(data) == self.rows * self.cols:
self.data = data
else:
raise Exception('Init error', 'The data is incompatible with matrix size')
except Exception as e:
print(e)
else:
self.data = [0] * (self.rows * self.cols)
| 30.550827
| 102
| 0.514973
| 1,684
| 12,923
| 3.917458
| 0.114608
| 0.01152
| 0.035168
| 0.012733
| 0.787176
| 0.758375
| 0.742762
| 0.72245
| 0.707594
| 0.678035
| 0
| 0.111457
| 0.402925
| 12,923
| 423
| 103
| 30.550827
| 0.74352
| 0.702159
| 0
| 0.523077
| 0
| 0
| 0.028814
| 0
| 0
| 0
| 0
| 0.004728
| 0
| 1
| 0.276923
| false
| 0.246154
| 0.015385
| 0
| 0.307692
| 0.015385
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
bb55579c6f96ecbc21cea483d25ad02b38e7aa01
| 92
|
py
|
Python
|
plot_bokeh/__init__.py
|
amarkpayne/ampayne_tools
|
a47e9baa70edaedfb655ab51f79bf5035eaacd83
|
[
"MIT"
] | null | null | null |
plot_bokeh/__init__.py
|
amarkpayne/ampayne_tools
|
a47e9baa70edaedfb655ab51f79bf5035eaacd83
|
[
"MIT"
] | null | null | null |
plot_bokeh/__init__.py
|
amarkpayne/ampayne_tools
|
a47e9baa70edaedfb655ab51f79bf5035eaacd83
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, print_function
from plot_bokeh.plot_bokeh import *
| 23
| 54
| 0.858696
| 13
| 92
| 5.461538
| 0.615385
| 0.253521
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108696
| 92
| 3
| 55
| 30.666667
| 0.865854
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
24f6fd0967e1c029cb8eacd64a5cb40e6175ef0b
| 13,553
|
py
|
Python
|
afs/tests/lla/VolServerLLATest.py
|
chanke/afspy
|
525e7b3b53e58be515f11b83cc59ddb0765ef8e5
|
[
"BSD-2-Clause"
] | null | null | null |
afs/tests/lla/VolServerLLATest.py
|
chanke/afspy
|
525e7b3b53e58be515f11b83cc59ddb0765ef8e5
|
[
"BSD-2-Clause"
] | null | null | null |
afs/tests/lla/VolServerLLATest.py
|
chanke/afspy
|
525e7b3b53e58be515f11b83cc59ddb0765ef8e5
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
"""
Unittest for the LLA module VolumeLLA
"""
from ConfigParser import ConfigParser
import datetime
import os
import sys
import time
import unittest
from afs.tests.BaseTest import parse_commandline
import afs.lla.VolServerLLA
import afs.lla.VLDBLLA
import afs
class EvaluateTestResults(unittest.TestCase) :
"""
evaluate results
"""
def eval_vos_examine(self, res) :
self.assertEqual(res.vid, self.volume.vid)
self.assertEqual(res.servername, self.volume.servername)
self.assertEqual(res.partition, self.volume.partition)
return
def eval_vos_move(self, res) :
self.assertEqual(res, self.tmp_volume)
return
def eval_vos_release(self, res, res_ro, before_date) :
self.assertEqual(res, True)
self.assertTrue(res_ro.creation_date >= before_date)
return
def eval_vos_set_blockquota(self, res, expected_quota) :
self.assertEqual(res.maxquota, expected_quota)
return
def eval_vos_dump(self, res) :
self.assertEqual(res, True)
return
def eval_vos_restore(self, res) :
self.assertEqual(res, self.tmp_volume)
return
def eval_vos_convert(self, res) :
self.assertEqual(res, self.tmp_volume)
return
def eval_vos_create(self, res) :
self.assertEqual(res, True)
return
def eval_vos_remove(self, res) :
self.assertEqual(res, True)
return
class TestVolServerLLAMethods(EvaluateTestResults) :
"""
Tests VolumeLLA Methods
"""
@classmethod
def setUpClass(self) :
"""
setup test environment
called automagically
"""
self.lla = afs.lla.VolServerLLA.VolServerLLA()
self.vldb_lla = afs.lla.VLDBLLA.VLDBLLA()
self.test_config = ConfigParser()
self.test_config.read(afs.CONFIG.setup)
self.fileserver = self.test_config.get("VolServerLLA", "FS")
self.part = self.test_config.get("VolServerLLA", "Part")
self.volume = afs.model.Volume.Volume()
self.volume.vid = int(self.test_config.get("VolServerLLA", "VolID"))
self.volume.servername = self.test_config.get("VolServerLLA", "FS")
self.volume.partition = self.test_config.get("VolServerLLA", "Part")
self.ro_volume = afs.model.Volume.Volume()
self.ro_volume.vid = int(self.test_config.get("VolServerLLA", "ROVolID"))
self.ro_volume.servername = self.test_config.get("VolServerLLA", "FS")
self.ro_volume.partition = self.test_config.get("VolServerLLA", "Part")
self.dump_file = self.test_config.get("VolServerLLA", "DumpFile")
self.tmp_volume = afs.model.Volume.Volume()
self.tmp_volume.name = self.test_config.get("VolServerLLA", "TmpVolName")
self.tmp_volume.servername = self.volume.servername
self.tmp_volume.partition = self.volume.partition
self.dst_server = self.test_config.get("VolServerLLA", "DST_FS")
self.dst_partition = self.test_config.get("VolServerLLA", "DST_Part")
return
def test_vos_examine(self) :
res = self.lla.examine(self.volume)
self.eval_vos_examine(res)
return
def test_vos_dump_restore_remove(self) :
if not afs.CONFIG.enable_modifying_tests :
raise unittest.SkipTest("modifying tests disabled.")
res = self.lla.dump(self.volume, self.dump_file)
self.eval_vos_dump(res)
res = self.lla.restore(self.tmp_volume, self.dump_file)
self.eval_vos_restore(res)
os.unlink(self.dump_file)
res = self.lla.remove(self.tmp_volume)
self.eval_vos_remove(res)
return
def test_vos_create_remove(self) :
if not afs.CONFIG.enable_modifying_tests :
raise unittest.SkipTest("modifying tests disabled.")
res = self.lla.create(self.tmp_volume)
self.eval_vos_create(res)
res = self.lla.remove(self.tmp_volume)
self.eval_vos_remove(res)
return
def test_vos_release(self) :
if not afs.CONFIG.enable_modifying_tests :
raise unittest.SkipTest("modifying tests disabled.")
before_date = datetime.datetime.now()
# before_date has a higher resolution than creation_date of the ro.
# thus we need to wait to make sure before_Dat is smaller.
time.sleep(1)
res = self.lla.release(self.volume)
res_ro = self.lla.examine(self.ro_volume)
self.eval_vos_release(res, res_ro, before_date)
return
def test_vos_set_blockquota(self) :
if not afs.CONFIG.enable_modifying_tests :
raise unittest.SkipTest("modifying tests disabled.")
saved_vol = self.lla.examine(self.volume)
res = self.lla.set_blockquota(self.volume, 1000)
self.eval_vos_set_blockquota(res, 1000)
res = self.lla.set_blockquota(self.volume, saved_vol.maxquota)
self.eval_vos_set_blockquota(res, saved_vol.maxquota)
return
def test_vos_convert(self) :
if not afs.CONFIG.enable_modifying_tests :
raise unittest.SkipTest("modifying tests disabled.")
res = self.lla.create(self.tmp_volume)
res = self.vldb_lla.addsite(self.tmp_volume)
res = self.lla.release(self.tmp_volume)
res = self.lla.remove(self.tmp_volume)
res = self.lla.convert(self.tmp_volume)
self.eval_vos_convert(res)
res = self.lla.remove(self.tmp_volume)
self.eval_vos_remove(res)
return
def test_vos_move(self) :
if not afs.CONFIG.enable_modifying_tests :
raise unittest.SkipTest("modifying tests disabled.")
res = self.lla.create(self.tmp_volume)
res = self.lla.move(self.tmp_volume, self.dst_server, self.dst_partition)
self.eval_vos_move(res)
res = self.lla.remove(res)
return
class TestVolServerLLAMethods_async(EvaluateTestResults):
"""
Tests VolServerLLA Methods
"""
@classmethod
def setUpClass(self) :
"""
setup test environment
called automagically
"""
self.lla = afs.lla.VolServerLLA.VolServerLLA()
self.vldb_lla = afs.lla.VLDBLLA.VLDBLLA()
self.test_config = ConfigParser()
self.test_config.read(afs.CONFIG.setup)
self.fileserver = self.test_config.get("VolServerLLA", "FS")
self.part = self.test_config.get("VolServerLLA", "Part")
self.dump_file = self.test_config.get("VolServerLLA", "DumpFile")
self.volume = afs.model.Volume.Volume()
self.volume.vid = int(self.test_config.get("VolServerLLA", "VolID"))
self.volume.servername = self.test_config.get("VolServerLLA", "FS")
self.volume.partition = self.test_config.get("VolServerLLA", "Part")
self.ro_volume = afs.model.Volume.Volume()
self.ro_volume.vid = int(self.test_config.get("VolServerLLA", "ROVolID"))
self.ro_volume.servername = self.test_config.get("VolServerLLA", "FS")
self.ro_volume.partition = self.test_config.get("VolServerLLA", "Part")
self.tmp_volume = afs.model.Volume.Volume()
self.tmp_volume.name = self.test_config.get("VolServerLLA", "TmpVolName")
self.tmp_volume.servername = self.volume.servername
self.tmp_volume.partition = self.volume.partition
self.dst_server = self.test_config.get("VolServerLLA", "DST_FS")
self.dst_partition = self.test_config.get("VolServerLLA", "DST_Part")
return
def test_vos_examine(self) :
sp_ident = self.lla.examine(self.volume, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
self.eval_vos_examine(res)
return
def test_vos_dump_restore_remove(self) :
if not afs.CONFIG.enable_modifying_tests :
raise unittest.SkipTest("modifying tests disabled.")
sp_ident = self.lla.dump(self.volume, self.dump_file, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
self.eval_vos_dump(res)
sp_ident = self.lla.restore(self.tmp_volume, self.dump_file, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
self.eval_vos_restore(res)
os.unlink(self.dump_file)
sp_ident = self.lla.remove(self.tmp_volume, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
self.eval_vos_remove(res)
return
def test_vos_create_remove(self) :
if not afs.CONFIG.enable_modifying_tests :
raise unittest.SkipTest("modifying tests disabled.")
sp_ident = self.lla.create(self.tmp_volume, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
self.eval_vos_create(res)
sp_ident = self.lla.remove(self.tmp_volume, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
self.eval_vos_remove(res)
return
def test_vos_release(self) :
if not afs.CONFIG.enable_modifying_tests :
raise unittest.SkipTest("modifying tests disabled.")
before_date = datetime.datetime.now()
# before_date has a higher resolution than creation_date of the ro.
# thus we need to wait to make sure before_Dat is smaller.
time.sleep(1)
sp_ident = self.lla.release(self.volume, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
sp_ident = self.lla.examine(self.ro_volume, async=True)
self.lla.wait_for_subprocess(sp_ident)
res_ro = self.lla.get_subprocess_result(sp_ident)
self.eval_vos_release(res, res_ro, before_date)
return
def test_vos_set_blockquota(self) :
if not afs.CONFIG.enable_modifying_tests :
raise unittest.SkipTest("modifying tests disabled.")
sp_ident = self.lla.examine(self.volume, async=True)
self.lla.wait_for_subprocess(sp_ident)
saved_vol = self.lla.get_subprocess_result(sp_ident)
sp_ident = self.lla.set_blockquota(self.volume, 1000, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
self.eval_vos_set_blockquota(res, 1000)
sp_ident = self.lla.set_blockquota(self.volume, saved_vol.maxquota, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
self.eval_vos_set_blockquota(res, saved_vol.maxquota)
return
def test_vos_convert(self) :
if not afs.CONFIG.enable_modifying_tests :
raise unittest.SkipTest("modifying tests disabled.")
sp_ident = self.lla.create(self.tmp_volume, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
self.assertTrue(res != None)
# note the different lla for addsite!
sp_ident = self.vldb_lla.addsite(self.tmp_volume, async=True)
self.vldb_lla.wait_for_subprocess(sp_ident)
res = self.vldb_lla.get_subprocess_result(sp_ident)
self.assertTrue(res != None)
sp_ident = self.lla.release(self.tmp_volume, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
self.assertTrue(res != None)
sp_ident = self.lla.remove(self.tmp_volume, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
self.assertTrue(res != None)
sp_ident = self.lla.convert(self.tmp_volume, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
self.assertTrue(res != None)
self.eval_vos_convert(res)
sp_ident = self.lla.remove(self.tmp_volume, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
self.assertTrue(res != None)
self.eval_vos_remove(res)
return
def test_vos_move(self) :
if not afs.CONFIG.enable_modifying_tests :
raise unittest.SkipTest("modifying tests disabled.")
sp_ident = self.lla.create(self.tmp_volume, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
self.assertTrue(res != None)
sp_ident = self.lla.move(self.tmp_volume, self.dst_server, self.dst_partition, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
self.assertTrue(res != None)
self.eval_vos_move(res)
sp_ident = self.lla.remove(res, async=True)
self.lla.wait_for_subprocess(sp_ident)
res = self.lla.get_subprocess_result(sp_ident)
return
if __name__ == '__main__' :
parse_commandline()
sys.stderr.write("\n===\n=== testing direct fork ===\n===\n\n")
suite = unittest.TestLoader().loadTestsFromTestCase(TestVolServerLLAMethods)
unittest.TextTestRunner(verbosity = 2).run(suite)
sys.stderr.write("\n===\n=== testing detached execution ===\n===\n\n")
suite = unittest.TestLoader().loadTestsFromTestCase(TestVolServerLLAMethods_async)
unittest.TextTestRunner(verbosity = 2).run(suite)
| 40.822289
| 98
| 0.675422
| 1,781
| 13,553
| 4.907917
| 0.079169
| 0.062464
| 0.046562
| 0.046677
| 0.90024
| 0.871639
| 0.835259
| 0.811006
| 0.783778
| 0.766388
| 0
| 0.00189
| 0.219066
| 13,553
| 331
| 99
| 40.945619
| 0.82398
| 0.022283
| 0
| 0.773234
| 0
| 0
| 0.062843
| 0
| 0
| 0
| 0
| 0
| 0.074349
| 0
| null | null | 0
| 0.037175
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
702d9230949781af5e800d83e6baff01ebc0e462
| 85
|
py
|
Python
|
app/snyk/config.py
|
snyk-schmidtty/snyk-watcher
|
e9dc94389e0aaaba3d2b6b3a03b23d5224aabfa9
|
[
"MIT"
] | 18
|
2021-02-18T04:31:20.000Z
|
2022-03-26T02:22:47.000Z
|
app/snyk/config.py
|
snyk-schmidtty/snyk-watcher
|
e9dc94389e0aaaba3d2b6b3a03b23d5224aabfa9
|
[
"MIT"
] | 1
|
2022-02-16T22:43:49.000Z
|
2022-02-16T22:43:49.000Z
|
app/snyk/config.py
|
snyk-schmidtty/snyk-watcher
|
e9dc94389e0aaaba3d2b6b3a03b23d5224aabfa9
|
[
"MIT"
] | 6
|
2021-02-25T18:10:38.000Z
|
2022-02-16T22:52:41.000Z
|
import os
def get_snyk_token():
return os.environ.get('SECRET_SNYK_API_TOKEN')
| 14.166667
| 50
| 0.752941
| 14
| 85
| 4.214286
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141176
| 85
| 5
| 51
| 17
| 0.808219
| 0
| 0
| 0
| 0
| 0
| 0.247059
| 0.247059
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
702deac53a6f7eec5736db9cd85faf43d09eb46d
| 117
|
py
|
Python
|
tests/m3u/__init__.py
|
kyleheyne/mopidy
|
3b1b0dd2e9052cadb7bd1a29e724498f9fba7bca
|
[
"Apache-2.0"
] | 2
|
2019-02-13T15:16:55.000Z
|
2019-02-18T08:47:29.000Z
|
tests/m3u/__init__.py
|
kyleheyne/mopidy
|
3b1b0dd2e9052cadb7bd1a29e724498f9fba7bca
|
[
"Apache-2.0"
] | 40
|
2019-02-13T09:33:00.000Z
|
2019-02-19T13:21:12.000Z
|
tests/m3u/__init__.py
|
kyleheyne/mopidy
|
3b1b0dd2e9052cadb7bd1a29e724498f9fba7bca
|
[
"Apache-2.0"
] | 1
|
2021-10-01T17:26:30.000Z
|
2021-10-01T17:26:30.000Z
|
from __future__ import absolute_import, unicode_literals
def generate_song(i):
return 'dummy:track:song%s' % i
| 19.5
| 56
| 0.769231
| 17
| 117
| 4.882353
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145299
| 117
| 5
| 57
| 23.4
| 0.83
| 0
| 0
| 0
| 1
| 0
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
706edcd839b842521a152f5c40bd0ec18890c49e
| 35,466
|
py
|
Python
|
post_optimization_studies/mad_analyses/four_cuts_eff_flow_chart/Output/Histos/MadAnalysis5job_0/selection_7.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
post_optimization_studies/mad_analyses/four_cuts_eff_flow_chart/Output/Histos/MadAnalysis5job_0/selection_7.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
post_optimization_studies/mad_analyses/four_cuts_eff_flow_chart/Output/Histos/MadAnalysis5job_0/selection_7.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
def selection_7():
# Library import
import numpy
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# Library version
matplotlib_version = matplotlib.__version__
numpy_version = numpy.__version__
# Histo binning
xBinning = numpy.linspace(120.0,2000.0,161,endpoint=True)
# Creating data sequence: middle of each bin
xData = numpy.array([125.875,137.625,149.375,161.125,172.875,184.625,196.375,208.125,219.875,231.625,243.375,255.125,266.875,278.625,290.375,302.125,313.875,325.625,337.375,349.125,360.875,372.625,384.375,396.125,407.875,419.625,431.375,443.125,454.875,466.625,478.375,490.125,501.875,513.625,525.375,537.125,548.875,560.625,572.375,584.125,595.875,607.625,619.375,631.125,642.875,654.625,666.375,678.125,689.875,701.625,713.375,725.125,736.875,748.625,760.375,772.125,783.875,795.625,807.375,819.125,830.875,842.625,854.375,866.125,877.875,889.625,901.375,913.125,924.875,936.625,948.375,960.125,971.875,983.625,995.375,1007.125,1018.875,1030.625,1042.375,1054.125,1065.875,1077.625,1089.375,1101.125,1112.875,1124.625,1136.375,1148.125,1159.875,1171.625,1183.375,1195.125,1206.875,1218.625,1230.375,1242.125,1253.875,1265.625,1277.375,1289.125,1300.875,1312.625,1324.375,1336.125,1347.875,1359.625,1371.375,1383.125,1394.875,1406.625,1418.375,1430.125,1441.875,1453.625,1465.375,1477.125,1488.875,1500.625,1512.375,1524.125,1535.875,1547.625,1559.375,1571.125,1582.875,1594.625,1606.375,1618.125,1629.875,1641.625,1653.375,1665.125,1676.875,1688.625,1700.375,1712.125,1723.875,1735.625,1747.375,1759.125,1770.875,1782.625,1794.375,1806.125,1817.875,1829.625,1841.375,1853.125,1864.875,1876.625,1888.375,1900.125,1911.875,1923.625,1935.375,1947.125,1958.875,1970.625,1982.375,1994.125])
# Creating weights for histo: y8_M_0
y8_M_0_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.78092644877,4.19643634479,4.34791621285,4.6672559347,4.63450396322,4.58946800245,4.58946800245,4.72047988834,4.83920778492,5.04391160662,4.98249966011,5.07666357809,4.90061973143,5.29774338553,5.28136739979,5.13397952817,5.10532355313,5.01115963515,5.07666357809,5.39600329994,5.51063520009,5.44513125715,5.54339117156,5.21176746041,5.38371931064,5.33459135343,5.64574308241,5.56795515016,5.73171900752,5.21586345684,5.3427793463,5.50654320365,5.53520317869,5.42465927498,5.39190730351,5.39600329994,5.76037498256,5.37143932133,5.44513125715,5.38781530707,5.39600329994,5.62527110024,5.66211906814,5.73990700039,5.53929517513,5.28955539266,5.51473119652,5.17082749607,5.55157916443,5.72762301109,5.5843311359,5.28955539266,5.13397952817,5.36325132847,4.98659565654,5.22405144971,5.10941554956,5.20357946754,5.10941554956,5.05619559592,5.16673549964,5.20767546398,5.33459135343,4.85967576709,4.99068765298,5.00706363871,4.72047988834,4.96612367437,4.63859595966,4.78598383128,4.80645581345,4.79826782058,4.88424374569,4.79417182415,4.83101979205,4.52805605594,4.4871160916,4.63040796679,4.58946800245,4.6549719454,4.45845611657,4.43798813439,4.31926023781,4.42980014153,4.56899602028,4.44208013083,4.26603628417,4.2332843127,4.16368437332,4.44208013083,4.27831627347,4.33154022711,4.02448449457,4.01220050527,3.99992011596,3.92213258372,3.97535533736,3.75836912636,3.90575619798,3.88528581581,3.94260296589,3.72152235846,3.64782882264,3.75427512993,3.7993098907,3.40627783304,3.49225375815])
# Creating weights for histo: y8_M_1
y8_M_1_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0121240822392,0.0,0.0,0.0,0.0,0.0121313846429,0.012170493784,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0121753353338,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y8_M_2
y8_M_2_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0201069897442,0.0200520625826,0.0200902467322,0.0301386860576,0.0,0.0401901996181,0.0301534291639,0.0401373219466,0.0401354749263,0.0,0.0,0.0100187051905,0.0,0.0200525088425,0.0,0.0100340928946,0.010029956726,0.0100340928946,0.0201069236316,0.010030973207,0.0,0.0100187051905,0.0,0.020059913452,0.0100546539083,0.0,0.0200717021522,0.0100355638996,0.0301035100306,0.0301212943158,0.0,0.020081296741,0.0100329193962,0.0100459435753,0.0100702895346,0.0200800364699,0.0200909202541,0.0200976843979,0.0100187051905,0.0,0.0,0.0,0.0,0.0,0.0200856932279,0.0301465658512,0.0200554012681,0.0100262833455,0.0,0.0301023365322,0.0301327565851,0.0200707931042,0.0,0.0,0.0,0.0100602900062,0.0,0.0200628802542,0.0,0.0,0.0,0.0100568562838,0.0,0.010018415948,0.0200878294908,0.0100609841883,0.0100584801742,0.010036972924,0.0100407289452,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0200899492255,0.0100546539083,0.0,0.0100329193962,0.0,0.0,0.0100696697291,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0200968290664,0.0,0.0,0.0,0.0200913995704,0.0100546539083,0.0100407289452,0.0,0.0,0.0,0.0100229529241,0.0100271593373,0.0200494552675])
# Creating weights for histo: y8_M_3
y8_M_3_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.016467068046,0.033020819304,0.0660267939506,0.022005470001,0.0605417458234,0.0549595614727,0.0330115119394,0.0385177017189,0.0495202173402,0.0274719724628,0.0439881245678,0.0329905937288,0.0550531226281,0.0330729624838,0.0384914493443,0.0329847558053,0.0770149280482,0.0769908775905,0.0550048185839,0.0494478222126,0.0384611993937,0.0550147718983,0.0495028701351,0.0660128999362,0.0494861323166,0.0494600911961,0.0604524097483,0.0549908839438,0.054978127451,0.0440215595791,0.044014409443,0.0440062842884,0.0220022768152,0.0495135140877,0.0605064014008,0.0495056326877,0.0549466424769,0.0605267142873,0.0880243500509,0.0769583363463,0.0384865458135,0.0825579491536,0.0714909198046,0.0494663881909,0.0385217805465,0.093498672974,0.0494654131723,0.027476477861,0.0385032308185,0.0275048671512,0.0440315535192,0.0605509678738,0.0385348864209,0.0440292378502,0.0660262251897,0.0109989608662,0.0495139609712,0.033022789654,0.0440291565986,0.0660339034608,0.0495150578671,0.0274811335746,0.0440349254584,0.0330179226864,0.0494890167465,0.0440275721935,0.0660108280217,0.0220213424905,0.0165016040157,0.0274971766924,0.0440586509099,0.0329953266314,0.0274735812434,0.0604587473689,0.0659462736683,0.0330062224637,0.0385075696511,0.0385122172395,0.0549511113119,0.0384953697315,0.0330256943968,0.0385095725017,0.0550025435406,0.0329908252958,0.0439449799968,0.0440170501182,0.0164954248356,0.0164961114112,0.00551630153443,0.0384828529308,0.0275246153395,0.0110027024999,0.0495168047753,0.0110260298188,0.0109935170126,0.0329819810649,0.0219834630197,0.0219787341797,0.0165017908942,0.021995618251,0.0165183743348,0.0110160968173,0.027530579203,0.0110268870226,0.0110078538479,0.0275157670462,0.00549901933056])
# Creating weights for histo: y8_M_4
y8_M_4_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00197027550081,0.00394952315682,0.00394805331706,0.00197208323937,0.00394731619262,0.00591465602221,0.0049318955883,0.00789053831836,0.00295908284024,0.00690804643985,0.00492704555802,0.00789948081221,0.00887485797688,0.00690768569379,0.00592003915499,0.00690746924616,0.00887773592873,0.00987371970967,0.0118414852196,0.00691111278131,0.00986770326714,0.0118350198487,0.0138150146498,0.00887579992491,0.0167695168168,0.010855918983,0.00690918479406,0.0108539148383,0.0177644183596,0.020733422518,0.0167638170292,0.0138102888765,0.0138146498955,0.0167698214468,0.0118430484525,0.0236884617958,0.0167671719675,0.0128264021133,0.0236846138379,0.0207369417962,0.0384858120008,0.0246552251464,0.019740096233,0.0226995726873,0.0286270632524,0.0227092967976,0.0315806274797,0.0295947725593,0.0187597087065,0.0404452843598,0.0345472547224,0.0375002056814,0.0246604319144,0.0217110002336,0.0286303580664,0.0315931173097,0.025667294193,0.035510599002,0.037488389244,0.0236896803158,0.0296010094577,0.0355199463331,0.0365179222339,0.0355182428101,0.0305913575837,0.0236793028543,0.0315771522927,0.0325532148749,0.0463906599127,0.0305928967668,0.0246773148297,0.0296191028765,0.0463764705679,0.0305930891647,0.0414399413969,0.0256571492123,0.0345520326035,0.0404506955506,0.031575536952,0.0424380054801,0.0404769899297,0.0434100557645,0.0286233796344,0.0266487839992,0.0296034024066,0.0375073364284,0.0355257543446,0.0246621955618,0.0325592032594,0.0305981436178,0.0315898305124,0.0345392982678,0.0236820525409,0.0286232834354,0.0365091601131,0.0355226399037,0.0305958508762,0.0246805214613,0.0345343841049,0.0325702020057,0.0404531806901,0.0236774911075,0.0325773407693,0.0197404609873,0.0266355245775,0.0226978972223,0.0365152527131])
# Creating weights for histo: y8_M_5
y8_M_5_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000251938705986,0.0,0.000252305648909,0.0002524496493,0.000252082346275,0.000505256118684,0.000251550317047,0.0,0.00100821879408,0.000252565441778,0.000504063784247,0.00100804954661,0.00176381272972,0.0012601130476,0.000504233431831,0.000503640065399,0.000503908940815,0.0010087493429,0.0005055806097,0.000757268405308,0.00176452332904,0.000504453493556,0.000504129402652,0.00277454102769,0.00176529874653,0.00100816998039,0.00151229698237,0.00226877236524,0.00151225977193,0.00125991859306,0.00176589811465,0.0017634390249,0.00226803415819,0.0030252505489,0.00227000351057,0.00100852648038,0.00126088086296,0.00151354973375,0.00151175403008,0.00403113468814,0.00251986759464,0.00353011215693,0.00302553582892,0.00302505929526,0.00378131901764,0.00226734436471,0.00151308280278,0.0015113947293,0.00353056508397,0.00579873448009,0.00403280715725,0.00352975885784,0.00403183088342,0.00428482584567,0.00504358666959,0.0035289166216,0.00428863091295,0.00327555475643,0.00428580612063,0.00554881237974,0.00579692597282,0.0025199840273,0.00504172214661,0.00453653644768,0.00428524196239,0.00605145336491,0.00630244776603,0.00302364129753,0.00378220246544,0.00378109975614,0.00655375025356,0.00403377942996,0.00579619776857,0.00554580753691,0.00655393030406,0.00604913671511,0.00529495317508,0.0047889152371,0.00479140393515,0.00579921061364,0.00857274455886,0.00806881121119,0.00756044061894,0.00781866104687,0.00806541425839,0.00831917743612,0.00755932830694,0.00881756522831,0.00781524408846,0.00831736892885,0.00957568187154,0.00781375567097,0.00604803240537,0.0100857049273,0.00831910941704,0.0133673373885,0.0131095930941,0.0141151511418,0.00957541379635,0.0146184042987,0.00882085415082,0.00957963498034,0.0108400685178,0.0103367473419,0.00932927675767,0.0136076367897])
# Creating weights for histo: y8_M_6
y8_M_6_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000571674400031,0.0,0.000286439425776,0.0,0.00114685418586,0.0,0.000570006782515,0.0,0.0002863152617,0.000284909568403,0.000574595954751,0.000286450322624,0.000286764731657,0.0,0.000574212165791,0.000284852384948,0.0002863152617,0.000287128726344,0.0,0.000286439425776,0.000287128726344,0.0,0.000570397169566,0.0,0.0,0.0,0.0002863152617,0.000573312626051,0.00114767194926,0.000855710321202,0.0,0.00114175666071,0.0,0.000287276383623,0.000573726606275,0.000284468995872,0.000284540475191,0.00114194260691,0.0002863152617,0.00085747571043,0.000286183799735,0.000860014575872,0.00114628535044,0.000287772740014,0.000574099498388,0.00171888568258,0.0,0.000573215054281,0.000859751651942,0.000855365820875,0.000287128726344,0.00114753198976,0.000861860741729,0.00114346816553,0.000858949384059,0.00143494383419,0.000858809024669,0.00143481187237,0.00114442988728,0.000859505523154,0.000859800337856,0.00114721908029,0.000287772740014,0.000286823114765,0.0017229295126,0.000572128768571,0.00171794695418,0.00114786489344,0.000574257552659,0.00057395194108,0.000571234027442,0.000287772740014,0.00142779090371,0.000858079435757,0.00315403945603,0.00200631252267,0.00143411307455,0.00171937953969,0.00114571851444,0.00171905863254,0.00114306128325,0.00142758196417,0.00114167868327,0.00257856595517,0.00257476605458,0.00171679028883,0.00286293168092,0.000573893557972,0.00171545767439,0.00171991238553,0.00171669531631,0.00171834183993,0.00401011677105,0.00171878471179])
# Creating weights for histo: y8_M_7
y8_M_7_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.16142075517e-05,0.0,0.0,0.0,0.0,0.0,0.0,2.15983699309e-05,0.0,0.0,2.16751901451e-05,0.0,6.48229754557e-05,2.15259544109e-05,2.1608621008e-05,0.0,0.0,4.31871677194e-05,4.31500777633e-05,0.0,6.48842053155e-05,0.0,4.32972641202e-05,4.32010816917e-05,2.1608621008e-05,0.0,4.31777380696e-05,0.0,4.32343997878e-05,2.16142075517e-05,2.1655333398e-05,4.32154985786e-05,0.0,6.48049962567e-05,6.4709107195e-05,2.15933994577e-05,0.000108047903684,6.47796409759e-05,0.0,2.15259544109e-05,0.0,4.32198571723e-05,8.64498145473e-05,0.0,6.47386953407e-05,4.32177616945e-05,6.47944350488e-05,0.000129524958026,4.32275685304e-05,4.31157957474e-05,4.31698590732e-05,2.15789867617e-05,6.48095224886e-05,2.15259544109e-05,2.15774067715e-05,4.31813003817e-05,6.47836642932e-05,4.32141155632e-05,0.0,4.32269817966e-05,6.47637991641e-05,2.15912704523e-05,4.3124303387e-05,6.48629571711e-05,4.31932026953e-05,6.48075108299e-05,6.48050381662e-05,6.47414194618e-05,8.64146943402e-05,4.31736728427e-05,6.47325765457e-05,4.31823481206e-05,4.32195218958e-05,8.63922308188e-05,8.64066477057e-05,8.63133570364e-05,0.000108139434152,0.000151167974206,4.31120238874e-05,8.64006965489e-05,0.000107929760649,0.00015134743092,0.000108003270008,0.000108111270931,6.48057087191e-05,6.48629571711e-05])
# Creating weights for histo: y8_M_8
y8_M_8_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.83974019316e-05,0.0,0.0,0.0,0.0,0.0,0.0,2.83974019316e-05,0.0,0.0,2.83974019316e-05,0.0,0.0,0.0,0.0,0.0,0.0,2.84292668315e-05,0.0,0.0,0.0,2.83684918849e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.84489114087e-05,0.0,0.0,2.83974019316e-05,0.0,2.83498718548e-05,0.0,0.0,2.67506280594e-05,0.0,0.0,0.0,2.82532080623e-05,0.0,0.0,2.84292668315e-05,0.0,0.0,2.84080928579e-05,5.67658938165e-05,2.84080928579e-05])
# Creating weights for histo: y8_M_9
y8_M_9_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y8_M_10
y8_M_10_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0521138287,0.0,0.0,0.0,0.0,0.0,1.0529581672,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.05462838872,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y8_M_11
y8_M_11_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.230020174047,0.230597155343,0.230551273514,0.0,0.0,0.23018625551,0.460704673945,0.461024770794,0.690790519723,0.229982515595,0.92256484756,0.691531007872,0.459685974368,0.229952465686,0.921662581772,0.230020174047,0.23018625551,0.461233045094,0.0,0.0,0.0,0.691049133384,0.229982515595,0.229952465686,0.0,0.230128538167,0.230465312331,0.461073957345,0.460063327437,0.460617444671,0.690580324073,0.229703305066,0.0,0.229479006709,0.690811654569,0.459964954335,0.230673163934,0.230114128045,0.0,0.229932022526,0.23042826871,0.230619673561,0.0,0.0,0.230673163934,0.460930624662,0.230673163934,0.0,0.0,0.0,0.230752246685,0.0,0.230645227511,0.0,0.230020174047,0.460488714246,0.460933698821,0.23058774073,0.230551273514,0.0,0.0,0.229952465686,0.0,0.0,0.230764043772,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.23018625551,0.0,0.0,0.0,0.0,0.230645227511,0.0,0.230645227511,0.0,0.0,0.0,0.0,0.0,0.0,0.23042826871,0.0,0.229952465686,0.0,0.0,0.0,0.0,0.230020174047,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y8_M_12
y8_M_12_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.1384016717,0.138443216656,0.193985782999,0.0830623678981,0.193914271821,0.0553889654108,0.249327202516,0.0553995439874,0.0,0.110761389775,0.0830144373293,0.166014641665,0.13852719132,0.138417289526,0.0554480900371,0.276708559354,0.0830847175454,0.13863259241,0.110806050602,0.193944199576,0.221602676653,0.138488839171,0.166332229768,0.110769506428,0.0276929401566,0.0831032973727,0.221554361409,0.0553778482885,0.138307541602,0.138455103129,0.138503572244,0.304717206551,0.166303263702,0.165999177709,0.0830418646933,0.249127940601,0.138598702498,0.138450871698,0.138508919233,0.22168095812,0.0554508597007,0.0830207844753,0.13835108687,0.166090538143,0.110701880473,0.166112079972,0.138369743633,0.166070535017,0.0,0.138488146755,0.138542616808,0.16611227231,0.0829216535957,0.0554893657195,0.0831335713355,0.166173281846,0.16598894534,0.0831337636732,0.0553688468815,0.0554876731473,0.138561735181,0.138537539091,0.166128851824,0.0552076293743,0.0830163607069,0.193842375968,0.166046800537,0.083064522081,0.0277373240172,0.0830550590634,0.0830507506977,0.193846491996,0.0,0.055405237185,0.138359819005,0.0830155913558,0.138498263721,0.0277244643148,0.0553541522769,0.055427009819,0.027640512731,0.027640512731,0.0276622122767,0.0554012365597,0.0553334567344,0.0553302254601,0.0,0.0,0.027683115544,0.0552821025535,0.055481095196,0.0277205483181,0.110737001347,0.055328263615,0.0277631934454,0.0,0.0554069297572,0.0,0.11093630173,0.0,0.0554069297572,0.110733231527,0.0,0.083088948976,0.0,0.0])
# Creating weights for histo: y8_M_13
y8_M_13_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100450214151,0.0202018191175,0.020154633721,0.0201706251705,0.0201565514812,0.0100975170653,0.0100829700048,0.0201646655487,0.0302947299171,0.0100796988885,0.0100533236174,0.0503771563265,0.0202008723751,0.0403626188016,0.0,0.0100953565507,0.0100671484834,0.0302972606322,0.0705522117657,0.0302903906813,0.0403309818277,0.0,0.0,0.0201751586099,0.0302794424555,0.02018709606,0.0,0.0504437620791,0.0302832415627,0.0100671484834,0.020144614031,0.0,0.0605195101389,0.0302102088859,0.0201372160891,0.0302955856265,0.0504043630316,0.0504252520522,0.0504063475493,0.0201157626644,0.0201972128518,0.0202033970214,0.0100921886051,0.0504656221175,0.0302368026361,0.0100996957865,0.0403580307424,0.0100796988885,0.0302258301349,0.0201121759674,0.0302450805629,0.0201806145162,0.0403226614188,0.0302305820533,0.0201734168467,0.0403646943521,0.0201509438533,0.0201718874936,0.0201687620301,0.0502923379211,0.0302623282667,0.0503909447793,0.0302358741003,0.0100712388959,0.0100953565507,0.0201602656243,0.0100533236174,0.0201530679548,0.0100671484834,0.0302263702635,0.0,0.0201717114967,0.0201792186781,0.0,0.0100796988885,0.0403124111121,0.0201975648457,0.0403432834095,0.0201530679548,0.0100712388959,0.0,0.0201997435669,0.0100450214151,0.0,0.0302386839831,0.0201514657754,0.0504735844635,0.0302405653301,0.0201426537888,0.0,0.0403559794673,0.0302626923983,0.0504655614288,0.0503821024484,0.0201326462365,0.0100592771703,0.0201847413418,0.010103349241,0.0302577523452,0.0302652049069,0.0201010760201,0.0,0.0100733690662,0.0201624868275,0.0100953565507,0.0302478176193])
# Creating weights for histo: y8_M_14
y8_M_14_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00283355094161,0.0,0.0,0.0028258606674,0.0,0.0,0.00849404406756,0.0,0.0,0.00283301960937,0.00566026189448,0.00848628376984,0.00566853005501,0.00849465581141,0.00282513619339,0.00565291712087,0.00848269410312,0.0,0.00849725668462,0.0112963041191,0.00566429016999,0.011320377586,0.00848864225398,0.00849404406756,0.00566748354982,0.00283250443641,0.00848682241222,0.0113195465378,0.00848361364261,0.0056608043843,0.022631921437,0.00565841896805,0.00283102971052,0.011334166831,0.00848161297091,0.00849407099968,0.0141481000323,0.0113287688648,0.0282809334006,0.0,0.0113106320064,0.00283301960937,0.0112983047908,0.00565440992976,0.0084840176244,0.00847707683258,0.0113231939162,0.0113078195237,0.0169835481493,0.00566144306027,0.00283201542608,0.0169662769662,0.00282930990235,0.0113075271178,0.00565759946214,0.0113145871804,0.0084839753025,0.0113278685626,0.0169713017301,0.0113231708315,0.00848760729112,0.0113201275021,0.0056624587859,0.00566242031144,0.00283198118381,0.00566600997816,0.0141461686146,0.0056529363581,0.00849297063025,0.005657891868,0.00283622837898,0.0113069038316,0.00565444840421,0.0141587343719,0.0141420480004,0.0113181922369,0.00566055430034,0.00849807234308,0.00566200863477,0.005657891868,0.00849126621186,0.0113077156426,0.00283250443641,0.00282930990235,0.0113202198408,0.0,0.00564705746127,0.0084891001,0.0,0.00565548336707,0.00565157051492,0.0169864991401,0.0028258606674,0.00282544052634,0.00282513619339,0.00848646459978,0.00565943084623,0.00283102971052,0.00282347986807,0.00564901581106,0.00848026636496,0.0084916663462,0.0028309785395,0.0169567814705])
# Creating weights for histo: y8_M_15
y8_M_15_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00151881882101,0.0,0.0,0.0,0.0,0.00152260679112,0.0,0.00152162936482,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00303299398069,0.0,0.0,0.00151115660254,0.0,0.0,0.0,0.0,0.00151448717969,0.00152495994197,0.0,0.00151265406217,0.00150849615926,0.00153629548684,0.0,0.0,0.0,0.0,0.00151115660254,0.0,0.0030566070399,0.0,0.0,0.0,0.0,0.00153821488261,0.0,0.0,0.0,0.00152162936482,0.00152162936482,0.0,0.0,0.0,0.0,0.0,0.0,0.00304287815979,0.00150849615926,0.0,0.0030419007335,0.0,0.0,0.0,0.0,0.0,0.00152449663954,0.0,0.0015356572641,0.0,0.0,0.00304599008656,0.0,0.0,0.0,0.00153153127233,0.0])
# Creating weights for histo: y8_M_16
y8_M_16_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180734341243,0.0,0.0,0.0,0.000180766987918,0.0,0.0,0.0,0.0,0.000180657151876,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180003640899,0.0,0.0,0.0,0.000361281541581,0.0,0.0,0.0,0.000180970259668,0.0,0.0,0.0,0.0,0.000180154593273,0.0,0.0,0.0,0.0,0.0,0.000180970259668,0.0,0.0,0.000180003640899,0.0,0.0,0.0,0.000361441117605,0.0,0.0,0.0,0.000180003640899,0.0,0.0,0.000180657151876,0.0,0.000180612801676,0.0,0.0,0.000180503389116,0.0,0.0,0.0,0.0,0.0,0.000180766987918,0.0,0.0,0.0,0.0,0.0,0.000361110724014,0.00018054716184])
# Creating a new Canvas
fig = plt.figure(figsize=(12,6),dpi=80)
frame = gridspec.GridSpec(1,1,right=0.7)
pad = fig.add_subplot(frame[0])
# Creating a new Stack
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights+y8_M_12_weights+y8_M_13_weights+y8_M_14_weights+y8_M_15_weights+y8_M_16_weights,\
label="$bg\_dip\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#e5e5e5", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights+y8_M_12_weights+y8_M_13_weights+y8_M_14_weights+y8_M_15_weights,\
label="$bg\_dip\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#f2f2f2", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights+y8_M_12_weights+y8_M_13_weights+y8_M_14_weights,\
label="$bg\_dip\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights+y8_M_12_weights+y8_M_13_weights,\
label="$bg\_dip\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights+y8_M_12_weights,\
label="$bg\_dip\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#c1bfa8", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights,\
label="$bg\_dip\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#bab5a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights,\
label="$bg\_dip\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b2a596", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights,\
label="$bg\_dip\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b7a39b", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights,\
label="$bg\_vbf\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ad998c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights,\
label="$bg\_vbf\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#9b8e82", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights,\
label="$bg\_vbf\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#876656", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights,\
label="$bg\_vbf\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#afcec6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights,\
label="$bg\_vbf\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#84c1a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights,\
label="$bg\_vbf\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#89a8a0", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights,\
label="$bg\_vbf\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#829e8c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights,\
label="$bg\_vbf\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#adbcc6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights,\
label="$signal$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7a8e99", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
# Axis
plt.rc('text',usetex=False)
plt.xlabel(r"M [ j_{1} , j_{2} ] ( GeV ) ",\
fontsize=16,color="black")
plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 40.0\ \mathrm{fb}^{-1})$ ",\
fontsize=16,color="black")
# Boundary of y-axis
ymax=(y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights+y8_M_12_weights+y8_M_13_weights+y8_M_14_weights+y8_M_15_weights+y8_M_16_weights).max()*1.1
ymin=0 # linear scale
#ymin=min([x for x in (y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights+y8_M_12_weights+y8_M_13_weights+y8_M_14_weights+y8_M_15_weights+y8_M_16_weights) if x])/100. # log scale
plt.gca().set_ylim(ymin,ymax)
# Log/Linear scale for X-axis
plt.gca().set_xscale("linear")
#plt.gca().set_xscale("log",nonposx="clip")
# Log/Linear scale for Y-axis
plt.gca().set_yscale("linear")
#plt.gca().set_yscale("log",nonposy="clip")
# Legend
plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.)
# Saving the image
plt.savefig('../../HTML/MadAnalysis5job_0/selection_7.png')
plt.savefig('../../PDF/MadAnalysis5job_0/selection_7.png')
plt.savefig('../../DVI/MadAnalysis5job_0/selection_7.eps')
# Running!
if __name__ == '__main__':
selection_7()
| 182.814433
| 2,022
| 0.74742
| 7,622
| 35,466
| 3.381921
| 0.16741
| 0.260232
| 0.372192
| 0.473911
| 0.38158
| 0.369477
| 0.358343
| 0.345579
| 0.342553
| 0.334911
| 0
| 0.576533
| 0.046721
| 35,466
| 193
| 2,023
| 183.761658
| 0.185897
| 0.035217
| 0
| 0.185841
| 0
| 0.00885
| 0.030514
| 0.005851
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00885
| false
| 0
| 0.035398
| 0
| 0.044248
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7071f4925611f43a47a55fea91ffd98ac1480e78
| 22,743
|
py
|
Python
|
spect.py
|
philips-software/random_forest
|
5cbc95aa57ac33260720afd3fc779e7d71b5658c
|
[
"MIT"
] | 2
|
2020-01-09T23:26:30.000Z
|
2021-01-27T18:34:15.000Z
|
spect.py
|
Charterhouse/random_forest
|
b842f08fee1054dbff78b6fb3afd4006a7f14a6d
|
[
"MIT"
] | null | null | null |
spect.py
|
Charterhouse/random_forest
|
b842f08fee1054dbff78b6fb3afd4006a7f14a6d
|
[
"MIT"
] | 2
|
2020-03-03T18:30:14.000Z
|
2021-09-06T13:55:06.000Z
|
from mpyc.runtime import mpc
from src.dataset import ObliviousDataset, Sample
from src.output import output
from src.secint import secint as s
from src.forest import train_forest
def sample(ins, out):
return Sample([s(i) for i in ins], s(out))
spect_samples = ObliviousDataset.create(
sample([1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1], 1),
sample([1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0], 1),
sample([1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1], 1),
sample([1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0], 1),
sample([1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0], 0),
sample([1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1], 1),
sample([1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1], 1),
sample([1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1], 1),
sample([1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1], 0),
sample([1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1], 0),
sample([1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0], 0),
sample([1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0], 0),
sample([1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0], 0),
sample([1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1], 1),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0], 1),
sample([1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1], 1),
sample([1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1], 0),
sample([1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0], 1),
sample([1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1], 1),
sample([1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0], 0),
sample([1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1], 0),
sample([0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0], 0),
sample([0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], 0),
sample([0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0], 1),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0], 0),
sample([0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0], 0),
sample([0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0], 0),
sample([0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0], 0),
sample([0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1], 1),
sample([0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0], 0),
sample([1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0], 1),
sample([1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1], 0),
sample([1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0], 1),
sample([1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1], 1),
sample([1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0], 1),
sample([1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0], 0),
sample([1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0], 0),
sample([1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0], 0),
sample([1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0], 1),
sample([1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0], 0),
sample([1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0], 0),
sample([1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1], 0),
sample([1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1], 1),
sample([1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0], 1),
sample([1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], 0),
sample([1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1], 0),
sample([1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0], 0),
sample([1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1], 1),
sample([1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1], 0),
sample([1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0], 0),
sample([1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0], 0),
sample([1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1], 0),
sample([1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0], 0),
sample([1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0], 0),
sample([1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0], 0),
sample([1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1], 0),
sample([1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1], 1),
sample([1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0], 0),
sample([1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1], 1),
sample([1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0], 0),
sample([1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1], 1),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1], 0),
sample([1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0], 1),
sample([1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1], 1),
sample([1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1], 0),
sample([1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1], 1),
sample([1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], 1),
sample([1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1], 1),
sample([1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0], 0),
sample([1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0], 1),
sample([1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0], 1),
sample([1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0], 0),
sample([1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1], 1),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], 0),
sample([1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1], 1),
sample([1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0], 0),
sample([1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0], 0),
sample([1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1], 1),
sample([1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1], 1),
sample([1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1], 1),
sample([1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0], 0),
sample([1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1], 1),
sample([1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1], 1),
sample([1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1], 0),
sample([1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1], 1),
sample([1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1], 1),
sample([1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1], 1),
sample([1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1], 1),
sample([1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1], 1),
sample([1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0], 0),
sample([1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1], 1),
sample([1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0], 0),
sample([1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0], 1),
sample([1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1], 1),
sample([1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1], 0),
sample([1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0], 0),
sample([1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1], 1),
sample([1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0], 0),
sample([1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0], 1),
sample([1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0], 0),
sample([1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0], 1),
sample([1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1], 1),
sample([1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1], 1),
sample([1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1], 1),
sample([1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0], 0),
sample([1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1], 1),
sample([1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0], 0),
sample([1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], 0),
sample([1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0], 0),
sample([1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0], 0),
sample([1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0], 0),
sample([1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0], 0),
sample([1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1], 1),
sample([1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1], 0),
sample([1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0], 0),
sample([1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0], 1),
sample([1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1], 0),
sample([1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1], 1),
sample([1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1], 0),
sample([1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1], 0),
sample([1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1], 1),
sample([1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1], 0),
sample([1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1], 1),
sample([1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0], 1),
sample([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1], 1),
sample([1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0], 1),
sample([1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0], 1),
sample([1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1], 1),
sample([1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1], 1),
sample([1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1], 1),
sample([1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0], 0),
sample([1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1], 0),
sample([1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0], 0),
sample([1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1], 1),
sample([1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1], 1),
sample([1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1], 0),
sample([1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0], 0),
sample([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], 1),
sample([1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1], 1),
sample([1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0], 0),
sample([1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1], 1),
sample([1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1], 0),
sample([1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1], 1),
sample([1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0], 0),
sample([1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1], 0),
sample([1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1], 1),
sample([1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0], 1),
sample([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0], 0),
sample([1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0], 0),
sample([1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0], 1),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0], 0),
sample([0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0),
sample([0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0], 0),
sample([0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0], 0),
sample([0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0], 0),
sample([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 0)
)
async def main():
async with mpc:
forest = await output(await train_forest(spect_samples, amount=2, depth=4))
for index, tree in enumerate(forest):
print(f"Tree #{index}")
tree.pretty_print()
if __name__ == '__main__':
mpc.run(main())
| 77.357143
| 83
| 0.358
| 6,492
| 22,743
| 1.252157
| 0.006778
| 0.741297
| 0.818182
| 0.836019
| 0.952516
| 0.952516
| 0.952516
| 0.952516
| 0.952516
| 0.951655
| 0
| 0.398637
| 0.322429
| 22,743
| 293
| 84
| 77.62116
| 0.128877
| 0
| 0
| 0.183099
| 0
| 0
| 0.000923
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003521
| false
| 0
| 0.017606
| 0.003521
| 0.024648
| 0.007042
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
7087885e36c0266307dc1789a9c00444a7c21a73
| 49,994
|
py
|
Python
|
tests/test_make_table.py
|
kellyjonbrazil/jtbl
|
9bfc755bc964fbed59a4884bc4be605a5065f3d8
|
[
"MIT"
] | 108
|
2020-03-10T13:22:03.000Z
|
2022-03-30T03:09:38.000Z
|
tests/test_make_table.py
|
kellyjonbrazil/jtbl
|
9bfc755bc964fbed59a4884bc4be605a5065f3d8
|
[
"MIT"
] | 9
|
2020-03-08T00:44:38.000Z
|
2022-02-15T19:36:04.000Z
|
tests/test_make_table.py
|
kellyjonbrazil/jtbl
|
9bfc755bc964fbed59a4884bc4be605a5065f3d8
|
[
"MIT"
] | 5
|
2020-03-10T11:34:18.000Z
|
2021-08-02T10:57:43.000Z
|
import unittest
import textwrap
import jtbl.cli
class MyTests(unittest.TestCase):
def setUp(self):
self.SUCCESS, self.ERROR = True, False
def test_no_piped_data(self):
stdin = None
expected = textwrap.dedent('''\
jtbl: Missing piped data
''')
self.assertEqual(jtbl.cli.make_table(input_data=stdin), (self.ERROR, expected))
def test_null_data(self):
stdin = ''
expected = ''
self.assertEqual(jtbl.cli.make_table(input_data=stdin), (self.ERROR, expected))
def test_simple_key_value(self):
stdin = '[{"key": "value"}]'
expected = textwrap.dedent('''\
key
-----
value''')
self.assertEqual(jtbl.cli.make_table(input_data=stdin), (self.SUCCESS, expected))
def test_multi_key_value(self):
stdin = '[{"key1": "value1", "key2": "value1"}, {"key1": "value2", "key2": "value2"}]'
expected = textwrap.dedent('''\
key1 key2
------ ------
value1 value1
value2 value2''')
self.assertEqual(jtbl.cli.make_table(input_data=stdin), (self.SUCCESS, expected))
def test_null_string(self):
stdin = 'null'
expected = textwrap.dedent('''\
jtbl: Cannot represent this part of the JSON Object as a table.
(Could be an Element, an Array, or Null data instead of an Object):
[null]
''')
self.assertEqual(jtbl.cli.make_table(input_data=stdin), (self.ERROR, expected))
def test_hello_string(self):
stdin = 'hello'
expected = textwrap.dedent('''\
jtbl: Exception - Expecting value: line 1 column 1 (char 0)
Cannot parse line 1 (Not JSON or JSON Lines data):
hello
''')
self.assertEqual(jtbl.cli.make_table(input_data=stdin), (self.ERROR, expected))
def test_array_input(self):
stdin = '["value1", "value2", "value3"]'
expected = textwrap.dedent('''\
jtbl: Cannot represent this part of the JSON Object as a table.
(Could be an Element, an Array, or Null data instead of an Object):
["value1", "value2", "value3"]
''')
self.assertEqual(jtbl.cli.make_table(input_data=stdin), (self.ERROR, expected))
def test_deep_nest(self):
stdin = '{"this":{"is":{"a":{"deeply":{"nested":{"structure":"value1","item2":"value2"}}}}}}'
expected = textwrap.dedent('''\
this
---------------------------------------------------------------------------------
{'is': {'a': {'deeply': {'nested': {'structure': 'value1', 'item2': 'value2'}}}}}''')
self.assertEqual(jtbl.cli.make_table(input_data=stdin, columns=100), (self.SUCCESS, expected))
def test_jc_dig(self):
stdin = '[{"id": 55658, "opcode": "QUERY", "status": "NOERROR", "flags": ["qr", "rd", "ra"], "query_num": 1, "answer_num": 5, "authority_num": 0, "additional_num": 1, "question": {"name": "www.cnn.com.", "class": "IN", "type": "A"}, "answer": [{"name": "www.cnn.com.", "class": "IN", "type": "CNAME", "ttl": 147, "data": "turner-tls.map.fastly.net."}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.1.67"}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.65.67"}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.129.67"}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.193.67"}], "query_time": 44, "server": "2600", "when": "Wed Mar 18 12:20:59 PDT 2020", "rcvd": 143}]'
expected = textwrap.dedent('''\
+------+--------+--------+--------+--------+--------+--------+--------+--------+--------+--------+--------+--------+--------+
| id | opco | stat | flag | quer | answ | auth | addi | ques | answ | quer | serv | when | rcvd |
| | de | us | s | y_nu | er_n | orit | tion | tion | er | y_ti | er | | |
| | | | | m | um | y_nu | al_n | | | me | | | |
| | | | | | | m | um | | | | | | |
+======+========+========+========+========+========+========+========+========+========+========+========+========+========+
| 5565 | QUER | NOER | ['qr | 1 | 5 | 0 | 1 | {'na | [{'n | 44 | 2600 | Wed | 143 |
| 8 | Y | ROR | ', ' | | | | | me': | ame' | | | Mar | |
| | | | rd', | | | | | 'ww | : 'w | | | 18 1 | |
| | | | 'ra | | | | | w.cn | ww.c | | | 2:20 | |
| | | | '] | | | | | n.co | nn.c | | | :59 | |
| | | | | | | | | m.', | om.' | | | PDT | |
| | | | | | | | | 'cl | , 'c | | | 2020 | |
| | | | | | | | | ass' | lass | | | | |
| | | | | | | | | : 'I | ': ' | | | | |
| | | | | | | | | N', | IN', | | | | |
| | | | | | | | | 'typ | 'ty | | | | |
| | | | | | | | | e': | pe': | | | | |
| | | | | | | | | 'A'} | 'CN | | | | |
| | | | | | | | | | AME' | | | | |
| | | | | | | | | | , 't | | | | |
| | | | | | | | | | tl': | | | | |
| | | | | | | | | | 147 | | | | |
| | | | | | | | | | , 'd | | | | |
| | | | | | | | | | ata' | | | | |
| | | | | | | | | | : 't | | | | |
| | | | | | | | | | urne | | | | |
| | | | | | | | | | r-tl | | | | |
| | | | | | | | | | s.ma | | | | |
| | | | | | | | | | p.fa | | | | |
| | | | | | | | | | stly | | | | |
| | | | | | | | | | .net | | | | |
| | | | | | | | | | .'}, | | | | |
| | | | | | | | | | {'n | | | | |
| | | | | | | | | | ame' | | | | |
| | | | | | | | | | : 't | | | | |
| | | | | | | | | | urne | | | | |
| | | | | | | | | | r-tl | | | | |
| | | | | | | | | | s.ma | | | | |
| | | | | | | | | | p.fa | | | | |
| | | | | | | | | | stly | | | | |
| | | | | | | | | | .net | | | | |
| | | | | | | | | | .', | | | | |
| | | | | | | | | | 'cla | | | | |
| | | | | | | | | | ss': | | | | |
| | | | | | | | | | 'IN | | | | |
| | | | | | | | | | ', ' | | | | |
| | | | | | | | | | type | | | | |
| | | | | | | | | | ': ' | | | | |
| | | | | | | | | | A', | | | | |
| | | | | | | | | | 'ttl | | | | |
| | | | | | | | | | ': 5 | | | | |
| | | | | | | | | | , 'd | | | | |
| | | | | | | | | | ata' | | | | |
| | | | | | | | | | : '1 | | | | |
| | | | | | | | | | 51.1 | | | | |
| | | | | | | | | | 01.1 | | | | |
| | | | | | | | | | .67' | | | | |
| | | | | | | | | | }, { | | | | |
| | | | | | | | | | 'nam | | | | |
| | | | | | | | | | e': | | | | |
| | | | | | | | | | 'tur | | | | |
| | | | | | | | | | ner- | | | | |
| | | | | | | | | | tls. | | | | |
| | | | | | | | | | map. | | | | |
| | | | | | | | | | fast | | | | |
| | | | | | | | | | ly.n | | | | |
| | | | | | | | | | et.' | | | | |
| | | | | | | | | | , 'c | | | | |
| | | | | | | | | | lass | | | | |
| | | | | | | | | | ': ' | | | | |
| | | | | | | | | | IN', | | | | |
| | | | | | | | | | 'ty | | | | |
| | | | | | | | | | pe': | | | | |
| | | | | | | | | | 'A' | | | | |
| | | | | | | | | | , 't | | | | |
| | | | | | | | | | tl': | | | | |
| | | | | | | | | | 5, | | | | |
| | | | | | | | | | 'dat | | | | |
| | | | | | | | | | a': | | | | |
| | | | | | | | | | '151 | | | | |
| | | | | | | | | | .101 | | | | |
| | | | | | | | | | .65. | | | | |
| | | | | | | | | | 67'} | | | | |
| | | | | | | | | | , {' | | | | |
| | | | | | | | | | name | | | | |
| | | | | | | | | | ': ' | | | | |
| | | | | | | | | | turn | | | | |
| | | | | | | | | | er-t | | | | |
| | | | | | | | | | ls.m | | | | |
| | | | | | | | | | ap.f | | | | |
| | | | | | | | | | astl | | | | |
| | | | | | | | | | y.ne | | | | |
| | | | | | | | | | t.', | | | | |
| | | | | | | | | | 'cl | | | | |
| | | | | | | | | | ass' | | | | |
| | | | | | | | | | : 'I | | | | |
| | | | | | | | | | N', | | | | |
| | | | | | | | | | 'typ | | | | |
| | | | | | | | | | e': | | | | |
| | | | | | | | | | 'A', | | | | |
| | | | | | | | | | 'tt | | | | |
| | | | | | | | | | l': | | | | |
| | | | | | | | | | 5, ' | | | | |
| | | | | | | | | | data | | | | |
| | | | | | | | | | ': ' | | | | |
| | | | | | | | | | 151. | | | | |
| | | | | | | | | | 101. | | | | |
| | | | | | | | | | 129. | | | | |
| | | | | | | | | | 67'} | | | | |
| | | | | | | | | | , {' | | | | |
| | | | | | | | | | name | | | | |
| | | | | | | | | | ': ' | | | | |
| | | | | | | | | | turn | | | | |
| | | | | | | | | | er-t | | | | |
| | | | | | | | | | ls.m | | | | |
| | | | | | | | | | ap.f | | | | |
| | | | | | | | | | astl | | | | |
| | | | | | | | | | y.ne | | | | |
| | | | | | | | | | t.', | | | | |
| | | | | | | | | | 'cl | | | | |
| | | | | | | | | | ass' | | | | |
| | | | | | | | | | : 'I | | | | |
| | | | | | | | | | N', | | | | |
| | | | | | | | | | 'typ | | | | |
| | | | | | | | | | e': | | | | |
| | | | | | | | | | 'A', | | | | |
| | | | | | | | | | 'tt | | | | |
| | | | | | | | | | l': | | | | |
| | | | | | | | | | 5, ' | | | | |
| | | | | | | | | | data | | | | |
| | | | | | | | | | ': ' | | | | |
| | | | | | | | | | 151. | | | | |
| | | | | | | | | | 101. | | | | |
| | | | | | | | | | 193. | | | | |
| | | | | | | | | | 67'} | | | | |
| | | | | | | | | | ] | | | | |
+------+--------+--------+--------+--------+--------+--------+--------+--------+--------+--------+--------+--------+--------+''')
self.assertEqual(jtbl.cli.make_table(input_data=stdin, columns=80), (self.SUCCESS, expected))
def test_jc_dig_150cols(self):
stdin = '[{"id": 55658, "opcode": "QUERY", "status": "NOERROR", "flags": ["qr", "rd", "ra"], "query_num": 1, "answer_num": 5, "authority_num": 0, "additional_num": 1, "question": {"name": "www.cnn.com.", "class": "IN", "type": "A"}, "answer": [{"name": "www.cnn.com.", "class": "IN", "type": "CNAME", "ttl": 147, "data": "turner-tls.map.fastly.net."}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.1.67"}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.65.67"}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.129.67"}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.193.67"}], "query_time": 44, "server": "2600", "when": "Wed Mar 18 12:20:59 PDT 2020", "rcvd": 143}]'
expected = textwrap.dedent('''\
+----------+----------+-------+----------+---------+----------+----------+----------+----------+----------+----------+----------+--------+--------+
| opcode | server | id | status | flags | query_ | answer | author | additi | questi | answer | query_ | when | rcvd |
| | | | | | num | _num | ity_nu | onal_n | on | | time | | |
| | | | | | | | m | um | | | | | |
+==========+==========+=======+==========+=========+==========+==========+==========+==========+==========+==========+==========+========+========+
| QUERY | 2600 | 55658 | NOERRO | ['qr', | 1 | 5 | 0 | 1 | {'name | [{'nam | 44 | Wed Ma | 143 |
| | | | R | 'rd', | | | | | ': 'ww | e': 'w | | r 18 1 | |
| | | | | 'ra'] | | | | | w.cnn. | ww.cnn | | 2:20:5 | |
| | | | | | | | | | com.', | .com.' | | 9 PDT | |
| | | | | | | | | | 'clas | , 'cla | | 2020 | |
| | | | | | | | | | s': 'I | ss': ' | | | |
| | | | | | | | | | N', 't | IN', ' | | | |
| | | | | | | | | | ype': | type': | | | |
| | | | | | | | | | 'A'} | 'CNAM | | | |
| | | | | | | | | | | E', 't | | | |
| | | | | | | | | | | tl': 1 | | | |
| | | | | | | | | | | 47, 'd | | | |
| | | | | | | | | | | ata': | | | |
| | | | | | | | | | | 'turne | | | |
| | | | | | | | | | | r-tls. | | | |
| | | | | | | | | | | map.fa | | | |
| | | | | | | | | | | stly.n | | | |
| | | | | | | | | | | et.'}, | | | |
| | | | | | | | | | | {'nam | | | |
| | | | | | | | | | | e': 't | | | |
| | | | | | | | | | | urner- | | | |
| | | | | | | | | | | tls.ma | | | |
| | | | | | | | | | | p.fast | | | |
| | | | | | | | | | | ly.net | | | |
| | | | | | | | | | | .', 'c | | | |
| | | | | | | | | | | lass': | | | |
| | | | | | | | | | | 'IN', | | | |
| | | | | | | | | | | 'type | | | |
| | | | | | | | | | | ': 'A' | | | |
| | | | | | | | | | | , 'ttl | | | |
| | | | | | | | | | | ': 5, | | | |
| | | | | | | | | | | 'data' | | | |
| | | | | | | | | | | : '151 | | | |
| | | | | | | | | | | .101.1 | | | |
| | | | | | | | | | | .67'}, | | | |
| | | | | | | | | | | {'nam | | | |
| | | | | | | | | | | e': 't | | | |
| | | | | | | | | | | urner- | | | |
| | | | | | | | | | | tls.ma | | | |
| | | | | | | | | | | p.fast | | | |
| | | | | | | | | | | ly.net | | | |
| | | | | | | | | | | .', 'c | | | |
| | | | | | | | | | | lass': | | | |
| | | | | | | | | | | 'IN', | | | |
| | | | | | | | | | | 'type | | | |
| | | | | | | | | | | ': 'A' | | | |
| | | | | | | | | | | , 'ttl | | | |
| | | | | | | | | | | ': 5, | | | |
| | | | | | | | | | | 'data' | | | |
| | | | | | | | | | | : '151 | | | |
| | | | | | | | | | | .101.6 | | | |
| | | | | | | | | | | 5.67'} | | | |
| | | | | | | | | | | , {'na | | | |
| | | | | | | | | | | me': ' | | | |
| | | | | | | | | | | turner | | | |
| | | | | | | | | | | -tls.m | | | |
| | | | | | | | | | | ap.fas | | | |
| | | | | | | | | | | tly.ne | | | |
| | | | | | | | | | | t.', ' | | | |
| | | | | | | | | | | class' | | | |
| | | | | | | | | | | : 'IN' | | | |
| | | | | | | | | | | , 'typ | | | |
| | | | | | | | | | | e': 'A | | | |
| | | | | | | | | | | ', 'tt | | | |
| | | | | | | | | | | l': 5, | | | |
| | | | | | | | | | | 'data | | | |
| | | | | | | | | | | ': '15 | | | |
| | | | | | | | | | | 1.101. | | | |
| | | | | | | | | | | 129.67 | | | |
| | | | | | | | | | | '}, {' | | | |
| | | | | | | | | | | name': | | | |
| | | | | | | | | | | 'turn | | | |
| | | | | | | | | | | er-tls | | | |
| | | | | | | | | | | .map.f | | | |
| | | | | | | | | | | astly. | | | |
| | | | | | | | | | | net.', | | | |
| | | | | | | | | | | 'clas | | | |
| | | | | | | | | | | s': 'I | | | |
| | | | | | | | | | | N', 't | | | |
| | | | | | | | | | | ype': | | | |
| | | | | | | | | | | 'A', ' | | | |
| | | | | | | | | | | ttl': | | | |
| | | | | | | | | | | 5, 'da | | | |
| | | | | | | | | | | ta': ' | | | |
| | | | | | | | | | | 151.10 | | | |
| | | | | | | | | | | 1.193. | | | |
| | | | | | | | | | | 67'}] | | | |
+----------+----------+-------+----------+---------+----------+----------+----------+----------+----------+----------+----------+--------+--------+''')
self.assertEqual(jtbl.cli.make_table(input_data=stdin, columns=150), (self.SUCCESS, expected))
def test_jc_dig_150cols_t(self):
stdin = '[{"id": 55658, "opcode": "QUERY", "status": "NOERROR", "flags": ["qr", "rd", "ra"], "query_num": 1, "answer_num": 5, "authority_num": 0, "additional_num": 1, "question": {"name": "www.cnn.com.", "class": "IN", "type": "A"}, "answer": [{"name": "www.cnn.com.", "class": "IN", "type": "CNAME", "ttl": 147, "data": "turner-tls.map.fastly.net."}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.1.67"}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.65.67"}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.129.67"}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.193.67"}], "query_time": 44, "server": "2600", "when": "Wed Mar 18 12:20:59 PDT 2020", "rcvd": 143}]'
expected = textwrap.dedent('''\
opcode status server id flags query_nu answer_n authorit addition question answer query_ti when rcvd
-------- -------- -------- ----- -------- ---------- ---------- ---------- ---------- ---------- -------- ---------- ------- ------
QUERY NOERROR 2600 55658 ['qr', ' 1 5 0 1 {'name': [{'name' 44 Wed Mar 143''')
self.assertEqual(jtbl.cli.make_table(input_data=stdin, truncate=True, columns=150), (self.SUCCESS, expected))
def test_jc_dig_nowrap(self):
stdin = '[{"id": 55658, "opcode": "QUERY", "status": "NOERROR", "flags": ["qr", "rd", "ra"], "query_num": 1, "answer_num": 5, "authority_num": 0, "additional_num": 1, "question": {"name": "www.cnn.com.", "class": "IN", "type": "A"}, "answer": [{"name": "www.cnn.com.", "class": "IN", "type": "CNAME", "ttl": 147, "data": "turner-tls.map.fastly.net."}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.1.67"}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.65.67"}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.129.67"}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.193.67"}], "query_time": 44, "server": "2600", "when": "Wed Mar 18 12:20:59 PDT 2020", "rcvd": 143}]'
expected = textwrap.dedent('''\
id opcode status flags query_num answer_num authority_num additional_num question answer query_time server when rcvd
----- -------- -------- ------------------ ----------- ------------ --------------- ---------------- ---------------------------------------------------- ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ------------ -------- ---------------------------- ------
55658 QUERY NOERROR ['qr', 'rd', 'ra'] 1 5 0 1 {'name': 'www.cnn.com.', 'class': 'IN', 'type': 'A'} [{'name': 'www.cnn.com.', 'class': 'IN', 'type': 'CNAME', 'ttl': 147, 'data': 'turner-tls.map.fastly.net.'}, {'name': 'turner-tls.map.fastly.net.', 'class': 'IN', 'type': 'A', 'ttl': 5, 'data': '151.101.1.67'}, {'name': 'turner-tls.map.fastly.net.', 'class': 'IN', 'type': 'A', 'ttl': 5, 'data': '151.101.65.67'}, {'name': 'turner-tls.map.fastly.net.', 'class': 'IN', 'type': 'A', 'ttl': 5, 'data': '151.101.129.67'}, {'name': 'turner-tls.map.fastly.net.', 'class': 'IN', 'type': 'A', 'ttl': 5, 'data': '151.101.193.67'}] 44 2600 Wed Mar 18 12:20:59 PDT 2020 143''')
self.assertEqual(jtbl.cli.make_table(input_data=stdin, nowrap=True, columns=150), (self.SUCCESS, expected))
def test_jc_dig_nowrap_t_cols_80(self):
"""test that nowrap overrides both truncate and columns"""
stdin = '[{"id": 55658, "opcode": "QUERY", "status": "NOERROR", "flags": ["qr", "rd", "ra"], "query_num": 1, "answer_num": 5, "authority_num": 0, "additional_num": 1, "question": {"name": "www.cnn.com.", "class": "IN", "type": "A"}, "answer": [{"name": "www.cnn.com.", "class": "IN", "type": "CNAME", "ttl": 147, "data": "turner-tls.map.fastly.net."}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.1.67"}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.65.67"}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.129.67"}, {"name": "turner-tls.map.fastly.net.", "class": "IN", "type": "A", "ttl": 5, "data": "151.101.193.67"}], "query_time": 44, "server": "2600", "when": "Wed Mar 18 12:20:59 PDT 2020", "rcvd": 143}]'
expected = textwrap.dedent('''\
id opcode status flags query_num answer_num authority_num additional_num question answer query_time server when rcvd
----- -------- -------- ------------------ ----------- ------------ --------------- ---------------- ---------------------------------------------------- ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ------------ -------- ---------------------------- ------
55658 QUERY NOERROR ['qr', 'rd', 'ra'] 1 5 0 1 {'name': 'www.cnn.com.', 'class': 'IN', 'type': 'A'} [{'name': 'www.cnn.com.', 'class': 'IN', 'type': 'CNAME', 'ttl': 147, 'data': 'turner-tls.map.fastly.net.'}, {'name': 'turner-tls.map.fastly.net.', 'class': 'IN', 'type': 'A', 'ttl': 5, 'data': '151.101.1.67'}, {'name': 'turner-tls.map.fastly.net.', 'class': 'IN', 'type': 'A', 'ttl': 5, 'data': '151.101.65.67'}, {'name': 'turner-tls.map.fastly.net.', 'class': 'IN', 'type': 'A', 'ttl': 5, 'data': '151.101.129.67'}, {'name': 'turner-tls.map.fastly.net.', 'class': 'IN', 'type': 'A', 'ttl': 5, 'data': '151.101.193.67'}] 44 2600 Wed Mar 18 12:20:59 PDT 2020 143''')
self.assertEqual(jtbl.cli.make_table(input_data=stdin, nowrap=True, columns=80, truncate=True), (self.SUCCESS, expected))
def test_jc_dig_answer(self):
stdin = '[{"name":"www.cnn.com.","class":"IN","type":"CNAME","ttl":147,"data":"turner-tls.map.fastly.net."},{"name":"turner-tls.map.fastly.net.","class":"IN","type":"A","ttl":5,"data":"151.101.1.67"},{"name":"turner-tls.map.fastly.net.","class":"IN","type":"A","ttl":5,"data":"151.101.65.67"},{"name":"turner-tls.map.fastly.net.","class":"IN","type":"A","ttl":5,"data":"151.101.129.67"},{"name":"turner-tls.map.fastly.net.","class":"IN","type":"A","ttl":5,"data":"151.101.193.67"}]'
expected = textwrap.dedent('''\
name class type ttl data
-------------------------- ------- ------ ----- --------------------------
www.cnn.com. IN CNAME 147 turner-tls.map.fastly.net.
turner-tls.map.fastly.net. IN A 5 151.101.1.67
turner-tls.map.fastly.net. IN A 5 151.101.65.67
turner-tls.map.fastly.net. IN A 5 151.101.129.67
turner-tls.map.fastly.net. IN A 5 151.101.193.67''')
self.assertEqual(jtbl.cli.make_table(input_data=stdin, columns=80), (self.SUCCESS, expected))
def test_json_lines(self):
"""test JSON Lines data"""
stdin = textwrap.dedent('''\
{"name":"lo0","type":null,"ipv4_addr":"127.0.0.1","ipv4_mask":"255.0.0.0"}
{"name":"gif0","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"stf0","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"XHC0","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"XHC20","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"VHC128","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"XHC1","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"en5","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"ap1","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"en0","type":null,"ipv4_addr":"192.168.1.221","ipv4_mask":"255.255.255.0"}
{"name":"p2p0","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"awdl0","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"en1","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"en2","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"en3","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"en4","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"bridge0","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"utun0","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"utun1","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"utun2","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"utun3","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"utun4","type":null,"ipv4_addr":null,"ipv4_mask":null}
{"name":"vmnet1","type":null,"ipv4_addr":"192.168.101.1","ipv4_mask":"255.255.255.0"}
{"name":"vmnet8","type":null,"ipv4_addr":"192.168.71.1","ipv4_mask":"255.255.255.0"}''')
expected = textwrap.dedent('''\
name type ipv4_addr ipv4_mask
------- ------ ------------- -------------
lo0 127.0.0.1 255.0.0.0
gif0
stf0
XHC0
XHC20
VHC128
XHC1
en5
ap1
en0 192.168.1.221 255.255.255.0
p2p0
awdl0
en1
en2
en3
en4
bridge0
utun0
utun1
utun2
utun3
utun4
vmnet1 192.168.101.1 255.255.255.0
vmnet8 192.168.71.1 255.255.255.0''')
self.assertEqual(jtbl.cli.make_table(input_data=stdin), (self.SUCCESS, expected))
if __name__ == '__main__':
unittest.main()
| 116.808411
| 864
| 0.173461
| 2,330
| 49,994
| 3.645494
| 0.116738
| 0.035319
| 0.060866
| 0.095361
| 0.774194
| 0.766188
| 0.753944
| 0.737226
| 0.734989
| 0.636332
| 0
| 0.06084
| 0.631776
| 49,994
| 427
| 865
| 117.081967
| 0.400565
| 0.00146
| 0
| 0.383033
| 0
| 0.082262
| 0.936586
| 0.108375
| 0
| 0
| 0
| 0
| 0.03856
| 1
| 0.041131
| false
| 0
| 0.007712
| 0
| 0.051414
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
70a18e0916fd434d417dcc6ca67ebfa9f0b09899
| 30
|
py
|
Python
|
utils.py
|
andreaskleinl/capacity_transmissionline_simulation
|
023358d03cce5a4c7768eee597d0d6381742e271
|
[
"Apache-2.0"
] | null | null | null |
utils.py
|
andreaskleinl/capacity_transmissionline_simulation
|
023358d03cce5a4c7768eee597d0d6381742e271
|
[
"Apache-2.0"
] | null | null | null |
utils.py
|
andreaskleinl/capacity_transmissionline_simulation
|
023358d03cce5a4c7768eee597d0d6381742e271
|
[
"Apache-2.0"
] | null | null | null |
def funny(x,y):
return x+y
| 15
| 15
| 0.6
| 7
| 30
| 2.571429
| 0.714286
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.233333
| 30
| 2
| 16
| 15
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
569f140d78f25e52085c587978a86ff31c34d8bd
| 140,512
|
py
|
Python
|
contrib/qa/bugzilla.py
|
crystalfontz/openembedded
|
8af3377bf132113baa0766fd9021e0ce162a0944
|
[
"MIT"
] | 70
|
2015-02-23T04:18:51.000Z
|
2022-03-15T02:01:27.000Z
|
contrib/qa/bugzilla.py
|
buglabs/oe-buglabs
|
b8a4c4b1358214cd3ac1cf6f85154e9c62b16ce7
|
[
"MIT"
] | 1
|
2020-09-07T15:33:56.000Z
|
2020-09-07T15:33:56.000Z
|
contrib/qa/bugzilla.py
|
buglabs/oe-buglabs
|
b8a4c4b1358214cd3ac1cf6f85154e9c62b16ce7
|
[
"MIT"
] | 88
|
2015-02-11T12:03:16.000Z
|
2022-03-30T11:33:42.000Z
|
#
# BugZilla query page scanner to work with ancient
# Debian Stable bugzilla installationss
#
# This includes three test sites
# site contains one bug entry
# all_bugs contains all Openmoko bugs as of \today
# no_bug is a query which showed no bug
#
from HTMLParser import HTMLParser
class BugQueryExtractor(HTMLParser):
STATE_NONE = 0
STATE_FOUND_TR = 1
STATE_FOUND_NUMBER = 2
STATE_FOUND_PRIO = 3
STATE_FOUND_PRIO2 = 4
STATE_FOUND_NAME = 5
STATE_FOUND_PLATFORM = 6
STATE_FOUND_STATUS = 7
STATE_FOUND_WHATEVER = 8 # I don't know this field
STATE_FOUND_DESCRIPTION =9
def __init__(self):
HTMLParser.__init__(self)
self.state = self.STATE_NONE
self.bug = None
self.bugs = []
def handle_starttag(self, tag, attr):
if self.state == self.STATE_NONE and tag.lower() == "tr":
# check for bz_normal and bz_P2 as indicator in buglist.cgi
# use 'all' and 'map' on python2.5
if len(attr) == 1 and attr[0][0] == 'class' and \
('bz_normal' in attr[0][1] or 'bz_blocker' in attr[0][1] or 'bz_enhancement' in attr[0][1] or 'bz_major' in attr[0][1] or 'bz_minor' in attr[0][1] or 'bz_trivial' in attr[0][1] or 'bz_critical' in attr[0][1] or 'bz_wishlist' in attr[0][1]) \
and 'bz_P' in attr[0][1]:
print "Found tr %s %s" % (tag, attr)
self.state = self.STATE_FOUND_TR
elif self.state == self.STATE_FOUND_TR and tag.lower() == "td":
self.state += 1
def handle_endtag(self, tag):
if tag.lower() == "tr":
print "Going back"
if self.state != self.STATE_NONE:
self.bugs.append( (self.bug,self.status) )
self.state = self.STATE_NONE
self.bug = None
if self.state > 1 and tag.lower() == "td":
print "Next TD"
self.state += 1
def handle_data(self,data):
data = data.strip()
# skip garbage
if len(data) == 0:
return
if self.state == self.STATE_FOUND_NUMBER:
"""
#1995 in bugs.oe.org has [SEC] additionally to the number and we want to ignore it
"""
print "Bug Number '%s'" % data.strip()
if self.bug:
print "Ignoring bug data"
return
self.bug = data
elif self.state == self.STATE_FOUND_STATUS:
print "Status Name '%s'" % data.strip()
self.status = data
def result(self):
print "Found bugs"
return self.bugs
#
bugs_openmoko = """<!-- 1.0@bugzilla.org -->
<!-- 1.0@bugzilla.org -->
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Bug List</title>
<link href="/style/style.css" rel="stylesheet" type="text/css" />
<link href="/bugzilla/css/buglist.css" rel="stylesheet" type="text/css">
</head>
<body bgcolor="#FFFFFF" onload="">
<!-- 1.0@bugzilla.org -->
<div id="header">
<a href="http://bugzilla.openmoko.org/cgi-bin/bugzilla/" id="site_logo"><img src="/style/images/openmoko_logo.png" alt="openmoko.org" /></a>
<div id="main_navigation">
<ul>
<li><a href="http://www.openmoko.org/" class="nav_home"><span>Home</span></a></li>
<li><a href="http://wiki.openmoko.org/" class="nav_wiki"><span>Wiki</span></a></li>
<li><a href="http://bugzilla.openmoko.org/" class="nav_bugzilla selected"><span>Bugzilla</span></a></li>
<li><a href="http://planet.openmoko.org/" class="nav_planet"><span>Planet</span></a></li>
<li><a href="http://projects.openmoko.org/" class="nav_projects"><span>Projects</span></a></li>
<li><a href="http://lists.openmoko.org/" class="nav_lists"><span>Lists</span></a></li>
</ul>
</div>
</div>
<div class="page_title">
<strong>Bug List</strong>
</div>
<div class="container">
<div align="center">
<b>Fri Mar 16 20:51:52 CET 2007</b><br>
<a href="quips.cgi"><i>It was a time of great struggle and heroic deeds
</i></a>
</div>
<hr>
282 bugs found.
<!-- 1.0@bugzilla.org -->
<table class="bz_buglist" cellspacing="0" cellpadding="4" width="100%">
<colgroup>
<col class="bz_id_column">
<col class="bz_severity_column">
<col class="bz_priority_column">
<col class="bz_platform_column">
<col class="bz_owner_column">
<col class="bz_status_column">
<col class="bz_resolution_column">
<col class="bz_summary_column">
</colgroup>
<tr align="left">
<th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.bug_id">ID</a>
</th>
<th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.bug_severity,bugs.bug_id">Sev</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.priority,bugs.bug_id">Pri</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.rep_platform,bugs.bug_id">Plt</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=map_assigned_to.login_name,bugs.bug_id">Owner</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.bug_status,bugs.bug_id">State</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.resolution,bugs.bug_id">Result</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.short_desc,bugs.bug_id">Summary</a>
</th>
</tr>
<tr class="bz_critical bz_P2 ">
<td>
<a href="show_bug.cgi?id=1">1</a>
</td>
<td><nobr>cri</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>CLOS</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>kernel is running way too slow
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=2">2</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>SD card driver unstable
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=3">3</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>sean_chiang@fic.com.tw</nobr>
</td>
<td><nobr>CLOS</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Debug Board trying to control GSM_EN / FA_19
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=4">4</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>random crashes of gsmd
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=5">5</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>call progress information is lacking
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=6">6</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>sean_chiang@fic.com.tw</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>GSM_EN should be called nGSM_EN
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=7">7</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>CLOS</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>PMU RTC driver date/time conversion is erroneous
</td>
</tr>
<tr class="bz_critical bz_P5 ">
<td>
<a href="show_bug.cgi?id=8">8</a>
</td>
<td><nobr>cri</nobr>
</td>
<td><nobr>P5</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>SD/MMC: Card sometimes not detected
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=9">9</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Boot speed too low (kernel part)
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=10">10</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>CLOS</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>u-boot support for usb-serial lacking
</td>
</tr>
<tr class="bz_blocker bz_P2 ">
<td>
<a href="show_bug.cgi?id=11">11</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>ken_zhao@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>u-boot lacks USB DFU support
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=12">12</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>gordon_hsu@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Boot speed too low (bootloader part)
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=13">13</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>teddy@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>power button should not immediately react
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=14">14</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>werner@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>bootloader should display startup image before booting th...
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=15">15</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>kernel oops when unloading g_ether
</td>
</tr>
<tr class="bz_blocker bz_P2 ">
<td>
<a href="show_bug.cgi?id=16">16</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>sean_chiang@fic.com.tw</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>bluetooth pullup / pulldown resistors
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=17">17</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>sean_chiang@fic.com.tw</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>microSD socket still has mechanical contact problems
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=18">18</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>OE build of u_boot with CVSDATE 20061030 uses latest git ...
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=19">19</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>teddy@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>"reboot" doesn't work
</td>
</tr>
<tr class="bz_critical bz_P2 ">
<td>
<a href="show_bug.cgi?id=20">20</a>
</td>
<td><nobr>cri</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>connection status
</td>
</tr>
<tr class="bz_blocker bz_P3 ">
<td>
<a href="show_bug.cgi?id=21">21</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P3</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>sean_chiang@fic.com.tw</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>sms function missing
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=22">22</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>outgoing call generates 'segmentation fault' when the pee...
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=23">23</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>dtmf support not available now
</td>
</tr>
<tr class="bz_wishlist bz_P2 ">
<td>
<a href="show_bug.cgi?id=24">24</a>
</td>
<td><nobr>wis</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>libgsmd/misc.h: lgsm_get_signal_quality()
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=25">25</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>davewu01@seed.net.tw</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>GtkSpinBox unfinished
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=26">26</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>ken_zhao@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Pixmap Engine and Shadows
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=27">27</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>ken_zhao@fic-sh.com.cn</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Labels on GtkButton don't appear centered
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=28">28</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>ken_zhao@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>GtkComboBox styling woes
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=29">29</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>ken_zhao@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>GtkProgressBar styling woes
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=30">30</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>REOP</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Touchscreen emits bogus events under X
</td>
</tr>
<tr class="bz_critical bz_P2 ">
<td>
<a href="show_bug.cgi?id=31">31</a>
</td>
<td><nobr>cri</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Display colors are slightly off
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=32">32</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Common function for loading GdkPixbuf
</td>
</tr>
<tr class="bz_blocker bz_P2 ">
<td>
<a href="show_bug.cgi?id=33">33</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>incoming call status report causes gsmd to crash.
</td>
</tr>
<tr class="bz_blocker bz_P2 ">
<td>
<a href="show_bug.cgi?id=34">34</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>WORK</nobr>
</td>
<td>Need to decide if lgsm_handle is still valid.
</td>
</tr>
<tr class="bz_enhancement bz_P5 ">
<td>
<a href="show_bug.cgi?id=35">35</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P5</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>WONT</nobr>
</td>
<td>Support debug board from u-boot
</td>
</tr>
<tr class="bz_blocker bz_P2 ">
<td>
<a href="show_bug.cgi?id=36">36</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Implement s3c2410 udc (usb device controller) driver in u...
</td>
</tr>
<tr class="bz_blocker bz_P2 ">
<td>
<a href="show_bug.cgi?id=37">37</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>DUPL</nobr>
</td>
<td>Implement USB Device Firmware Upgrade (DFU)
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=38">38</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>DUPL</nobr>
</td>
<td>implement USB serial emulation in u-boot
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=39">39</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>gordon_hsu@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Move LCM initialization into u-boot (currently in kernel ...
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=40">40</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>werner@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>DUPL</nobr>
</td>
<td>test + debug display of image on LCM in u-boot
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=41">41</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>ken_zhao@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>evaluate sapwood theme engine
</td>
</tr>
<tr class="bz_blocker bz_P3 ">
<td>
<a href="show_bug.cgi?id=42">42</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P3</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>dynamic mtd partition table cration
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=43">43</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>StatusBar (Footer) API
</td>
</tr>
<tr class="bz_wishlist bz_P2 ">
<td>
<a href="show_bug.cgi?id=44">44</a>
</td>
<td><nobr>wis</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>InputMethod API
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=45">45</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Automatic opening input methods
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=46">46</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>266MHz initialization of GTA01Bv2
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=47">47</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>ken_zhao@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>DUPL</nobr>
</td>
<td>Evaluate sapwood theming engine
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=48">48</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>DUPL</nobr>
</td>
<td>Only power up the phone in case power button was pressed ...
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=49">49</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Implement touchscreen & click daemon
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=50">50</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Sound Event API
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=51">51</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Preferences API
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=52">52</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>cj_steven@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Single Instance Startup
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=53">53</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>tonyguan@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>DTMF tones during call
</td>
</tr>
<tr class="bz_blocker bz_P1 ">
<td>
<a href="show_bug.cgi?id=54">54</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P1</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>tonyguan@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>PIN Entry
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=55">55</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>tonyguan@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Don't pop up the dialer interface initially
</td>
</tr>
<tr class="bz_blocker bz_P4 ">
<td>
<a href="show_bug.cgi?id=56">56</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P4</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>tonyguan@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Integrate with contacts database
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=57">57</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>tonyguan@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>LATE</nobr>
</td>
<td>Recording Calls
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=58">58</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>API for devmand
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=59">59</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>ken_zhao@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Real DPI vs. Fake DPI
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=60">60</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>ken_zhao@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>fontconfig antialiasing
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=61">61</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>ken_zhao@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Theme is very slow
</td>
</tr>
<tr class="bz_wishlist bz_P2 ">
<td>
<a href="show_bug.cgi?id=62">62</a>
</td>
<td><nobr>wis</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>High Level Multi Layer Network Discovery API
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=63">63</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>matchbox-panel 1 vs. 2
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=64">64</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Show Cipher Status in GSM-Panel applet
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=65">65</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Visual indication for SMS overflow
</td>
</tr>
<tr class="bz_critical bz_P2 ">
<td>
<a href="show_bug.cgi?id=66">66</a>
</td>
<td><nobr>cri</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Applet for Missed Events
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=67">67</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>WONT</nobr>
</td>
<td>libmokopim not necessary
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=68">68</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>SIM backend for EDS
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=69">69</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Speed up System Initialization
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=70">70</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Minimize Services started on Bootup
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=71">71</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>gordon_hsu@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>make a short vibration pulse once u-boot is starting
</td>
</tr>
<tr class="bz_wishlist bz_P2 ">
<td>
<a href="show_bug.cgi?id=72">72</a>
</td>
<td><nobr>wis</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>gordon_hsu@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Add on-screen boot menu
</td>
</tr>
<tr class="bz_blocker bz_P2 ">
<td>
<a href="show_bug.cgi?id=73">73</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>test and verify battery charger control (pcf50606)
</td>
</tr>
<tr class="bz_blocker bz_P1 ">
<td>
<a href="show_bug.cgi?id=74">74</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P1</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>WONT</nobr>
</td>
<td>stub audio driver to power up amp and route audio through...
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=75">75</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>PWM code for display brightness control
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=76">76</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>teddy@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Implement PWM control for vibrator
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=77">77</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>songcw@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Finish, test and verify agpsd implementation
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=78">78</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Implement and test ASoC platform driver
</td>
</tr>
<tr class="bz_blocker bz_P1 ">
<td>
<a href="show_bug.cgi?id=79">79</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P1</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>werner@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>suspend/resume to RAM support
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=80">80</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>WONT</nobr>
</td>
<td>Add sysfs entry for PMU wakeup reason
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=81">81</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>werner@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Decide how PMU RTC alarm interrupt is signalled to userspace
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=82">82</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>implement and test cpufreq interface to S3C2410 PLL / SLO...
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=83">83</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>teddy@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>evaluate process and I/O schedulers
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=84">84</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>enable voluntary preemption
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=85">85</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>test NO_IDLE_HZ / tickless idle
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=86">86</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>APM emulation for battery / charger / charging and possib...
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=87">87</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>define and implement how headphone jack routing/signallin...
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=88">88</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>teddy@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>use and test PMU watchdog driver
</td>
</tr>
<tr class="bz_critical bz_P2 ">
<td>
<a href="show_bug.cgi?id=89">89</a>
</td>
<td><nobr>cri</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>teddy@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>determine correct gamma calibration values and put them i...
</td>
</tr>
<tr class="bz_critical bz_P1 ">
<td>
<a href="show_bug.cgi?id=90">90</a>
</td>
<td><nobr>cri</nobr>
</td>
<td><nobr>P1</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>GSM TS07.10 multiplex missing
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=91">91</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>DUPL</nobr>
</td>
<td>debug sd card timeout problems
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=92">92</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>test multiple microSD card vendors for compatibility with...
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=93">93</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>test 4GB microSD card compatibility
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=94">94</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>tonyguan@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>+ symbol support
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=95">95</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>sean_chiang@fic.com.tw</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>verify charger current and battery temperature reading co...
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=96">96</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>make sure PMU alarm (set via rtc interface) is persistent
</td>
</tr>
<tr class="bz_blocker bz_P2 ">
<td>
<a href="show_bug.cgi?id=97">97</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>remove static mtd partition table, use u-boot created dyn...
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=98">98</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>how to do touch panel calibration in factory and store va...
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=99">99</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>DUPL</nobr>
</td>
<td>Implement SMS support
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=100">100</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Implement Cell Broadcast support
</td>
</tr>
</table>
<table class="bz_buglist" cellspacing="0" cellpadding="4" width="100%">
<colgroup>
<col class="bz_id_column">
<col class="bz_severity_column">
<col class="bz_priority_column">
<col class="bz_platform_column">
<col class="bz_owner_column">
<col class="bz_status_column">
<col class="bz_resolution_column">
<col class="bz_summary_column">
</colgroup>
<tr align="left">
<th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.bug_id">ID</a>
</th>
<th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.bug_severity,bugs.bug_id">Sev</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.priority,bugs.bug_id">Pri</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.rep_platform,bugs.bug_id">Plt</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=map_assigned_to.login_name,bugs.bug_id">Owner</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.bug_status,bugs.bug_id">State</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.resolution,bugs.bug_id">Result</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.short_desc,bugs.bug_id">Summary</a>
</th>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=101">101</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Implement GPRS setup/teardown support
</td>
</tr>
<tr class="bz_critical bz_P2 ">
<td>
<a href="show_bug.cgi?id=102">102</a>
</td>
<td><nobr>cri</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>SIM phonebook access
</td>
</tr>
<tr class="bz_blocker bz_P1 ">
<td>
<a href="show_bug.cgi?id=103">103</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P1</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>power-up/power-down GSM Modem
</td>
</tr>
<tr class="bz_critical bz_P2 ">
<td>
<a href="show_bug.cgi?id=104">104</a>
</td>
<td><nobr>cri</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>tonyguan@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>LATE</nobr>
</td>
<td>Volume control
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=105">105</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>DUPL</nobr>
</td>
<td>add passthrough mode
</td>
</tr>
<tr class="bz_blocker bz_P2 ">
<td>
<a href="show_bug.cgi?id=106">106</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>tonyguan@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>LATE</nobr>
</td>
<td>Emergency Call Support
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=107">107</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>obtain list of operators / control operator selection
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=108">108</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>tonyguan@fic-sh.com.cn</nobr>
</td>
<td><nobr>REOP</nobr>
</td>
<td><nobr></nobr>
</td>
<td>allow query of manufacturer/model/revision/imei
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=109">109</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>add dbus interface, like recent upstream gpsd
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=110">110</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>look into gps / agps integration
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=111">111</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>integrate agpsd in our system power management.
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=112">112</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>How to deliver kernel-level alarm to destination app
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=113">113</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>marcel@holtmann.org</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>bluetooth headset support
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=114">114</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Who is managing wakeup times?
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=115">115</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>marcel@holtmann.org</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>A2DP / alsa integration
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=116">116</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>marcel@holtmann.org</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>bluetooth HID support (host)
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=117">117</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>marcel@holtmann.org</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>bluetooth HID support (device)
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=118">118</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>marcel@holtmann.org</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>bluetooth networking support
</td>
</tr>
<tr class="bz_critical bz_P3 ">
<td>
<a href="show_bug.cgi?id=119">119</a>
</td>
<td><nobr>cri</nobr>
</td>
<td><nobr>P3</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>merge openmoko-taskmanager into openmoko-footer
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=120">120</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>marcel@holtmann.org</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>bluetooth OBEX
</td>
</tr>
<tr class="bz_critical bz_P3 ">
<td>
<a href="show_bug.cgi?id=121">121</a>
</td>
<td><nobr>cri</nobr>
</td>
<td><nobr>P3</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>merge openmoko-mainmenu into openmoko-mainmenu (panel)
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=122">122</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>rename openmoko-history to openmoko-taskmanager
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=123">123</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>DUPL</nobr>
</td>
<td>rename openmoko-history to openmoko-taskmanager
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=124">124</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>modem volume control on connection
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=125">125</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>davewu01@seed.net.tw</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>FInger UI is not usable on 2.8" screen
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=126">126</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>sunzhiyong@fic-sh.com.cn</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Remove back functionality from Main Menu
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=127">127</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>davewu01@seed.net.tw</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Power On / Off Images needed
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=128">128</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>cj_steven@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Tap and hold on panel icon doesn't change to Today applic...
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=129">129</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>ken_zhao@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Create / Find better system fonts
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=130">130</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>GTK Popup menus size incorrectly
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=131">131</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>davewu01@seed.net.tw</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Move Search Open / Close buttons into same location
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=132">132</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>sunzhiyong@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Task Manager is not quick to use
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=133">133</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>davewu01@seed.net.tw</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Designer image layouts should have both 4 corners and ful...
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=134">134</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Stylus applications need close function
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=135">135</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Finger applications need close functionality
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=136">136</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>application manager doesn't build
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=137">137</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>submit patch against ipkg upstream
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=138">138</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>sunzhiyong@fic-sh.com.cn</nobr>
</td>
<td><nobr>REOP</nobr>
</td>
<td><nobr></nobr>
</td>
<td>submit patch against matchbox-window-manager upstream
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=139">139</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>GSM API
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=140">140</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>stefan@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>add network-enabled fbgrab from openEZX to openmoko-devel...
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=141">141</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Need support for device under WIndows and OS X
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=142">142</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>werner@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>sjf2410-linux cleanup / help message / NAND read
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=143">143</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>REOP</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Implement NAND write/read support in OpenOCD
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=144">144</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>when phone is hard-rebooted, Xfbdev complains about /tmp/...
</td>
</tr>
<tr class="bz_blocker bz_P2 ">
<td>
<a href="show_bug.cgi?id=145">145</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>WONT</nobr>
</td>
<td>battery is not automatically charging
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=146">146</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>sjf2410-linux does not contain latest svn code
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=147">147</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>WONT</nobr>
</td>
<td>openmoko-panel-applet could not be resized
</td>
</tr>
<tr class="bz_blocker bz_P1 ">
<td>
<a href="show_bug.cgi?id=148">148</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P1</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>gsmd not talking to TI modem on GTA01Bv2
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=149">149</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>lm4857 not i2c address compliant
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=150">150</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>graeme.gregory@wolfsonmicro...</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>INVA</nobr>
</td>
<td>ASoC patch doesn't compile
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=151">151</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>cj_steven@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Does mainmenu need libmatchbox or not?
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=152">152</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>cj_steven@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>VFOLDERDIR is hardcoded
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=153">153</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>sunzhiyong@fic-sh.com.cn</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Rationale for copying GtkIconView instead of deriving?
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=154">154</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>sunzhiyong@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>mainmenu crashes when clicking wheel the 2nd time
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=155">155</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>sunzhiyong@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>How to get back one level if you are in a subdirectory?
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=156">156</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>sunzhiyong@fic-sh.com.cn</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Where is mainmenu going to look for applications?
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=157">157</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>davewu01@seed.net.tw</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>DUPL</nobr>
</td>
<td>The sizes of each keys are too small
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=158">158</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>musicplayer crashes
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=159">159</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>sunzhiyong@fic-sh.com.cn</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>display thumbnails of actual applications
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=160">160</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>sunzhiyong@fic-sh.com.cn</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>display thumbnails in 3x3 grid
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=161">161</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>davewu01@seed.net.tw</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Docked Keypad is too small
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=162">162</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>REMI</nobr>
</td>
<td>libmutil0_svn.bb setup misses libltdl creation
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=163">163</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Audio Profile Management
</td>
</tr>
<tr class="bz_major bz_P1 ">
<td>
<a href="show_bug.cgi?id=164">164</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P1</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>werner@openmoko.org</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>improve non-SanDisk microSD support in u-boot
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=165">165</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>openmoko-simplemediaplayer doesn't build in OE
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=166">166</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>u-boot cdc_acm hot un-plug/replug hang
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=167">167</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>stefan@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>add LCM QVGA switching support
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=168">168</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>usb0 is not automatically configured
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=169">169</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>gdb currently broken (gdb-6.4-r0)
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=170">170</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>usbtty: sometimes bogus characters arrive
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=171">171</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>agpsd source code and bitbake rules not in our svn
</td>
</tr>
<tr class="bz_blocker bz_P1 ">
<td>
<a href="show_bug.cgi?id=172">172</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P1</nobr>
</td>
<td><nobr>Oth</nobr>
</td>
<td><nobr>tonyguan@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>missing openmoko-dialer-window-pin.o breaks build
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=173">173</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Oth</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>no NAND partitions due to ID mismatch if using defaults
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=174">174</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Oth</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>defconfig-om-gta01 could use updating
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=175">175</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>MOKO_FINGER_WINDOW has to show_all and then hide to initi...
</td>
</tr>
<tr class="bz_critical bz_P2 ">
<td>
<a href="show_bug.cgi?id=176">176</a>
</td>
<td><nobr>cri</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>libgsmd need a mechanism to avoid dead waiting.
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=177">177</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>libmokoui widget functions should return GtkWidget
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=178">178</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>werner@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>u-boot 'factory reset' option
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=179">179</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>werner@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Implement u-boot power-off timer
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=180">180</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>uboot build broken for EABI
</td>
</tr>
<tr class="bz_wishlist bz_P2 ">
<td>
<a href="show_bug.cgi?id=181">181</a>
</td>
<td><nobr>wis</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Password Storage/Retrieval Application
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=182">182</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>openmoko-panel-demo-simple hardcodes -Werror
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=183">183</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>openmoko-simple-mediaplayer missing mkinstalldirs and has...
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=184">184</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>cj_steven@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>openmoko-mainmenu should link against libmb
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=185">185</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>thomas@openedhand.com</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>openmoko-dates lacks intltool-update.in
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=186">186</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>thomas@openedhand.com</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Fingerbubbles take endless amount of ram and get OOMed
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=187">187</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>src/target/OM-2007/README doesn't mention ipkg patch
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=188">188</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>openmoko-panel-demo fails to build
</td>
</tr>
<tr class="bz_normal bz_P5 ">
<td>
<a href="show_bug.cgi?id=189">189</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P5</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>thomas@openedhand.com</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>openmoko-dates tries to include non-existant header
</td>
</tr>
<tr class="bz_normal bz_P5 ">
<td>
<a href="show_bug.cgi?id=190">190</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P5</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>thomas@openedhand.com</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>No rule to build dates.desktop
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=191">191</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>sean_chiang@fic.com.tw</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>investigate if we can set CPU voltage to 1.8V on 200MHz o...
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=192">192</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Graphic bootsplash during userspace sysinit
</td>
</tr>
<tr class="bz_enhancement bz_P3 ">
<td>
<a href="show_bug.cgi?id=193">193</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P3</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>sean_chiang@fic.com.tw</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Information about current charging status when AC is online
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=194">194</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>stefan@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>s3c2410fb 8bit mode corrupt
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=195">195</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>stefan@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>passthrough mode (Directly use GSM Modem from PC
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=196">196</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Merge back fixes to openmoko recipes from OE
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=197">197</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Make theme suitable for qvga screens.
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=198">198</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Please enable CONFIG_TUN as a module in defconfig-om-gta01
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=199">199</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>sean_mosko@fic.com.tw</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>We need freely licensed ringtones
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=200">200</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>PARALLEL_MAKE seems to not work
</td>
</tr>
</table>
<table class="bz_buglist" cellspacing="0" cellpadding="4" width="100%">
<colgroup>
<col class="bz_id_column">
<col class="bz_severity_column">
<col class="bz_priority_column">
<col class="bz_platform_column">
<col class="bz_owner_column">
<col class="bz_status_column">
<col class="bz_resolution_column">
<col class="bz_summary_column">
</colgroup>
<tr align="left">
<th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.bug_id">ID</a>
</th>
<th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.bug_severity,bugs.bug_id">Sev</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.priority,bugs.bug_id">Pri</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.rep_platform,bugs.bug_id">Plt</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=map_assigned_to.login_name,bugs.bug_id">Owner</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.bug_status,bugs.bug_id">State</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.resolution,bugs.bug_id">Result</a>
</th><th colspan="1">
<a href="buglist.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=&order=bugs.short_desc,bugs.bug_id">Summary</a>
</th>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=201">201</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Use TEXT_BASE 0x37f80000 in u-boot on GTA01Bv2 and higher
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=202">202</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>werner@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Start using NAND hardware ECC support
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=203">203</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>fix the web site: http://openmoko.com/
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=204">204</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Fatal error in Special:Newimages
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=205">205</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>add code to u-boot to query hardware revision and serial ...
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=206">206</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Disallow setting of overvoltage via pcf50606 kernel driver
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=207">207</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>DFU mode should only be enabled when in "911 key" mode
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=208">208</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>u-boot DFU upload broken
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=209">209</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>u-boot DFU needs to block console access while in DFU mode
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=210">210</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Oth</nobr>
</td>
<td><nobr>henryk@ploetzli.ch</nobr>
</td>
<td><nobr>ASSI</nobr>
</td>
<td><nobr></nobr>
</td>
<td>"now" causes frequent rebuilds and fills disks
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=211">211</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>sjf2410-linux-native.bb has do_deploy in the wrong location
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=212">212</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Oth</nobr>
</td>
<td><nobr>werner@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Charging seems completely broken
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=213">213</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Oth</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>openmoko-dates-0.1+svnnow fails certificate check
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=214">214</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Add CVS_TARBALL_STASH for missing upstream sources
</td>
</tr>
<tr class="bz_blocker bz_P2 ">
<td>
<a href="show_bug.cgi?id=215">215</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>fingerwheel crashes mainmenu when touching the black part
</td>
</tr>
<tr class="bz_blocker bz_P3 ">
<td>
<a href="show_bug.cgi?id=216">216</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P3</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>thomas@openedhand.com</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>DUPL</nobr>
</td>
<td>contacts crashes when tying to enter import widget
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=217">217</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>werner@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Implement NAND OTP area read/write as u-boot commands
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=218">218</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Distinguish stylus from finger via tslib
</td>
</tr>
<tr class="bz_blocker bz_P2 ">
<td>
<a href="show_bug.cgi?id=219">219</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>tonyguan@fic-sh.com.cn</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>openmoko-dialer r1159 fails to compile
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=220">220</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>libgsmd_device.c is missing
</td>
</tr>
<tr class="bz_blocker bz_P2 ">
<td>
<a href="show_bug.cgi?id=221">221</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>thomas@openedhand.com</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Can't add new contacts via the gui
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=222">222</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>thomas@openedhand.com</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>WORK</nobr>
</td>
<td>Can't add new events
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=223">223</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>thomas@openedhand.com</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>weekview only displays half the week
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=224">224</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>call to uboot-mkimage requires ${STAGING_BINDIR} prefix
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=225">225</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Fix ordering of do_deploy in uboot to be compatible with ...
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=226">226</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>REOP</nobr>
</td>
<td><nobr></nobr>
</td>
<td>dfu-util-native do_deploy tries to install from wrong sou...
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=227">227</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Add openmoko-mirrors.bbclass and enable use of it
</td>
</tr>
<tr class="bz_blocker bz_P2 ">
<td>
<a href="show_bug.cgi?id=228">228</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>openmoko applications(contacts, appmanager ...) easily c...
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=229">229</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>davewu01@seed.net.tw</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>outgoing call/incoming call/talking status should be more...
</td>
</tr>
<tr class="bz_trivial bz_P2 ">
<td>
<a href="show_bug.cgi?id=230">230</a>
</td>
<td><nobr>tri</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Use the toolchain speified in $CROSS_COMPILE in u-boot.
</td>
</tr>
<tr class="bz_minor bz_P2 ">
<td>
<a href="show_bug.cgi?id=231">231</a>
</td>
<td><nobr>min</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>werner@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>switch display backlight GPIO to "output, off" when suspe...
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=232">232</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Oth</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>n-plicate buglog mails
</td>
</tr>
<tr class="bz_critical bz_P2 ">
<td>
<a href="show_bug.cgi?id=233">233</a>
</td>
<td><nobr>cri</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>werner@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>power-off timer should be halted in DFU mode
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=234">234</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>werner@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>check for bad blocks in first _and_ second page of each b...
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=235">235</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Deploy openocd-native, not openocd, and make openocd-nati...
</td>
</tr>
<tr class="bz_critical bz_P2 ">
<td>
<a href="show_bug.cgi?id=236">236</a>
</td>
<td><nobr>cri</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Close moko_dialog_window several times, moko_stylus_demo ...
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=237">237</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Fix remaining https urls in bitbake recipes.
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=238">238</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Mac</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>LATE</nobr>
</td>
<td>manual test bug
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=239">239</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>LATE</nobr>
</td>
<td>foo
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=240">240</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>INVA</nobr>
</td>
<td>broken-1.0-r0-do_fetch
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=241">241</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>CLOS</nobr>
</td>
<td><nobr>LATE</nobr>
</td>
<td>broken-1.0-r0-do_fetch
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=242">242</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>INVA</nobr>
</td>
<td>broken-1.0-r0-do_compile
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=243">243</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>INVA</nobr>
</td>
<td>broken-1.0-r0-do_configure
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=244">244</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>I can't build Xorg7.1 from MokoMakefile
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=245">245</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Neo crashes when writing large amounts of data to SD
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=246">246</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>sean_chiang@fic.com.tw</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Debug board needs to be recognized by mainline linux kernel.
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=247">247</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>thomas@openedhand.com</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>openmoko-dates svn rev. 335 does no longer build
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=248">248</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Buttons disappear under zoom
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=249">249</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>add command to print gsmd version number
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=250">250</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>INVA</nobr>
</td>
<td>broken-1.0-r0-do_compile
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=251">251</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>INVA</nobr>
</td>
<td>broken-1.0-r0-do_compile
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=252">252</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>REOP</nobr>
</td>
<td><nobr></nobr>
</td>
<td>openmoko-devel-image-1.0-r0-do_rootfs
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=253">253</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Mount /tmp as tmpfs
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=254">254</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Oth</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>bug with "patch" on arklinux 2006.1??
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=255">255</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>tony_tu@fiwin.com.tw</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>battery voltage scale is not correct
</td>
</tr>
<tr class="bz_critical bz_P2 ">
<td>
<a href="show_bug.cgi?id=256">256</a>
</td>
<td><nobr>cri</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>sean_chiang@fic.com.tw</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>GSM Modem doesn't seem to work on some devices
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=257">257</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Oth</nobr>
</td>
<td><nobr>sean_chiang@fic.com.tw</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>AUX button sticking
</td>
</tr>
<tr class="bz_major bz_P2 ">
<td>
<a href="show_bug.cgi?id=258">258</a>
</td>
<td><nobr>maj</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>All</nobr>
</td>
<td><nobr>cj_steven@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Main Menu needs to have Single Instance functionality
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=259">259</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>stefan@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>implement 500mA charging in u-boot
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=260">260</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>stefan@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>implement 100mA charging in Linux
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=261">261</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>stefan@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Implement 500mA charging using wall-outlet charger
</td>
</tr>
<tr class="bz_enhancement bz_P2 ">
<td>
<a href="show_bug.cgi?id=262">262</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>Indicate different charging mode in battery applet
</td>
</tr>
<tr class="bz_blocker bz_P2 ">
<td>
<a href="show_bug.cgi?id=263">263</a>
</td>
<td><nobr>blo</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>gsmd doesn't receive AT reply from the modem properly.
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=264">264</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>package libelf-0.8.6-r0: task do_populate_sysroot: failed
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=265">265</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>MokoMakefile: perl-native fix
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=266">266</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>ftdi-eeprom-native missing confuse-native dependency
</td>
</tr>
<tr class="bz_enhancement bz_P4 ">
<td>
<a href="show_bug.cgi?id=267">267</a>
</td>
<td><nobr>enh</nobr>
</td>
<td><nobr>P4</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>internal function duplicates strstr(3)
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=268">268</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>openmoko-today crashes when one of the buttons is pressed
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=269">269</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>openmoko-contacts-0.1+svnnow-r3_0_200703151745-do_unpack
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=270">270</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>does our xserver need security updates?
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=271">271</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>laforge@openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>It would be nice if ppp was supported by kernel
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=272">272</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>openmoko-contacts-0.1+svnnow-r3_0_200703152250-do_unpack
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=273">273</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>openmoko-contacts-0.1+svnnow-r3_0_200703160254-do_unpack
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=274">274</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>openmoko-contacts-0.1+svnnow-r3_0_200703160321-do_unpack
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=275">275</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>openmoko-contacts-0.1+svnnow-r3_0_200703160350-do_unpack
</td>
</tr>
<tr class="bz_normal bz_P3 ">
<td>
<a href="show_bug.cgi?id=276">276</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P3</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>songcw@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>The open file window is too ugly
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=277">277</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>openmoko-contacts-0.1+svnnow-r3_0_200703160712-do_unpack
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=278">278</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>mickey@vanille-media.de</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>openmoko-contacts-0.1+svnnow-r3_0_200703160805-do_unpack
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=279">279</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>RESO</nobr>
</td>
<td><nobr>FIXE</nobr>
</td>
<td>Appmanager crush when install packages
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=280">280</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>songcw@fic-sh.com.cn</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>openmoko-appmanager not refresh the packages list after r...
</td>
</tr>
<tr class="bz_normal bz_P3 ">
<td>
<a href="show_bug.cgi?id=281">281</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P3</nobr>
</td>
<td><nobr>PC</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>implicit declaration of function `strdup'
</td>
</tr>
<tr class="bz_normal bz_P2 ">
<td>
<a href="show_bug.cgi?id=282">282</a>
</td>
<td><nobr>nor</nobr>
</td>
<td><nobr>P2</nobr>
</td>
<td><nobr>Neo</nobr>
</td>
<td><nobr>buglog@lists.openmoko.org</nobr>
</td>
<td><nobr>NEW</nobr>
</td>
<td><nobr></nobr>
</td>
<td>microSD Problem
</td>
</tr>
</table>
282 bugs found.
<br>
<form method="post" action="long_list.cgi">
<input type="hidden" name="buglist" value="1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282">
<input type="submit" value="Long Format">
<a href="query.cgi">Query Page</a>
<a href="enter_bug.cgi">Enter New Bug</a>
<a href="colchange.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=">Change Columns</a>
<a href="query.cgi?short_desc_type=allwordssubstr&short_desc=&long_desc_type=allwordssubstr&long_desc=&bug_file_loc_type=allwordssubstr&bug_file_loc=&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&emailassigned_to1=1&emailtype1=substring&email1=&emailassigned_to2=1&emailreporter2=1&emailcc2=1&emailtype2=substring&email2=&bugidtype=include&bug_id=&votes=&changedin=&chfieldfrom=&chfieldto=Now&chfieldvalue=&field0-0-0=noop&type0-0-0=noop&value0-0-0=">Edit this Query</a>
</form>
<!-- 1.0@bugzilla.org -->
</div>
<div class="footer">
<div class="group">This is <b>Bugzilla</b>: the Mozilla bug system. For more information about what Bugzilla is and what it can do, see <a href="http://www.bugzilla.org/">bugzilla.org</a>.</div>
<!-- 1.0@bugzilla.org -->
<form method="get" action="show_bug.cgi">
<div class="group">
<a href="enter_bug.cgi">New</a> | <a href="query.cgi">Query</a> | <input type="submit" value="Find"> bug # <input name="id" size="6"> | <a href="reports.cgi">Reports</a>
</div>
<div>
<a href="createaccount.cgi">New Account</a> | <a href="query.cgi?GoAheadAndLogIn=1">Log In</a>
</div>
</form>
</div>
</body>
</html>
"""
bugfinder =BugQueryExtractor()
bugfinder.feed(bugs_openmoko)
print bugfinder.result()
print len(bugfinder.result())
seen_numbers = {}
for (number,_) in bugfinder.result():
seen_numbers[number] = "Yes"
for i in range(1,283):
if not seen_numbers.has_key(str(i)):
print "Not seen %d" % i
| 16.970048
| 1,068
| 0.534723
| 21,123
| 140,512
| 3.478341
| 0.064385
| 0.107468
| 0.184231
| 0.230289
| 0.831435
| 0.828032
| 0.820451
| 0.816599
| 0.814626
| 0.809998
| 0
| 0.035221
| 0.245509
| 140,512
| 8,279
| 1,069
| 16.972098
| 0.657822
| 0.002612
| 0
| 0.87957
| 0
| 0.007168
| 0.983559
| 0.347375
| 0
| 0
| 0.000071
| 0
| 0
| 0
| null | null | 0.000538
| 0.000358
| null | null | 0.001971
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
3b069538dfe981f0fa7968f67f5bda04646284dd
| 152,330
|
py
|
Python
|
google/ads/google_ads/v4/types.py
|
arammaliachi/google-ads-python
|
a4fe89567bd43eb784410523a6306b5d1dd9ee67
|
[
"Apache-2.0"
] | 1
|
2021-04-09T04:28:47.000Z
|
2021-04-09T04:28:47.000Z
|
google/ads/google_ads/v4/types.py
|
arammaliachi/google-ads-python
|
a4fe89567bd43eb784410523a6306b5d1dd9ee67
|
[
"Apache-2.0"
] | null | null | null |
google/ads/google_ads/v4/types.py
|
arammaliachi/google-ads-python
|
a4fe89567bd43eb784410523a6306b5d1dd9ee67
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import importlib
import sys
from itertools import chain
from google.api_core.protobuf_helpers import get_messages
from google.ads.google_ads import util
if sys.version_info < (3, 6):
raise ImportError("This module requires Python 3.6 or later.")
_lazy_name_to_package_map = {
'ad_asset_pb2':"google.ads.google_ads.v4.proto.common",
'ad_type_infos_pb2':"google.ads.google_ads.v4.proto.common",
'asset_types_pb2':"google.ads.google_ads.v4.proto.common",
'bidding_pb2':"google.ads.google_ads.v4.proto.common",
'click_location_pb2':"google.ads.google_ads.v4.proto.common",
'criteria_pb2':"google.ads.google_ads.v4.proto.common",
'criterion_category_availability_pb2':"google.ads.google_ads.v4.proto.common",
'custom_parameter_pb2':"google.ads.google_ads.v4.proto.common",
'dates_pb2':"google.ads.google_ads.v4.proto.common",
'explorer_auto_optimizer_setting_pb2':"google.ads.google_ads.v4.proto.common",
'extensions_pb2':"google.ads.google_ads.v4.proto.common",
'feed_common_pb2':"google.ads.google_ads.v4.proto.common",
'final_app_url_pb2':"google.ads.google_ads.v4.proto.common",
'frequency_cap_pb2':"google.ads.google_ads.v4.proto.common",
'keyword_plan_common_pb2':"google.ads.google_ads.v4.proto.common",
'matching_function_pb2':"google.ads.google_ads.v4.proto.common",
'metrics_pb2':"google.ads.google_ads.v4.proto.common",
'offline_user_data_pb2':"google.ads.google_ads.v4.proto.common",
'policy_pb2':"google.ads.google_ads.v4.proto.common",
'real_time_bidding_setting_pb2':"google.ads.google_ads.v4.proto.common",
'segments_pb2':"google.ads.google_ads.v4.proto.common",
'simulation_pb2':"google.ads.google_ads.v4.proto.common",
'tag_snippet_pb2':"google.ads.google_ads.v4.proto.common",
'targeting_setting_pb2':"google.ads.google_ads.v4.proto.common",
'text_label_pb2':"google.ads.google_ads.v4.proto.common",
'url_collection_pb2':"google.ads.google_ads.v4.proto.common",
'user_lists_pb2':"google.ads.google_ads.v4.proto.common",
'value_pb2':"google.ads.google_ads.v4.proto.common",
'access_reason_pb2':"google.ads.google_ads.v4.proto.enums",
'access_role_pb2':"google.ads.google_ads.v4.proto.enums",
'account_budget_proposal_status_pb2':"google.ads.google_ads.v4.proto.enums",
'account_budget_proposal_type_pb2':"google.ads.google_ads.v4.proto.enums",
'account_budget_status_pb2':"google.ads.google_ads.v4.proto.enums",
'account_link_status_pb2':"google.ads.google_ads.v4.proto.enums",
'ad_customizer_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'ad_group_ad_rotation_mode_pb2':"google.ads.google_ads.v4.proto.enums",
'ad_group_ad_status_pb2':"google.ads.google_ads.v4.proto.enums",
'ad_group_criterion_approval_status_pb2':"google.ads.google_ads.v4.proto.enums",
'ad_group_criterion_status_pb2':"google.ads.google_ads.v4.proto.enums",
'ad_group_status_pb2':"google.ads.google_ads.v4.proto.enums",
'ad_group_type_pb2':"google.ads.google_ads.v4.proto.enums",
'ad_network_type_pb2':"google.ads.google_ads.v4.proto.enums",
'ad_serving_optimization_status_pb2':"google.ads.google_ads.v4.proto.enums",
'ad_strength_pb2':"google.ads.google_ads.v4.proto.enums",
'ad_type_pb2':"google.ads.google_ads.v4.proto.enums",
'advertising_channel_sub_type_pb2':"google.ads.google_ads.v4.proto.enums",
'advertising_channel_type_pb2':"google.ads.google_ads.v4.proto.enums",
'affiliate_location_feed_relationship_type_pb2':"google.ads.google_ads.v4.proto.enums",
'affiliate_location_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'age_range_type_pb2':"google.ads.google_ads.v4.proto.enums",
'app_campaign_app_store_pb2':"google.ads.google_ads.v4.proto.enums",
'app_campaign_bidding_strategy_goal_type_pb2':"google.ads.google_ads.v4.proto.enums",
'app_payment_model_type_pb2':"google.ads.google_ads.v4.proto.enums",
'app_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'app_store_pb2':"google.ads.google_ads.v4.proto.enums",
'app_url_operating_system_type_pb2':"google.ads.google_ads.v4.proto.enums",
'asset_field_type_pb2':"google.ads.google_ads.v4.proto.enums",
'asset_performance_label_pb2':"google.ads.google_ads.v4.proto.enums",
'asset_type_pb2':"google.ads.google_ads.v4.proto.enums",
'attribution_model_pb2':"google.ads.google_ads.v4.proto.enums",
'batch_job_status_pb2':"google.ads.google_ads.v4.proto.enums",
'bid_modifier_source_pb2':"google.ads.google_ads.v4.proto.enums",
'bidding_source_pb2':"google.ads.google_ads.v4.proto.enums",
'bidding_strategy_status_pb2':"google.ads.google_ads.v4.proto.enums",
'bidding_strategy_type_pb2':"google.ads.google_ads.v4.proto.enums",
'billing_setup_status_pb2':"google.ads.google_ads.v4.proto.enums",
'brand_safety_suitability_pb2':"google.ads.google_ads.v4.proto.enums",
'budget_delivery_method_pb2':"google.ads.google_ads.v4.proto.enums",
'budget_period_pb2':"google.ads.google_ads.v4.proto.enums",
'budget_status_pb2':"google.ads.google_ads.v4.proto.enums",
'budget_type_pb2':"google.ads.google_ads.v4.proto.enums",
'call_conversion_reporting_state_pb2':"google.ads.google_ads.v4.proto.enums",
'call_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'callout_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'campaign_criterion_status_pb2':"google.ads.google_ads.v4.proto.enums",
'campaign_draft_status_pb2':"google.ads.google_ads.v4.proto.enums",
'campaign_experiment_status_pb2':"google.ads.google_ads.v4.proto.enums",
'campaign_experiment_traffic_split_type_pb2':"google.ads.google_ads.v4.proto.enums",
'campaign_experiment_type_pb2':"google.ads.google_ads.v4.proto.enums",
'campaign_serving_status_pb2':"google.ads.google_ads.v4.proto.enums",
'campaign_shared_set_status_pb2':"google.ads.google_ads.v4.proto.enums",
'campaign_status_pb2':"google.ads.google_ads.v4.proto.enums",
'change_status_operation_pb2':"google.ads.google_ads.v4.proto.enums",
'change_status_resource_type_pb2':"google.ads.google_ads.v4.proto.enums",
'click_type_pb2':"google.ads.google_ads.v4.proto.enums",
'content_label_type_pb2':"google.ads.google_ads.v4.proto.enums",
'conversion_action_category_pb2':"google.ads.google_ads.v4.proto.enums",
'conversion_action_counting_type_pb2':"google.ads.google_ads.v4.proto.enums",
'conversion_action_status_pb2':"google.ads.google_ads.v4.proto.enums",
'conversion_action_type_pb2':"google.ads.google_ads.v4.proto.enums",
'conversion_adjustment_type_pb2':"google.ads.google_ads.v4.proto.enums",
'conversion_attribution_event_type_pb2':"google.ads.google_ads.v4.proto.enums",
'conversion_lag_bucket_pb2':"google.ads.google_ads.v4.proto.enums",
'conversion_or_adjustment_lag_bucket_pb2':"google.ads.google_ads.v4.proto.enums",
'criterion_category_channel_availability_mode_pb2':"google.ads.google_ads.v4.proto.enums",
'criterion_category_locale_availability_mode_pb2':"google.ads.google_ads.v4.proto.enums",
'criterion_system_serving_status_pb2':"google.ads.google_ads.v4.proto.enums",
'criterion_type_pb2':"google.ads.google_ads.v4.proto.enums",
'custom_interest_member_type_pb2':"google.ads.google_ads.v4.proto.enums",
'custom_interest_status_pb2':"google.ads.google_ads.v4.proto.enums",
'custom_interest_type_pb2':"google.ads.google_ads.v4.proto.enums",
'custom_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'customer_match_upload_key_type_pb2':"google.ads.google_ads.v4.proto.enums",
'customer_pay_per_conversion_eligibility_failure_reason_pb2':"google.ads.google_ads.v4.proto.enums",
'data_driven_model_status_pb2':"google.ads.google_ads.v4.proto.enums",
'day_of_week_pb2':"google.ads.google_ads.v4.proto.enums",
'device_pb2':"google.ads.google_ads.v4.proto.enums",
'display_ad_format_setting_pb2':"google.ads.google_ads.v4.proto.enums",
'display_upload_product_type_pb2':"google.ads.google_ads.v4.proto.enums",
'distance_bucket_pb2':"google.ads.google_ads.v4.proto.enums",
'dsa_page_feed_criterion_field_pb2':"google.ads.google_ads.v4.proto.enums",
'education_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'extension_setting_device_pb2':"google.ads.google_ads.v4.proto.enums",
'extension_type_pb2':"google.ads.google_ads.v4.proto.enums",
'external_conversion_source_pb2':"google.ads.google_ads.v4.proto.enums",
'feed_attribute_type_pb2':"google.ads.google_ads.v4.proto.enums",
'feed_item_quality_approval_status_pb2':"google.ads.google_ads.v4.proto.enums",
'feed_item_quality_disapproval_reason_pb2':"google.ads.google_ads.v4.proto.enums",
'feed_item_status_pb2':"google.ads.google_ads.v4.proto.enums",
'feed_item_target_device_pb2':"google.ads.google_ads.v4.proto.enums",
'feed_item_target_status_pb2':"google.ads.google_ads.v4.proto.enums",
'feed_item_target_type_pb2':"google.ads.google_ads.v4.proto.enums",
'feed_item_validation_status_pb2':"google.ads.google_ads.v4.proto.enums",
'feed_link_status_pb2':"google.ads.google_ads.v4.proto.enums",
'feed_mapping_criterion_type_pb2':"google.ads.google_ads.v4.proto.enums",
'feed_mapping_status_pb2':"google.ads.google_ads.v4.proto.enums",
'feed_origin_pb2':"google.ads.google_ads.v4.proto.enums",
'feed_status_pb2':"google.ads.google_ads.v4.proto.enums",
'flight_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'frequency_cap_event_type_pb2':"google.ads.google_ads.v4.proto.enums",
'frequency_cap_level_pb2':"google.ads.google_ads.v4.proto.enums",
'frequency_cap_time_unit_pb2':"google.ads.google_ads.v4.proto.enums",
'gender_type_pb2':"google.ads.google_ads.v4.proto.enums",
'geo_target_constant_status_pb2':"google.ads.google_ads.v4.proto.enums",
'geo_targeting_restriction_pb2':"google.ads.google_ads.v4.proto.enums",
'geo_targeting_type_pb2':"google.ads.google_ads.v4.proto.enums",
'google_ads_field_category_pb2':"google.ads.google_ads.v4.proto.enums",
'google_ads_field_data_type_pb2':"google.ads.google_ads.v4.proto.enums",
'hotel_date_selection_type_pb2':"google.ads.google_ads.v4.proto.enums",
'hotel_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'hotel_price_bucket_pb2':"google.ads.google_ads.v4.proto.enums",
'hotel_rate_type_pb2':"google.ads.google_ads.v4.proto.enums",
'income_range_type_pb2':"google.ads.google_ads.v4.proto.enums",
'interaction_event_type_pb2':"google.ads.google_ads.v4.proto.enums",
'interaction_type_pb2':"google.ads.google_ads.v4.proto.enums",
'invoice_type_pb2':"google.ads.google_ads.v4.proto.enums",
'job_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'keyword_match_type_pb2':"google.ads.google_ads.v4.proto.enums",
'keyword_plan_competition_level_pb2':"google.ads.google_ads.v4.proto.enums",
'keyword_plan_forecast_interval_pb2':"google.ads.google_ads.v4.proto.enums",
'keyword_plan_network_pb2':"google.ads.google_ads.v4.proto.enums",
'label_status_pb2':"google.ads.google_ads.v4.proto.enums",
'legacy_app_install_ad_app_store_pb2':"google.ads.google_ads.v4.proto.enums",
'linked_account_type_pb2':"google.ads.google_ads.v4.proto.enums",
'listing_group_type_pb2':"google.ads.google_ads.v4.proto.enums",
'local_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'location_extension_targeting_criterion_field_pb2':"google.ads.google_ads.v4.proto.enums",
'location_group_radius_units_pb2':"google.ads.google_ads.v4.proto.enums",
'location_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'location_source_type_pb2':"google.ads.google_ads.v4.proto.enums",
'manager_link_status_pb2':"google.ads.google_ads.v4.proto.enums",
'matching_function_context_type_pb2':"google.ads.google_ads.v4.proto.enums",
'matching_function_operator_pb2':"google.ads.google_ads.v4.proto.enums",
'media_type_pb2':"google.ads.google_ads.v4.proto.enums",
'merchant_center_link_status_pb2':"google.ads.google_ads.v4.proto.enums",
'message_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'mime_type_pb2':"google.ads.google_ads.v4.proto.enums",
'minute_of_hour_pb2':"google.ads.google_ads.v4.proto.enums",
'mobile_app_vendor_pb2':"google.ads.google_ads.v4.proto.enums",
'mobile_device_type_pb2':"google.ads.google_ads.v4.proto.enums",
'month_of_year_pb2':"google.ads.google_ads.v4.proto.enums",
'negative_geo_target_type_pb2':"google.ads.google_ads.v4.proto.enums",
'offline_user_data_job_failure_reason_pb2':"google.ads.google_ads.v4.proto.enums",
'offline_user_data_job_status_pb2':"google.ads.google_ads.v4.proto.enums",
'offline_user_data_job_type_pb2':"google.ads.google_ads.v4.proto.enums",
'operating_system_version_operator_type_pb2':"google.ads.google_ads.v4.proto.enums",
'optimization_goal_type_pb2':"google.ads.google_ads.v4.proto.enums",
'page_one_promoted_strategy_goal_pb2':"google.ads.google_ads.v4.proto.enums",
'parental_status_type_pb2':"google.ads.google_ads.v4.proto.enums",
'payment_mode_pb2':"google.ads.google_ads.v4.proto.enums",
'placeholder_type_pb2':"google.ads.google_ads.v4.proto.enums",
'placement_type_pb2':"google.ads.google_ads.v4.proto.enums",
'policy_approval_status_pb2':"google.ads.google_ads.v4.proto.enums",
'policy_review_status_pb2':"google.ads.google_ads.v4.proto.enums",
'policy_topic_entry_type_pb2':"google.ads.google_ads.v4.proto.enums",
'policy_topic_evidence_destination_mismatch_url_type_pb2':"google.ads.google_ads.v4.proto.enums",
'policy_topic_evidence_destination_not_working_device_pb2':"google.ads.google_ads.v4.proto.enums",
'policy_topic_evidence_destination_not_working_dns_error_type_pb2':"google.ads.google_ads.v4.proto.enums",
'positive_geo_target_type_pb2':"google.ads.google_ads.v4.proto.enums",
'preferred_content_type_pb2':"google.ads.google_ads.v4.proto.enums",
'price_extension_price_qualifier_pb2':"google.ads.google_ads.v4.proto.enums",
'price_extension_price_unit_pb2':"google.ads.google_ads.v4.proto.enums",
'price_extension_type_pb2':"google.ads.google_ads.v4.proto.enums",
'price_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'product_bidding_category_level_pb2':"google.ads.google_ads.v4.proto.enums",
'product_bidding_category_status_pb2':"google.ads.google_ads.v4.proto.enums",
'product_channel_exclusivity_pb2':"google.ads.google_ads.v4.proto.enums",
'product_channel_pb2':"google.ads.google_ads.v4.proto.enums",
'product_condition_pb2':"google.ads.google_ads.v4.proto.enums",
'product_custom_attribute_index_pb2':"google.ads.google_ads.v4.proto.enums",
'product_type_level_pb2':"google.ads.google_ads.v4.proto.enums",
'promotion_extension_discount_modifier_pb2':"google.ads.google_ads.v4.proto.enums",
'promotion_extension_occasion_pb2':"google.ads.google_ads.v4.proto.enums",
'promotion_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'proximity_radius_units_pb2':"google.ads.google_ads.v4.proto.enums",
'quality_score_bucket_pb2':"google.ads.google_ads.v4.proto.enums",
'reach_plan_ad_length_pb2':"google.ads.google_ads.v4.proto.enums",
'reach_plan_age_range_pb2':"google.ads.google_ads.v4.proto.enums",
'reach_plan_network_pb2':"google.ads.google_ads.v4.proto.enums",
'real_estate_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'recommendation_type_pb2':"google.ads.google_ads.v4.proto.enums",
'search_engine_results_page_type_pb2':"google.ads.google_ads.v4.proto.enums",
'search_term_match_type_pb2':"google.ads.google_ads.v4.proto.enums",
'search_term_targeting_status_pb2':"google.ads.google_ads.v4.proto.enums",
'served_asset_field_type_pb2':"google.ads.google_ads.v4.proto.enums",
'shared_set_status_pb2':"google.ads.google_ads.v4.proto.enums",
'shared_set_type_pb2':"google.ads.google_ads.v4.proto.enums",
'simulation_modification_method_pb2':"google.ads.google_ads.v4.proto.enums",
'simulation_type_pb2':"google.ads.google_ads.v4.proto.enums",
'sitelink_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'slot_pb2':"google.ads.google_ads.v4.proto.enums",
'spending_limit_type_pb2':"google.ads.google_ads.v4.proto.enums",
'structured_snippet_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'summary_row_setting_pb2':"google.ads.google_ads.v4.proto.enums",
'system_managed_entity_source_pb2':"google.ads.google_ads.v4.proto.enums",
'target_cpa_opt_in_recommendation_goal_pb2':"google.ads.google_ads.v4.proto.enums",
'target_impression_share_location_pb2':"google.ads.google_ads.v4.proto.enums",
'targeting_dimension_pb2':"google.ads.google_ads.v4.proto.enums",
'time_type_pb2':"google.ads.google_ads.v4.proto.enums",
'tracking_code_page_format_pb2':"google.ads.google_ads.v4.proto.enums",
'tracking_code_type_pb2':"google.ads.google_ads.v4.proto.enums",
'travel_placeholder_field_pb2':"google.ads.google_ads.v4.proto.enums",
'user_interest_taxonomy_type_pb2':"google.ads.google_ads.v4.proto.enums",
'user_list_access_status_pb2':"google.ads.google_ads.v4.proto.enums",
'user_list_closing_reason_pb2':"google.ads.google_ads.v4.proto.enums",
'user_list_combined_rule_operator_pb2':"google.ads.google_ads.v4.proto.enums",
'user_list_crm_data_source_type_pb2':"google.ads.google_ads.v4.proto.enums",
'user_list_date_rule_item_operator_pb2':"google.ads.google_ads.v4.proto.enums",
'user_list_logical_rule_operator_pb2':"google.ads.google_ads.v4.proto.enums",
'user_list_membership_status_pb2':"google.ads.google_ads.v4.proto.enums",
'user_list_number_rule_item_operator_pb2':"google.ads.google_ads.v4.proto.enums",
'user_list_prepopulation_status_pb2':"google.ads.google_ads.v4.proto.enums",
'user_list_rule_type_pb2':"google.ads.google_ads.v4.proto.enums",
'user_list_size_range_pb2':"google.ads.google_ads.v4.proto.enums",
'user_list_string_rule_item_operator_pb2':"google.ads.google_ads.v4.proto.enums",
'user_list_type_pb2':"google.ads.google_ads.v4.proto.enums",
'vanity_pharma_display_url_mode_pb2':"google.ads.google_ads.v4.proto.enums",
'vanity_pharma_text_pb2':"google.ads.google_ads.v4.proto.enums",
'webpage_condition_operand_pb2':"google.ads.google_ads.v4.proto.enums",
'webpage_condition_operator_pb2':"google.ads.google_ads.v4.proto.enums",
'access_invitation_error_pb2':"google.ads.google_ads.v4.proto.errors",
'account_budget_proposal_error_pb2':"google.ads.google_ads.v4.proto.errors",
'account_link_error_pb2':"google.ads.google_ads.v4.proto.errors",
'ad_customizer_error_pb2':"google.ads.google_ads.v4.proto.errors",
'ad_error_pb2':"google.ads.google_ads.v4.proto.errors",
'ad_group_ad_error_pb2':"google.ads.google_ads.v4.proto.errors",
'ad_group_bid_modifier_error_pb2':"google.ads.google_ads.v4.proto.errors",
'ad_group_criterion_error_pb2':"google.ads.google_ads.v4.proto.errors",
'ad_group_error_pb2':"google.ads.google_ads.v4.proto.errors",
'ad_group_feed_error_pb2':"google.ads.google_ads.v4.proto.errors",
'ad_parameter_error_pb2':"google.ads.google_ads.v4.proto.errors",
'ad_sharing_error_pb2':"google.ads.google_ads.v4.proto.errors",
'adx_error_pb2':"google.ads.google_ads.v4.proto.errors",
'asset_error_pb2':"google.ads.google_ads.v4.proto.errors",
'asset_link_error_pb2':"google.ads.google_ads.v4.proto.errors",
'authentication_error_pb2':"google.ads.google_ads.v4.proto.errors",
'authorization_error_pb2':"google.ads.google_ads.v4.proto.errors",
'batch_job_error_pb2':"google.ads.google_ads.v4.proto.errors",
'bidding_error_pb2':"google.ads.google_ads.v4.proto.errors",
'bidding_strategy_error_pb2':"google.ads.google_ads.v4.proto.errors",
'billing_setup_error_pb2':"google.ads.google_ads.v4.proto.errors",
'campaign_budget_error_pb2':"google.ads.google_ads.v4.proto.errors",
'campaign_criterion_error_pb2':"google.ads.google_ads.v4.proto.errors",
'campaign_draft_error_pb2':"google.ads.google_ads.v4.proto.errors",
'campaign_error_pb2':"google.ads.google_ads.v4.proto.errors",
'campaign_experiment_error_pb2':"google.ads.google_ads.v4.proto.errors",
'campaign_feed_error_pb2':"google.ads.google_ads.v4.proto.errors",
'campaign_shared_set_error_pb2':"google.ads.google_ads.v4.proto.errors",
'change_status_error_pb2':"google.ads.google_ads.v4.proto.errors",
'collection_size_error_pb2':"google.ads.google_ads.v4.proto.errors",
'context_error_pb2':"google.ads.google_ads.v4.proto.errors",
'conversion_action_error_pb2':"google.ads.google_ads.v4.proto.errors",
'conversion_adjustment_upload_error_pb2':"google.ads.google_ads.v4.proto.errors",
'conversion_upload_error_pb2':"google.ads.google_ads.v4.proto.errors",
'country_code_error_pb2':"google.ads.google_ads.v4.proto.errors",
'criterion_error_pb2':"google.ads.google_ads.v4.proto.errors",
'currency_code_error_pb2':"google.ads.google_ads.v4.proto.errors",
'custom_interest_error_pb2':"google.ads.google_ads.v4.proto.errors",
'customer_client_link_error_pb2':"google.ads.google_ads.v4.proto.errors",
'customer_error_pb2':"google.ads.google_ads.v4.proto.errors",
'customer_feed_error_pb2':"google.ads.google_ads.v4.proto.errors",
'customer_manager_link_error_pb2':"google.ads.google_ads.v4.proto.errors",
'database_error_pb2':"google.ads.google_ads.v4.proto.errors",
'date_error_pb2':"google.ads.google_ads.v4.proto.errors",
'date_range_error_pb2':"google.ads.google_ads.v4.proto.errors",
'distinct_error_pb2':"google.ads.google_ads.v4.proto.errors",
'enum_error_pb2':"google.ads.google_ads.v4.proto.errors",
'errors_pb2':"google.ads.google_ads.v4.proto.errors",
'extension_feed_item_error_pb2':"google.ads.google_ads.v4.proto.errors",
'extension_setting_error_pb2':"google.ads.google_ads.v4.proto.errors",
'feed_attribute_reference_error_pb2':"google.ads.google_ads.v4.proto.errors",
'feed_error_pb2':"google.ads.google_ads.v4.proto.errors",
'feed_item_error_pb2':"google.ads.google_ads.v4.proto.errors",
'feed_item_target_error_pb2':"google.ads.google_ads.v4.proto.errors",
'feed_item_validation_error_pb2':"google.ads.google_ads.v4.proto.errors",
'feed_mapping_error_pb2':"google.ads.google_ads.v4.proto.errors",
'field_error_pb2':"google.ads.google_ads.v4.proto.errors",
'field_mask_error_pb2':"google.ads.google_ads.v4.proto.errors",
'function_error_pb2':"google.ads.google_ads.v4.proto.errors",
'function_parsing_error_pb2':"google.ads.google_ads.v4.proto.errors",
'geo_target_constant_suggestion_error_pb2':"google.ads.google_ads.v4.proto.errors",
'header_error_pb2':"google.ads.google_ads.v4.proto.errors",
'id_error_pb2':"google.ads.google_ads.v4.proto.errors",
'image_error_pb2':"google.ads.google_ads.v4.proto.errors",
'internal_error_pb2':"google.ads.google_ads.v4.proto.errors",
'invoice_error_pb2':"google.ads.google_ads.v4.proto.errors",
'keyword_plan_ad_group_error_pb2':"google.ads.google_ads.v4.proto.errors",
'keyword_plan_ad_group_keyword_error_pb2':"google.ads.google_ads.v4.proto.errors",
'keyword_plan_campaign_error_pb2':"google.ads.google_ads.v4.proto.errors",
'keyword_plan_campaign_keyword_error_pb2':"google.ads.google_ads.v4.proto.errors",
'keyword_plan_error_pb2':"google.ads.google_ads.v4.proto.errors",
'keyword_plan_idea_error_pb2':"google.ads.google_ads.v4.proto.errors",
'label_error_pb2':"google.ads.google_ads.v4.proto.errors",
'language_code_error_pb2':"google.ads.google_ads.v4.proto.errors",
'list_operation_error_pb2':"google.ads.google_ads.v4.proto.errors",
'manager_link_error_pb2':"google.ads.google_ads.v4.proto.errors",
'media_bundle_error_pb2':"google.ads.google_ads.v4.proto.errors",
'media_file_error_pb2':"google.ads.google_ads.v4.proto.errors",
'media_upload_error_pb2':"google.ads.google_ads.v4.proto.errors",
'multiplier_error_pb2':"google.ads.google_ads.v4.proto.errors",
'mutate_error_pb2':"google.ads.google_ads.v4.proto.errors",
'new_resource_creation_error_pb2':"google.ads.google_ads.v4.proto.errors",
'not_empty_error_pb2':"google.ads.google_ads.v4.proto.errors",
'not_whitelisted_error_pb2':"google.ads.google_ads.v4.proto.errors",
'null_error_pb2':"google.ads.google_ads.v4.proto.errors",
'offline_user_data_job_error_pb2':"google.ads.google_ads.v4.proto.errors",
'operation_access_denied_error_pb2':"google.ads.google_ads.v4.proto.errors",
'operator_error_pb2':"google.ads.google_ads.v4.proto.errors",
'partial_failure_error_pb2':"google.ads.google_ads.v4.proto.errors",
'payments_account_error_pb2':"google.ads.google_ads.v4.proto.errors",
'policy_finding_error_pb2':"google.ads.google_ads.v4.proto.errors",
'policy_validation_parameter_error_pb2':"google.ads.google_ads.v4.proto.errors",
'policy_violation_error_pb2':"google.ads.google_ads.v4.proto.errors",
'query_error_pb2':"google.ads.google_ads.v4.proto.errors",
'quota_error_pb2':"google.ads.google_ads.v4.proto.errors",
'range_error_pb2':"google.ads.google_ads.v4.proto.errors",
'reach_plan_error_pb2':"google.ads.google_ads.v4.proto.errors",
'recommendation_error_pb2':"google.ads.google_ads.v4.proto.errors",
'region_code_error_pb2':"google.ads.google_ads.v4.proto.errors",
'request_error_pb2':"google.ads.google_ads.v4.proto.errors",
'resource_access_denied_error_pb2':"google.ads.google_ads.v4.proto.errors",
'resource_count_limit_exceeded_error_pb2':"google.ads.google_ads.v4.proto.errors",
'setting_error_pb2':"google.ads.google_ads.v4.proto.errors",
'shared_criterion_error_pb2':"google.ads.google_ads.v4.proto.errors",
'shared_set_error_pb2':"google.ads.google_ads.v4.proto.errors",
'size_limit_error_pb2':"google.ads.google_ads.v4.proto.errors",
'string_format_error_pb2':"google.ads.google_ads.v4.proto.errors",
'string_length_error_pb2':"google.ads.google_ads.v4.proto.errors",
'third_party_app_analytics_link_error_pb2':"google.ads.google_ads.v4.proto.errors",
'time_zone_error_pb2':"google.ads.google_ads.v4.proto.errors",
'url_field_error_pb2':"google.ads.google_ads.v4.proto.errors",
'user_data_error_pb2':"google.ads.google_ads.v4.proto.errors",
'user_list_error_pb2':"google.ads.google_ads.v4.proto.errors",
'youtube_video_registration_error_pb2':"google.ads.google_ads.v4.proto.errors",
'account_budget_pb2':"google.ads.google_ads.v4.proto.resources",
'account_budget_proposal_pb2':"google.ads.google_ads.v4.proto.resources",
'account_link_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_group_ad_asset_view_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_group_ad_label_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_group_ad_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_group_audience_view_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_group_bid_modifier_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_group_criterion_label_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_group_criterion_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_group_criterion_simulation_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_group_extension_setting_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_group_feed_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_group_label_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_group_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_group_simulation_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_parameter_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_pb2':"google.ads.google_ads.v4.proto.resources",
'ad_schedule_view_pb2':"google.ads.google_ads.v4.proto.resources",
'age_range_view_pb2':"google.ads.google_ads.v4.proto.resources",
'asset_pb2':"google.ads.google_ads.v4.proto.resources",
'batch_job_pb2':"google.ads.google_ads.v4.proto.resources",
'bidding_strategy_pb2':"google.ads.google_ads.v4.proto.resources",
'billing_setup_pb2':"google.ads.google_ads.v4.proto.resources",
'campaign_audience_view_pb2':"google.ads.google_ads.v4.proto.resources",
'campaign_bid_modifier_pb2':"google.ads.google_ads.v4.proto.resources",
'campaign_budget_pb2':"google.ads.google_ads.v4.proto.resources",
'campaign_criterion_pb2':"google.ads.google_ads.v4.proto.resources",
'campaign_criterion_simulation_pb2':"google.ads.google_ads.v4.proto.resources",
'campaign_draft_pb2':"google.ads.google_ads.v4.proto.resources",
'campaign_experiment_pb2':"google.ads.google_ads.v4.proto.resources",
'campaign_extension_setting_pb2':"google.ads.google_ads.v4.proto.resources",
'campaign_feed_pb2':"google.ads.google_ads.v4.proto.resources",
'campaign_label_pb2':"google.ads.google_ads.v4.proto.resources",
'campaign_pb2':"google.ads.google_ads.v4.proto.resources",
'campaign_shared_set_pb2':"google.ads.google_ads.v4.proto.resources",
'carrier_constant_pb2':"google.ads.google_ads.v4.proto.resources",
'change_status_pb2':"google.ads.google_ads.v4.proto.resources",
'click_view_pb2':"google.ads.google_ads.v4.proto.resources",
'conversion_action_pb2':"google.ads.google_ads.v4.proto.resources",
'currency_constant_pb2':"google.ads.google_ads.v4.proto.resources",
'custom_interest_pb2':"google.ads.google_ads.v4.proto.resources",
'customer_client_link_pb2':"google.ads.google_ads.v4.proto.resources",
'customer_client_pb2':"google.ads.google_ads.v4.proto.resources",
'customer_extension_setting_pb2':"google.ads.google_ads.v4.proto.resources",
'customer_feed_pb2':"google.ads.google_ads.v4.proto.resources",
'customer_label_pb2':"google.ads.google_ads.v4.proto.resources",
'customer_manager_link_pb2':"google.ads.google_ads.v4.proto.resources",
'customer_negative_criterion_pb2':"google.ads.google_ads.v4.proto.resources",
'customer_pb2':"google.ads.google_ads.v4.proto.resources",
'detail_placement_view_pb2':"google.ads.google_ads.v4.proto.resources",
'display_keyword_view_pb2':"google.ads.google_ads.v4.proto.resources",
'distance_view_pb2':"google.ads.google_ads.v4.proto.resources",
'domain_category_pb2':"google.ads.google_ads.v4.proto.resources",
'dynamic_search_ads_search_term_view_pb2':"google.ads.google_ads.v4.proto.resources",
'expanded_landing_page_view_pb2':"google.ads.google_ads.v4.proto.resources",
'extension_feed_item_pb2':"google.ads.google_ads.v4.proto.resources",
'feed_item_pb2':"google.ads.google_ads.v4.proto.resources",
'feed_item_target_pb2':"google.ads.google_ads.v4.proto.resources",
'feed_mapping_pb2':"google.ads.google_ads.v4.proto.resources",
'feed_pb2':"google.ads.google_ads.v4.proto.resources",
'feed_placeholder_view_pb2':"google.ads.google_ads.v4.proto.resources",
'gender_view_pb2':"google.ads.google_ads.v4.proto.resources",
'geo_target_constant_pb2':"google.ads.google_ads.v4.proto.resources",
'geographic_view_pb2':"google.ads.google_ads.v4.proto.resources",
'google_ads_field_pb2':"google.ads.google_ads.v4.proto.resources",
'group_placement_view_pb2':"google.ads.google_ads.v4.proto.resources",
'hotel_group_view_pb2':"google.ads.google_ads.v4.proto.resources",
'hotel_performance_view_pb2':"google.ads.google_ads.v4.proto.resources",
'income_range_view_pb2':"google.ads.google_ads.v4.proto.resources",
'invoice_pb2':"google.ads.google_ads.v4.proto.resources",
'keyword_plan_ad_group_keyword_pb2':"google.ads.google_ads.v4.proto.resources",
'keyword_plan_ad_group_pb2':"google.ads.google_ads.v4.proto.resources",
'keyword_plan_campaign_keyword_pb2':"google.ads.google_ads.v4.proto.resources",
'keyword_plan_campaign_pb2':"google.ads.google_ads.v4.proto.resources",
'keyword_plan_pb2':"google.ads.google_ads.v4.proto.resources",
'keyword_view_pb2':"google.ads.google_ads.v4.proto.resources",
'label_pb2':"google.ads.google_ads.v4.proto.resources",
'landing_page_view_pb2':"google.ads.google_ads.v4.proto.resources",
'language_constant_pb2':"google.ads.google_ads.v4.proto.resources",
'location_view_pb2':"google.ads.google_ads.v4.proto.resources",
'managed_placement_view_pb2':"google.ads.google_ads.v4.proto.resources",
'media_file_pb2':"google.ads.google_ads.v4.proto.resources",
'merchant_center_link_pb2':"google.ads.google_ads.v4.proto.resources",
'mobile_app_category_constant_pb2':"google.ads.google_ads.v4.proto.resources",
'mobile_device_constant_pb2':"google.ads.google_ads.v4.proto.resources",
'offline_user_data_job_pb2':"google.ads.google_ads.v4.proto.resources",
'operating_system_version_constant_pb2':"google.ads.google_ads.v4.proto.resources",
'paid_organic_search_term_view_pb2':"google.ads.google_ads.v4.proto.resources",
'parental_status_view_pb2':"google.ads.google_ads.v4.proto.resources",
'payments_account_pb2':"google.ads.google_ads.v4.proto.resources",
'product_bidding_category_constant_pb2':"google.ads.google_ads.v4.proto.resources",
'product_group_view_pb2':"google.ads.google_ads.v4.proto.resources",
'recommendation_pb2':"google.ads.google_ads.v4.proto.resources",
'remarketing_action_pb2':"google.ads.google_ads.v4.proto.resources",
'search_term_view_pb2':"google.ads.google_ads.v4.proto.resources",
'shared_criterion_pb2':"google.ads.google_ads.v4.proto.resources",
'shared_set_pb2':"google.ads.google_ads.v4.proto.resources",
'shopping_performance_view_pb2':"google.ads.google_ads.v4.proto.resources",
'third_party_app_analytics_link_pb2':"google.ads.google_ads.v4.proto.resources",
'topic_constant_pb2':"google.ads.google_ads.v4.proto.resources",
'topic_view_pb2':"google.ads.google_ads.v4.proto.resources",
'user_interest_pb2':"google.ads.google_ads.v4.proto.resources",
'user_list_pb2':"google.ads.google_ads.v4.proto.resources",
'user_location_view_pb2':"google.ads.google_ads.v4.proto.resources",
'video_pb2':"google.ads.google_ads.v4.proto.resources",
'account_budget_proposal_service_pb2':"google.ads.google_ads.v4.proto.services",
'account_budget_service_pb2':"google.ads.google_ads.v4.proto.services",
'account_link_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_group_ad_asset_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_group_ad_label_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_group_ad_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_group_audience_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_group_bid_modifier_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_group_criterion_label_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_group_criterion_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_group_criterion_simulation_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_group_extension_setting_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_group_feed_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_group_label_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_group_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_group_simulation_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_parameter_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_schedule_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'ad_service_pb2':"google.ads.google_ads.v4.proto.services",
'age_range_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'asset_service_pb2':"google.ads.google_ads.v4.proto.services",
'batch_job_service_pb2':"google.ads.google_ads.v4.proto.services",
'bidding_strategy_service_pb2':"google.ads.google_ads.v4.proto.services",
'billing_setup_service_pb2':"google.ads.google_ads.v4.proto.services",
'campaign_audience_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'campaign_bid_modifier_service_pb2':"google.ads.google_ads.v4.proto.services",
'campaign_budget_service_pb2':"google.ads.google_ads.v4.proto.services",
'campaign_criterion_service_pb2':"google.ads.google_ads.v4.proto.services",
'campaign_criterion_simulation_service_pb2':"google.ads.google_ads.v4.proto.services",
'campaign_draft_service_pb2':"google.ads.google_ads.v4.proto.services",
'campaign_experiment_service_pb2':"google.ads.google_ads.v4.proto.services",
'campaign_extension_setting_service_pb2':"google.ads.google_ads.v4.proto.services",
'campaign_feed_service_pb2':"google.ads.google_ads.v4.proto.services",
'campaign_label_service_pb2':"google.ads.google_ads.v4.proto.services",
'campaign_service_pb2':"google.ads.google_ads.v4.proto.services",
'campaign_shared_set_service_pb2':"google.ads.google_ads.v4.proto.services",
'carrier_constant_service_pb2':"google.ads.google_ads.v4.proto.services",
'change_status_service_pb2':"google.ads.google_ads.v4.proto.services",
'click_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'conversion_action_service_pb2':"google.ads.google_ads.v4.proto.services",
'conversion_adjustment_upload_service_pb2':"google.ads.google_ads.v4.proto.services",
'conversion_upload_service_pb2':"google.ads.google_ads.v4.proto.services",
'currency_constant_service_pb2':"google.ads.google_ads.v4.proto.services",
'custom_interest_service_pb2':"google.ads.google_ads.v4.proto.services",
'customer_client_link_service_pb2':"google.ads.google_ads.v4.proto.services",
'customer_client_service_pb2':"google.ads.google_ads.v4.proto.services",
'customer_extension_setting_service_pb2':"google.ads.google_ads.v4.proto.services",
'customer_feed_service_pb2':"google.ads.google_ads.v4.proto.services",
'customer_label_service_pb2':"google.ads.google_ads.v4.proto.services",
'customer_manager_link_service_pb2':"google.ads.google_ads.v4.proto.services",
'customer_negative_criterion_service_pb2':"google.ads.google_ads.v4.proto.services",
'customer_service_pb2':"google.ads.google_ads.v4.proto.services",
'detail_placement_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'display_keyword_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'distance_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'domain_category_service_pb2':"google.ads.google_ads.v4.proto.services",
'dynamic_search_ads_search_term_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'expanded_landing_page_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'extension_feed_item_service_pb2':"google.ads.google_ads.v4.proto.services",
'feed_item_service_pb2':"google.ads.google_ads.v4.proto.services",
'feed_item_target_service_pb2':"google.ads.google_ads.v4.proto.services",
'feed_mapping_service_pb2':"google.ads.google_ads.v4.proto.services",
'feed_placeholder_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'feed_service_pb2':"google.ads.google_ads.v4.proto.services",
'gender_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'geo_target_constant_service_pb2':"google.ads.google_ads.v4.proto.services",
'geographic_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'google_ads_field_service_pb2':"google.ads.google_ads.v4.proto.services",
'google_ads_service_pb2':"google.ads.google_ads.v4.proto.services",
'group_placement_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'hotel_group_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'hotel_performance_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'income_range_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'invoice_service_pb2':"google.ads.google_ads.v4.proto.services",
'keyword_plan_ad_group_keyword_service_pb2':"google.ads.google_ads.v4.proto.services",
'keyword_plan_ad_group_service_pb2':"google.ads.google_ads.v4.proto.services",
'keyword_plan_campaign_keyword_service_pb2':"google.ads.google_ads.v4.proto.services",
'keyword_plan_campaign_service_pb2':"google.ads.google_ads.v4.proto.services",
'keyword_plan_idea_service_pb2':"google.ads.google_ads.v4.proto.services",
'keyword_plan_service_pb2':"google.ads.google_ads.v4.proto.services",
'keyword_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'label_service_pb2':"google.ads.google_ads.v4.proto.services",
'landing_page_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'language_constant_service_pb2':"google.ads.google_ads.v4.proto.services",
'location_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'managed_placement_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'media_file_service_pb2':"google.ads.google_ads.v4.proto.services",
'merchant_center_link_service_pb2':"google.ads.google_ads.v4.proto.services",
'mobile_app_category_constant_service_pb2':"google.ads.google_ads.v4.proto.services",
'mobile_device_constant_service_pb2':"google.ads.google_ads.v4.proto.services",
'offline_user_data_job_service_pb2':"google.ads.google_ads.v4.proto.services",
'operating_system_version_constant_service_pb2':"google.ads.google_ads.v4.proto.services",
'paid_organic_search_term_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'parental_status_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'payments_account_service_pb2':"google.ads.google_ads.v4.proto.services",
'product_bidding_category_constant_service_pb2':"google.ads.google_ads.v4.proto.services",
'product_group_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'reach_plan_service_pb2':"google.ads.google_ads.v4.proto.services",
'recommendation_service_pb2':"google.ads.google_ads.v4.proto.services",
'remarketing_action_service_pb2':"google.ads.google_ads.v4.proto.services",
'search_term_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'shared_criterion_service_pb2':"google.ads.google_ads.v4.proto.services",
'shared_set_service_pb2':"google.ads.google_ads.v4.proto.services",
'shopping_performance_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'third_party_app_analytics_link_service_pb2':"google.ads.google_ads.v4.proto.services",
'topic_constant_service_pb2':"google.ads.google_ads.v4.proto.services",
'topic_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'user_data_service_pb2':"google.ads.google_ads.v4.proto.services",
'user_interest_service_pb2':"google.ads.google_ads.v4.proto.services",
'user_list_service_pb2':"google.ads.google_ads.v4.proto.services",
'user_location_view_service_pb2':"google.ads.google_ads.v4.proto.services",
'video_service_pb2':"google.ads.google_ads.v4.proto.services",
'operations_pb2':"google.longrunning",
'any_pb2':"google.protobuf",
'empty_pb2':"google.protobuf",
'field_mask_pb2':"google.protobuf",
'wrappers_pb2':"google.protobuf",
'status_pb2':"google.rpc",
}
_lazy_class_to_package_map = {
'AccessInvitationErrorEnum':"google.ads.google_ads.v4.proto.errors.access_invitation_error_pb2",
'AccessReasonEnum':"google.ads.google_ads.v4.proto.enums.access_reason_pb2",
'AccessRoleEnum':"google.ads.google_ads.v4.proto.enums.access_role_pb2",
'AccountBudget':"google.ads.google_ads.v4.proto.resources.account_budget_pb2",
'AccountBudgetProposal':"google.ads.google_ads.v4.proto.resources.account_budget_proposal_pb2",
'AccountBudgetProposalErrorEnum':"google.ads.google_ads.v4.proto.errors.account_budget_proposal_error_pb2",
'AccountBudgetProposalOperation':"google.ads.google_ads.v4.proto.services.account_budget_proposal_service_pb2",
'AccountBudgetProposalStatusEnum':"google.ads.google_ads.v4.proto.enums.account_budget_proposal_status_pb2",
'AccountBudgetProposalTypeEnum':"google.ads.google_ads.v4.proto.enums.account_budget_proposal_type_pb2",
'AccountBudgetStatusEnum':"google.ads.google_ads.v4.proto.enums.account_budget_status_pb2",
'AccountLink':"google.ads.google_ads.v4.proto.resources.account_link_pb2",
'AccountLinkOperation':"google.ads.google_ads.v4.proto.services.account_link_service_pb2",
'AccountLinkStatusEnum':"google.ads.google_ads.v4.proto.enums.account_link_status_pb2",
'Ad':"google.ads.google_ads.v4.proto.resources.ad_pb2",
'AdCustomizerErrorEnum':"google.ads.google_ads.v4.proto.errors.ad_customizer_error_pb2",
'AdCustomizerPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.ad_customizer_placeholder_field_pb2",
'AdErrorEnum':"google.ads.google_ads.v4.proto.errors.ad_error_pb2",
'AdGroup':"google.ads.google_ads.v4.proto.resources.ad_group_pb2",
'AdGroupAd':"google.ads.google_ads.v4.proto.resources.ad_group_ad_pb2",
'AdGroupAdAssetPolicySummary':"google.ads.google_ads.v4.proto.resources.ad_group_ad_asset_view_pb2",
'AdGroupAdAssetView':"google.ads.google_ads.v4.proto.resources.ad_group_ad_asset_view_pb2",
'AdGroupAdErrorEnum':"google.ads.google_ads.v4.proto.errors.ad_group_ad_error_pb2",
'AdGroupAdLabel':"google.ads.google_ads.v4.proto.resources.ad_group_ad_label_pb2",
'AdGroupAdLabelOperation':"google.ads.google_ads.v4.proto.services.ad_group_ad_label_service_pb2",
'AdGroupAdOperation':"google.ads.google_ads.v4.proto.services.ad_group_ad_service_pb2",
'AdGroupAdPolicySummary':"google.ads.google_ads.v4.proto.resources.ad_group_ad_pb2",
'AdGroupAdRotationModeEnum':"google.ads.google_ads.v4.proto.enums.ad_group_ad_rotation_mode_pb2",
'AdGroupAdStatusEnum':"google.ads.google_ads.v4.proto.enums.ad_group_ad_status_pb2",
'AdGroupAudienceView':"google.ads.google_ads.v4.proto.resources.ad_group_audience_view_pb2",
'AdGroupBidModifier':"google.ads.google_ads.v4.proto.resources.ad_group_bid_modifier_pb2",
'AdGroupBidModifierErrorEnum':"google.ads.google_ads.v4.proto.errors.ad_group_bid_modifier_error_pb2",
'AdGroupBidModifierOperation':"google.ads.google_ads.v4.proto.services.ad_group_bid_modifier_service_pb2",
'AdGroupCriterion':"google.ads.google_ads.v4.proto.resources.ad_group_criterion_pb2",
'AdGroupCriterionApprovalStatusEnum':"google.ads.google_ads.v4.proto.enums.ad_group_criterion_approval_status_pb2",
'AdGroupCriterionErrorEnum':"google.ads.google_ads.v4.proto.errors.ad_group_criterion_error_pb2",
'AdGroupCriterionLabel':"google.ads.google_ads.v4.proto.resources.ad_group_criterion_label_pb2",
'AdGroupCriterionLabelOperation':"google.ads.google_ads.v4.proto.services.ad_group_criterion_label_service_pb2",
'AdGroupCriterionOperation':"google.ads.google_ads.v4.proto.services.ad_group_criterion_service_pb2",
'AdGroupCriterionSimulation':"google.ads.google_ads.v4.proto.resources.ad_group_criterion_simulation_pb2",
'AdGroupCriterionStatusEnum':"google.ads.google_ads.v4.proto.enums.ad_group_criterion_status_pb2",
'AdGroupErrorEnum':"google.ads.google_ads.v4.proto.errors.ad_group_error_pb2",
'AdGroupExtensionSetting':"google.ads.google_ads.v4.proto.resources.ad_group_extension_setting_pb2",
'AdGroupExtensionSettingOperation':"google.ads.google_ads.v4.proto.services.ad_group_extension_setting_service_pb2",
'AdGroupFeed':"google.ads.google_ads.v4.proto.resources.ad_group_feed_pb2",
'AdGroupFeedErrorEnum':"google.ads.google_ads.v4.proto.errors.ad_group_feed_error_pb2",
'AdGroupFeedOperation':"google.ads.google_ads.v4.proto.services.ad_group_feed_service_pb2",
'AdGroupLabel':"google.ads.google_ads.v4.proto.resources.ad_group_label_pb2",
'AdGroupLabelOperation':"google.ads.google_ads.v4.proto.services.ad_group_label_service_pb2",
'AdGroupOperation':"google.ads.google_ads.v4.proto.services.ad_group_service_pb2",
'AdGroupSimulation':"google.ads.google_ads.v4.proto.resources.ad_group_simulation_pb2",
'AdGroupStatusEnum':"google.ads.google_ads.v4.proto.enums.ad_group_status_pb2",
'AdGroupTypeEnum':"google.ads.google_ads.v4.proto.enums.ad_group_type_pb2",
'AdImageAsset':"google.ads.google_ads.v4.proto.common.ad_asset_pb2",
'AdMediaBundleAsset':"google.ads.google_ads.v4.proto.common.ad_asset_pb2",
'AdNetworkTypeEnum':"google.ads.google_ads.v4.proto.enums.ad_network_type_pb2",
'AdOperation':"google.ads.google_ads.v4.proto.services.ad_service_pb2",
'AdParameter':"google.ads.google_ads.v4.proto.resources.ad_parameter_pb2",
'AdParameterErrorEnum':"google.ads.google_ads.v4.proto.errors.ad_parameter_error_pb2",
'AdParameterOperation':"google.ads.google_ads.v4.proto.services.ad_parameter_service_pb2",
'AdScheduleInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'AdScheduleView':"google.ads.google_ads.v4.proto.resources.ad_schedule_view_pb2",
'AdServingOptimizationStatusEnum':"google.ads.google_ads.v4.proto.enums.ad_serving_optimization_status_pb2",
'AdSharingErrorEnum':"google.ads.google_ads.v4.proto.errors.ad_sharing_error_pb2",
'AdStrengthEnum':"google.ads.google_ads.v4.proto.enums.ad_strength_pb2",
'AdTextAsset':"google.ads.google_ads.v4.proto.common.ad_asset_pb2",
'AdTypeEnum':"google.ads.google_ads.v4.proto.enums.ad_type_pb2",
'AdVideoAsset':"google.ads.google_ads.v4.proto.common.ad_asset_pb2",
'AddBatchJobOperationsRequest':"google.ads.google_ads.v4.proto.services.batch_job_service_pb2",
'AddBatchJobOperationsResponse':"google.ads.google_ads.v4.proto.services.batch_job_service_pb2",
'AddOfflineUserDataJobOperationsRequest':"google.ads.google_ads.v4.proto.services.offline_user_data_job_service_pb2",
'AddOfflineUserDataJobOperationsResponse':"google.ads.google_ads.v4.proto.services.offline_user_data_job_service_pb2",
'AddressInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'AdvertisingChannelSubTypeEnum':"google.ads.google_ads.v4.proto.enums.advertising_channel_sub_type_pb2",
'AdvertisingChannelTypeEnum':"google.ads.google_ads.v4.proto.enums.advertising_channel_type_pb2",
'AdxErrorEnum':"google.ads.google_ads.v4.proto.errors.adx_error_pb2",
'AffiliateLocationFeedItem':"google.ads.google_ads.v4.proto.common.extensions_pb2",
'AffiliateLocationFeedRelationshipTypeEnum':"google.ads.google_ads.v4.proto.enums.affiliate_location_feed_relationship_type_pb2",
'AffiliateLocationPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.affiliate_location_placeholder_field_pb2",
'AgeRangeInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'AgeRangeTypeEnum':"google.ads.google_ads.v4.proto.enums.age_range_type_pb2",
'AgeRangeView':"google.ads.google_ads.v4.proto.resources.age_range_view_pb2",
'AppAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'AppCampaignAppStoreEnum':"google.ads.google_ads.v4.proto.enums.app_campaign_app_store_pb2",
'AppCampaignBiddingStrategyGoalTypeEnum':"google.ads.google_ads.v4.proto.enums.app_campaign_bidding_strategy_goal_type_pb2",
'AppEngagementAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'AppFeedItem':"google.ads.google_ads.v4.proto.common.extensions_pb2",
'AppPaymentModelInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'AppPaymentModelTypeEnum':"google.ads.google_ads.v4.proto.enums.app_payment_model_type_pb2",
'AppPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.app_placeholder_field_pb2",
'AppStoreEnum':"google.ads.google_ads.v4.proto.enums.app_store_pb2",
'AppUrlOperatingSystemTypeEnum':"google.ads.google_ads.v4.proto.enums.app_url_operating_system_type_pb2",
'ApplyRecommendationOperation':"google.ads.google_ads.v4.proto.services.recommendation_service_pb2",
'ApplyRecommendationRequest':"google.ads.google_ads.v4.proto.services.recommendation_service_pb2",
'ApplyRecommendationResponse':"google.ads.google_ads.v4.proto.services.recommendation_service_pb2",
'ApplyRecommendationResult':"google.ads.google_ads.v4.proto.services.recommendation_service_pb2",
'Asset':"google.ads.google_ads.v4.proto.resources.asset_pb2",
'AssetErrorEnum':"google.ads.google_ads.v4.proto.errors.asset_error_pb2",
'AssetFieldTypeEnum':"google.ads.google_ads.v4.proto.enums.asset_field_type_pb2",
'AssetLinkErrorEnum':"google.ads.google_ads.v4.proto.errors.asset_link_error_pb2",
'AssetOperation':"google.ads.google_ads.v4.proto.services.asset_service_pb2",
'AssetPerformanceLabelEnum':"google.ads.google_ads.v4.proto.enums.asset_performance_label_pb2",
'AssetTypeEnum':"google.ads.google_ads.v4.proto.enums.asset_type_pb2",
'AttributeFieldMapping':"google.ads.google_ads.v4.proto.resources.feed_mapping_pb2",
'AttributionModelEnum':"google.ads.google_ads.v4.proto.enums.attribution_model_pb2",
'AuthenticationErrorEnum':"google.ads.google_ads.v4.proto.errors.authentication_error_pb2",
'AuthorizationErrorEnum':"google.ads.google_ads.v4.proto.errors.authorization_error_pb2",
'BasicUserListInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'BatchJob':"google.ads.google_ads.v4.proto.resources.batch_job_pb2",
'BatchJobErrorEnum':"google.ads.google_ads.v4.proto.errors.batch_job_error_pb2",
'BatchJobOperation':"google.ads.google_ads.v4.proto.services.batch_job_service_pb2",
'BatchJobResult':"google.ads.google_ads.v4.proto.services.batch_job_service_pb2",
'BatchJobStatusEnum':"google.ads.google_ads.v4.proto.enums.batch_job_status_pb2",
'BidModifierSimulationPoint':"google.ads.google_ads.v4.proto.common.simulation_pb2",
'BidModifierSimulationPointList':"google.ads.google_ads.v4.proto.common.simulation_pb2",
'BidModifierSourceEnum':"google.ads.google_ads.v4.proto.enums.bid_modifier_source_pb2",
'BiddingErrorEnum':"google.ads.google_ads.v4.proto.errors.bidding_error_pb2",
'BiddingSourceEnum':"google.ads.google_ads.v4.proto.enums.bidding_source_pb2",
'BiddingStrategy':"google.ads.google_ads.v4.proto.resources.bidding_strategy_pb2",
'BiddingStrategyErrorEnum':"google.ads.google_ads.v4.proto.errors.bidding_strategy_error_pb2",
'BiddingStrategyOperation':"google.ads.google_ads.v4.proto.services.bidding_strategy_service_pb2",
'BiddingStrategyStatusEnum':"google.ads.google_ads.v4.proto.enums.bidding_strategy_status_pb2",
'BiddingStrategyTypeEnum':"google.ads.google_ads.v4.proto.enums.bidding_strategy_type_pb2",
'BillingSetup':"google.ads.google_ads.v4.proto.resources.billing_setup_pb2",
'BillingSetupErrorEnum':"google.ads.google_ads.v4.proto.errors.billing_setup_error_pb2",
'BillingSetupOperation':"google.ads.google_ads.v4.proto.services.billing_setup_service_pb2",
'BillingSetupStatusEnum':"google.ads.google_ads.v4.proto.enums.billing_setup_status_pb2",
'BookOnGoogleAsset':"google.ads.google_ads.v4.proto.common.asset_types_pb2",
'BrandSafetySuitabilityEnum':"google.ads.google_ads.v4.proto.enums.brand_safety_suitability_pb2",
'BudgetDeliveryMethodEnum':"google.ads.google_ads.v4.proto.enums.budget_delivery_method_pb2",
'BudgetPeriodEnum':"google.ads.google_ads.v4.proto.enums.budget_period_pb2",
'BudgetStatusEnum':"google.ads.google_ads.v4.proto.enums.budget_status_pb2",
'BudgetTypeEnum':"google.ads.google_ads.v4.proto.enums.budget_type_pb2",
'CallConversion':"google.ads.google_ads.v4.proto.services.conversion_upload_service_pb2",
'CallConversionReportingStateEnum':"google.ads.google_ads.v4.proto.enums.call_conversion_reporting_state_pb2",
'CallConversionResult':"google.ads.google_ads.v4.proto.services.conversion_upload_service_pb2",
'CallFeedItem':"google.ads.google_ads.v4.proto.common.extensions_pb2",
'CallOnlyAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'CallPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.call_placeholder_field_pb2",
'CallReportingSetting':"google.ads.google_ads.v4.proto.resources.customer_pb2",
'CalloutFeedItem':"google.ads.google_ads.v4.proto.common.extensions_pb2",
'CalloutPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.callout_placeholder_field_pb2",
'Campaign':"google.ads.google_ads.v4.proto.resources.campaign_pb2",
'CampaignAudienceView':"google.ads.google_ads.v4.proto.resources.campaign_audience_view_pb2",
'CampaignBidModifier':"google.ads.google_ads.v4.proto.resources.campaign_bid_modifier_pb2",
'CampaignBidModifierOperation':"google.ads.google_ads.v4.proto.services.campaign_bid_modifier_service_pb2",
'CampaignBudget':"google.ads.google_ads.v4.proto.resources.campaign_budget_pb2",
'CampaignBudgetErrorEnum':"google.ads.google_ads.v4.proto.errors.campaign_budget_error_pb2",
'CampaignBudgetOperation':"google.ads.google_ads.v4.proto.services.campaign_budget_service_pb2",
'CampaignCriterion':"google.ads.google_ads.v4.proto.resources.campaign_criterion_pb2",
'CampaignCriterionErrorEnum':"google.ads.google_ads.v4.proto.errors.campaign_criterion_error_pb2",
'CampaignCriterionOperation':"google.ads.google_ads.v4.proto.services.campaign_criterion_service_pb2",
'CampaignCriterionSimulation':"google.ads.google_ads.v4.proto.resources.campaign_criterion_simulation_pb2",
'CampaignCriterionStatusEnum':"google.ads.google_ads.v4.proto.enums.campaign_criterion_status_pb2",
'CampaignDraft':"google.ads.google_ads.v4.proto.resources.campaign_draft_pb2",
'CampaignDraftErrorEnum':"google.ads.google_ads.v4.proto.errors.campaign_draft_error_pb2",
'CampaignDraftOperation':"google.ads.google_ads.v4.proto.services.campaign_draft_service_pb2",
'CampaignDraftStatusEnum':"google.ads.google_ads.v4.proto.enums.campaign_draft_status_pb2",
'CampaignDuration':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'CampaignErrorEnum':"google.ads.google_ads.v4.proto.errors.campaign_error_pb2",
'CampaignExperiment':"google.ads.google_ads.v4.proto.resources.campaign_experiment_pb2",
'CampaignExperimentErrorEnum':"google.ads.google_ads.v4.proto.errors.campaign_experiment_error_pb2",
'CampaignExperimentOperation':"google.ads.google_ads.v4.proto.services.campaign_experiment_service_pb2",
'CampaignExperimentStatusEnum':"google.ads.google_ads.v4.proto.enums.campaign_experiment_status_pb2",
'CampaignExperimentTrafficSplitTypeEnum':"google.ads.google_ads.v4.proto.enums.campaign_experiment_traffic_split_type_pb2",
'CampaignExperimentTypeEnum':"google.ads.google_ads.v4.proto.enums.campaign_experiment_type_pb2",
'CampaignExtensionSetting':"google.ads.google_ads.v4.proto.resources.campaign_extension_setting_pb2",
'CampaignExtensionSettingOperation':"google.ads.google_ads.v4.proto.services.campaign_extension_setting_service_pb2",
'CampaignFeed':"google.ads.google_ads.v4.proto.resources.campaign_feed_pb2",
'CampaignFeedErrorEnum':"google.ads.google_ads.v4.proto.errors.campaign_feed_error_pb2",
'CampaignFeedOperation':"google.ads.google_ads.v4.proto.services.campaign_feed_service_pb2",
'CampaignLabel':"google.ads.google_ads.v4.proto.resources.campaign_label_pb2",
'CampaignLabelOperation':"google.ads.google_ads.v4.proto.services.campaign_label_service_pb2",
'CampaignOperation':"google.ads.google_ads.v4.proto.services.campaign_service_pb2",
'CampaignServingStatusEnum':"google.ads.google_ads.v4.proto.enums.campaign_serving_status_pb2",
'CampaignSharedSet':"google.ads.google_ads.v4.proto.resources.campaign_shared_set_pb2",
'CampaignSharedSetErrorEnum':"google.ads.google_ads.v4.proto.errors.campaign_shared_set_error_pb2",
'CampaignSharedSetOperation':"google.ads.google_ads.v4.proto.services.campaign_shared_set_service_pb2",
'CampaignSharedSetStatusEnum':"google.ads.google_ads.v4.proto.enums.campaign_shared_set_status_pb2",
'CampaignStatusEnum':"google.ads.google_ads.v4.proto.enums.campaign_status_pb2",
'CarrierConstant':"google.ads.google_ads.v4.proto.resources.carrier_constant_pb2",
'CarrierInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'ChangeStatus':"google.ads.google_ads.v4.proto.resources.change_status_pb2",
'ChangeStatusErrorEnum':"google.ads.google_ads.v4.proto.errors.change_status_error_pb2",
'ChangeStatusOperationEnum':"google.ads.google_ads.v4.proto.enums.change_status_operation_pb2",
'ChangeStatusResourceTypeEnum':"google.ads.google_ads.v4.proto.enums.change_status_resource_type_pb2",
'ClickConversion':"google.ads.google_ads.v4.proto.services.conversion_upload_service_pb2",
'ClickConversionResult':"google.ads.google_ads.v4.proto.services.conversion_upload_service_pb2",
'ClickLocation':"google.ads.google_ads.v4.proto.common.click_location_pb2",
'ClickTypeEnum':"google.ads.google_ads.v4.proto.enums.click_type_pb2",
'ClickView':"google.ads.google_ads.v4.proto.resources.click_view_pb2",
'CollectionSizeErrorEnum':"google.ads.google_ads.v4.proto.errors.collection_size_error_pb2",
'CombinedRuleUserListInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'Commission':"google.ads.google_ads.v4.proto.common.bidding_pb2",
'ContentLabelInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'ContentLabelTypeEnum':"google.ads.google_ads.v4.proto.enums.content_label_type_pb2",
'ContextErrorEnum':"google.ads.google_ads.v4.proto.errors.context_error_pb2",
'ConversionAction':"google.ads.google_ads.v4.proto.resources.conversion_action_pb2",
'ConversionActionCategoryEnum':"google.ads.google_ads.v4.proto.enums.conversion_action_category_pb2",
'ConversionActionCountingTypeEnum':"google.ads.google_ads.v4.proto.enums.conversion_action_counting_type_pb2",
'ConversionActionErrorEnum':"google.ads.google_ads.v4.proto.errors.conversion_action_error_pb2",
'ConversionActionOperation':"google.ads.google_ads.v4.proto.services.conversion_action_service_pb2",
'ConversionActionStatusEnum':"google.ads.google_ads.v4.proto.enums.conversion_action_status_pb2",
'ConversionActionTypeEnum':"google.ads.google_ads.v4.proto.enums.conversion_action_type_pb2",
'ConversionAdjustment':"google.ads.google_ads.v4.proto.services.conversion_adjustment_upload_service_pb2",
'ConversionAdjustmentResult':"google.ads.google_ads.v4.proto.services.conversion_adjustment_upload_service_pb2",
'ConversionAdjustmentTypeEnum':"google.ads.google_ads.v4.proto.enums.conversion_adjustment_type_pb2",
'ConversionAdjustmentUploadErrorEnum':"google.ads.google_ads.v4.proto.errors.conversion_adjustment_upload_error_pb2",
'ConversionAttributionEventTypeEnum':"google.ads.google_ads.v4.proto.enums.conversion_attribution_event_type_pb2",
'ConversionLagBucketEnum':"google.ads.google_ads.v4.proto.enums.conversion_lag_bucket_pb2",
'ConversionOrAdjustmentLagBucketEnum':"google.ads.google_ads.v4.proto.enums.conversion_or_adjustment_lag_bucket_pb2",
'ConversionTrackingSetting':"google.ads.google_ads.v4.proto.resources.customer_pb2",
'ConversionUploadErrorEnum':"google.ads.google_ads.v4.proto.errors.conversion_upload_error_pb2",
'CountryCodeErrorEnum':"google.ads.google_ads.v4.proto.errors.country_code_error_pb2",
'CpcBidSimulationPoint':"google.ads.google_ads.v4.proto.common.simulation_pb2",
'CpcBidSimulationPointList':"google.ads.google_ads.v4.proto.common.simulation_pb2",
'CpvBidSimulationPoint':"google.ads.google_ads.v4.proto.common.simulation_pb2",
'CpvBidSimulationPointList':"google.ads.google_ads.v4.proto.common.simulation_pb2",
'CreateCampaignExperimentMetadata':"google.ads.google_ads.v4.proto.services.campaign_experiment_service_pb2",
'CreateCampaignExperimentRequest':"google.ads.google_ads.v4.proto.services.campaign_experiment_service_pb2",
'CreateCustomerClientRequest':"google.ads.google_ads.v4.proto.services.customer_service_pb2",
'CreateCustomerClientResponse':"google.ads.google_ads.v4.proto.services.customer_service_pb2",
'CreateOfflineUserDataJobRequest':"google.ads.google_ads.v4.proto.services.offline_user_data_job_service_pb2",
'CreateOfflineUserDataJobResponse':"google.ads.google_ads.v4.proto.services.offline_user_data_job_service_pb2",
'CriterionCategoryAvailability':"google.ads.google_ads.v4.proto.common.criterion_category_availability_pb2",
'CriterionCategoryChannelAvailability':"google.ads.google_ads.v4.proto.common.criterion_category_availability_pb2",
'CriterionCategoryChannelAvailabilityModeEnum':"google.ads.google_ads.v4.proto.enums.criterion_category_channel_availability_mode_pb2",
'CriterionCategoryLocaleAvailability':"google.ads.google_ads.v4.proto.common.criterion_category_availability_pb2",
'CriterionCategoryLocaleAvailabilityModeEnum':"google.ads.google_ads.v4.proto.enums.criterion_category_locale_availability_mode_pb2",
'CriterionErrorEnum':"google.ads.google_ads.v4.proto.errors.criterion_error_pb2",
'CriterionSystemServingStatusEnum':"google.ads.google_ads.v4.proto.enums.criterion_system_serving_status_pb2",
'CriterionTypeEnum':"google.ads.google_ads.v4.proto.enums.criterion_type_pb2",
'CrmBasedUserListInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'CurrencyCodeErrorEnum':"google.ads.google_ads.v4.proto.errors.currency_code_error_pb2",
'CurrencyConstant':"google.ads.google_ads.v4.proto.resources.currency_constant_pb2",
'CustomAffinityInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'CustomIntentInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'CustomInterest':"google.ads.google_ads.v4.proto.resources.custom_interest_pb2",
'CustomInterestErrorEnum':"google.ads.google_ads.v4.proto.errors.custom_interest_error_pb2",
'CustomInterestMember':"google.ads.google_ads.v4.proto.resources.custom_interest_pb2",
'CustomInterestMemberTypeEnum':"google.ads.google_ads.v4.proto.enums.custom_interest_member_type_pb2",
'CustomInterestOperation':"google.ads.google_ads.v4.proto.services.custom_interest_service_pb2",
'CustomInterestStatusEnum':"google.ads.google_ads.v4.proto.enums.custom_interest_status_pb2",
'CustomInterestTypeEnum':"google.ads.google_ads.v4.proto.enums.custom_interest_type_pb2",
'CustomParameter':"google.ads.google_ads.v4.proto.common.custom_parameter_pb2",
'CustomPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.custom_placeholder_field_pb2",
'Customer':"google.ads.google_ads.v4.proto.resources.customer_pb2",
'CustomerClient':"google.ads.google_ads.v4.proto.resources.customer_client_pb2",
'CustomerClientLink':"google.ads.google_ads.v4.proto.resources.customer_client_link_pb2",
'CustomerClientLinkErrorEnum':"google.ads.google_ads.v4.proto.errors.customer_client_link_error_pb2",
'CustomerClientLinkOperation':"google.ads.google_ads.v4.proto.services.customer_client_link_service_pb2",
'CustomerErrorEnum':"google.ads.google_ads.v4.proto.errors.customer_error_pb2",
'CustomerExtensionSetting':"google.ads.google_ads.v4.proto.resources.customer_extension_setting_pb2",
'CustomerExtensionSettingOperation':"google.ads.google_ads.v4.proto.services.customer_extension_setting_service_pb2",
'CustomerFeed':"google.ads.google_ads.v4.proto.resources.customer_feed_pb2",
'CustomerFeedErrorEnum':"google.ads.google_ads.v4.proto.errors.customer_feed_error_pb2",
'CustomerFeedOperation':"google.ads.google_ads.v4.proto.services.customer_feed_service_pb2",
'CustomerLabel':"google.ads.google_ads.v4.proto.resources.customer_label_pb2",
'CustomerLabelOperation':"google.ads.google_ads.v4.proto.services.customer_label_service_pb2",
'CustomerManagerLink':"google.ads.google_ads.v4.proto.resources.customer_manager_link_pb2",
'CustomerManagerLinkErrorEnum':"google.ads.google_ads.v4.proto.errors.customer_manager_link_error_pb2",
'CustomerManagerLinkOperation':"google.ads.google_ads.v4.proto.services.customer_manager_link_service_pb2",
'CustomerMatchUploadKeyTypeEnum':"google.ads.google_ads.v4.proto.enums.customer_match_upload_key_type_pb2",
'CustomerMatchUserListMetadata':"google.ads.google_ads.v4.proto.common.offline_user_data_pb2",
'CustomerNegativeCriterion':"google.ads.google_ads.v4.proto.resources.customer_negative_criterion_pb2",
'CustomerNegativeCriterionOperation':"google.ads.google_ads.v4.proto.services.customer_negative_criterion_service_pb2",
'CustomerOperation':"google.ads.google_ads.v4.proto.services.customer_service_pb2",
'CustomerPayPerConversionEligibilityFailureReasonEnum':"google.ads.google_ads.v4.proto.enums.customer_pay_per_conversion_eligibility_failure_reason_pb2",
'DataDrivenModelStatusEnum':"google.ads.google_ads.v4.proto.enums.data_driven_model_status_pb2",
'DatabaseErrorEnum':"google.ads.google_ads.v4.proto.errors.database_error_pb2",
'DateErrorEnum':"google.ads.google_ads.v4.proto.errors.date_error_pb2",
'DateRange':"google.ads.google_ads.v4.proto.common.dates_pb2",
'DateRangeErrorEnum':"google.ads.google_ads.v4.proto.errors.date_range_error_pb2",
'DateSpecificRuleUserListInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'DayOfWeekEnum':"google.ads.google_ads.v4.proto.enums.day_of_week_pb2",
'DetailPlacementView':"google.ads.google_ads.v4.proto.resources.detail_placement_view_pb2",
'DeviceEnum':"google.ads.google_ads.v4.proto.enums.device_pb2",
'DeviceInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'DismissRecommendationRequest':"google.ads.google_ads.v4.proto.services.recommendation_service_pb2",
'DismissRecommendationResponse':"google.ads.google_ads.v4.proto.services.recommendation_service_pb2",
'DisplayAdFormatSettingEnum':"google.ads.google_ads.v4.proto.enums.display_ad_format_setting_pb2",
'DisplayCallToAction':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'DisplayKeywordView':"google.ads.google_ads.v4.proto.resources.display_keyword_view_pb2",
'DisplayUploadAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'DisplayUploadProductTypeEnum':"google.ads.google_ads.v4.proto.enums.display_upload_product_type_pb2",
'DistanceBucketEnum':"google.ads.google_ads.v4.proto.enums.distance_bucket_pb2",
'DistanceView':"google.ads.google_ads.v4.proto.resources.distance_view_pb2",
'DistinctErrorEnum':"google.ads.google_ads.v4.proto.errors.distinct_error_pb2",
'DomainCategory':"google.ads.google_ads.v4.proto.resources.domain_category_pb2",
'DsaPageFeedCriterionFieldEnum':"google.ads.google_ads.v4.proto.enums.dsa_page_feed_criterion_field_pb2",
'DynamicSearchAdsSearchTermView':"google.ads.google_ads.v4.proto.resources.dynamic_search_ads_search_term_view_pb2",
'EducationPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.education_placeholder_field_pb2",
'EndCampaignExperimentRequest':"google.ads.google_ads.v4.proto.services.campaign_experiment_service_pb2",
'EnhancedCpc':"google.ads.google_ads.v4.proto.common.bidding_pb2",
'EnumErrorEnum':"google.ads.google_ads.v4.proto.errors.enum_error_pb2",
'ErrorCode':"google.ads.google_ads.v4.proto.errors.errors_pb2",
'ErrorDetails':"google.ads.google_ads.v4.proto.errors.errors_pb2",
'ErrorLocation':"google.ads.google_ads.v4.proto.errors.errors_pb2",
'ExpandedDynamicSearchAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'ExpandedLandingPageView':"google.ads.google_ads.v4.proto.resources.expanded_landing_page_view_pb2",
'ExpandedTextAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'ExplorerAutoOptimizerSetting':"google.ads.google_ads.v4.proto.common.explorer_auto_optimizer_setting_pb2",
'ExpressionRuleUserListInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'ExtensionFeedItem':"google.ads.google_ads.v4.proto.resources.extension_feed_item_pb2",
'ExtensionFeedItemErrorEnum':"google.ads.google_ads.v4.proto.errors.extension_feed_item_error_pb2",
'ExtensionFeedItemOperation':"google.ads.google_ads.v4.proto.services.extension_feed_item_service_pb2",
'ExtensionSettingDeviceEnum':"google.ads.google_ads.v4.proto.enums.extension_setting_device_pb2",
'ExtensionSettingErrorEnum':"google.ads.google_ads.v4.proto.errors.extension_setting_error_pb2",
'ExtensionTypeEnum':"google.ads.google_ads.v4.proto.enums.extension_type_pb2",
'ExternalAttributionData':"google.ads.google_ads.v4.proto.services.conversion_upload_service_pb2",
'ExternalConversionSourceEnum':"google.ads.google_ads.v4.proto.enums.external_conversion_source_pb2",
'Feed':"google.ads.google_ads.v4.proto.resources.feed_pb2",
'FeedAttribute':"google.ads.google_ads.v4.proto.resources.feed_pb2",
'FeedAttributeOperation':"google.ads.google_ads.v4.proto.resources.feed_pb2",
'FeedAttributeReferenceErrorEnum':"google.ads.google_ads.v4.proto.errors.feed_attribute_reference_error_pb2",
'FeedAttributeTypeEnum':"google.ads.google_ads.v4.proto.enums.feed_attribute_type_pb2",
'FeedErrorEnum':"google.ads.google_ads.v4.proto.errors.feed_error_pb2",
'FeedItem':"google.ads.google_ads.v4.proto.resources.feed_item_pb2",
'FeedItemAttributeValue':"google.ads.google_ads.v4.proto.resources.feed_item_pb2",
'FeedItemErrorEnum':"google.ads.google_ads.v4.proto.errors.feed_item_error_pb2",
'FeedItemOperation':"google.ads.google_ads.v4.proto.services.feed_item_service_pb2",
'FeedItemPlaceholderPolicyInfo':"google.ads.google_ads.v4.proto.resources.feed_item_pb2",
'FeedItemQualityApprovalStatusEnum':"google.ads.google_ads.v4.proto.enums.feed_item_quality_approval_status_pb2",
'FeedItemQualityDisapprovalReasonEnum':"google.ads.google_ads.v4.proto.enums.feed_item_quality_disapproval_reason_pb2",
'FeedItemStatusEnum':"google.ads.google_ads.v4.proto.enums.feed_item_status_pb2",
'FeedItemTarget':"google.ads.google_ads.v4.proto.resources.feed_item_target_pb2",
'FeedItemTargetDeviceEnum':"google.ads.google_ads.v4.proto.enums.feed_item_target_device_pb2",
'FeedItemTargetErrorEnum':"google.ads.google_ads.v4.proto.errors.feed_item_target_error_pb2",
'FeedItemTargetOperation':"google.ads.google_ads.v4.proto.services.feed_item_target_service_pb2",
'FeedItemTargetStatusEnum':"google.ads.google_ads.v4.proto.enums.feed_item_target_status_pb2",
'FeedItemTargetTypeEnum':"google.ads.google_ads.v4.proto.enums.feed_item_target_type_pb2",
'FeedItemValidationError':"google.ads.google_ads.v4.proto.resources.feed_item_pb2",
'FeedItemValidationErrorEnum':"google.ads.google_ads.v4.proto.errors.feed_item_validation_error_pb2",
'FeedItemValidationStatusEnum':"google.ads.google_ads.v4.proto.enums.feed_item_validation_status_pb2",
'FeedLinkStatusEnum':"google.ads.google_ads.v4.proto.enums.feed_link_status_pb2",
'FeedMapping':"google.ads.google_ads.v4.proto.resources.feed_mapping_pb2",
'FeedMappingCriterionTypeEnum':"google.ads.google_ads.v4.proto.enums.feed_mapping_criterion_type_pb2",
'FeedMappingErrorEnum':"google.ads.google_ads.v4.proto.errors.feed_mapping_error_pb2",
'FeedMappingOperation':"google.ads.google_ads.v4.proto.services.feed_mapping_service_pb2",
'FeedMappingStatusEnum':"google.ads.google_ads.v4.proto.enums.feed_mapping_status_pb2",
'FeedOperation':"google.ads.google_ads.v4.proto.services.feed_service_pb2",
'FeedOriginEnum':"google.ads.google_ads.v4.proto.enums.feed_origin_pb2",
'FeedPlaceholderView':"google.ads.google_ads.v4.proto.resources.feed_placeholder_view_pb2",
'FeedStatusEnum':"google.ads.google_ads.v4.proto.enums.feed_status_pb2",
'FieldErrorEnum':"google.ads.google_ads.v4.proto.errors.field_error_pb2",
'FieldMaskErrorEnum':"google.ads.google_ads.v4.proto.errors.field_mask_error_pb2",
'FinalAppUrl':"google.ads.google_ads.v4.proto.common.final_app_url_pb2",
'FlightPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.flight_placeholder_field_pb2",
'Forecast':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'ForecastMetrics':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'FrequencyCap':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'FrequencyCapEntry':"google.ads.google_ads.v4.proto.common.frequency_cap_pb2",
'FrequencyCapEventTypeEnum':"google.ads.google_ads.v4.proto.enums.frequency_cap_event_type_pb2",
'FrequencyCapKey':"google.ads.google_ads.v4.proto.common.frequency_cap_pb2",
'FrequencyCapLevelEnum':"google.ads.google_ads.v4.proto.enums.frequency_cap_level_pb2",
'FrequencyCapTimeUnitEnum':"google.ads.google_ads.v4.proto.enums.frequency_cap_time_unit_pb2",
'FunctionErrorEnum':"google.ads.google_ads.v4.proto.errors.function_error_pb2",
'FunctionParsingErrorEnum':"google.ads.google_ads.v4.proto.errors.function_parsing_error_pb2",
'GclidDateTimePair':"google.ads.google_ads.v4.proto.services.conversion_adjustment_upload_service_pb2",
'GenderInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'GenderTypeEnum':"google.ads.google_ads.v4.proto.enums.gender_type_pb2",
'GenderView':"google.ads.google_ads.v4.proto.resources.gender_view_pb2",
'GenerateForecastCurveRequest':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'GenerateForecastCurveResponse':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'GenerateForecastMetricsRequest':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'GenerateForecastMetricsResponse':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'GenerateHistoricalMetricsRequest':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'GenerateHistoricalMetricsResponse':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'GenerateKeywordIdeaResponse':"google.ads.google_ads.v4.proto.services.keyword_plan_idea_service_pb2",
'GenerateKeywordIdeaResult':"google.ads.google_ads.v4.proto.services.keyword_plan_idea_service_pb2",
'GenerateKeywordIdeasRequest':"google.ads.google_ads.v4.proto.services.keyword_plan_idea_service_pb2",
'GenerateProductMixIdeasRequest':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'GenerateProductMixIdeasResponse':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'GenerateReachForecastRequest':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'GenerateReachForecastResponse':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'GeoPointInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'GeoTargetConstant':"google.ads.google_ads.v4.proto.resources.geo_target_constant_pb2",
'GeoTargetConstantStatusEnum':"google.ads.google_ads.v4.proto.enums.geo_target_constant_status_pb2",
'GeoTargetConstantSuggestion':"google.ads.google_ads.v4.proto.services.geo_target_constant_service_pb2",
'GeoTargetConstantSuggestionErrorEnum':"google.ads.google_ads.v4.proto.errors.geo_target_constant_suggestion_error_pb2",
'GeoTargetingRestrictionEnum':"google.ads.google_ads.v4.proto.enums.geo_targeting_restriction_pb2",
'GeoTargetingTypeEnum':"google.ads.google_ads.v4.proto.enums.geo_targeting_type_pb2",
'GeographicView':"google.ads.google_ads.v4.proto.resources.geographic_view_pb2",
'GetAccountBudgetProposalRequest':"google.ads.google_ads.v4.proto.services.account_budget_proposal_service_pb2",
'GetAccountBudgetRequest':"google.ads.google_ads.v4.proto.services.account_budget_service_pb2",
'GetAccountLinkRequest':"google.ads.google_ads.v4.proto.services.account_link_service_pb2",
'GetAdGroupAdAssetViewRequest':"google.ads.google_ads.v4.proto.services.ad_group_ad_asset_view_service_pb2",
'GetAdGroupAdLabelRequest':"google.ads.google_ads.v4.proto.services.ad_group_ad_label_service_pb2",
'GetAdGroupAdRequest':"google.ads.google_ads.v4.proto.services.ad_group_ad_service_pb2",
'GetAdGroupAudienceViewRequest':"google.ads.google_ads.v4.proto.services.ad_group_audience_view_service_pb2",
'GetAdGroupBidModifierRequest':"google.ads.google_ads.v4.proto.services.ad_group_bid_modifier_service_pb2",
'GetAdGroupCriterionLabelRequest':"google.ads.google_ads.v4.proto.services.ad_group_criterion_label_service_pb2",
'GetAdGroupCriterionRequest':"google.ads.google_ads.v4.proto.services.ad_group_criterion_service_pb2",
'GetAdGroupCriterionSimulationRequest':"google.ads.google_ads.v4.proto.services.ad_group_criterion_simulation_service_pb2",
'GetAdGroupExtensionSettingRequest':"google.ads.google_ads.v4.proto.services.ad_group_extension_setting_service_pb2",
'GetAdGroupFeedRequest':"google.ads.google_ads.v4.proto.services.ad_group_feed_service_pb2",
'GetAdGroupLabelRequest':"google.ads.google_ads.v4.proto.services.ad_group_label_service_pb2",
'GetAdGroupRequest':"google.ads.google_ads.v4.proto.services.ad_group_service_pb2",
'GetAdGroupSimulationRequest':"google.ads.google_ads.v4.proto.services.ad_group_simulation_service_pb2",
'GetAdParameterRequest':"google.ads.google_ads.v4.proto.services.ad_parameter_service_pb2",
'GetAdRequest':"google.ads.google_ads.v4.proto.services.ad_service_pb2",
'GetAdScheduleViewRequest':"google.ads.google_ads.v4.proto.services.ad_schedule_view_service_pb2",
'GetAgeRangeViewRequest':"google.ads.google_ads.v4.proto.services.age_range_view_service_pb2",
'GetAssetRequest':"google.ads.google_ads.v4.proto.services.asset_service_pb2",
'GetBatchJobRequest':"google.ads.google_ads.v4.proto.services.batch_job_service_pb2",
'GetBiddingStrategyRequest':"google.ads.google_ads.v4.proto.services.bidding_strategy_service_pb2",
'GetBillingSetupRequest':"google.ads.google_ads.v4.proto.services.billing_setup_service_pb2",
'GetCampaignAudienceViewRequest':"google.ads.google_ads.v4.proto.services.campaign_audience_view_service_pb2",
'GetCampaignBidModifierRequest':"google.ads.google_ads.v4.proto.services.campaign_bid_modifier_service_pb2",
'GetCampaignBudgetRequest':"google.ads.google_ads.v4.proto.services.campaign_budget_service_pb2",
'GetCampaignCriterionRequest':"google.ads.google_ads.v4.proto.services.campaign_criterion_service_pb2",
'GetCampaignCriterionSimulationRequest':"google.ads.google_ads.v4.proto.services.campaign_criterion_simulation_service_pb2",
'GetCampaignDraftRequest':"google.ads.google_ads.v4.proto.services.campaign_draft_service_pb2",
'GetCampaignExperimentRequest':"google.ads.google_ads.v4.proto.services.campaign_experiment_service_pb2",
'GetCampaignExtensionSettingRequest':"google.ads.google_ads.v4.proto.services.campaign_extension_setting_service_pb2",
'GetCampaignFeedRequest':"google.ads.google_ads.v4.proto.services.campaign_feed_service_pb2",
'GetCampaignLabelRequest':"google.ads.google_ads.v4.proto.services.campaign_label_service_pb2",
'GetCampaignRequest':"google.ads.google_ads.v4.proto.services.campaign_service_pb2",
'GetCampaignSharedSetRequest':"google.ads.google_ads.v4.proto.services.campaign_shared_set_service_pb2",
'GetCarrierConstantRequest':"google.ads.google_ads.v4.proto.services.carrier_constant_service_pb2",
'GetChangeStatusRequest':"google.ads.google_ads.v4.proto.services.change_status_service_pb2",
'GetClickViewRequest':"google.ads.google_ads.v4.proto.services.click_view_service_pb2",
'GetConversionActionRequest':"google.ads.google_ads.v4.proto.services.conversion_action_service_pb2",
'GetCurrencyConstantRequest':"google.ads.google_ads.v4.proto.services.currency_constant_service_pb2",
'GetCustomInterestRequest':"google.ads.google_ads.v4.proto.services.custom_interest_service_pb2",
'GetCustomerClientLinkRequest':"google.ads.google_ads.v4.proto.services.customer_client_link_service_pb2",
'GetCustomerClientRequest':"google.ads.google_ads.v4.proto.services.customer_client_service_pb2",
'GetCustomerExtensionSettingRequest':"google.ads.google_ads.v4.proto.services.customer_extension_setting_service_pb2",
'GetCustomerFeedRequest':"google.ads.google_ads.v4.proto.services.customer_feed_service_pb2",
'GetCustomerLabelRequest':"google.ads.google_ads.v4.proto.services.customer_label_service_pb2",
'GetCustomerManagerLinkRequest':"google.ads.google_ads.v4.proto.services.customer_manager_link_service_pb2",
'GetCustomerNegativeCriterionRequest':"google.ads.google_ads.v4.proto.services.customer_negative_criterion_service_pb2",
'GetCustomerRequest':"google.ads.google_ads.v4.proto.services.customer_service_pb2",
'GetDetailPlacementViewRequest':"google.ads.google_ads.v4.proto.services.detail_placement_view_service_pb2",
'GetDisplayKeywordViewRequest':"google.ads.google_ads.v4.proto.services.display_keyword_view_service_pb2",
'GetDistanceViewRequest':"google.ads.google_ads.v4.proto.services.distance_view_service_pb2",
'GetDomainCategoryRequest':"google.ads.google_ads.v4.proto.services.domain_category_service_pb2",
'GetDynamicSearchAdsSearchTermViewRequest':"google.ads.google_ads.v4.proto.services.dynamic_search_ads_search_term_view_service_pb2",
'GetExpandedLandingPageViewRequest':"google.ads.google_ads.v4.proto.services.expanded_landing_page_view_service_pb2",
'GetExtensionFeedItemRequest':"google.ads.google_ads.v4.proto.services.extension_feed_item_service_pb2",
'GetFeedItemRequest':"google.ads.google_ads.v4.proto.services.feed_item_service_pb2",
'GetFeedItemTargetRequest':"google.ads.google_ads.v4.proto.services.feed_item_target_service_pb2",
'GetFeedMappingRequest':"google.ads.google_ads.v4.proto.services.feed_mapping_service_pb2",
'GetFeedPlaceholderViewRequest':"google.ads.google_ads.v4.proto.services.feed_placeholder_view_service_pb2",
'GetFeedRequest':"google.ads.google_ads.v4.proto.services.feed_service_pb2",
'GetGenderViewRequest':"google.ads.google_ads.v4.proto.services.gender_view_service_pb2",
'GetGeoTargetConstantRequest':"google.ads.google_ads.v4.proto.services.geo_target_constant_service_pb2",
'GetGeographicViewRequest':"google.ads.google_ads.v4.proto.services.geographic_view_service_pb2",
'GetGoogleAdsFieldRequest':"google.ads.google_ads.v4.proto.services.google_ads_field_service_pb2",
'GetGroupPlacementViewRequest':"google.ads.google_ads.v4.proto.services.group_placement_view_service_pb2",
'GetHotelGroupViewRequest':"google.ads.google_ads.v4.proto.services.hotel_group_view_service_pb2",
'GetHotelPerformanceViewRequest':"google.ads.google_ads.v4.proto.services.hotel_performance_view_service_pb2",
'GetIncomeRangeViewRequest':"google.ads.google_ads.v4.proto.services.income_range_view_service_pb2",
'GetKeywordPlanAdGroupKeywordRequest':"google.ads.google_ads.v4.proto.services.keyword_plan_ad_group_keyword_service_pb2",
'GetKeywordPlanAdGroupRequest':"google.ads.google_ads.v4.proto.services.keyword_plan_ad_group_service_pb2",
'GetKeywordPlanCampaignKeywordRequest':"google.ads.google_ads.v4.proto.services.keyword_plan_campaign_keyword_service_pb2",
'GetKeywordPlanCampaignRequest':"google.ads.google_ads.v4.proto.services.keyword_plan_campaign_service_pb2",
'GetKeywordPlanRequest':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'GetKeywordViewRequest':"google.ads.google_ads.v4.proto.services.keyword_view_service_pb2",
'GetLabelRequest':"google.ads.google_ads.v4.proto.services.label_service_pb2",
'GetLandingPageViewRequest':"google.ads.google_ads.v4.proto.services.landing_page_view_service_pb2",
'GetLanguageConstantRequest':"google.ads.google_ads.v4.proto.services.language_constant_service_pb2",
'GetLocationViewRequest':"google.ads.google_ads.v4.proto.services.location_view_service_pb2",
'GetManagedPlacementViewRequest':"google.ads.google_ads.v4.proto.services.managed_placement_view_service_pb2",
'GetMediaFileRequest':"google.ads.google_ads.v4.proto.services.media_file_service_pb2",
'GetMerchantCenterLinkRequest':"google.ads.google_ads.v4.proto.services.merchant_center_link_service_pb2",
'GetMobileAppCategoryConstantRequest':"google.ads.google_ads.v4.proto.services.mobile_app_category_constant_service_pb2",
'GetMobileDeviceConstantRequest':"google.ads.google_ads.v4.proto.services.mobile_device_constant_service_pb2",
'GetOfflineUserDataJobRequest':"google.ads.google_ads.v4.proto.services.offline_user_data_job_service_pb2",
'GetOperatingSystemVersionConstantRequest':"google.ads.google_ads.v4.proto.services.operating_system_version_constant_service_pb2",
'GetPaidOrganicSearchTermViewRequest':"google.ads.google_ads.v4.proto.services.paid_organic_search_term_view_service_pb2",
'GetParentalStatusViewRequest':"google.ads.google_ads.v4.proto.services.parental_status_view_service_pb2",
'GetProductBiddingCategoryConstantRequest':"google.ads.google_ads.v4.proto.services.product_bidding_category_constant_service_pb2",
'GetProductGroupViewRequest':"google.ads.google_ads.v4.proto.services.product_group_view_service_pb2",
'GetRecommendationRequest':"google.ads.google_ads.v4.proto.services.recommendation_service_pb2",
'GetRemarketingActionRequest':"google.ads.google_ads.v4.proto.services.remarketing_action_service_pb2",
'GetSearchTermViewRequest':"google.ads.google_ads.v4.proto.services.search_term_view_service_pb2",
'GetSharedCriterionRequest':"google.ads.google_ads.v4.proto.services.shared_criterion_service_pb2",
'GetSharedSetRequest':"google.ads.google_ads.v4.proto.services.shared_set_service_pb2",
'GetShoppingPerformanceViewRequest':"google.ads.google_ads.v4.proto.services.shopping_performance_view_service_pb2",
'GetThirdPartyAppAnalyticsLinkRequest':"google.ads.google_ads.v4.proto.services.third_party_app_analytics_link_service_pb2",
'GetTopicConstantRequest':"google.ads.google_ads.v4.proto.services.topic_constant_service_pb2",
'GetTopicViewRequest':"google.ads.google_ads.v4.proto.services.topic_view_service_pb2",
'GetUserInterestRequest':"google.ads.google_ads.v4.proto.services.user_interest_service_pb2",
'GetUserListRequest':"google.ads.google_ads.v4.proto.services.user_list_service_pb2",
'GetUserLocationViewRequest':"google.ads.google_ads.v4.proto.services.user_location_view_service_pb2",
'GetVideoRequest':"google.ads.google_ads.v4.proto.services.video_service_pb2",
'GmailAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'GmailTeaser':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'GoogleAdsError':"google.ads.google_ads.v4.proto.errors.errors_pb2",
'GoogleAdsFailure':"google.ads.google_ads.v4.proto.errors.errors_pb2",
'GoogleAdsField':"google.ads.google_ads.v4.proto.resources.google_ads_field_pb2",
'GoogleAdsFieldCategoryEnum':"google.ads.google_ads.v4.proto.enums.google_ads_field_category_pb2",
'GoogleAdsFieldDataTypeEnum':"google.ads.google_ads.v4.proto.enums.google_ads_field_data_type_pb2",
'GoogleAdsRow':"google.ads.google_ads.v4.proto.services.google_ads_service_pb2",
'GraduateCampaignExperimentRequest':"google.ads.google_ads.v4.proto.services.campaign_experiment_service_pb2",
'GraduateCampaignExperimentResponse':"google.ads.google_ads.v4.proto.services.campaign_experiment_service_pb2",
'GroupPlacementView':"google.ads.google_ads.v4.proto.resources.group_placement_view_pb2",
'HeaderErrorEnum':"google.ads.google_ads.v4.proto.errors.header_error_pb2",
'HotelAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'HotelAdvanceBookingWindowInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'HotelCalloutFeedItem':"google.ads.google_ads.v4.proto.common.extensions_pb2",
'HotelCheckInDayInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'HotelCityInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'HotelClassInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'HotelCountryRegionInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'HotelDateSelectionTypeEnum':"google.ads.google_ads.v4.proto.enums.hotel_date_selection_type_pb2",
'HotelDateSelectionTypeInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'HotelGroupView':"google.ads.google_ads.v4.proto.resources.hotel_group_view_pb2",
'HotelIdInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'HotelLengthOfStayInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'HotelPerformanceView':"google.ads.google_ads.v4.proto.resources.hotel_performance_view_pb2",
'HotelPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.hotel_placeholder_field_pb2",
'HotelPriceBucketEnum':"google.ads.google_ads.v4.proto.enums.hotel_price_bucket_pb2",
'HotelRateTypeEnum':"google.ads.google_ads.v4.proto.enums.hotel_rate_type_pb2",
'HotelStateInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'IdErrorEnum':"google.ads.google_ads.v4.proto.errors.id_error_pb2",
'ImageAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'ImageAsset':"google.ads.google_ads.v4.proto.common.asset_types_pb2",
'ImageDimension':"google.ads.google_ads.v4.proto.common.asset_types_pb2",
'ImageErrorEnum':"google.ads.google_ads.v4.proto.errors.image_error_pb2",
'IncomeRangeInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'IncomeRangeTypeEnum':"google.ads.google_ads.v4.proto.enums.income_range_type_pb2",
'IncomeRangeView':"google.ads.google_ads.v4.proto.resources.income_range_view_pb2",
'InteractionEventTypeEnum':"google.ads.google_ads.v4.proto.enums.interaction_event_type_pb2",
'InteractionTypeEnum':"google.ads.google_ads.v4.proto.enums.interaction_type_pb2",
'InteractionTypeInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'InternalErrorEnum':"google.ads.google_ads.v4.proto.errors.internal_error_pb2",
'Invoice':"google.ads.google_ads.v4.proto.resources.invoice_pb2",
'InvoiceErrorEnum':"google.ads.google_ads.v4.proto.errors.invoice_error_pb2",
'InvoiceTypeEnum':"google.ads.google_ads.v4.proto.enums.invoice_type_pb2",
'IpBlockInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'JobPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.job_placeholder_field_pb2",
'Keyword':"google.ads.google_ads.v4.proto.common.segments_pb2",
'KeywordAndUrlSeed':"google.ads.google_ads.v4.proto.services.keyword_plan_idea_service_pb2",
'KeywordInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'KeywordMatchTypeEnum':"google.ads.google_ads.v4.proto.enums.keyword_match_type_pb2",
'KeywordPlan':"google.ads.google_ads.v4.proto.resources.keyword_plan_pb2",
'KeywordPlanAdGroup':"google.ads.google_ads.v4.proto.resources.keyword_plan_ad_group_pb2",
'KeywordPlanAdGroupErrorEnum':"google.ads.google_ads.v4.proto.errors.keyword_plan_ad_group_error_pb2",
'KeywordPlanAdGroupForecast':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'KeywordPlanAdGroupKeyword':"google.ads.google_ads.v4.proto.resources.keyword_plan_ad_group_keyword_pb2",
'KeywordPlanAdGroupKeywordErrorEnum':"google.ads.google_ads.v4.proto.errors.keyword_plan_ad_group_keyword_error_pb2",
'KeywordPlanAdGroupKeywordOperation':"google.ads.google_ads.v4.proto.services.keyword_plan_ad_group_keyword_service_pb2",
'KeywordPlanAdGroupOperation':"google.ads.google_ads.v4.proto.services.keyword_plan_ad_group_service_pb2",
'KeywordPlanCampaign':"google.ads.google_ads.v4.proto.resources.keyword_plan_campaign_pb2",
'KeywordPlanCampaignErrorEnum':"google.ads.google_ads.v4.proto.errors.keyword_plan_campaign_error_pb2",
'KeywordPlanCampaignForecast':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'KeywordPlanCampaignForecastCurve':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'KeywordPlanCampaignKeyword':"google.ads.google_ads.v4.proto.resources.keyword_plan_campaign_keyword_pb2",
'KeywordPlanCampaignKeywordErrorEnum':"google.ads.google_ads.v4.proto.errors.keyword_plan_campaign_keyword_error_pb2",
'KeywordPlanCampaignKeywordOperation':"google.ads.google_ads.v4.proto.services.keyword_plan_campaign_keyword_service_pb2",
'KeywordPlanCampaignOperation':"google.ads.google_ads.v4.proto.services.keyword_plan_campaign_service_pb2",
'KeywordPlanCompetitionLevelEnum':"google.ads.google_ads.v4.proto.enums.keyword_plan_competition_level_pb2",
'KeywordPlanErrorEnum':"google.ads.google_ads.v4.proto.errors.keyword_plan_error_pb2",
'KeywordPlanForecastIntervalEnum':"google.ads.google_ads.v4.proto.enums.keyword_plan_forecast_interval_pb2",
'KeywordPlanForecastPeriod':"google.ads.google_ads.v4.proto.resources.keyword_plan_pb2",
'KeywordPlanGeoTarget':"google.ads.google_ads.v4.proto.resources.keyword_plan_campaign_pb2",
'KeywordPlanHistoricalMetrics':"google.ads.google_ads.v4.proto.common.keyword_plan_common_pb2",
'KeywordPlanIdeaErrorEnum':"google.ads.google_ads.v4.proto.errors.keyword_plan_idea_error_pb2",
'KeywordPlanKeywordForecast':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'KeywordPlanKeywordHistoricalMetrics':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'KeywordPlanMaxCpcBidForecast':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'KeywordPlanMaxCpcBidForecastCurve':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'KeywordPlanNetworkEnum':"google.ads.google_ads.v4.proto.enums.keyword_plan_network_pb2",
'KeywordPlanOperation':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'KeywordSeed':"google.ads.google_ads.v4.proto.services.keyword_plan_idea_service_pb2",
'KeywordView':"google.ads.google_ads.v4.proto.resources.keyword_view_pb2",
'Label':"google.ads.google_ads.v4.proto.resources.label_pb2",
'LabelErrorEnum':"google.ads.google_ads.v4.proto.errors.label_error_pb2",
'LabelOperation':"google.ads.google_ads.v4.proto.services.label_service_pb2",
'LabelStatusEnum':"google.ads.google_ads.v4.proto.enums.label_status_pb2",
'LandingPageView':"google.ads.google_ads.v4.proto.resources.landing_page_view_pb2",
'LanguageCodeErrorEnum':"google.ads.google_ads.v4.proto.errors.language_code_error_pb2",
'LanguageConstant':"google.ads.google_ads.v4.proto.resources.language_constant_pb2",
'LanguageInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'LegacyAppInstallAdAppStoreEnum':"google.ads.google_ads.v4.proto.enums.legacy_app_install_ad_app_store_pb2",
'LegacyAppInstallAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'LegacyResponsiveDisplayAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'LinkedAccountTypeEnum':"google.ads.google_ads.v4.proto.enums.linked_account_type_pb2",
'ListAccessibleCustomersRequest':"google.ads.google_ads.v4.proto.services.customer_service_pb2",
'ListAccessibleCustomersResponse':"google.ads.google_ads.v4.proto.services.customer_service_pb2",
'ListBatchJobResultsRequest':"google.ads.google_ads.v4.proto.services.batch_job_service_pb2",
'ListBatchJobResultsResponse':"google.ads.google_ads.v4.proto.services.batch_job_service_pb2",
'ListCampaignDraftAsyncErrorsRequest':"google.ads.google_ads.v4.proto.services.campaign_draft_service_pb2",
'ListCampaignDraftAsyncErrorsResponse':"google.ads.google_ads.v4.proto.services.campaign_draft_service_pb2",
'ListCampaignExperimentAsyncErrorsRequest':"google.ads.google_ads.v4.proto.services.campaign_experiment_service_pb2",
'ListCampaignExperimentAsyncErrorsResponse':"google.ads.google_ads.v4.proto.services.campaign_experiment_service_pb2",
'ListInvoicesRequest':"google.ads.google_ads.v4.proto.services.invoice_service_pb2",
'ListInvoicesResponse':"google.ads.google_ads.v4.proto.services.invoice_service_pb2",
'ListMerchantCenterLinksRequest':"google.ads.google_ads.v4.proto.services.merchant_center_link_service_pb2",
'ListMerchantCenterLinksResponse':"google.ads.google_ads.v4.proto.services.merchant_center_link_service_pb2",
'ListOperationErrorEnum':"google.ads.google_ads.v4.proto.errors.list_operation_error_pb2",
'ListPaymentsAccountsRequest':"google.ads.google_ads.v4.proto.services.payments_account_service_pb2",
'ListPaymentsAccountsResponse':"google.ads.google_ads.v4.proto.services.payments_account_service_pb2",
'ListPlannableLocationsRequest':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'ListPlannableLocationsResponse':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'ListPlannableProductsRequest':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'ListPlannableProductsResponse':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'ListingDimensionInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'ListingGroupInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'ListingGroupTypeEnum':"google.ads.google_ads.v4.proto.enums.listing_group_type_pb2",
'ListingScopeInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'LocalAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'LocalPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.local_placeholder_field_pb2",
'LocationExtensionTargetingCriterionFieldEnum':"google.ads.google_ads.v4.proto.enums.location_extension_targeting_criterion_field_pb2",
'LocationFeedItem':"google.ads.google_ads.v4.proto.common.extensions_pb2",
'LocationGroupInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'LocationGroupRadiusUnitsEnum':"google.ads.google_ads.v4.proto.enums.location_group_radius_units_pb2",
'LocationInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'LocationPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.location_placeholder_field_pb2",
'LocationSourceTypeEnum':"google.ads.google_ads.v4.proto.enums.location_source_type_pb2",
'LocationView':"google.ads.google_ads.v4.proto.resources.location_view_pb2",
'LogicalUserListInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'LogicalUserListOperandInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'ManagedPlacementView':"google.ads.google_ads.v4.proto.resources.managed_placement_view_pb2",
'ManagerLinkErrorEnum':"google.ads.google_ads.v4.proto.errors.manager_link_error_pb2",
'ManagerLinkStatusEnum':"google.ads.google_ads.v4.proto.enums.manager_link_status_pb2",
'ManualCpc':"google.ads.google_ads.v4.proto.common.bidding_pb2",
'ManualCpm':"google.ads.google_ads.v4.proto.common.bidding_pb2",
'ManualCpv':"google.ads.google_ads.v4.proto.common.bidding_pb2",
'MatchingFunction':"google.ads.google_ads.v4.proto.common.matching_function_pb2",
'MatchingFunctionContextTypeEnum':"google.ads.google_ads.v4.proto.enums.matching_function_context_type_pb2",
'MatchingFunctionOperatorEnum':"google.ads.google_ads.v4.proto.enums.matching_function_operator_pb2",
'MaximizeConversionValue':"google.ads.google_ads.v4.proto.common.bidding_pb2",
'MaximizeConversions':"google.ads.google_ads.v4.proto.common.bidding_pb2",
'MediaAudio':"google.ads.google_ads.v4.proto.resources.media_file_pb2",
'MediaBundle':"google.ads.google_ads.v4.proto.resources.media_file_pb2",
'MediaBundleAsset':"google.ads.google_ads.v4.proto.common.asset_types_pb2",
'MediaBundleErrorEnum':"google.ads.google_ads.v4.proto.errors.media_bundle_error_pb2",
'MediaFile':"google.ads.google_ads.v4.proto.resources.media_file_pb2",
'MediaFileErrorEnum':"google.ads.google_ads.v4.proto.errors.media_file_error_pb2",
'MediaFileOperation':"google.ads.google_ads.v4.proto.services.media_file_service_pb2",
'MediaImage':"google.ads.google_ads.v4.proto.resources.media_file_pb2",
'MediaTypeEnum':"google.ads.google_ads.v4.proto.enums.media_type_pb2",
'MediaUploadErrorEnum':"google.ads.google_ads.v4.proto.errors.media_upload_error_pb2",
'MediaVideo':"google.ads.google_ads.v4.proto.resources.media_file_pb2",
'MerchantCenterLink':"google.ads.google_ads.v4.proto.resources.merchant_center_link_pb2",
'MerchantCenterLinkOperation':"google.ads.google_ads.v4.proto.services.merchant_center_link_service_pb2",
'MerchantCenterLinkStatusEnum':"google.ads.google_ads.v4.proto.enums.merchant_center_link_status_pb2",
'MessagePlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.message_placeholder_field_pb2",
'Metrics':"google.ads.google_ads.v4.proto.common.metrics_pb2",
'MimeTypeEnum':"google.ads.google_ads.v4.proto.enums.mime_type_pb2",
'MinuteOfHourEnum':"google.ads.google_ads.v4.proto.enums.minute_of_hour_pb2",
'MobileAppCategoryConstant':"google.ads.google_ads.v4.proto.resources.mobile_app_category_constant_pb2",
'MobileAppCategoryInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'MobileAppVendorEnum':"google.ads.google_ads.v4.proto.enums.mobile_app_vendor_pb2",
'MobileApplicationInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'MobileDeviceConstant':"google.ads.google_ads.v4.proto.resources.mobile_device_constant_pb2",
'MobileDeviceInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'MobileDeviceTypeEnum':"google.ads.google_ads.v4.proto.enums.mobile_device_type_pb2",
'Money':"google.ads.google_ads.v4.proto.common.feed_common_pb2",
'MonthOfYearEnum':"google.ads.google_ads.v4.proto.enums.month_of_year_pb2",
'MonthlySearchVolume':"google.ads.google_ads.v4.proto.common.keyword_plan_common_pb2",
'MoveManagerLinkRequest':"google.ads.google_ads.v4.proto.services.customer_manager_link_service_pb2",
'MoveManagerLinkResponse':"google.ads.google_ads.v4.proto.services.customer_manager_link_service_pb2",
'MultiplierErrorEnum':"google.ads.google_ads.v4.proto.errors.multiplier_error_pb2",
'MutateAccountBudgetProposalRequest':"google.ads.google_ads.v4.proto.services.account_budget_proposal_service_pb2",
'MutateAccountBudgetProposalResponse':"google.ads.google_ads.v4.proto.services.account_budget_proposal_service_pb2",
'MutateAccountBudgetProposalResult':"google.ads.google_ads.v4.proto.services.account_budget_proposal_service_pb2",
'MutateAccountLinkRequest':"google.ads.google_ads.v4.proto.services.account_link_service_pb2",
'MutateAccountLinkResponse':"google.ads.google_ads.v4.proto.services.account_link_service_pb2",
'MutateAccountLinkResult':"google.ads.google_ads.v4.proto.services.account_link_service_pb2",
'MutateAdGroupAdLabelResult':"google.ads.google_ads.v4.proto.services.ad_group_ad_label_service_pb2",
'MutateAdGroupAdLabelsRequest':"google.ads.google_ads.v4.proto.services.ad_group_ad_label_service_pb2",
'MutateAdGroupAdLabelsResponse':"google.ads.google_ads.v4.proto.services.ad_group_ad_label_service_pb2",
'MutateAdGroupAdResult':"google.ads.google_ads.v4.proto.services.ad_group_ad_service_pb2",
'MutateAdGroupAdsRequest':"google.ads.google_ads.v4.proto.services.ad_group_ad_service_pb2",
'MutateAdGroupAdsResponse':"google.ads.google_ads.v4.proto.services.ad_group_ad_service_pb2",
'MutateAdGroupBidModifierResult':"google.ads.google_ads.v4.proto.services.ad_group_bid_modifier_service_pb2",
'MutateAdGroupBidModifiersRequest':"google.ads.google_ads.v4.proto.services.ad_group_bid_modifier_service_pb2",
'MutateAdGroupBidModifiersResponse':"google.ads.google_ads.v4.proto.services.ad_group_bid_modifier_service_pb2",
'MutateAdGroupCriteriaRequest':"google.ads.google_ads.v4.proto.services.ad_group_criterion_service_pb2",
'MutateAdGroupCriteriaResponse':"google.ads.google_ads.v4.proto.services.ad_group_criterion_service_pb2",
'MutateAdGroupCriterionLabelResult':"google.ads.google_ads.v4.proto.services.ad_group_criterion_label_service_pb2",
'MutateAdGroupCriterionLabelsRequest':"google.ads.google_ads.v4.proto.services.ad_group_criterion_label_service_pb2",
'MutateAdGroupCriterionLabelsResponse':"google.ads.google_ads.v4.proto.services.ad_group_criterion_label_service_pb2",
'MutateAdGroupCriterionResult':"google.ads.google_ads.v4.proto.services.ad_group_criterion_service_pb2",
'MutateAdGroupExtensionSettingResult':"google.ads.google_ads.v4.proto.services.ad_group_extension_setting_service_pb2",
'MutateAdGroupExtensionSettingsRequest':"google.ads.google_ads.v4.proto.services.ad_group_extension_setting_service_pb2",
'MutateAdGroupExtensionSettingsResponse':"google.ads.google_ads.v4.proto.services.ad_group_extension_setting_service_pb2",
'MutateAdGroupFeedResult':"google.ads.google_ads.v4.proto.services.ad_group_feed_service_pb2",
'MutateAdGroupFeedsRequest':"google.ads.google_ads.v4.proto.services.ad_group_feed_service_pb2",
'MutateAdGroupFeedsResponse':"google.ads.google_ads.v4.proto.services.ad_group_feed_service_pb2",
'MutateAdGroupLabelResult':"google.ads.google_ads.v4.proto.services.ad_group_label_service_pb2",
'MutateAdGroupLabelsRequest':"google.ads.google_ads.v4.proto.services.ad_group_label_service_pb2",
'MutateAdGroupLabelsResponse':"google.ads.google_ads.v4.proto.services.ad_group_label_service_pb2",
'MutateAdGroupResult':"google.ads.google_ads.v4.proto.services.ad_group_service_pb2",
'MutateAdGroupsRequest':"google.ads.google_ads.v4.proto.services.ad_group_service_pb2",
'MutateAdGroupsResponse':"google.ads.google_ads.v4.proto.services.ad_group_service_pb2",
'MutateAdParameterResult':"google.ads.google_ads.v4.proto.services.ad_parameter_service_pb2",
'MutateAdParametersRequest':"google.ads.google_ads.v4.proto.services.ad_parameter_service_pb2",
'MutateAdParametersResponse':"google.ads.google_ads.v4.proto.services.ad_parameter_service_pb2",
'MutateAdResult':"google.ads.google_ads.v4.proto.services.ad_service_pb2",
'MutateAdsRequest':"google.ads.google_ads.v4.proto.services.ad_service_pb2",
'MutateAdsResponse':"google.ads.google_ads.v4.proto.services.ad_service_pb2",
'MutateAssetResult':"google.ads.google_ads.v4.proto.services.asset_service_pb2",
'MutateAssetsRequest':"google.ads.google_ads.v4.proto.services.asset_service_pb2",
'MutateAssetsResponse':"google.ads.google_ads.v4.proto.services.asset_service_pb2",
'MutateBatchJobRequest':"google.ads.google_ads.v4.proto.services.batch_job_service_pb2",
'MutateBatchJobResponse':"google.ads.google_ads.v4.proto.services.batch_job_service_pb2",
'MutateBatchJobResult':"google.ads.google_ads.v4.proto.services.batch_job_service_pb2",
'MutateBiddingStrategiesRequest':"google.ads.google_ads.v4.proto.services.bidding_strategy_service_pb2",
'MutateBiddingStrategiesResponse':"google.ads.google_ads.v4.proto.services.bidding_strategy_service_pb2",
'MutateBiddingStrategyResult':"google.ads.google_ads.v4.proto.services.bidding_strategy_service_pb2",
'MutateBillingSetupRequest':"google.ads.google_ads.v4.proto.services.billing_setup_service_pb2",
'MutateBillingSetupResponse':"google.ads.google_ads.v4.proto.services.billing_setup_service_pb2",
'MutateBillingSetupResult':"google.ads.google_ads.v4.proto.services.billing_setup_service_pb2",
'MutateCampaignBidModifierResult':"google.ads.google_ads.v4.proto.services.campaign_bid_modifier_service_pb2",
'MutateCampaignBidModifiersRequest':"google.ads.google_ads.v4.proto.services.campaign_bid_modifier_service_pb2",
'MutateCampaignBidModifiersResponse':"google.ads.google_ads.v4.proto.services.campaign_bid_modifier_service_pb2",
'MutateCampaignBudgetResult':"google.ads.google_ads.v4.proto.services.campaign_budget_service_pb2",
'MutateCampaignBudgetsRequest':"google.ads.google_ads.v4.proto.services.campaign_budget_service_pb2",
'MutateCampaignBudgetsResponse':"google.ads.google_ads.v4.proto.services.campaign_budget_service_pb2",
'MutateCampaignCriteriaRequest':"google.ads.google_ads.v4.proto.services.campaign_criterion_service_pb2",
'MutateCampaignCriteriaResponse':"google.ads.google_ads.v4.proto.services.campaign_criterion_service_pb2",
'MutateCampaignCriterionResult':"google.ads.google_ads.v4.proto.services.campaign_criterion_service_pb2",
'MutateCampaignDraftResult':"google.ads.google_ads.v4.proto.services.campaign_draft_service_pb2",
'MutateCampaignDraftsRequest':"google.ads.google_ads.v4.proto.services.campaign_draft_service_pb2",
'MutateCampaignDraftsResponse':"google.ads.google_ads.v4.proto.services.campaign_draft_service_pb2",
'MutateCampaignExperimentResult':"google.ads.google_ads.v4.proto.services.campaign_experiment_service_pb2",
'MutateCampaignExperimentsRequest':"google.ads.google_ads.v4.proto.services.campaign_experiment_service_pb2",
'MutateCampaignExperimentsResponse':"google.ads.google_ads.v4.proto.services.campaign_experiment_service_pb2",
'MutateCampaignExtensionSettingResult':"google.ads.google_ads.v4.proto.services.campaign_extension_setting_service_pb2",
'MutateCampaignExtensionSettingsRequest':"google.ads.google_ads.v4.proto.services.campaign_extension_setting_service_pb2",
'MutateCampaignExtensionSettingsResponse':"google.ads.google_ads.v4.proto.services.campaign_extension_setting_service_pb2",
'MutateCampaignFeedResult':"google.ads.google_ads.v4.proto.services.campaign_feed_service_pb2",
'MutateCampaignFeedsRequest':"google.ads.google_ads.v4.proto.services.campaign_feed_service_pb2",
'MutateCampaignFeedsResponse':"google.ads.google_ads.v4.proto.services.campaign_feed_service_pb2",
'MutateCampaignLabelResult':"google.ads.google_ads.v4.proto.services.campaign_label_service_pb2",
'MutateCampaignLabelsRequest':"google.ads.google_ads.v4.proto.services.campaign_label_service_pb2",
'MutateCampaignLabelsResponse':"google.ads.google_ads.v4.proto.services.campaign_label_service_pb2",
'MutateCampaignResult':"google.ads.google_ads.v4.proto.services.campaign_service_pb2",
'MutateCampaignSharedSetResult':"google.ads.google_ads.v4.proto.services.campaign_shared_set_service_pb2",
'MutateCampaignSharedSetsRequest':"google.ads.google_ads.v4.proto.services.campaign_shared_set_service_pb2",
'MutateCampaignSharedSetsResponse':"google.ads.google_ads.v4.proto.services.campaign_shared_set_service_pb2",
'MutateCampaignsRequest':"google.ads.google_ads.v4.proto.services.campaign_service_pb2",
'MutateCampaignsResponse':"google.ads.google_ads.v4.proto.services.campaign_service_pb2",
'MutateConversionActionResult':"google.ads.google_ads.v4.proto.services.conversion_action_service_pb2",
'MutateConversionActionsRequest':"google.ads.google_ads.v4.proto.services.conversion_action_service_pb2",
'MutateConversionActionsResponse':"google.ads.google_ads.v4.proto.services.conversion_action_service_pb2",
'MutateCustomInterestResult':"google.ads.google_ads.v4.proto.services.custom_interest_service_pb2",
'MutateCustomInterestsRequest':"google.ads.google_ads.v4.proto.services.custom_interest_service_pb2",
'MutateCustomInterestsResponse':"google.ads.google_ads.v4.proto.services.custom_interest_service_pb2",
'MutateCustomerClientLinkRequest':"google.ads.google_ads.v4.proto.services.customer_client_link_service_pb2",
'MutateCustomerClientLinkResponse':"google.ads.google_ads.v4.proto.services.customer_client_link_service_pb2",
'MutateCustomerClientLinkResult':"google.ads.google_ads.v4.proto.services.customer_client_link_service_pb2",
'MutateCustomerExtensionSettingResult':"google.ads.google_ads.v4.proto.services.customer_extension_setting_service_pb2",
'MutateCustomerExtensionSettingsRequest':"google.ads.google_ads.v4.proto.services.customer_extension_setting_service_pb2",
'MutateCustomerExtensionSettingsResponse':"google.ads.google_ads.v4.proto.services.customer_extension_setting_service_pb2",
'MutateCustomerFeedResult':"google.ads.google_ads.v4.proto.services.customer_feed_service_pb2",
'MutateCustomerFeedsRequest':"google.ads.google_ads.v4.proto.services.customer_feed_service_pb2",
'MutateCustomerFeedsResponse':"google.ads.google_ads.v4.proto.services.customer_feed_service_pb2",
'MutateCustomerLabelResult':"google.ads.google_ads.v4.proto.services.customer_label_service_pb2",
'MutateCustomerLabelsRequest':"google.ads.google_ads.v4.proto.services.customer_label_service_pb2",
'MutateCustomerLabelsResponse':"google.ads.google_ads.v4.proto.services.customer_label_service_pb2",
'MutateCustomerManagerLinkRequest':"google.ads.google_ads.v4.proto.services.customer_manager_link_service_pb2",
'MutateCustomerManagerLinkResponse':"google.ads.google_ads.v4.proto.services.customer_manager_link_service_pb2",
'MutateCustomerManagerLinkResult':"google.ads.google_ads.v4.proto.services.customer_manager_link_service_pb2",
'MutateCustomerNegativeCriteriaRequest':"google.ads.google_ads.v4.proto.services.customer_negative_criterion_service_pb2",
'MutateCustomerNegativeCriteriaResponse':"google.ads.google_ads.v4.proto.services.customer_negative_criterion_service_pb2",
'MutateCustomerNegativeCriteriaResult':"google.ads.google_ads.v4.proto.services.customer_negative_criterion_service_pb2",
'MutateCustomerRequest':"google.ads.google_ads.v4.proto.services.customer_service_pb2",
'MutateCustomerResponse':"google.ads.google_ads.v4.proto.services.customer_service_pb2",
'MutateCustomerResult':"google.ads.google_ads.v4.proto.services.customer_service_pb2",
'MutateErrorEnum':"google.ads.google_ads.v4.proto.errors.mutate_error_pb2",
'MutateExtensionFeedItemResult':"google.ads.google_ads.v4.proto.services.extension_feed_item_service_pb2",
'MutateExtensionFeedItemsRequest':"google.ads.google_ads.v4.proto.services.extension_feed_item_service_pb2",
'MutateExtensionFeedItemsResponse':"google.ads.google_ads.v4.proto.services.extension_feed_item_service_pb2",
'MutateFeedItemResult':"google.ads.google_ads.v4.proto.services.feed_item_service_pb2",
'MutateFeedItemTargetResult':"google.ads.google_ads.v4.proto.services.feed_item_target_service_pb2",
'MutateFeedItemTargetsRequest':"google.ads.google_ads.v4.proto.services.feed_item_target_service_pb2",
'MutateFeedItemTargetsResponse':"google.ads.google_ads.v4.proto.services.feed_item_target_service_pb2",
'MutateFeedItemsRequest':"google.ads.google_ads.v4.proto.services.feed_item_service_pb2",
'MutateFeedItemsResponse':"google.ads.google_ads.v4.proto.services.feed_item_service_pb2",
'MutateFeedMappingResult':"google.ads.google_ads.v4.proto.services.feed_mapping_service_pb2",
'MutateFeedMappingsRequest':"google.ads.google_ads.v4.proto.services.feed_mapping_service_pb2",
'MutateFeedMappingsResponse':"google.ads.google_ads.v4.proto.services.feed_mapping_service_pb2",
'MutateFeedResult':"google.ads.google_ads.v4.proto.services.feed_service_pb2",
'MutateFeedsRequest':"google.ads.google_ads.v4.proto.services.feed_service_pb2",
'MutateFeedsResponse':"google.ads.google_ads.v4.proto.services.feed_service_pb2",
'MutateGoogleAdsRequest':"google.ads.google_ads.v4.proto.services.google_ads_service_pb2",
'MutateGoogleAdsResponse':"google.ads.google_ads.v4.proto.services.google_ads_service_pb2",
'MutateKeywordPlanAdGroupKeywordResult':"google.ads.google_ads.v4.proto.services.keyword_plan_ad_group_keyword_service_pb2",
'MutateKeywordPlanAdGroupKeywordsRequest':"google.ads.google_ads.v4.proto.services.keyword_plan_ad_group_keyword_service_pb2",
'MutateKeywordPlanAdGroupKeywordsResponse':"google.ads.google_ads.v4.proto.services.keyword_plan_ad_group_keyword_service_pb2",
'MutateKeywordPlanAdGroupResult':"google.ads.google_ads.v4.proto.services.keyword_plan_ad_group_service_pb2",
'MutateKeywordPlanAdGroupsRequest':"google.ads.google_ads.v4.proto.services.keyword_plan_ad_group_service_pb2",
'MutateKeywordPlanAdGroupsResponse':"google.ads.google_ads.v4.proto.services.keyword_plan_ad_group_service_pb2",
'MutateKeywordPlanCampaignKeywordResult':"google.ads.google_ads.v4.proto.services.keyword_plan_campaign_keyword_service_pb2",
'MutateKeywordPlanCampaignKeywordsRequest':"google.ads.google_ads.v4.proto.services.keyword_plan_campaign_keyword_service_pb2",
'MutateKeywordPlanCampaignKeywordsResponse':"google.ads.google_ads.v4.proto.services.keyword_plan_campaign_keyword_service_pb2",
'MutateKeywordPlanCampaignResult':"google.ads.google_ads.v4.proto.services.keyword_plan_campaign_service_pb2",
'MutateKeywordPlanCampaignsRequest':"google.ads.google_ads.v4.proto.services.keyword_plan_campaign_service_pb2",
'MutateKeywordPlanCampaignsResponse':"google.ads.google_ads.v4.proto.services.keyword_plan_campaign_service_pb2",
'MutateKeywordPlansRequest':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'MutateKeywordPlansResponse':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'MutateKeywordPlansResult':"google.ads.google_ads.v4.proto.services.keyword_plan_service_pb2",
'MutateLabelResult':"google.ads.google_ads.v4.proto.services.label_service_pb2",
'MutateLabelsRequest':"google.ads.google_ads.v4.proto.services.label_service_pb2",
'MutateLabelsResponse':"google.ads.google_ads.v4.proto.services.label_service_pb2",
'MutateMediaFileResult':"google.ads.google_ads.v4.proto.services.media_file_service_pb2",
'MutateMediaFilesRequest':"google.ads.google_ads.v4.proto.services.media_file_service_pb2",
'MutateMediaFilesResponse':"google.ads.google_ads.v4.proto.services.media_file_service_pb2",
'MutateMerchantCenterLinkRequest':"google.ads.google_ads.v4.proto.services.merchant_center_link_service_pb2",
'MutateMerchantCenterLinkResponse':"google.ads.google_ads.v4.proto.services.merchant_center_link_service_pb2",
'MutateMerchantCenterLinkResult':"google.ads.google_ads.v4.proto.services.merchant_center_link_service_pb2",
'MutateOperation':"google.ads.google_ads.v4.proto.services.google_ads_service_pb2",
'MutateOperationResponse':"google.ads.google_ads.v4.proto.services.google_ads_service_pb2",
'MutateRemarketingActionResult':"google.ads.google_ads.v4.proto.services.remarketing_action_service_pb2",
'MutateRemarketingActionsRequest':"google.ads.google_ads.v4.proto.services.remarketing_action_service_pb2",
'MutateRemarketingActionsResponse':"google.ads.google_ads.v4.proto.services.remarketing_action_service_pb2",
'MutateSharedCriteriaRequest':"google.ads.google_ads.v4.proto.services.shared_criterion_service_pb2",
'MutateSharedCriteriaResponse':"google.ads.google_ads.v4.proto.services.shared_criterion_service_pb2",
'MutateSharedCriterionResult':"google.ads.google_ads.v4.proto.services.shared_criterion_service_pb2",
'MutateSharedSetResult':"google.ads.google_ads.v4.proto.services.shared_set_service_pb2",
'MutateSharedSetsRequest':"google.ads.google_ads.v4.proto.services.shared_set_service_pb2",
'MutateSharedSetsResponse':"google.ads.google_ads.v4.proto.services.shared_set_service_pb2",
'MutateUserListResult':"google.ads.google_ads.v4.proto.services.user_list_service_pb2",
'MutateUserListsRequest':"google.ads.google_ads.v4.proto.services.user_list_service_pb2",
'MutateUserListsResponse':"google.ads.google_ads.v4.proto.services.user_list_service_pb2",
'NegativeGeoTargetTypeEnum':"google.ads.google_ads.v4.proto.enums.negative_geo_target_type_pb2",
'NewResourceCreationErrorEnum':"google.ads.google_ads.v4.proto.errors.new_resource_creation_error_pb2",
'NotEmptyErrorEnum':"google.ads.google_ads.v4.proto.errors.not_empty_error_pb2",
'NotWhitelistedErrorEnum':"google.ads.google_ads.v4.proto.errors.not_whitelisted_error_pb2",
'NullErrorEnum':"google.ads.google_ads.v4.proto.errors.null_error_pb2",
'OfflineUserAddressInfo':"google.ads.google_ads.v4.proto.common.offline_user_data_pb2",
'OfflineUserDataJob':"google.ads.google_ads.v4.proto.resources.offline_user_data_job_pb2",
'OfflineUserDataJobErrorEnum':"google.ads.google_ads.v4.proto.errors.offline_user_data_job_error_pb2",
'OfflineUserDataJobFailureReasonEnum':"google.ads.google_ads.v4.proto.enums.offline_user_data_job_failure_reason_pb2",
'OfflineUserDataJobOperation':"google.ads.google_ads.v4.proto.services.offline_user_data_job_service_pb2",
'OfflineUserDataJobStatusEnum':"google.ads.google_ads.v4.proto.enums.offline_user_data_job_status_pb2",
'OfflineUserDataJobTypeEnum':"google.ads.google_ads.v4.proto.enums.offline_user_data_job_type_pb2",
'OnTargetAudienceMetrics':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'Operand':"google.ads.google_ads.v4.proto.common.matching_function_pb2",
'OperatingSystemVersionConstant':"google.ads.google_ads.v4.proto.resources.operating_system_version_constant_pb2",
'OperatingSystemVersionInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'OperatingSystemVersionOperatorTypeEnum':"google.ads.google_ads.v4.proto.enums.operating_system_version_operator_type_pb2",
'OperationAccessDeniedErrorEnum':"google.ads.google_ads.v4.proto.errors.operation_access_denied_error_pb2",
'OperatorErrorEnum':"google.ads.google_ads.v4.proto.errors.operator_error_pb2",
'OptimizationGoalTypeEnum':"google.ads.google_ads.v4.proto.enums.optimization_goal_type_pb2",
'PaidOrganicSearchTermView':"google.ads.google_ads.v4.proto.resources.paid_organic_search_term_view_pb2",
'ParentalStatusInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'ParentalStatusTypeEnum':"google.ads.google_ads.v4.proto.enums.parental_status_type_pb2",
'ParentalStatusView':"google.ads.google_ads.v4.proto.resources.parental_status_view_pb2",
'PartialFailureErrorEnum':"google.ads.google_ads.v4.proto.errors.partial_failure_error_pb2",
'PaymentModeEnum':"google.ads.google_ads.v4.proto.enums.payment_mode_pb2",
'PaymentsAccount':"google.ads.google_ads.v4.proto.resources.payments_account_pb2",
'PaymentsAccountErrorEnum':"google.ads.google_ads.v4.proto.errors.payments_account_error_pb2",
'PercentCpc':"google.ads.google_ads.v4.proto.common.bidding_pb2",
'PlaceholderTypeEnum':"google.ads.google_ads.v4.proto.enums.placeholder_type_pb2",
'PlacementInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'PlacementTypeEnum':"google.ads.google_ads.v4.proto.enums.placement_type_pb2",
'PlannableLocation':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'PlannableTargeting':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'PlannedProduct':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'PolicyApprovalStatusEnum':"google.ads.google_ads.v4.proto.enums.policy_approval_status_pb2",
'PolicyFindingDetails':"google.ads.google_ads.v4.proto.errors.errors_pb2",
'PolicyFindingErrorEnum':"google.ads.google_ads.v4.proto.errors.policy_finding_error_pb2",
'PolicyReviewStatusEnum':"google.ads.google_ads.v4.proto.enums.policy_review_status_pb2",
'PolicyTopicConstraint':"google.ads.google_ads.v4.proto.common.policy_pb2",
'PolicyTopicEntry':"google.ads.google_ads.v4.proto.common.policy_pb2",
'PolicyTopicEntryTypeEnum':"google.ads.google_ads.v4.proto.enums.policy_topic_entry_type_pb2",
'PolicyTopicEvidence':"google.ads.google_ads.v4.proto.common.policy_pb2",
'PolicyTopicEvidenceDestinationMismatchUrlTypeEnum':"google.ads.google_ads.v4.proto.enums.policy_topic_evidence_destination_mismatch_url_type_pb2",
'PolicyTopicEvidenceDestinationNotWorkingDeviceEnum':"google.ads.google_ads.v4.proto.enums.policy_topic_evidence_destination_not_working_device_pb2",
'PolicyTopicEvidenceDestinationNotWorkingDnsErrorTypeEnum':"google.ads.google_ads.v4.proto.enums.policy_topic_evidence_destination_not_working_dns_error_type_pb2",
'PolicyValidationParameter':"google.ads.google_ads.v4.proto.common.policy_pb2",
'PolicyValidationParameterErrorEnum':"google.ads.google_ads.v4.proto.errors.policy_validation_parameter_error_pb2",
'PolicyViolationDetails':"google.ads.google_ads.v4.proto.errors.errors_pb2",
'PolicyViolationErrorEnum':"google.ads.google_ads.v4.proto.errors.policy_violation_error_pb2",
'PolicyViolationKey':"google.ads.google_ads.v4.proto.common.policy_pb2",
'PositiveGeoTargetTypeEnum':"google.ads.google_ads.v4.proto.enums.positive_geo_target_type_pb2",
'Preferences':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'PreferredContentInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'PreferredContentTypeEnum':"google.ads.google_ads.v4.proto.enums.preferred_content_type_pb2",
'PriceExtensionPriceQualifierEnum':"google.ads.google_ads.v4.proto.enums.price_extension_price_qualifier_pb2",
'PriceExtensionPriceUnitEnum':"google.ads.google_ads.v4.proto.enums.price_extension_price_unit_pb2",
'PriceExtensionTypeEnum':"google.ads.google_ads.v4.proto.enums.price_extension_type_pb2",
'PriceFeedItem':"google.ads.google_ads.v4.proto.common.extensions_pb2",
'PriceOffer':"google.ads.google_ads.v4.proto.common.extensions_pb2",
'PricePlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.price_placeholder_field_pb2",
'ProductAllocation':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'ProductBiddingCategoryConstant':"google.ads.google_ads.v4.proto.resources.product_bidding_category_constant_pb2",
'ProductBiddingCategoryInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'ProductBiddingCategoryLevelEnum':"google.ads.google_ads.v4.proto.enums.product_bidding_category_level_pb2",
'ProductBiddingCategoryStatusEnum':"google.ads.google_ads.v4.proto.enums.product_bidding_category_status_pb2",
'ProductBrandInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'ProductChannelEnum':"google.ads.google_ads.v4.proto.enums.product_channel_pb2",
'ProductChannelExclusivityEnum':"google.ads.google_ads.v4.proto.enums.product_channel_exclusivity_pb2",
'ProductChannelExclusivityInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'ProductChannelInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'ProductConditionEnum':"google.ads.google_ads.v4.proto.enums.product_condition_pb2",
'ProductConditionInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'ProductCustomAttributeIndexEnum':"google.ads.google_ads.v4.proto.enums.product_custom_attribute_index_pb2",
'ProductCustomAttributeInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'ProductGroupView':"google.ads.google_ads.v4.proto.resources.product_group_view_pb2",
'ProductImage':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'ProductItemIdInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'ProductMetadata':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'ProductTypeInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'ProductTypeLevelEnum':"google.ads.google_ads.v4.proto.enums.product_type_level_pb2",
'ProductVideo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'PromoteCampaignDraftRequest':"google.ads.google_ads.v4.proto.services.campaign_draft_service_pb2",
'PromoteCampaignExperimentRequest':"google.ads.google_ads.v4.proto.services.campaign_experiment_service_pb2",
'PromotionExtensionDiscountModifierEnum':"google.ads.google_ads.v4.proto.enums.promotion_extension_discount_modifier_pb2",
'PromotionExtensionOccasionEnum':"google.ads.google_ads.v4.proto.enums.promotion_extension_occasion_pb2",
'PromotionFeedItem':"google.ads.google_ads.v4.proto.common.extensions_pb2",
'PromotionPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.promotion_placeholder_field_pb2",
'ProximityInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'ProximityRadiusUnitsEnum':"google.ads.google_ads.v4.proto.enums.proximity_radius_units_pb2",
'QualityScoreBucketEnum':"google.ads.google_ads.v4.proto.enums.quality_score_bucket_pb2",
'QueryErrorEnum':"google.ads.google_ads.v4.proto.errors.query_error_pb2",
'QuotaErrorEnum':"google.ads.google_ads.v4.proto.errors.quota_error_pb2",
'RangeErrorEnum':"google.ads.google_ads.v4.proto.errors.range_error_pb2",
'ReachCurve':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'ReachForecast':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'ReachPlanAdLengthEnum':"google.ads.google_ads.v4.proto.enums.reach_plan_ad_length_pb2",
'ReachPlanAgeRangeEnum':"google.ads.google_ads.v4.proto.enums.reach_plan_age_range_pb2",
'ReachPlanErrorEnum':"google.ads.google_ads.v4.proto.errors.reach_plan_error_pb2",
'ReachPlanNetworkEnum':"google.ads.google_ads.v4.proto.enums.reach_plan_network_pb2",
'RealEstatePlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.real_estate_placeholder_field_pb2",
'RealTimeBiddingSetting':"google.ads.google_ads.v4.proto.common.real_time_bidding_setting_pb2",
'Recommendation':"google.ads.google_ads.v4.proto.resources.recommendation_pb2",
'RecommendationErrorEnum':"google.ads.google_ads.v4.proto.errors.recommendation_error_pb2",
'RecommendationTypeEnum':"google.ads.google_ads.v4.proto.enums.recommendation_type_pb2",
'RegionCodeErrorEnum':"google.ads.google_ads.v4.proto.errors.region_code_error_pb2",
'RemarketingAction':"google.ads.google_ads.v4.proto.resources.remarketing_action_pb2",
'RemarketingActionOperation':"google.ads.google_ads.v4.proto.services.remarketing_action_service_pb2",
'RemarketingSetting':"google.ads.google_ads.v4.proto.resources.customer_pb2",
'RequestErrorEnum':"google.ads.google_ads.v4.proto.errors.request_error_pb2",
'ResourceAccessDeniedErrorEnum':"google.ads.google_ads.v4.proto.errors.resource_access_denied_error_pb2",
'ResourceCountLimitExceededErrorEnum':"google.ads.google_ads.v4.proto.errors.resource_count_limit_exceeded_error_pb2",
'ResponsiveDisplayAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'ResponsiveSearchAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'RestatementValue':"google.ads.google_ads.v4.proto.services.conversion_adjustment_upload_service_pb2",
'RuleBasedUserListInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'RunBatchJobRequest':"google.ads.google_ads.v4.proto.services.batch_job_service_pb2",
'RunOfflineUserDataJobRequest':"google.ads.google_ads.v4.proto.services.offline_user_data_job_service_pb2",
'SearchEngineResultsPageTypeEnum':"google.ads.google_ads.v4.proto.enums.search_engine_results_page_type_pb2",
'SearchGoogleAdsFieldsRequest':"google.ads.google_ads.v4.proto.services.google_ads_field_service_pb2",
'SearchGoogleAdsFieldsResponse':"google.ads.google_ads.v4.proto.services.google_ads_field_service_pb2",
'SearchGoogleAdsRequest':"google.ads.google_ads.v4.proto.services.google_ads_service_pb2",
'SearchGoogleAdsResponse':"google.ads.google_ads.v4.proto.services.google_ads_service_pb2",
'SearchGoogleAdsStreamRequest':"google.ads.google_ads.v4.proto.services.google_ads_service_pb2",
'SearchGoogleAdsStreamResponse':"google.ads.google_ads.v4.proto.services.google_ads_service_pb2",
'SearchTermMatchTypeEnum':"google.ads.google_ads.v4.proto.enums.search_term_match_type_pb2",
'SearchTermTargetingStatusEnum':"google.ads.google_ads.v4.proto.enums.search_term_targeting_status_pb2",
'SearchTermView':"google.ads.google_ads.v4.proto.resources.search_term_view_pb2",
'Segments':"google.ads.google_ads.v4.proto.common.segments_pb2",
'ServedAssetFieldTypeEnum':"google.ads.google_ads.v4.proto.enums.served_asset_field_type_pb2",
'SettingErrorEnum':"google.ads.google_ads.v4.proto.errors.setting_error_pb2",
'SharedCriterion':"google.ads.google_ads.v4.proto.resources.shared_criterion_pb2",
'SharedCriterionErrorEnum':"google.ads.google_ads.v4.proto.errors.shared_criterion_error_pb2",
'SharedCriterionOperation':"google.ads.google_ads.v4.proto.services.shared_criterion_service_pb2",
'SharedSet':"google.ads.google_ads.v4.proto.resources.shared_set_pb2",
'SharedSetErrorEnum':"google.ads.google_ads.v4.proto.errors.shared_set_error_pb2",
'SharedSetOperation':"google.ads.google_ads.v4.proto.services.shared_set_service_pb2",
'SharedSetStatusEnum':"google.ads.google_ads.v4.proto.enums.shared_set_status_pb2",
'SharedSetTypeEnum':"google.ads.google_ads.v4.proto.enums.shared_set_type_pb2",
'ShoppingComparisonListingAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'ShoppingPerformanceView':"google.ads.google_ads.v4.proto.resources.shopping_performance_view_pb2",
'ShoppingProductAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'ShoppingSmartAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'SimilarUserListInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'SimulationModificationMethodEnum':"google.ads.google_ads.v4.proto.enums.simulation_modification_method_pb2",
'SimulationTypeEnum':"google.ads.google_ads.v4.proto.enums.simulation_type_pb2",
'SiteSeed':"google.ads.google_ads.v4.proto.services.keyword_plan_idea_service_pb2",
'SitelinkFeedItem':"google.ads.google_ads.v4.proto.common.extensions_pb2",
'SitelinkPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.sitelink_placeholder_field_pb2",
'SizeLimitErrorEnum':"google.ads.google_ads.v4.proto.errors.size_limit_error_pb2",
'SlotEnum':"google.ads.google_ads.v4.proto.enums.slot_pb2",
'SpendingLimitTypeEnum':"google.ads.google_ads.v4.proto.enums.spending_limit_type_pb2",
'StoreAttribute':"google.ads.google_ads.v4.proto.common.offline_user_data_pb2",
'StoreSalesMetadata':"google.ads.google_ads.v4.proto.common.offline_user_data_pb2",
'StoreSalesThirdPartyMetadata':"google.ads.google_ads.v4.proto.common.offline_user_data_pb2",
'StringFormatErrorEnum':"google.ads.google_ads.v4.proto.errors.string_format_error_pb2",
'StringLengthErrorEnum':"google.ads.google_ads.v4.proto.errors.string_length_error_pb2",
'StructuredSnippetFeedItem':"google.ads.google_ads.v4.proto.common.extensions_pb2",
'StructuredSnippetPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.structured_snippet_placeholder_field_pb2",
'SuggestGeoTargetConstantsRequest':"google.ads.google_ads.v4.proto.services.geo_target_constant_service_pb2",
'SuggestGeoTargetConstantsResponse':"google.ads.google_ads.v4.proto.services.geo_target_constant_service_pb2",
'SummaryRowSettingEnum':"google.ads.google_ads.v4.proto.enums.summary_row_setting_pb2",
'SystemManagedResourceSourceEnum':"google.ads.google_ads.v4.proto.enums.system_managed_entity_source_pb2",
'TagSnippet':"google.ads.google_ads.v4.proto.common.tag_snippet_pb2",
'TargetCpa':"google.ads.google_ads.v4.proto.common.bidding_pb2",
'TargetCpaOptInRecommendationGoalEnum':"google.ads.google_ads.v4.proto.enums.target_cpa_opt_in_recommendation_goal_pb2",
'TargetCpaSimulationPoint':"google.ads.google_ads.v4.proto.common.simulation_pb2",
'TargetCpaSimulationPointList':"google.ads.google_ads.v4.proto.common.simulation_pb2",
'TargetCpm':"google.ads.google_ads.v4.proto.common.bidding_pb2",
'TargetImpressionShare':"google.ads.google_ads.v4.proto.common.bidding_pb2",
'TargetImpressionShareLocationEnum':"google.ads.google_ads.v4.proto.enums.target_impression_share_location_pb2",
'TargetRestriction':"google.ads.google_ads.v4.proto.common.targeting_setting_pb2",
'TargetRestrictionOperation':"google.ads.google_ads.v4.proto.common.targeting_setting_pb2",
'TargetRoas':"google.ads.google_ads.v4.proto.common.bidding_pb2",
'TargetRoasSimulationPoint':"google.ads.google_ads.v4.proto.common.simulation_pb2",
'TargetRoasSimulationPointList':"google.ads.google_ads.v4.proto.common.simulation_pb2",
'TargetSpend':"google.ads.google_ads.v4.proto.common.bidding_pb2",
'Targeting':"google.ads.google_ads.v4.proto.services.reach_plan_service_pb2",
'TargetingDimensionEnum':"google.ads.google_ads.v4.proto.enums.targeting_dimension_pb2",
'TargetingSetting':"google.ads.google_ads.v4.proto.common.targeting_setting_pb2",
'TextAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'TextAsset':"google.ads.google_ads.v4.proto.common.asset_types_pb2",
'TextLabel':"google.ads.google_ads.v4.proto.common.text_label_pb2",
'TextMessageFeedItem':"google.ads.google_ads.v4.proto.common.extensions_pb2",
'ThirdPartyAppAnalyticsLink':"google.ads.google_ads.v4.proto.resources.third_party_app_analytics_link_pb2",
'ThirdPartyAppAnalyticsLinkIdentifier':"google.ads.google_ads.v4.proto.resources.account_link_pb2",
'TimeTypeEnum':"google.ads.google_ads.v4.proto.enums.time_type_pb2",
'TimeZoneErrorEnum':"google.ads.google_ads.v4.proto.errors.time_zone_error_pb2",
'TopicConstant':"google.ads.google_ads.v4.proto.resources.topic_constant_pb2",
'TopicInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'TopicView':"google.ads.google_ads.v4.proto.resources.topic_view_pb2",
'TrackingCodePageFormatEnum':"google.ads.google_ads.v4.proto.enums.tracking_code_page_format_pb2",
'TrackingCodeTypeEnum':"google.ads.google_ads.v4.proto.enums.tracking_code_type_pb2",
'TransactionAttribute':"google.ads.google_ads.v4.proto.common.offline_user_data_pb2",
'TravelPlaceholderFieldEnum':"google.ads.google_ads.v4.proto.enums.travel_placeholder_field_pb2",
'UnknownListingDimensionInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'UploadCallConversionsRequest':"google.ads.google_ads.v4.proto.services.conversion_upload_service_pb2",
'UploadCallConversionsResponse':"google.ads.google_ads.v4.proto.services.conversion_upload_service_pb2",
'UploadClickConversionsRequest':"google.ads.google_ads.v4.proto.services.conversion_upload_service_pb2",
'UploadClickConversionsResponse':"google.ads.google_ads.v4.proto.services.conversion_upload_service_pb2",
'UploadConversionAdjustmentsRequest':"google.ads.google_ads.v4.proto.services.conversion_adjustment_upload_service_pb2",
'UploadConversionAdjustmentsResponse':"google.ads.google_ads.v4.proto.services.conversion_adjustment_upload_service_pb2",
'UploadUserDataRequest':"google.ads.google_ads.v4.proto.services.user_data_service_pb2",
'UploadUserDataResponse':"google.ads.google_ads.v4.proto.services.user_data_service_pb2",
'UrlCollection':"google.ads.google_ads.v4.proto.common.url_collection_pb2",
'UrlFieldErrorEnum':"google.ads.google_ads.v4.proto.errors.url_field_error_pb2",
'UrlSeed':"google.ads.google_ads.v4.proto.services.keyword_plan_idea_service_pb2",
'UserData':"google.ads.google_ads.v4.proto.common.offline_user_data_pb2",
'UserDataErrorEnum':"google.ads.google_ads.v4.proto.errors.user_data_error_pb2",
'UserDataOperation':"google.ads.google_ads.v4.proto.services.user_data_service_pb2",
'UserIdentifier':"google.ads.google_ads.v4.proto.common.offline_user_data_pb2",
'UserInterest':"google.ads.google_ads.v4.proto.resources.user_interest_pb2",
'UserInterestInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'UserInterestTaxonomyTypeEnum':"google.ads.google_ads.v4.proto.enums.user_interest_taxonomy_type_pb2",
'UserList':"google.ads.google_ads.v4.proto.resources.user_list_pb2",
'UserListAccessStatusEnum':"google.ads.google_ads.v4.proto.enums.user_list_access_status_pb2",
'UserListActionInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'UserListClosingReasonEnum':"google.ads.google_ads.v4.proto.enums.user_list_closing_reason_pb2",
'UserListCombinedRuleOperatorEnum':"google.ads.google_ads.v4.proto.enums.user_list_combined_rule_operator_pb2",
'UserListCrmDataSourceTypeEnum':"google.ads.google_ads.v4.proto.enums.user_list_crm_data_source_type_pb2",
'UserListDateRuleItemInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'UserListDateRuleItemOperatorEnum':"google.ads.google_ads.v4.proto.enums.user_list_date_rule_item_operator_pb2",
'UserListErrorEnum':"google.ads.google_ads.v4.proto.errors.user_list_error_pb2",
'UserListInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'UserListLogicalRuleInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'UserListLogicalRuleOperatorEnum':"google.ads.google_ads.v4.proto.enums.user_list_logical_rule_operator_pb2",
'UserListMembershipStatusEnum':"google.ads.google_ads.v4.proto.enums.user_list_membership_status_pb2",
'UserListNumberRuleItemInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'UserListNumberRuleItemOperatorEnum':"google.ads.google_ads.v4.proto.enums.user_list_number_rule_item_operator_pb2",
'UserListOperation':"google.ads.google_ads.v4.proto.services.user_list_service_pb2",
'UserListPrepopulationStatusEnum':"google.ads.google_ads.v4.proto.enums.user_list_prepopulation_status_pb2",
'UserListRuleInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'UserListRuleItemGroupInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'UserListRuleItemInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'UserListRuleTypeEnum':"google.ads.google_ads.v4.proto.enums.user_list_rule_type_pb2",
'UserListSizeRangeEnum':"google.ads.google_ads.v4.proto.enums.user_list_size_range_pb2",
'UserListStringRuleItemInfo':"google.ads.google_ads.v4.proto.common.user_lists_pb2",
'UserListStringRuleItemOperatorEnum':"google.ads.google_ads.v4.proto.enums.user_list_string_rule_item_operator_pb2",
'UserListTypeEnum':"google.ads.google_ads.v4.proto.enums.user_list_type_pb2",
'UserLocationView':"google.ads.google_ads.v4.proto.resources.user_location_view_pb2",
'Value':"google.ads.google_ads.v4.proto.common.value_pb2",
'VanityPharmaDisplayUrlModeEnum':"google.ads.google_ads.v4.proto.enums.vanity_pharma_display_url_mode_pb2",
'VanityPharmaTextEnum':"google.ads.google_ads.v4.proto.enums.vanity_pharma_text_pb2",
'Video':"google.ads.google_ads.v4.proto.resources.video_pb2",
'VideoAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'VideoBumperInStreamAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'VideoNonSkippableInStreamAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'VideoOutstreamAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'VideoTrueViewDiscoveryAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'VideoTrueViewInStreamAdInfo':"google.ads.google_ads.v4.proto.common.ad_type_infos_pb2",
'WebpageConditionInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'WebpageConditionOperandEnum':"google.ads.google_ads.v4.proto.enums.webpage_condition_operand_pb2",
'WebpageConditionOperatorEnum':"google.ads.google_ads.v4.proto.enums.webpage_condition_operator_pb2",
'WebpageInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'YouTubeChannelInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'YouTubeVideoInfo':"google.ads.google_ads.v4.proto.common.criteria_pb2",
'YoutubeVideoAsset':"google.ads.google_ads.v4.proto.common.asset_types_pb2",
'YoutubeVideoRegistrationErrorEnum':"google.ads.google_ads.v4.proto.errors.youtube_video_registration_error_pb2",
}
DEPENDENT_MODULE_LIST = [
"google.longrunning.operations_pb2",
"google.protobuf.any_pb2",
"google.protobuf.empty_pb2",
"google.protobuf.field_mask_pb2",
"google.protobuf.wrappers_pb2",
"google.rpc.status_pb2",
]
def _get_class_from_module(module_name):
module = importlib.import_module(module_name)
for class_name in get_messages(module).keys(): # from inspect module
yield class_name
def _populate_dependent_classes(module_list=DEPENDENT_MODULE_LIST):
class_list = {}
for module_name in module_list:
for cls in _get_class_from_module(module_name):
class_list[cls] = module_name
return class_list
_lazy_dependent_class_to_package_map = _populate_dependent_classes()
def _load_module(module_name):
"""Load a module by it's name.
Args:
module_name: a str of the name of a sub-module to load.
Returns:
A module class instance.
Raises:
AttributeError if the given module can't be found.
"""
try:
if module_name in _lazy_name_to_package_map:
module_path = (
f"{_lazy_name_to_package_map[module_name]}.{module_name}"
)
else:
module_path = module_name
return importlib.import_module(module_path)
except KeyError:
raise AttributeError(f"unknown sub-module {module_name!r}.")
def _get_module_by_name(module_name):
"""Get a module containing one or more message classes.
For example: google.ads.google_ads.v2.proto.services.video_service_pb2.
Args:
module_name: a str of the name of a module.
Returns:
a module class instance.
"""
module = _load_module(module_name)
globals()[module_name] = module
for name, message in get_messages(module).items():
if name.endswith("_service_pb2"):
message.__module__ = "google.ads.google_ads.v2.types"
globals()[name] = message
return module
def _get_message_class_by_name(class_name):
"""Get a message class instance by name.
For example: VideoService
Args:
module_name: a str of the name of a protobuf class to load.
Returns:
a protobuf message class definition that inherits from
google.protobuf.pyext.cpp_message.GeneratedProtocolMessageType.
"""
if class_name in _lazy_dependent_class_to_package_map:
module_path = _lazy_dependent_class_to_package_map[class_name]
elif class_name in _lazy_class_to_package_map:
module_path = _lazy_class_to_package_map[class_name]
else:
raise AttributeError(f"unknown sub-module {class_name!r}.")
try:
module = _load_module(module_path)
message = getattr(module, class_name)
except AttributeError:
raise AttributeError(f"unknown message class {class_name!r}.")
if class_name.endswith("Service"):
message.__module__ = "google.ads.google_ads.v2.types"
globals()[class_name] = message
return message
# Background on how this behaves: https://www.python.org/dev/peps/pep-0562/
def __getattr__(name): # Requires Python >= 3.7
"""Lazily perform imports and class definitions on first demand."""
if name == "__all__":
converted = (
util.convert_snake_case_to_upper_case(key)
for key in chain(
_lazy_name_to_package_map,
_lazy_class_to_package_map,
_lazy_dependent_class_to_package_map,
)
)
all_names = sorted(converted)
globals()["__all__"] = all_names
return all_names
elif name.endswith("_pb2"):
return _get_module_by_name(name)
elif name.endswith("Pb2"):
module_name = f"{util.convert_upper_case_to_snake_case(name)}"
return _get_module_by_name(module_name)
else:
return _get_message_class_by_name(name)
def __dir__():
return globals().get("__all__") or __getattr__("__all__")
if not sys.version_info >= (3, 7):
from pep562 import Pep562
Pep562(__name__)
| 82.698154
| 167
| 0.808029
| 20,159
| 152,330
| 5.758619
| 0.077186
| 0.259562
| 0.215009
| 0.258011
| 0.768831
| 0.766393
| 0.763257
| 0.762456
| 0.76181
| 0.709634
| 0
| 0.023579
| 0.062023
| 152,330
| 1,841
| 168
| 82.743074
| 0.788896
| 0.009486
| 0
| 0.003986
| 0
| 0
| 0.859191
| 0.794944
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003986
| false
| 0
| 0.005125
| 0.000569
| 0.014237
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3b709158cf14b1839d6b152a591a9ecf2fc0b0d2
| 141
|
py
|
Python
|
torchexpo/nlp/sentiment_analysis/__init__.py
|
torchexpo/torchexpo
|
88c875358e830065ee23f49f47d4995b5b2d3e3c
|
[
"Apache-2.0"
] | 23
|
2020-09-08T05:08:46.000Z
|
2021-08-12T07:16:53.000Z
|
torchexpo/nlp/sentiment_analysis/__init__.py
|
torchexpo/torchexpo
|
88c875358e830065ee23f49f47d4995b5b2d3e3c
|
[
"Apache-2.0"
] | 1
|
2021-12-05T06:15:18.000Z
|
2021-12-20T08:10:19.000Z
|
torchexpo/nlp/sentiment_analysis/__init__.py
|
torchexpo/torchexpo
|
88c875358e830065ee23f49f47d4995b5b2d3e3c
|
[
"Apache-2.0"
] | 2
|
2021-01-12T06:10:53.000Z
|
2021-07-24T08:21:59.000Z
|
from torchexpo.nlp.sentiment_analysis.electra import (electra_imdb)
from torchexpo.nlp.sentiment_analysis.distilbert import (distilbert_imdb)
| 70.5
| 73
| 0.879433
| 18
| 141
| 6.666667
| 0.5
| 0.216667
| 0.266667
| 0.416667
| 0.55
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049645
| 141
| 2
| 73
| 70.5
| 0.895522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8e9e6af2d0e1e8b1eb883ee00252a7b5e6939c84
| 3,953
|
py
|
Python
|
speedread_ner.py
|
gsi-upm/nerdy
|
62d2a6df730e30bc1c05c91557fcfd2236b742b8
|
[
"Apache-2.0"
] | 2
|
2017-10-26T19:40:51.000Z
|
2018-03-07T12:10:49.000Z
|
speedread_ner.py
|
gsi-upm/nerdy
|
62d2a6df730e30bc1c05c91557fcfd2236b742b8
|
[
"Apache-2.0"
] | null | null | null |
speedread_ner.py
|
gsi-upm/nerdy
|
62d2a6df730e30bc1c05c91557fcfd2236b742b8
|
[
"Apache-2.0"
] | null | null | null |
__author__ = 'croman'
from pipeline import pipe
from lxml import etree
import rdflib
def ner(datasetfile, format):
tweets = ""
tweetids = []
if format == 'xml':
dataset = etree.parse(datasetfile)
for tweet in dataset.xpath('//Tweet'):
tweetText = tweet.xpath('./TweetText/text()')[0]
tweets += tweetText+"\n"
tweetids.append(tweet.xpath('./TweetId/text()')[0])
tweets = tweets.encode('utf-8')
elif format == "nif":
tweetdict = {}
a = rdflib.Graph()
a.parse(datasetfile, format='n3')
for s, p, o in a:
if s.endswith(',') and p.endswith('isString'):
tweetid = s.split('#')[0].split('.xml/')[1]
tweetdict[tweetid] = o
for key in sorted(tweetdict):
tweetids.append(key)
tweets += tweetdict[key]+'\n'
tweets = tweets.encode('utf-8')
print tweets
indexes = []
tweetlines = tweets.split('\n')
for t in tweetlines:
tweetlength = 0
for word in t.split():
tweetlength += len(word)
print tweetlength
indexes.append(tweetlength)
options = {'log':'DEBUG', 'conf': 'pipeline/settings.py', 'text': tweets}
results = pipe.main(options, [])
print 'results: ' + results
x = 0
finalresults = ''
resultslines = results.splitlines()
finalresults = ''
for i in indexes:
print i
length = 0
tweetresult = ''
print x
print resultslines[x]
while length < i:
if resultslines[x] != '':
entity = resultslines[x].split('\t')
print entity
length += len(entity[0])
tweetresult += entity[0]+'/'+entity[1]+' '
x += 1
#print 'x=', x
print 'length: ', length
else:
print 'ok'
x += 1
print tweetresult
finalresults += tweetresult[:-1]+' END\n'
print finalresults
ner("Mena Collection.ttl", "nif")
"""__author__ = 'croman'
from pipeline import pipe
from lxml import etree
import rdflib
def ner(datasetfile, format):
tweets = ""
tweetids = []
if format == 'xml':
dataset = etree.parse(datasetfile)
for tweet in dataset.xpath('//Tweet'):
tweetText = tweet.xpath('./TweetText/text()')[0]
tweets += tweetText+"\n"
tweetids.append(tweet.xpath('./TweetId/text()')[0])
tweets = tweets.encode('utf-8')
elif format == "nif":
tweetdict = {}
a = rdflib.Graph()
a.parse(datasetfile, format='n3')
for s, p, o in a:
if s.endswith(',') and p.endswith('isString'):
tweetid = s.split('#')[0].split('.xml/')[1]
tweetdict[tweetid] = o
for key in sorted(tweetdict):
tweetids.append(key)
tweets += tweetdict[key]+'\n'
tweets = tweets.encode('utf-8')
print tweets
indexes = []
tweetlines = tweets.split('\n')
for t in tweetlines:
tweetlength = 0
for word in t.split():
tweetlength += len(word)
indexes.append(tweetlength)
options = {'log':'DEBUG', 'conf': 'pipeline/settings.py', 'text': tweets}
results = pipe.main(options, [])
print results
x = 0
finalresults = ''
for i in indexes:
print i
resultslines = results.split('\n')
length = 0
while length < i:
entity = resultslines[x].split('\t')
print resultslines[x]
length += len(entity[0])
if len(entity)>1:
finalresults += entity[0]+'/'+entity[1]+' '
x += 1
print 'x=', x
print 'length: ', length
finalresults = finalresults[:-1]+' END\n'
print finalresults
ner("Mena Collection.ttl", "nif")"""
| 27.838028
| 77
| 0.517329
| 418
| 3,953
| 4.873206
| 0.186603
| 0.03191
| 0.0216
| 0.041237
| 0.831615
| 0.831615
| 0.80216
| 0.771723
| 0.771723
| 0.771723
| 0
| 0.012232
| 0.338224
| 3,953
| 142
| 78
| 27.838028
| 0.766437
| 0.003289
| 0
| 0.095238
| 0
| 0
| 0.083173
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.047619
| null | null | 0.174603
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9022f601f4293f1035df02426253ad8f56ff9ca
| 12,142
|
py
|
Python
|
tests/test_views.py
|
lavyaKoli/stormpath-flask
|
ca79162302e34085bef774751288d2ac95479c0d
|
[
"Apache-2.0"
] | 99
|
2015-01-04T06:27:10.000Z
|
2021-07-27T11:06:15.000Z
|
tests/test_views.py
|
lavyaKoli/stormpath-flask
|
ca79162302e34085bef774751288d2ac95479c0d
|
[
"Apache-2.0"
] | 65
|
2015-01-05T17:34:27.000Z
|
2019-01-21T09:59:01.000Z
|
tests/test_views.py
|
lavyaKoli/stormpath-flask
|
ca79162302e34085bef774751288d2ac95479c0d
|
[
"Apache-2.0"
] | 37
|
2015-03-20T16:24:44.000Z
|
2020-10-01T16:12:30.000Z
|
"""Run tests against our custom views."""
from flask.ext.stormpath.models import User
from .helpers import StormpathTestCase
class TestRegister(StormpathTestCase):
"""Test our registration view."""
def test_default_fields(self):
# By default, we'll register new users with first name, last name,
# email, and password.
with self.app.test_client() as c:
# Ensure that missing fields will cause a failure.
resp = c.post('/register', data={
'email': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 200)
# Ensure that valid fields will result in a success.
resp = c.post('/register', data={
'username': 'rdegges',
'given_name': 'Randall',
'middle_name': 'Clark',
'surname': 'Degges',
'email': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
def test_disable_all_except_mandatory(self):
# Here we'll disable all the fields except for the mandatory fields:
# email and password.
self.app.config['STORMPATH_ENABLE_USERNAME'] = False
self.app.config['STORMPATH_ENABLE_GIVEN_NAME'] = False
self.app.config['STORMPATH_ENABLE_MIDDLE_NAME'] = False
self.app.config['STORMPATH_ENABLE_SURNAME'] = False
with self.app.test_client() as c:
# Ensure that missing fields will cause a failure.
resp = c.post('/register', data={
'email': 'r@rdegges.com',
})
self.assertEqual(resp.status_code, 200)
# Ensure that valid fields will result in a success.
resp = c.post('/register', data={
'email': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
def test_require_settings(self):
# Here we'll change our backend behavior such that users *can* enter a
# first and last name, but they aren't required server side.
# email and password.
self.app.config['STORMPATH_REQUIRE_GIVEN_NAME'] = False
self.app.config['STORMPATH_REQUIRE_SURNAME'] = False
with self.app.test_client() as c:
# Ensure that registration works *without* given name and surname
# since they aren't required.
resp = c.post('/register', data={
'email': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
# Find our user account that was just created, and ensure the given
# name and surname fields were set to our default string.
user = User.from_login('r@rdegges.com', 'woot1LoveCookies!')
self.assertEqual(user.given_name, 'Anonymous')
self.assertEqual(user.surname, 'Anonymous')
def test_error_messages(self):
with self.app.test_client() as c:
# Ensure that an error is raised if an invalid password is
# specified.
resp = c.post('/register', data={
'given_name': 'Randall',
'surname': 'Degges',
'email': 'r@rdegges.com',
'password': 'hilol',
})
self.assertEqual(resp.status_code, 200)
self.assertTrue('Account password minimum length not satisfied.' in resp.data.decode('utf-8'))
self.assertFalse('developerMessage' in resp.data.decode('utf-8'))
resp = c.post('/register', data={
'given_name': 'Randall',
'surname': 'Degges',
'email': 'r@rdegges.com',
'password': 'hilolwoot1',
})
self.assertEqual(resp.status_code, 200)
self.assertTrue('Password requires at least 1 uppercase character.' in resp.data.decode('utf-8'))
self.assertFalse('developerMessage' in resp.data.decode('utf-8'))
resp = c.post('/register', data={
'given_name': 'Randall',
'surname': 'Degges',
'email': 'r@rdegges.com',
'password': 'hilolwoothi',
})
self.assertEqual(resp.status_code, 200)
self.assertTrue('Password requires at least 1 numeric character.' in resp.data.decode('utf-8'))
self.assertFalse('developerMessage' in resp.data.decode('utf-8'))
def test_redirect_to_login_and_register_url(self):
# Setting redirect URL to something that is easy to check
stormpath_redirect_url = '/redirect_for_login_and_registration'
self.app.config['STORMPATH_REDIRECT_URL'] = stormpath_redirect_url
with self.app.test_client() as c:
# Ensure that valid registration will redirect to
# STORMPATH_REDIRECT_URL
resp = c.post(
'/register',
data=
{
'given_name': 'Randall',
'middle_name': 'Clark',
'surname': 'Degges',
'email': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
location = resp.headers.get('location')
self.assertTrue(stormpath_redirect_url in location)
def test_redirect_to_register_url(self):
# Setting redirect URLs to something that is easy to check
stormpath_redirect_url = '/redirect_for_login'
stormpath_registration_redirect_url = '/redirect_for_registration'
self.app.config['STORMPATH_REDIRECT_URL'] = stormpath_redirect_url
self.app.config['STORMPATH_REGISTRATION_REDIRECT_URL'] = \
stormpath_registration_redirect_url
with self.app.test_client() as c:
# Ensure that valid registration will redirect to
# STORMPATH_REGISTRATION_REDIRECT_URL if it exists
resp = c.post(
'/register',
data=
{
'given_name': 'Randall',
'middle_name': 'Clark',
'surname': 'Degges',
'email': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
location = resp.headers.get('location')
self.assertFalse(stormpath_redirect_url in location)
self.assertTrue(stormpath_registration_redirect_url in location)
class TestLogin(StormpathTestCase):
"""Test our login view."""
def test_email_login(self):
# Create a user.
with self.app.app_context():
User.create(
given_name = 'Randall',
surname = 'Degges',
email = 'r@rdegges.com',
password = 'woot1LoveCookies!',
)
# Attempt a login using email and password.
with self.app.test_client() as c:
resp = c.post('/login', data={
'login': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
def test_username_login(self):
# Create a user.
with self.app.app_context():
User.create(
username = 'rdegges',
given_name = 'Randall',
surname = 'Degges',
email = 'r@rdegges.com',
password = 'woot1LoveCookies!',
)
# Attempt a login using username and password.
with self.app.test_client() as c:
resp = c.post('/login', data={
'login': 'rdegges',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
def test_error_messages(self):
# Create a user.
with self.app.app_context():
User.create(
username = 'rdegges',
given_name = 'Randall',
surname = 'Degges',
email = 'r@rdegges.com',
password = 'woot1LoveCookies!',
)
# Ensure that an error is raised if an invalid username or password is
# specified.
with self.app.test_client() as c:
resp = c.post('/login', data={
'login': 'rdegges',
'password': 'hilol',
})
self.assertEqual(resp.status_code, 200)
#self.assertTrue('Invalid username or password.' in resp.data.decode('utf-8'))
self.assertTrue('Login attempt failed because the specified password is incorrect.' in resp.data.decode('utf-8'))
self.assertFalse('developerMessage' in resp.data.decode('utf-8'))
def test_redirect_to_login_and_register_url(self):
# Create a user.
with self.app.app_context():
User.create(
username = 'rdegges',
given_name = 'Randall',
surname = 'Degges',
email = 'r@rdegges.com',
password = 'woot1LoveCookies!',
)
# Setting redirect URL to something that is easy to check
stormpath_redirect_url = '/redirect_for_login_and_registration'
self.app.config['STORMPATH_REDIRECT_URL'] = stormpath_redirect_url
with self.app.test_client() as c:
# Attempt a login using username and password.
resp = c.post(
'/login',
data={'login': 'rdegges', 'password': 'woot1LoveCookies!',})
self.assertEqual(resp.status_code, 302)
location = resp.headers.get('location')
self.assertTrue(stormpath_redirect_url in location)
def test_redirect_to_register_url(self):
# Create a user.
with self.app.app_context():
User.create(
username = 'rdegges',
given_name = 'Randall',
surname = 'Degges',
email = 'r@rdegges.com',
password = 'woot1LoveCookies!',
)
# Setting redirect URLs to something that is easy to check
stormpath_redirect_url = '/redirect_for_login'
stormpath_registration_redirect_url = '/redirect_for_registration'
self.app.config['STORMPATH_REDIRECT_URL'] = stormpath_redirect_url
self.app.config['STORMPATH_REGISTRATION_REDIRECT_URL'] = \
stormpath_registration_redirect_url
with self.app.test_client() as c:
# Attempt a login using username and password.
resp = c.post(
'/login',
data={'login': 'rdegges', 'password': 'woot1LoveCookies!',})
self.assertEqual(resp.status_code, 302)
location = resp.headers.get('location')
self.assertTrue('redirect_for_login' in location)
self.assertFalse('redirect_for_registration' in location)
class TestLogout(StormpathTestCase):
"""Test our logout view."""
def test_logout_works_with_anonymous_users(self):
with self.app.test_client() as c:
resp = c.get('/logout')
self.assertEqual(resp.status_code, 302)
def test_logout_works(self):
# Create a user.
with self.app.app_context():
User.create(
given_name = 'Randall',
surname = 'Degges',
email = 'r@rdegges.com',
password = 'woot1LoveCookies!',
)
with self.app.test_client() as c:
# Log this user in.
resp = c.post('/login', data={
'login': 'r@rdegges.com',
'password': 'woot1LoveCookies!',
})
self.assertEqual(resp.status_code, 302)
# Log this user out.
resp = c.get('/logout')
self.assertEqual(resp.status_code, 302)
| 38.18239
| 125
| 0.558887
| 1,257
| 12,142
| 5.249006
| 0.130469
| 0.032889
| 0.031676
| 0.068202
| 0.800394
| 0.785541
| 0.78054
| 0.743104
| 0.740831
| 0.722643
| 0
| 0.01039
| 0.334129
| 12,142
| 317
| 126
| 38.302839
| 0.80569
| 0.145363
| 0
| 0.819005
| 0
| 0
| 0.211496
| 0.044974
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.058824
| false
| 0.113122
| 0.00905
| 0
| 0.081448
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
d9265bc8d9e395e0e1f4b2ccd82a397094568300
| 324
|
py
|
Python
|
flask-portal/app/referral_api.py
|
TUMTICS-Dev/Talent-Pool
|
c51376453184e7a83d684ab5f9467535d49cc784
|
[
"MIT"
] | null | null | null |
flask-portal/app/referral_api.py
|
TUMTICS-Dev/Talent-Pool
|
c51376453184e7a83d684ab5f9467535d49cc784
|
[
"MIT"
] | null | null | null |
flask-portal/app/referral_api.py
|
TUMTICS-Dev/Talent-Pool
|
c51376453184e7a83d684ab5f9467535d49cc784
|
[
"MIT"
] | null | null | null |
from abc import ABC
class Refferal_API(ABC):
def __init__(self):
# TODO
pass
def NewVacancy():
# TODO
pass
def NewApplication():
# TODO
pass
def NotifyReferrer():
# TODO
pass
def NotifyApplicant():
# TODO
pass
| 13.5
| 26
| 0.481481
| 29
| 324
| 5.206897
| 0.517241
| 0.264901
| 0.291391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.447531
| 324
| 24
| 27
| 13.5
| 0.843575
| 0.074074
| 0
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 0
| 1
| 0.416667
| false
| 0.416667
| 0.083333
| 0
| 0.583333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
d93cdda3893693209b80cab60a21164807e0bd8b
| 1,493
|
py
|
Python
|
venv/lib/python2.7/site-packages/pychart/afm/ZapfDingbats.py
|
Christian-Castro/castro_odoo8
|
8247fdb20aa39e043b6fa0c4d0af509462ab3e00
|
[
"Unlicense"
] | 1
|
2019-12-19T01:53:13.000Z
|
2019-12-19T01:53:13.000Z
|
venv/lib/python2.7/site-packages/pychart/afm/ZapfDingbats.py
|
Christian-Castro/castro_odoo8
|
8247fdb20aa39e043b6fa0c4d0af509462ab3e00
|
[
"Unlicense"
] | null | null | null |
venv/lib/python2.7/site-packages/pychart/afm/ZapfDingbats.py
|
Christian-Castro/castro_odoo8
|
8247fdb20aa39e043b6fa0c4d0af509462ab3e00
|
[
"Unlicense"
] | null | null | null |
# AFM font ZapfDingbats (path: /usr/share/fonts/afms/adobe/pzdr.afm).
# Derived from Ghostscript distribution.
# Go to www.cs.wisc.edu/~ghost to get the Ghostcript source code.
import dir
dir.afm["ZapfDingbats"] = (500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 278, 974, 961, 974, 980, 719, 789, 790, 791, 690, 960, 939, 549, 855, 911, 933, 911, 945, 974, 755, 846, 762, 761, 571, 677, 763, 760, 759, 754, 494, 552, 537, 577, 692, 786, 788, 788, 790, 793, 794, 816, 823, 789, 841, 823, 833, 816, 831, 923, 744, 723, 749, 790, 792, 695, 776, 768, 792, 759, 707, 708, 682, 701, 826, 815, 789, 789, 707, 687, 696, 689, 786, 787, 713, 791, 785, 791, 873, 761, 762, 762, 759, 759, 892, 892, 788, 784, 438, 138, 277, 415, 392, 392, 668, 668, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 732, 544, 544, 910, 667, 760, 760, 776, 595, 694, 626, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 894, 838, 1016, 458, 748, 924, 748, 918, 927, 928, 928, 834, 873, 828, 924, 924, 917, 930, 931, 463, 883, 836, 836, 867, 867, 696, 696, 874, 500, 874, 760, 946, 771, 865, 771, 888, 967, 888, 831, 873, 927, 970, 918, )
| 248.833333
| 1,304
| 0.621567
| 288
| 1,493
| 3.222222
| 0.451389
| 0.413793
| 0.601293
| 0.775862
| 0.342672
| 0.342672
| 0.342672
| 0.342672
| 0.342672
| 0.342672
| 0
| 0.632013
| 0.188212
| 1,493
| 5
| 1,305
| 298.6
| 0.133663
| 0.113865
| 0
| 0
| 0
| 0
| 0.009098
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
d95eed5d862d9b62ea1af084b7b9d3c23fb6a1ce
| 13,127
|
py
|
Python
|
c-deps/krb5/src/tests/t_preauth.py
|
Yangjxxxxx/ZNBase
|
dcf993b73250dd5cb63041f4d9cf098941f67b2b
|
[
"MIT",
"BSD-3-Clause"
] | 1
|
2021-12-03T05:07:39.000Z
|
2021-12-03T05:07:39.000Z
|
c-deps/krb5/src/tests/t_preauth.py
|
Yangjxxxxx/ZNBase
|
dcf993b73250dd5cb63041f4d9cf098941f67b2b
|
[
"MIT",
"BSD-3-Clause"
] | 9
|
2020-05-13T16:28:13.000Z
|
2021-12-30T04:07:49.000Z
|
src/tests/t_preauth.py
|
frozencemetery/krb5
|
bb8fa495d00ccd931eec87a01b8920636cf7903e
|
[
"MIT"
] | 4
|
2020-07-01T12:41:46.000Z
|
2022-01-26T03:05:30.000Z
|
from k5test import *
# Test that the kdcpreauth client_keyblock() callback matches the key
# indicated by the etype info, and returns NULL if no key was selected.
testpreauth = os.path.join(buildtop, 'plugins', 'preauth', 'test', 'test.so')
conf = {'plugins': {'kdcpreauth': {'module': 'test:' + testpreauth},
'clpreauth': {'module': 'test:' + testpreauth}}}
realm = K5Realm(create_host=False, get_creds=False, krb5_conf=conf)
realm.run([kadminl, 'modprinc', '+requires_preauth', realm.user_princ])
realm.run([kadminl, 'setstr', realm.user_princ, 'teststring', 'testval'])
realm.run([kadminl, 'addprinc', '-nokey', '+requires_preauth', 'nokeyuser'])
realm.kinit(realm.user_princ, password('user'), expected_msg='testval')
realm.kinit('nokeyuser', password('user'), expected_code=1,
expected_msg='no key')
# Preauth type -123 is the test preauth module type; 133 is FAST
# PA-FX-COOKIE; 2 is encrypted timestamp.
# Test normal preauth flow.
mark('normal')
msgs = ('Sending unauthenticated request',
'/Additional pre-authentication required',
'Preauthenticating using KDC method data',
'Processing preauth types:',
'Preauth module test (-123) (real) returned: 0/Success',
'Produced preauth for next request: PA-FX-COOKIE (133), -123',
'Decrypted AS reply')
realm.run(['./icred', realm.user_princ, password('user')],
expected_msg='testval', expected_trace=msgs)
# Test successful optimistic preauth.
mark('optimistic')
expected_trace = ('Attempting optimistic preauth',
'Processing preauth types: -123',
'Preauth module test (-123) (real) returned: 0/Success',
'Produced preauth for next request: -123',
'Decrypted AS reply')
realm.run(['./icred', '-o', '-123', realm.user_princ, password('user')],
expected_trace=expected_trace)
# Test optimistic preauth failing on client, falling back to encrypted
# timestamp.
mark('optimistic (client failure)')
msgs = ('Attempting optimistic preauth',
'Processing preauth types: -123',
'/induced optimistic fail',
'Sending unauthenticated request',
'/Additional pre-authentication required',
'Preauthenticating using KDC method data',
'Processing preauth types:',
'Encrypted timestamp (for ',
'module encrypted_timestamp (2) (real) returned: 0/Success',
'preauth for next request: PA-FX-COOKIE (133), PA-ENC-TIMESTAMP (2)',
'Decrypted AS reply')
realm.run(['./icred', '-o', '-123', '-X', 'fail_optimistic', realm.user_princ,
password('user')], expected_trace=msgs)
# Test optimistic preauth failing on KDC, falling back to encrypted
# timestamp.
mark('optimistic (KDC failure)')
realm.run([kadminl, 'setstr', realm.user_princ, 'failopt', 'yes'])
msgs = ('Attempting optimistic preauth',
'Processing preauth types: -123',
'Preauth module test (-123) (real) returned: 0/Success',
'Produced preauth for next request: -123',
'/Preauthentication failed',
'Preauthenticating using KDC method data',
'Processing preauth types:',
'Encrypted timestamp (for ',
'module encrypted_timestamp (2) (real) returned: 0/Success',
'preauth for next request: PA-FX-COOKIE (133), PA-ENC-TIMESTAMP (2)',
'Decrypted AS reply')
realm.run(['./icred', '-o', '-123', realm.user_princ, password('user')],
expected_trace=msgs)
# Leave failopt set for the next test.
# Test optimistic preauth failing on KDC, stopping because the test
# module disabled fallback.
mark('optimistic (KDC failure, no fallback)')
msgs = ('Attempting optimistic preauth',
'Processing preauth types: -123',
'Preauth module test (-123) (real) returned: 0/Success',
'Produced preauth for next request: -123',
'/Preauthentication failed')
realm.run(['./icred', '-X', 'disable_fallback', '-o', '-123', realm.user_princ,
password('user')], expected_code=1,
expected_msg='Preauthentication failed', expected_trace=msgs)
realm.run([kadminl, 'delstr', realm.user_princ, 'failopt'])
# Test KDC_ERR_MORE_PREAUTH_DATA_REQUIRED and secure cookies.
mark('second round-trip')
realm.run([kadminl, 'setstr', realm.user_princ, '2rt', 'secondtrip'])
msgs = ('Sending unauthenticated request',
'/Additional pre-authentication required',
'Preauthenticating using KDC method data',
'Processing preauth types:',
'Preauth module test (-123) (real) returned: 0/Success',
'Produced preauth for next request: PA-FX-COOKIE (133), -123',
'/More preauthentication data is required',
'Continuing preauth mech -123',
'Processing preauth types: -123, PA-FX-COOKIE (133)',
'Produced preauth for next request: PA-FX-COOKIE (133), -123',
'Decrypted AS reply')
realm.run(['./icred', realm.user_princ, password('user')],
expected_msg='2rt: secondtrip', expected_trace=msgs)
# Test client-side failure after KDC_ERR_MORE_PREAUTH_DATA_REQUIRED,
# falling back to encrypted timestamp.
mark('second round-trip (client failure)')
msgs = ('Sending unauthenticated request',
'/Additional pre-authentication required',
'Preauthenticating using KDC method data',
'Processing preauth types:',
'Preauth module test (-123) (real) returned: 0/Success',
'Produced preauth for next request: PA-FX-COOKIE (133), -123',
'/More preauthentication data is required',
'Continuing preauth mech -123',
'Processing preauth types: -123, PA-FX-COOKIE (133)',
'/induced 2rt fail',
'Preauthenticating using KDC method data',
'Processing preauth types:',
'Encrypted timestamp (for ',
'module encrypted_timestamp (2) (real) returned: 0/Success',
'preauth for next request: PA-FX-COOKIE (133), PA-ENC-TIMESTAMP (2)',
'Decrypted AS reply')
realm.run(['./icred', '-X', 'fail_2rt', realm.user_princ, password('user')],
expected_msg='2rt: secondtrip', expected_trace=msgs)
# Test client-side failure after KDC_ERR_MORE_PREAUTH_DATA_REQUIRED,
# stopping because the test module disabled fallback.
mark('second round-trip (client failure, no fallback)')
msgs = ('Sending unauthenticated request',
'/Additional pre-authentication required',
'Preauthenticating using KDC method data',
'Processing preauth types:',
'Preauth module test (-123) (real) returned: 0/Success',
'Produced preauth for next request: PA-FX-COOKIE (133), -123',
'/More preauthentication data is required',
'Continuing preauth mech -123',
'Processing preauth types: -123, PA-FX-COOKIE (133)',
'/induced 2rt fail')
realm.run(['./icred', '-X', 'fail_2rt', '-X', 'disable_fallback',
realm.user_princ, password('user')], expected_code=1,
expected_msg='Pre-authentication failed: induced 2rt fail',
expected_trace=msgs)
# Test KDC-side failure after KDC_ERR_MORE_PREAUTH_DATA_REQUIRED,
# falling back to encrypted timestamp.
mark('second round-trip (KDC failure)')
realm.run([kadminl, 'setstr', realm.user_princ, 'fail2rt', 'yes'])
msgs = ('Sending unauthenticated request',
'/Additional pre-authentication required',
'Preauthenticating using KDC method data',
'Processing preauth types:',
'Preauth module test (-123) (real) returned: 0/Success',
'Produced preauth for next request: PA-FX-COOKIE (133), -123',
'/More preauthentication data is required',
'Continuing preauth mech -123',
'Processing preauth types: -123, PA-FX-COOKIE (133)',
'Preauth module test (-123) (real) returned: 0/Success',
'Produced preauth for next request: PA-FX-COOKIE (133), -123',
'/Preauthentication failed',
'Preauthenticating using KDC method data',
'Processing preauth types:',
'Encrypted timestamp (for ',
'module encrypted_timestamp (2) (real) returned: 0/Success',
'preauth for next request: PA-FX-COOKIE (133), PA-ENC-TIMESTAMP (2)',
'Decrypted AS reply')
realm.run(['./icred', realm.user_princ, password('user')],
expected_msg='2rt: secondtrip', expected_trace=msgs)
# Leave fail2rt set for the next test.
# Test KDC-side failure after KDC_ERR_MORE_PREAUTH_DATA_REQUIRED,
# stopping because the test module disabled fallback.
mark('second round-trip (KDC failure, no fallback)')
msgs = ('Sending unauthenticated request',
'/Additional pre-authentication required',
'Preauthenticating using KDC method data',
'Processing preauth types:',
'Preauth module test (-123) (real) returned: 0/Success',
'Produced preauth for next request: PA-FX-COOKIE (133), -123',
'/More preauthentication data is required',
'Continuing preauth mech -123',
'Processing preauth types: -123, PA-FX-COOKIE (133)',
'Preauth module test (-123) (real) returned: 0/Success',
'Produced preauth for next request: PA-FX-COOKIE (133), -123',
'/Preauthentication failed')
realm.run(['./icred', '-X', 'disable_fallback',
realm.user_princ, password('user')], expected_code=1,
expected_msg='Preauthentication failed', expected_trace=msgs)
realm.run([kadminl, 'delstr', realm.user_princ, 'fail2rt'])
# Test tryagain flow by inducing a KDC_ERR_ENCTYPE_NOSUPP error on the KDC.
mark('tryagain')
realm.run([kadminl, 'setstr', realm.user_princ, 'err', 'testagain'])
msgs = ('Sending unauthenticated request',
'/Additional pre-authentication required',
'Preauthenticating using KDC method data',
'Processing preauth types:',
'Preauth module test (-123) (real) returned: 0/Success',
'Produced preauth for next request: PA-FX-COOKIE (133), -123',
'/KDC has no support for encryption type',
'Recovering from KDC error 14 using preauth mech -123',
'Preauth tryagain input types (-123): -123, PA-FX-COOKIE (133)',
'Preauth module test (-123) tryagain returned: 0/Success',
'Followup preauth for next request: -123, PA-FX-COOKIE (133)',
'Decrypted AS reply')
realm.run(['./icred', realm.user_princ, password('user')],
expected_msg='tryagain: testagain', expected_trace=msgs)
# Test a client-side tryagain failure, falling back to encrypted
# timestamp.
mark('tryagain (client failure)')
msgs = ('Sending unauthenticated request',
'/Additional pre-authentication required',
'Preauthenticating using KDC method data',
'Processing preauth types:',
'Preauth module test (-123) (real) returned: 0/Success',
'Produced preauth for next request: PA-FX-COOKIE (133), -123',
'/KDC has no support for encryption type',
'Recovering from KDC error 14 using preauth mech -123',
'Preauth tryagain input types (-123): -123, PA-FX-COOKIE (133)',
'/induced tryagain fail',
'Preauthenticating using KDC method data',
'Processing preauth types:',
'Encrypted timestamp (for ',
'module encrypted_timestamp (2) (real) returned: 0/Success',
'preauth for next request: PA-FX-COOKIE (133), PA-ENC-TIMESTAMP (2)',
'Decrypted AS reply')
realm.run(['./icred', '-X', 'fail_tryagain', realm.user_princ,
password('user')], expected_trace=msgs)
# Test a client-side tryagain failure, stopping because the test
# module disabled fallback.
mark('tryagain (client failure, no fallback)')
msgs = ('Sending unauthenticated request',
'/Additional pre-authentication required',
'Preauthenticating using KDC method data',
'Processing preauth types:',
'Preauth module test (-123) (real) returned: 0/Success',
'Produced preauth for next request: PA-FX-COOKIE (133), -123',
'/KDC has no support for encryption type',
'Recovering from KDC error 14 using preauth mech -123',
'Preauth tryagain input types (-123): -123, PA-FX-COOKIE (133)',
'/induced tryagain fail')
realm.run(['./icred', '-X', 'fail_tryagain', '-X', 'disable_fallback',
realm.user_princ, password('user')], expected_code=1,
expected_msg='KDC has no support for encryption type',
expected_trace=msgs)
# Test that multiple stepwise initial creds operations can be
# performed with the same krb5_context, with proper tracking of
# clpreauth module request handles.
mark('interleaved')
realm.run([kadminl, 'addprinc', '-pw', 'pw', 'u1'])
realm.run([kadminl, 'addprinc', '+requires_preauth', '-pw', 'pw', 'u2'])
realm.run([kadminl, 'addprinc', '+requires_preauth', '-pw', 'pw', 'u3'])
realm.run([kadminl, 'setstr', 'u2', '2rt', 'extra'])
out = realm.run(['./icinterleave', 'pw', 'u1', 'u2', 'u3'])
if out != ('step 1\nstep 2\nstep 3\nstep 1\nfinish 1\nstep 2\nno attr\n'
'step 3\nno attr\nstep 2\n2rt: extra\nstep 3\nfinish 3\nstep 2\n'
'finish 2\n'):
fail('unexpected output from icinterleave')
success('Pre-authentication framework tests')
| 49.349624
| 79
| 0.665346
| 1,576
| 13,127
| 5.475888
| 0.120558
| 0.025029
| 0.031286
| 0.039166
| 0.829316
| 0.819235
| 0.793627
| 0.763152
| 0.728969
| 0.692005
| 0
| 0.032068
| 0.199436
| 13,127
| 265
| 80
| 49.535849
| 0.789133
| 0.11564
| 0
| 0.709677
| 0
| 0.032258
| 0.616049
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.069124
| 0.004608
| 0
| 0.004608
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
d96bfa9969aa7535edbc947eef5fd3836a546bc4
| 61,602
|
py
|
Python
|
email_client.py
|
HideyoshiNakazone/duo-db-client
|
d847f59c4b4243cf359e2fca411f0e7a6864493b
|
[
"MIT"
] | null | null | null |
email_client.py
|
HideyoshiNakazone/duo-db-client
|
d847f59c4b4243cf359e2fca411f0e7a6864493b
|
[
"MIT"
] | null | null | null |
email_client.py
|
HideyoshiNakazone/duo-db-client
|
d847f59c4b4243cf359e2fca411f0e7a6864493b
|
[
"MIT"
] | null | null | null |
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from datetime import datetime, timedelta
from email.mime.multipart import MIMEMultipart
from email.mime.image import MIMEImage
from email.mime.text import MIMEText
from email.header import Header
import psycopg2.extensions
import psycopg2
import smtplib
import pickle
import select
import os
EMAIL_ADDRESS = os.environ.get('EMAIL_ADDRESS')
EMAIL_PASSWORD = os.environ.get('EMAIL_PASSWORD')
db_conn = psycopg2.connect(
host = "localhost",
port = "6432",
dbname = "server",
user = "hideyoshi",
password = "vhnb2901"
)
db_conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
db = db_conn.cursor()
db.execute('LISTEN novo_pedido')
while 1:
if not select.select([db_conn], [], [], 5) == ([], [], []):
db_conn.poll()
while db_conn.notifies:
notify = db_conn.notifies.pop()
id_pedido, c_cpf, id_servico, quantidade, valor_total, date = notify.payload.replace('(','').replace(')','').split(',')
year,month,day = date.replace('"','').split()[0].split('-')
hour,minute,sec = date.replace('"','').split()[1].split(':')
sec = sec.split('.')[0]
date = datetime(int(year),int(month),int(day),int(hour),int(minute),int(sec))
months = [
"Janeiro",
"Fevereiro",
"Março",
"Abril",
"Maio",
"Junho",
"Julho",
"Augosto",
"Setembro",
"Outubro",
"Novembro",
"Dezembro"]
month = months[date.month]
week = [
"Segunda",
"Terça",
"Quarta",
"Quinta",
"Sexta",
"Sábado",
"Domingo"]
dow = week[date.weekday()]
db.execute('SELECT DISTINCT nome FROM cliente, pedido WHERE cliente.cpf = pedido.cpf and pedido.id ='+id_pedido+';')
c_nome = str(db.fetchall()).replace('[','').replace('(','').replace(')','').replace(']','').replace(',','').replace("'",'')
db.execute('SELECT DISTINCT email FROM cliente, pedido WHERE cliente.cpf = pedido.cpf and pedido.id ='+id_pedido+';')
c_email = str(db.fetchall()).replace('[','').replace('(','').replace(')','').replace(']','').replace(',','').replace("'",'')
db.execute('SELECT DISTINCT nome FROM servico, pedido WHERE servico.id = id_servico AND pedido.id = '+id_pedido+';')
p_nome = str(db.fetchall()).replace('[','').replace('(','').replace(')','').replace(']','').replace(',','').replace("'",'')
db.execute("SELECT DISTINCT endereco.logradouro,endereco.complemento,endereco.numero,endereco.cidade,' - ',endereco.estado FROM endereco, cliente, pedido WHERE cliente.cpf = pedido.cpf AND cliente.id_endereco = endereco.id AND pedido.id = "+id_pedido+";")
endereco = str(db.fetchall()).replace('[','').replace('(','').replace(')','').replace(']','').replace(',','').replace("'",'')
msgRoot = MIMEMultipart('related')
msgRoot['Subject'] = 'Compra realizada com Sucesso'
msgRoot['From'] = EMAIL_ADDRESS
msgRoot['To'] = c_email
message = """\
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional //EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:v="urn:schemas-microsoft-com:vml">
<head>
<!--[if gte mso 9]><xml><o:OfficeDocumentSettings><o:AllowPNG/><o:PixelsPerInch>96</o:PixelsPerInch></o:OfficeDocumentSettings></xml><![endif]-->
<meta content="text/html; charset=utf-8" http-equiv="Content-Type"/>
<meta content="width=device-width" name="viewport"/>
<!--[if !mso]><!-->
<meta content="IE=edge" http-equiv="X-UA-Compatible"/>
<!--<![endif]-->
<title></title>
<!--[if !mso]><!-->
<link href="https://fonts.googleapis.com/css?family=Oswald" rel="stylesheet" type="text/css"/>
<link href="https://fonts.googleapis.com/css?family=Open+Sans" rel="stylesheet" type="text/css"/>
<link href="https://fonts.googleapis.com/css?family=Merriweather" rel="stylesheet" type="text/css"/>
<link href="https://fonts.googleapis.com/css?family=Montserrat" rel="stylesheet" type="text/css"/>
<link href="https://fonts.googleapis.com/css?family=Source+Sans+Pro" rel="stylesheet" type="text/css"/>
<!--<![endif]-->
<style type="text/css">
body {
margin: 0;
padding: 0;
}
table,
td,
tr {
vertical-align: top;
border-collapse: collapse;
}
* {
line-height: inherit;
}
a[x-apple-data-detectors=true] {
color: inherit !important;
text-decoration: none !important;
}
</style>
<style id="media-query" type="text/css">
@media (max-width: 670px) {
.block-grid,
.col {
min-width: 320px !important;
max-width: 100% !important;
display: block !important;
}
.block-grid {
width: 100% !important;
}
.col {
width: 100% !important;
}
.col>div {
margin: 0 auto;
}
img.fullwidth,
img.fullwidthOnMobile {
max-width: 100% !important;
}
.no-stack .col {
min-width: 0 !important;
display: table-cell !important;
}
.no-stack.two-up .col {
width: 50% !important;
}
.no-stack .col.num4 {
width: 33% !important;
}
.no-stack .col.num8 {
width: 66% !important;
}
.no-stack .col.num4 {
width: 33% !important;
}
.no-stack .col.num3 {
width: 25% !important;
}
.no-stack .col.num6 {
width: 50% !important;
}
.no-stack .col.num9 {
width: 75% !important;
}
.video-block {
max-width: none !important;
}
.mobile_hide {
min-height: 0px;
max-height: 0px;
max-width: 0px;
display: none;
overflow: hidden;
font-size: 0px;
}
.desktop_hide {
display: block !important;
max-height: none !important;
}
}
</style>
</head>
<body class="clean-body" style="margin: 0; padding: 0; -webkit-text-size-adjust: 100%; background-color: #482c71;">
<!--[if IE]><div class="ie-browser"><![endif]-->
<table bgcolor="#482c71" cellpadding="0" cellspacing="0" class="nl-container" role="presentation" style="table-layout: fixed; vertical-align: top; min-width: 320px; Margin: 0 auto; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; background-color: #482c71; width: 100%;" valign="top" width="100%">
<tbody>
<tr style="vertical-align: top;" valign="top">
<td style="word-break: break-word; vertical-align: top;" valign="top">
<!--[if (mso)|(IE)]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td align="center" style="background-color:#482c71"><![endif]-->
<div style="background-color:transparent;">
<div class="block-grid" style="Margin: 0 auto; min-width: 320px; max-width: 650px; overflow-wrap: break-word; word-wrap: break-word; word-break: break-word; background-color: transparent;">
<div style="border-collapse: collapse;display: table;width: 100%;background-color:transparent;">
<!--[if (mso)|(IE)]><table width="100%" cellpadding="0" cellspacing="0" border="0" style="background-color:transparent;"><tr><td align="center"><table cellpadding="0" cellspacing="0" border="0" style="width:650px"><tr class="layout-full-width" style="background-color:transparent"><![endif]-->
<!--[if (mso)|(IE)]><td align="center" width="650" style="background-color:transparent;width:650px; border-top: 0px solid transparent; border-left: 0px solid transparent; border-bottom: 0px solid transparent; border-right: 0px solid transparent;" valign="top"><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 0px; padding-left: 0px; padding-top:0px; padding-bottom:0px;"><![endif]-->
<div class="col num12" style="min-width: 320px; max-width: 650px; display: table-cell; vertical-align: top; width: 650px;">
<div style="width:100% !important;">
<!--[if (!mso)&(!IE)]><!-->
<div style="border-top:0px solid transparent; border-left:0px solid transparent; border-bottom:0px solid transparent; border-right:0px solid transparent; padding-top:0px; padding-bottom:0px; padding-right: 0px; padding-left: 0px;">
<!--<![endif]-->
<div class="mobile_hide">
<table border="0" cellpadding="0" cellspacing="0" class="divider" role="presentation" style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;" valign="top" width="100%">
<tbody>
<tr style="vertical-align: top;" valign="top">
<td class="divider_inner" style="word-break: break-word; vertical-align: top; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px;" valign="top">
<table align="center" border="0" cellpadding="0" cellspacing="0" class="divider_content" height="15" role="presentation" style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; border-top: 0px solid transparent; height: 15px; width: 100%;" valign="top" width="100%">
<tbody>
<tr style="vertical-align: top;" valign="top">
<td height="15" style="word-break: break-word; vertical-align: top; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;" valign="top"><span></span></td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
</div>
<!--[if (!mso)&(!IE)]><!-->
</div>
<!--<![endif]-->
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
<!--[if (mso)|(IE)]></td></tr></table></td></tr></table><![endif]-->
</div>
</div>
</div>
<div style="background-color:transparent;">
<div class="block-grid" style="Margin: 0 auto; min-width: 320px; max-width: 650px; overflow-wrap: break-word; word-wrap: break-word; word-break: break-word; background-color: transparent;">
<div style="border-collapse: collapse;display: table;width: 100%;background-color:transparent;">
<!--[if (mso)|(IE)]><table width="100%" cellpadding="0" cellspacing="0" border="0" style="background-color:transparent;"><tr><td align="center"><table cellpadding="0" cellspacing="0" border="0" style="width:650px"><tr class="layout-full-width" style="background-color:transparent"><![endif]-->
<!--[if (mso)|(IE)]><td align="center" width="650" style="background-color:transparent;width:650px; border-top: 1px solid #C879F1; border-left: 1px solid #C879F1; border-bottom: 0px solid transparent; border-right: 1px solid #C879F1;" valign="top"><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 0px; padding-left: 0px; padding-top:10px; padding-bottom:0px;"><![endif]-->
<div class="col num12" style="min-width: 320px; max-width: 650px; display: table-cell; vertical-align: top; width: 648px;">
<div style="width:100% !important;">
<!--[if (!mso)&(!IE)]><!-->
<div style="border-top:1px solid #C879F1; border-left:1px solid #C879F1; border-bottom:0px solid transparent; border-right:1px solid #C879F1; padding-top:10px; padding-bottom:0px; padding-right: 0px; padding-left: 0px;">
<!--<![endif]-->
<div align="center" class="img-container center autowidth fixedwidth" style="padding-right: 15px;padding-left: 15px;">
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr style="line-height:0px"><td style="padding-right: 15px;padding-left: 15px;" align="center"><![endif]--><img align="center" alt="Image" border="0" class="center autowidth fixedwidth" src="cid:swirls_1.png" style="text-decoration: none; -ms-interpolation-mode: bicubic; height: auto; border: 0; width: 100%; max-width: 618px; display: block;" title="Image" width="618"/>
<div style="font-size:1px;line-height:20px"> </div>
<!--[if mso]></td></tr></table><![endif]-->
</div>
<!--[if (!mso)&(!IE)]><!-->
</div>
<!--<![endif]-->
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
<!--[if (mso)|(IE)]></td></tr></table></td></tr></table><![endif]-->
</div>
</div>
</div>
<div style="background-color:transparent;">
<div class="block-grid" style="Margin: 0 auto; min-width: 320px; max-width: 650px; overflow-wrap: break-word; word-wrap: break-word; word-break: break-word; background-color: transparent;">
<div style="border-collapse: collapse;display: table;width: 100%;background-color:transparent;">
<!--[if (mso)|(IE)]><table width="100%" cellpadding="0" cellspacing="0" border="0" style="background-color:transparent;"><tr><td align="center"><table cellpadding="0" cellspacing="0" border="0" style="width:650px"><tr class="layout-full-width" style="background-color:transparent"><![endif]-->
<!--[if (mso)|(IE)]><td align="center" width="650" style="background-color:transparent;width:650px; border-top: 0px solid transparent; border-left: 1px solid #C879F1; border-bottom: 0px solid transparent; border-right: 1px solid #C879F1;" valign="top"><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 0px; padding-left: 0px; padding-top:0px; padding-bottom:5px;"><![endif]-->
<div class="col num12" style="min-width: 320px; max-width: 650px; display: table-cell; vertical-align: top; width: 648px;">
<div style="width:100% !important;">
<!--[if (!mso)&(!IE)]><!-->
<div style="border-top:0px solid transparent; border-left:1px solid #C879F1; border-bottom:0px solid transparent; border-right:1px solid #C879F1; padding-top:0px; padding-bottom:5px; padding-right: 0px; padding-left: 0px;">
<!--<![endif]-->
<div align="center" class="img-container center fixedwidth" style="padding-right: 0px;padding-left: 0px;">
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr style="line-height:0px"><td style="padding-right: 0px;padding-left: 0px;" align="center"><![endif]--><img align="center" alt="Image" border="0" class="center fixedwidth" src="cid:logoflower.png" style="text-decoration: none; -ms-interpolation-mode: bicubic; height: auto; border: 0; width: 100%; max-width: 259px; display: block;" title="Image" width="259"/>
<div style="font-size:1px;line-height:20px"> </div>
<!--[if mso]></td></tr></table><![endif]-->
</div>
<!--[if (!mso)&(!IE)]><!-->
</div>
<!--<![endif]-->
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
<!--[if (mso)|(IE)]></td></tr></table></td></tr></table><![endif]-->
</div>
</div>
</div>
<div style="background-color:transparent;">
<div class="block-grid" style="Margin: 0 auto; min-width: 320px; max-width: 650px; overflow-wrap: break-word; word-wrap: break-word; word-break: break-word; background-color: transparent;">
<div style="border-collapse: collapse;display: table;width: 100%;background-color:transparent;">
<!--[if (mso)|(IE)]><table width="100%" cellpadding="0" cellspacing="0" border="0" style="background-color:transparent;"><tr><td align="center"><table cellpadding="0" cellspacing="0" border="0" style="width:650px"><tr class="layout-full-width" style="background-color:transparent"><![endif]-->
<!--[if (mso)|(IE)]><td align="center" width="650" style="background-color:transparent;width:650px; border-top: 0px solid transparent; border-left: 1px solid #C879F1; border-bottom: 0px solid transparent; border-right: 1px solid #C879F1;" valign="top"><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 0px; padding-left: 0px; padding-top:5px; padding-bottom:40px;"><![endif]-->
<div class="col num12" style="min-width: 320px; max-width: 650px; display: table-cell; vertical-align: top; width: 648px;">
<div style="width:100% !important;">
<!--[if (!mso)&(!IE)]><!-->
<div style="border-top:0px solid transparent; border-left:1px solid #C879F1; border-bottom:0px solid transparent; border-right:1px solid #C879F1; padding-top:5px; padding-bottom:40px; padding-right: 0px; padding-left: 0px;">
<!--<![endif]-->
<table border="0" cellpadding="0" cellspacing="0" class="divider" role="presentation" style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;" valign="top" width="100%">
<tbody>
<tr style="vertical-align: top;" valign="top">
<td class="divider_inner" style="word-break: break-word; vertical-align: top; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px;" valign="top">
<table align="center" border="0" cellpadding="0" cellspacing="0" class="divider_content" role="presentation" style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; border-top: 1px dotted #C879F1; width: 95%;" valign="top" width="95%">
<tbody>
<tr style="vertical-align: top;" valign="top">
<td style="word-break: break-word; vertical-align: top; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;" valign="top"><span></span></td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 40px; padding-left: 40px; padding-top: 30px; padding-bottom: 15px; font-family: serif"><![endif]-->
<div style="color:#E3E3E3;font-family:'Merriwheater', 'Georgia', serif;line-height:1.5;padding-top:30px;padding-right:40px;padding-bottom:15px;padding-left:40px;">
<div style="line-height: 1.5; font-size: 12px; font-family: 'Merriwheater', 'Georgia', serif; color: #E3E3E3; mso-line-height-alt: 18px;">
<p style="line-height: 1.5; word-break: break-word; text-align: center; font-family: Merriwheater, Georgia, serif; font-size: 30px; mso-line-height-alt: 45px; margin: 0;"><span style="font-size: 30px; color: #00ad99;">SUA RESERVA</span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 40px; padding-left: 40px; padding-top: 5px; padding-bottom: 20px; font-family: 'Trebuchet MS', Tahoma, sans-serif"><![endif]-->
<div style="color:#E3E3E3;font-family:'Montserrat', 'Trebuchet MS', 'Lucida Grande', 'Lucida Sans Unicode', 'Lucida Sans', Tahoma, sans-serif;line-height:1.5;padding-top:5px;padding-right:40px;padding-bottom:20px;padding-left:40px;">
<div style="font-size: 12px; line-height: 1.5; font-family: 'Montserrat', 'Trebuchet MS', 'Lucida Grande', 'Lucida Sans Unicode', 'Lucida Sans', Tahoma, sans-serif; color: #E3E3E3; mso-line-height-alt: 18px;">
<p style="font-size: 20px; line-height: 1.5; text-align: center; word-break: break-word; font-family: Merriwheater, Georgia, serif; mso-line-height-alt: 30px; margin: 0;"><span style="font-size: 20px;"><span style="font-size: 18px;">"""+dow+", "+day+" "+month+" "+year+"""</span><br/><span style="font-size: 18px;">Serviço: """+p_nome+"""</span><br/><span style="font-size: 18px;">Duração: """+date.strftime("%H:%M")+" - "+(date+timedelta(hours=2)).strftime("%H:%M")+"""</span><br/></span></p>
<div style="color:#E3E3E3;font-family:'Merriwheater', 'Georgia', serif;line-height:1.5;padding-top:30px;padding-right:40px;padding-bottom:15px;padding-left:40px;">
<div style="line-height: 1.5; font-size: 12px; font-family: 'Merriwheater', 'Georgia', serif; color: #E3E3E3; mso-line-height-alt: 18px;">
<p style="line-height: 1.5; word-break: break-word; text-align: center; font-family: Merriwheater, Georgia, serif; font-size: 30px; mso-line-height-alt: 45px; margin: 0;"><span style="font-size: 30px; color: #00ad99;">VALOR</span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 40px; padding-left: 40px; padding-top: 5px; padding-bottom: 20px; font-family: 'Trebuchet MS', Tahoma, sans-serif"><![endif]-->
<div style="color:#E3E3E3;font-family:'Montserrat', 'Trebuchet MS', 'Lucida Grande', 'Lucida Sans Unicode', 'Lucida Sans', Tahoma, sans-serif;line-height:1.5;padding-top:5px;padding-right:40px;padding-bottom:20px;padding-left:40px;">
<div style="font-size: 12px; line-height: 1.5; font-family: 'Montserrat', 'Trebuchet MS', 'Lucida Grande', 'Lucida Sans Unicode', 'Lucida Sans', Tahoma, sans-serif; color: #E3E3E3; mso-line-height-alt: 18px;">
<p style="font-size: 20px; line-height: 1.5; text-align: center; word-break: break-word; font-family: Merriwheater, Georgia, serif; mso-line-height-alt: 30px; margin: 0;"><span style="font-size: 20px;"><span style="font-size: 18px;">R$ """+valor_total+"""</span><br/></span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
<!--[if (!mso)&(!IE)]><!-->
</div>
<!--<![endif]-->
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
<!--[if (mso)|(IE)]></td></tr></table></td></tr></table><![endif]-->
</div>
</div>
</div>
<div style="background-color:transparent;">
<div class="block-grid" style="Margin: 0 auto; min-width: 320px; max-width: 650px; overflow-wrap: break-word; word-wrap: break-word; word-break: break-word; background-color: transparent;">
<div style="border-collapse: collapse;display: table;width: 100%;background-color:transparent;">
<!--[if (mso)|(IE)]><table width="100%" cellpadding="0" cellspacing="0" border="0" style="background-color:transparent;"><tr><td align="center"><table cellpadding="0" cellspacing="0" border="0" style="width:650px"><tr class="layout-full-width" style="background-color:transparent"><![endif]-->
<!--[if (mso)|(IE)]><td align="center" width="650" style="background-color:transparent;width:650px; border-top: 0px solid transparent; border-left: 1px solid #C879F1; border-bottom: 0px solid transparent; border-right: 1px solid #C879F1;" valign="top"><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 0px; padding-left: 0px; padding-top:5px; padding-bottom:5px;"><![endif]-->
<div class="col num12" style="min-width: 320px; max-width: 650px; display: table-cell; vertical-align: top; width: 648px;">
<div style="width:100% !important;">
<!--[if (!mso)&(!IE)]><!-->
<div style="border-top:0px solid transparent; border-left:1px solid #C879F1; border-bottom:0px solid transparent; border-right:1px solid #C879F1; padding-top:5px; padding-bottom:5px; padding-right: 0px; padding-left: 0px;">
<!--<![endif]-->
<div align="center" class="img-container center autowidth fullwidth" style="padding-right: 0px;padding-left: 0px;">
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr style="line-height:0px"><td style="padding-right: 0px;padding-left: 0px;" align="center"><![endif]-->
<div style="font-size:1px;line-height:10px"> </div><img align="center" alt="Image" border="0" class="center autowidth fullwidth" src="cid:diveinto.jpeg" style="text-decoration: none; -ms-interpolation-mode: bicubic; height: auto; border: 0; width: 100%; max-width: 648px; display: block;" title="Image" width="648"/>
<!--[if mso]></td></tr></table><![endif]-->
</div>
<div class="mobile_hide">
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 10px; padding-left: 10px; padding-top: 55px; padding-bottom: 0px; font-family: serif"><![endif]-->
<div style="color:#FFFFFF;font-family:'Merriwheater', 'Georgia', serif;line-height:1.2;padding-top:55px;padding-right:10px;padding-bottom:0px;padding-left:10px;">
<div style="line-height: 1.2; font-family: 'Merriwheater', 'Georgia', serif; font-size: 12px; color: #FFFFFF; mso-line-height-alt: 14px;">
<p style="font-size: 38px; line-height: 1.2; text-align: center; word-break: break-word; font-family: Merriwheater, Georgia, serif; mso-line-height-alt: 46px; margin: 0;"><span style="font-size: 38px;">DIVE INTO RELAX</span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
</div>
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 0px; padding-left: 0px; padding-top: 15px; padding-bottom: 0px; font-family: Georgia, 'Times New Roman', serif"><![endif]-->
<div style="color:#FFFFFF;font-family:Georgia, Times, 'Times New Roman', serif;line-height:1.2;padding-top:15px;padding-right:0px;padding-bottom:0px;padding-left:0px;">
<div style="line-height: 1.2; font-family: Georgia, Times, 'Times New Roman', serif; font-size: 12px; color: #FFFFFF; mso-line-height-alt: 14px;">
<p style="line-height: 1.2; text-align: center; font-size: 28px; word-break: break-word; font-family: Georgia, Times, Times New Roman, serif; mso-line-height-alt: 34px; margin: 0;"><span style="font-size: 28px;"><em><span style="color: #00ad99;"><span style="">Enjoy Your Day Spa</span></span></em></span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
<!--[if (!mso)&(!IE)]><!-->
</div>
<!--<![endif]-->
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
<!--[if (mso)|(IE)]></td></tr></table></td></tr></table><![endif]-->
</div>
</div>
</div>
<div style="background-color:transparent;">
<div class="block-grid" style="Margin: 0 auto; min-width: 320px; max-width: 650px; overflow-wrap: break-word; word-wrap: break-word; word-break: break-word; background-color: transparent;">
<div style="border-collapse: collapse;display: table;width: 100%;background-color:transparent;">
<!--[if (mso)|(IE)]><table width="100%" cellpadding="0" cellspacing="0" border="0" style="background-color:transparent;"><tr><td align="center"><table cellpadding="0" cellspacing="0" border="0" style="width:650px"><tr class="layout-full-width" style="background-color:transparent"><![endif]-->
<!--[if (mso)|(IE)]><td align="center" width="650" style="background-color:transparent;width:650px; border-top: 0px solid transparent; border-left: 1px solid #C879F1; border-bottom: 0px solid transparent; border-right: 1px solid #C879F1;" valign="top"><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 0px; padding-left: 0px; padding-top:5px; padding-bottom:55px;"><![endif]-->
<div class="col num12" style="min-width: 320px; max-width: 650px; display: table-cell; vertical-align: top; width: 648px;">
<div style="width:100% !important;">
<!--[if (!mso)&(!IE)]><!-->
<div style="border-top:0px solid transparent; border-left:1px solid #C879F1; border-bottom:0px solid transparent; border-right:1px solid #C879F1; padding-top:5px; padding-bottom:55px; padding-right: 0px; padding-left: 0px;">
<!--<![endif]-->
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 40px; padding-left: 40px; padding-top: 10px; padding-bottom: 20px; font-family: serif"><![endif]-->
<div style="color:#E3E3E3;font-family:'Merriwheater', 'Georgia', serif;line-height:1.5;padding-top:10px;padding-right:40px;padding-bottom:20px;padding-left:40px;">
<div style="font-size: 12px; line-height: 1.5; font-family: 'Merriwheater', 'Georgia', serif; color: #E3E3E3; mso-line-height-alt: 18px;">
<p style="font-size: 18px; line-height: 1.5; word-break: break-word; text-align: center; font-family: Merriwheater, Georgia, serif; mso-line-height-alt: 27px; margin: 0;"><span style="font-size: 18px;">Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec elementum nisl id neque ullamcorper, vel mattis nisl rutrum. Sed pulvinar aliquam dolor et euismod.</span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
<!--[if (!mso)&(!IE)]><!-->
</div>
<!--<![endif]-->
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
<!--[if (mso)|(IE)]></td></tr></table></td></tr></table><![endif]-->
</div>
</div>
</div>
<div style="background-color:transparent;">
<div class="block-grid" style="Margin: 0 auto; min-width: 320px; max-width: 650px; overflow-wrap: break-word; word-wrap: break-word; word-break: break-word; background-color: #3f2765;">
<div style="border-collapse: collapse;display: table;width: 100%;background-color:#3f2765;">
<!--[if (mso)|(IE)]><table width="100%" cellpadding="0" cellspacing="0" border="0" style="background-color:transparent;"><tr><td align="center"><table cellpadding="0" cellspacing="0" border="0" style="width:650px"><tr class="layout-full-width" style="background-color:#3f2765"><![endif]-->
<!--[if (mso)|(IE)]><td align="center" width="650" style="background-color:#3f2765;width:650px; border-top: 0px solid transparent; border-left: 1px solid #C879F1; border-bottom: 0px solid transparent; border-right: 1px solid #C879F1;" valign="top"><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 0px; padding-left: 0px; padding-top:30px; padding-bottom:5px;"><![endif]-->
<div class="col num12" style="min-width: 320px; max-width: 650px; display: table-cell; vertical-align: top; width: 648px;">
<div style="width:100% !important;">
<!--[if (!mso)&(!IE)]><!-->
<div style="border-top:0px solid transparent; border-left:1px solid #C879F1; border-bottom:0px solid transparent; border-right:1px solid #C879F1; padding-top:30px; padding-bottom:5px; padding-right: 0px; padding-left: 0px;">
<!--<![endif]-->
<div align="center" class="img-container center fixedwidth" style="padding-right: 15px;padding-left: 15px;">
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr style="line-height:0px"><td style="padding-right: 15px;padding-left: 15px;" align="center"><![endif]-->
<div style="font-size:1px;line-height:15px"> </div><img align="center" alt="Image" border="0" class="center fixedwidth" src="cid:swirlup.png" style="text-decoration: none; -ms-interpolation-mode: bicubic; height: auto; border: 0; width: 100%; max-width: 291px; display: block;" title="Image" width="291"/>
<div style="font-size:1px;line-height:10px"> </div>
<!--[if mso]></td></tr></table><![endif]-->
</div>
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 10px; padding-left: 10px; padding-top: 30px; padding-bottom: 30px; font-family: serif"><![endif]-->
<div style="color:#ffffff;font-family:'Merriwheater', 'Georgia', serif;line-height:1.2;padding-top:30px;padding-right:10px;padding-bottom:30px;padding-left:10px;">
<div style="line-height: 1.2; font-family: 'Merriwheater', 'Georgia', serif; font-size: 12px; color: #ffffff; mso-line-height-alt: 14px;">
<p style="line-height: 1.2; text-align: center; font-size: 30px; word-break: break-word; font-family: Merriwheater, Georgia, serif; mso-line-height-alt: 36px; margin: 0;"><span style="font-size: 30px;">TRATAMENTOS</span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
<!--[if (!mso)&(!IE)]><!-->
</div>
<!--<![endif]-->
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
<!--[if (mso)|(IE)]></td></tr></table></td></tr></table><![endif]-->
</div>
</div>
</div>
<div style="background-color:transparent;">
<div class="block-grid four-up" style="Margin: 0 auto; min-width: 320px; max-width: 650px; overflow-wrap: break-word; word-wrap: break-word; word-break: break-word; background-color: #3f2765;">
<div style="border-collapse: collapse;display: table;width: 100%;background-color:#3f2765;">
<!--[if (mso)|(IE)]><table width="100%" cellpadding="0" cellspacing="0" border="0" style="background-color:transparent;"><tr><td align="center"><table cellpadding="0" cellspacing="0" border="0" style="width:650px"><tr class="layout-full-width" style="background-color:#3f2765"><![endif]-->
<!--[if (mso)|(IE)]><td align="center" width="162" style="background-color:#3f2765;width:162px; border-top: 0px solid transparent; border-left: 1px solid #C879F1; border-bottom: 0px solid #C879F1; border-right: 0px solid transparent;" valign="top"><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 0px; padding-left: 0px; padding-top:5px; padding-bottom:25px;"><![endif]-->
<div class="col num3" style="max-width: 320px; min-width: 162px; display: table-cell; vertical-align: top; width: 161px;">
<div style="width:100% !important;">
<!--[if (!mso)&(!IE)]><!-->
<div style="border-top:0px solid transparent; border-left:1px solid #C879F1; border-bottom:0px solid #C879F1; border-right:0px solid transparent; padding-top:5px; padding-bottom:25px; padding-right: 0px; padding-left: 0px;">
<!--<![endif]-->
<div align="center" class="img-container center autowidth" style="padding-right: 0px;padding-left: 0px;">
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr style="line-height:0px"><td style="padding-right: 0px;padding-left: 0px;" align="center"><![endif]--><img align="center" alt="Alternate text" border="0" class="center autowidth" src="cid:bea_1.png" style="text-decoration: none; -ms-interpolation-mode: bicubic; height: auto; border: 0; width: 100%; max-width: 64px; display: block;" title="Alternate text" width="64"/>
<!--[if mso]></td></tr></table><![endif]-->
</div>
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 0px; font-family: Georgia, 'Times New Roman', serif"><![endif]-->
<div style="color:#00ad99;font-family:Georgia, Times, 'Times New Roman', serif;line-height:1.2;padding-top:10px;padding-right:10px;padding-bottom:0px;padding-left:10px;">
<div style="line-height: 1.2; font-family: Georgia, Times, 'Times New Roman', serif; font-size: 12px; color: #00ad99; mso-line-height-alt: 14px;">
<p style="line-height: 1.2; text-align: center; font-size: 16px; word-break: break-word; font-family: Georgia, Times, Times New Roman, serif; mso-line-height-alt: 19px; margin: 0;"><span style="font-size: 16px;"><strong><em>Head Massage</em></strong></span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 40px; padding-left: 40px; padding-top: 0px; padding-bottom: 0px; font-family: serif"><![endif]-->
<div style="color:#ffffff;font-family:'Merriwheater', 'Georgia', serif;line-height:1.5;padding-top:0px;padding-right:40px;padding-bottom:0px;padding-left:40px;">
<div style="line-height: 1.5; font-size: 12px; font-family: 'Merriwheater', 'Georgia', serif; color: #ffffff; mso-line-height-alt: 18px;">
<p style="line-height: 1.5; font-size: 34px; text-align: center; word-break: break-word; font-family: Merriwheater, Georgia, serif; mso-line-height-alt: 51px; margin: 0;"><span style="font-size: 34px;">50$</span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
<table border="0" cellpadding="0" cellspacing="0" class="divider" role="presentation" style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;" valign="top" width="100%">
<tbody>
<tr style="vertical-align: top;" valign="top">
<td class="divider_inner" style="word-break: break-word; vertical-align: top; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px;" valign="top">
<table align="center" border="0" cellpadding="0" cellspacing="0" class="divider_content" role="presentation" style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; border-top: 2px solid transparent; width: 100%;" valign="top" width="100%">
<tbody>
<tr style="vertical-align: top;" valign="top">
<td style="word-break: break-word; vertical-align: top; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;" valign="top"><span></span></td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
<!--[if (!mso)&(!IE)]><!-->
</div>
<!--<![endif]-->
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
<!--[if (mso)|(IE)]></td><td align="center" width="162" style="background-color:#3f2765;width:162px; border-top: 0px solid transparent; border-left: 0px solid transparent; border-bottom: 0px solid transparent; border-right: 0px solid transparent;" valign="top"><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 0px; padding-left: 0px; padding-top:5px; padding-bottom:25px;"><![endif]-->
<div class="col num3" style="max-width: 320px; min-width: 162px; display: table-cell; vertical-align: top; width: 162px;">
<div style="width:100% !important;">
<!--[if (!mso)&(!IE)]><!-->
<div style="border-top:0px solid transparent; border-left:0px solid transparent; border-bottom:0px solid transparent; border-right:0px solid transparent; padding-top:5px; padding-bottom:25px; padding-right: 0px; padding-left: 0px;">
<!--<![endif]-->
<div align="center" class="img-container center autowidth" style="padding-right: 0px;padding-left: 0px;">
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr style="line-height:0px"><td style="padding-right: 0px;padding-left: 0px;" align="center"><![endif]--><img align="center" alt="Alternate text" border="0" class="center autowidth" src="cid:feet.png" style="text-decoration: none; -ms-interpolation-mode: bicubic; height: auto; border: 0; width: 100%; max-width: 64px; display: block;" title="Alternate text" width="64"/>
<!--[if mso]></td></tr></table><![endif]-->
</div>
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 0px; font-family: Georgia, 'Times New Roman', serif"><![endif]-->
<div style="color:#00ad99;font-family:Georgia, Times, 'Times New Roman', serif;line-height:1.2;padding-top:10px;padding-right:10px;padding-bottom:0px;padding-left:10px;">
<div style="line-height: 1.2; font-family: Georgia, Times, 'Times New Roman', serif; font-size: 12px; color: #00ad99; mso-line-height-alt: 14px;">
<p style="line-height: 1.2; text-align: center; font-size: 16px; word-break: break-word; font-family: Georgia, Times, Times New Roman, serif; mso-line-height-alt: 19px; margin: 0;"><span style="font-size: 16px;"><strong><em>Feet Treatment</em></strong></span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 40px; padding-left: 40px; padding-top: 0px; padding-bottom: 0px; font-family: serif"><![endif]-->
<div style="color:#ffffff;font-family:'Merriwheater', 'Georgia', serif;line-height:1.5;padding-top:0px;padding-right:40px;padding-bottom:0px;padding-left:40px;">
<div style="line-height: 1.5; font-size: 12px; font-family: 'Merriwheater', 'Georgia', serif; color: #ffffff; mso-line-height-alt: 18px;">
<p style="line-height: 1.5; font-size: 34px; text-align: center; word-break: break-word; font-family: Merriwheater, Georgia, serif; mso-line-height-alt: 51px; margin: 0;"><span style="font-size: 34px;">65$</span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
<table border="0" cellpadding="0" cellspacing="0" class="divider" role="presentation" style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;" valign="top" width="100%">
<tbody>
<tr style="vertical-align: top;" valign="top">
<td class="divider_inner" style="word-break: break-word; vertical-align: top; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px;" valign="top">
<table align="center" border="0" cellpadding="0" cellspacing="0" class="divider_content" role="presentation" style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; border-top: 2px solid transparent; width: 100%;" valign="top" width="100%">
<tbody>
<tr style="vertical-align: top;" valign="top">
<td style="word-break: break-word; vertical-align: top; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;" valign="top"><span></span></td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
<!--[if (!mso)&(!IE)]><!-->
</div>
<!--<![endif]-->
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
<!--[if (mso)|(IE)]></td><td align="center" width="162" style="background-color:#3f2765;width:162px; border-top: 0px solid transparent; border-left: 0px solid transparent; border-bottom: 0px solid transparent; border-right: 0px solid transparent;" valign="top"><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 0px; padding-left: 0px; padding-top:5px; padding-bottom:25px;"><![endif]-->
<div class="col num3" style="max-width: 320px; min-width: 162px; display: table-cell; vertical-align: top; width: 162px;">
<div style="width:100% !important;">
<!--[if (!mso)&(!IE)]><!-->
<div style="border-top:0px solid transparent; border-left:0px solid transparent; border-bottom:0px solid transparent; border-right:0px solid transparent; padding-top:5px; padding-bottom:25px; padding-right: 0px; padding-left: 0px;">
<!--<![endif]-->
<div align="center" class="img-container center autowidth" style="padding-right: 0px;padding-left: 0px;">
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr style="line-height:0px"><td style="padding-right: 0px;padding-left: 0px;" align="center"><![endif]--><img align="center" alt="Alternate text" border="0" class="center autowidth" src="cid:massagstone.png" style="text-decoration: none; -ms-interpolation-mode: bicubic; height: auto; border: 0; width: 100%; max-width: 64px; display: block;" title="Alternate text" width="64"/>
<!--[if mso]></td></tr></table><![endif]-->
</div>
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 0px; font-family: Georgia, 'Times New Roman', serif"><![endif]-->
<div style="color:#00ad99;font-family:Georgia, Times, 'Times New Roman', serif;line-height:1.2;padding-top:10px;padding-right:10px;padding-bottom:0px;padding-left:10px;">
<div style="line-height: 1.2; font-family: Georgia, Times, 'Times New Roman', serif; font-size: 12px; color: #00ad99; mso-line-height-alt: 14px;">
<p style="line-height: 1.2; text-align: center; font-size: 16px; word-break: break-word; font-family: Georgia, Times, Times New Roman, serif; mso-line-height-alt: 19px; margin: 0;"><span style="font-size: 16px;"><strong><em>Stone massage</em></strong></span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 40px; padding-left: 40px; padding-top: 0px; padding-bottom: 0px; font-family: serif"><![endif]-->
<div style="color:#ffffff;font-family:'Merriwheater', 'Georgia', serif;line-height:1.5;padding-top:0px;padding-right:40px;padding-bottom:0px;padding-left:40px;">
<div style="line-height: 1.5; font-size: 12px; font-family: 'Merriwheater', 'Georgia', serif; color: #ffffff; mso-line-height-alt: 18px;">
<p style="line-height: 1.5; font-size: 34px; text-align: center; word-break: break-word; font-family: Merriwheater, Georgia, serif; mso-line-height-alt: 51px; margin: 0;"><span style="font-size: 34px;">20$</span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
<table border="0" cellpadding="0" cellspacing="0" class="divider" role="presentation" style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;" valign="top" width="100%">
<tbody>
<tr style="vertical-align: top;" valign="top">
<td class="divider_inner" style="word-break: break-word; vertical-align: top; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px;" valign="top">
<table align="center" border="0" cellpadding="0" cellspacing="0" class="divider_content" role="presentation" style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; border-top: 2px solid transparent; width: 100%;" valign="top" width="100%">
<tbody>
<tr style="vertical-align: top;" valign="top">
<td style="word-break: break-word; vertical-align: top; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;" valign="top"><span></span></td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
<!--[if (!mso)&(!IE)]><!-->
</div>
<!--<![endif]-->
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
<!--[if (mso)|(IE)]></td><td align="center" width="162" style="background-color:#3f2765;width:162px; border-top: 0px solid ; border-left: 0px solid ; border-bottom: 0px solid ; border-right: 1px solid #C879F1;" valign="top"><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 0px; padding-left: 0px; padding-top:5px; padding-bottom:25px;"><![endif]-->
<div class="col num3" style="max-width: 320px; min-width: 162px; display: table-cell; vertical-align: top; width: 161px;">
<div style="width:100% !important;">
<!--[if (!mso)&(!IE)]><!-->
<div style="border-top:0px solid ; border-left:0px solid ; border-bottom:0px solid ; border-right:1px solid #C879F1; padding-top:5px; padding-bottom:25px; padding-right: 0px; padding-left: 0px;">
<!--<![endif]-->
<div align="center" class="img-container center autowidth" style="padding-right: 0px;padding-left: 0px;">
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr style="line-height:0px"><td style="padding-right: 0px;padding-left: 0px;" align="center"><![endif]--><img align="center" alt="Alternate text" border="0" class="center autowidth" src="cid:face.png" style="text-decoration: none; -ms-interpolation-mode: bicubic; height: auto; border: 0; width: 100%; max-width: 64px; display: block;" title="Alternate text" width="64"/>
<!--[if mso]></td></tr></table><![endif]-->
</div>
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 0px; font-family: Georgia, 'Times New Roman', serif"><![endif]-->
<div style="color:#00ad99;font-family:Georgia, Times, 'Times New Roman', serif;line-height:1.2;padding-top:10px;padding-right:10px;padding-bottom:0px;padding-left:10px;">
<div style="line-height: 1.2; font-family: Georgia, Times, 'Times New Roman', serif; font-size: 12px; color: #00ad99; mso-line-height-alt: 14px;">
<p style="line-height: 1.2; text-align: center; font-size: 16px; word-break: break-word; font-family: Georgia, Times, Times New Roman, serif; mso-line-height-alt: 19px; margin: 0;"><span style="font-size: 16px;"><strong><em>Stone massage</em></strong></span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 40px; padding-left: 40px; padding-top: 0px; padding-bottom: 0px; font-family: serif"><![endif]-->
<div style="color:#ffffff;font-family:'Merriwheater', 'Georgia', serif;line-height:1.5;padding-top:0px;padding-right:40px;padding-bottom:0px;padding-left:40px;">
<div style="line-height: 1.5; font-size: 12px; font-family: 'Merriwheater', 'Georgia', serif; color: #ffffff; mso-line-height-alt: 18px;">
<p style="line-height: 1.5; font-size: 34px; text-align: center; word-break: break-word; font-family: Merriwheater, Georgia, serif; mso-line-height-alt: 51px; margin: 0;"><span style="font-size: 34px;">35$</span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
<table border="0" cellpadding="0" cellspacing="0" class="divider" role="presentation" style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;" valign="top" width="100%">
<tbody>
<tr style="vertical-align: top;" valign="top">
<td class="divider_inner" style="word-break: break-word; vertical-align: top; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px;" valign="top">
<table align="center" border="0" cellpadding="0" cellspacing="0" class="divider_content" role="presentation" style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; border-top: 2px solid transparent; width: 100%;" valign="top" width="100%">
<tbody>
<tr style="vertical-align: top;" valign="top">
<td style="word-break: break-word; vertical-align: top; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;" valign="top"><span></span></td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
<!--[if (!mso)&(!IE)]><!-->
</div>
<!--<![endif]-->
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
<!--[if (mso)|(IE)]></td></tr></table></td></tr></table><![endif]-->
</div>
</div>
</div>
<div style="background-color:transparent;">
<div class="block-grid" style="Margin: 0 auto; min-width: 320px; max-width: 650px; overflow-wrap: break-word; word-wrap: break-word; word-break: break-word; background-color: #3f2765;">
<div style="border-collapse: collapse;display: table;width: 100%;background-color:#3f2765;">
<!--[if (mso)|(IE)]><table width="100%" cellpadding="0" cellspacing="0" border="0" style="background-color:transparent;"><tr><td align="center"><table cellpadding="0" cellspacing="0" border="0" style="width:650px"><tr class="layout-full-width" style="background-color:#3f2765"><![endif]-->
<!--[if (mso)|(IE)]><td align="center" width="650" style="background-color:#3f2765;width:650px; border-top: 0px solid transparent; border-left: 1px solid #C879F1; border-bottom: 0px solid transparent; border-right: 1px solid #C879F1;" valign="top"><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 0px; padding-left: 0px; padding-top:0px; padding-bottom:40px;"><![endif]-->
<div class="col num12" style="min-width: 320px; max-width: 650px; display: table-cell; vertical-align: top; width: 648px;">
<div style="width:100% !important;">
<!--[if (!mso)&(!IE)]><!-->
<div style="border-top:0px solid transparent; border-left:1px solid #C879F1; border-bottom:0px solid transparent; border-right:1px solid #C879F1; padding-top:0px; padding-bottom:40px; padding-right: 0px; padding-left: 0px;">
<!--<![endif]-->
<div align="center" class="img-container center fixedwidth" style="padding-right: 15px;padding-left: 15px;">
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr style="line-height:0px"><td style="padding-right: 15px;padding-left: 15px;" align="center"><![endif]-->
<div style="font-size:1px;line-height:15px"> </div><img align="center" alt="Image" border="0" class="center fixedwidth" src="cid:swirl.png" style="text-decoration: none; -ms-interpolation-mode: bicubic; height: auto; border: 0; width: 100%; max-width: 291px; display: block;" title="Image" width="291"/>
<div style="font-size:1px;line-height:10px"> </div>
<!--[if mso]></td></tr></table><![endif]-->
</div>
<!--[if (!mso)&(!IE)]><!-->
</div>
<!--<![endif]-->
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
<!--[if (mso)|(IE)]></td></tr></table></td></tr></table><![endif]-->
</div>
</div>
</div>
<div style="background-color:transparent;">
<div class="block-grid" style="Margin: 0 auto; min-width: 320px; max-width: 650px; overflow-wrap: break-word; word-wrap: break-word; word-break: break-word; background-color: transparent;">
<div style="border-collapse: collapse;display: table;width: 100%;background-color:transparent;">
<!--[if (mso)|(IE)]><table width="100%" cellpadding="0" cellspacing="0" border="0" style="background-color:transparent;"><tr><td align="center"><table cellpadding="0" cellspacing="0" border="0" style="width:650px"><tr class="layout-full-width" style="background-color:transparent"><![endif]-->
<!--[if (mso)|(IE)]><td align="center" width="650" style="background-color:transparent;width:650px; border-top: 0px solid transparent; border-left: 1px solid #C879F1; border-bottom: 1px solid #C879F1; border-right: 1px solid #C879F1;" valign="top"><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 0px; padding-left: 0px; padding-top:0px; padding-bottom:0px;"><![endif]-->
<div class="col num12" style="min-width: 320px; max-width: 650px; display: table-cell; vertical-align: top; width: 648px;">
<div style="width:100% !important;">
<!--[if (!mso)&(!IE)]><!-->
<div style="border-top:0px solid transparent; border-left:1px solid #C879F1; border-bottom:1px solid #C879F1; border-right:1px solid #C879F1; padding-top:0px; padding-bottom:0px; padding-right: 0px; padding-left: 0px;">
<!--<![endif]-->
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 40px; padding-left: 40px; padding-top: 35px; padding-bottom: 30px; font-family: serif"><![endif]-->
<div style="color:#E3E3E3;font-family:'Merriwheater', 'Georgia', serif;line-height:1.5;padding-top:35px;padding-right:40px;padding-bottom:30px;padding-left:40px;">
<div style="line-height: 1.5; font-size: 12px; font-family: 'Merriwheater', 'Georgia', serif; color: #E3E3E3; mso-line-height-alt: 18px;">
<p style="line-height: 1.5; font-size: 14px; text-align: center; word-break: break-word; font-family: Merriwheater, Georgia, serif; mso-line-height-alt: 21px; margin: 0;"><span style="font-size: 14px;"><span style="font-size: 14px;"><em><span style="color: #00ad99; font-size: 14px;"><strong>Flower Spa</strong></span> </em>- Barkley street, 67 - Seattle</span></span></p>
<p style="line-height: 1.5; font-size: 18px; text-align: center; word-break: break-word; font-family: Merriwheater, Georgia, serif; mso-line-height-alt: 27px; margin: 0;"><span style="font-size: 18px;"><span style="font-size: 14px;">www.example.com | bookings@example.com</span><br/><span style="font-size: 14px;">© All rights reserved </span><br/></span></p>
</div>
</div>
<!--[if mso]></td></tr></table><![endif]-->
<div align="center" class="img-container center autowidth fullwidth" style="padding-right: 15px;padding-left: 15px;">
<!--[if mso]><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr style="line-height:0px"><td style="padding-right: 15px;padding-left: 15px;" align="center"><![endif]--><img align="center" alt="Image" border="0" class="center autowidth fullwidth" src="cid:swirls_2.png" style="text-decoration: none; -ms-interpolation-mode: bicubic; height: auto; border: 0; width: 100%; max-width: 618px; display: block;" title="Image" width="618"/>
<div style="font-size:1px;line-height:10px"> </div>
<!--[if mso]></td></tr></table><![endif]-->
</div>
<!--[if (!mso)&(!IE)]><!-->
</div>
<!--<![endif]-->
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
<!--[if (mso)|(IE)]></td></tr></table></td></tr></table><![endif]-->
</div>
</div>
</div>
<div style="background-color:transparent;">
<div class="block-grid" style="Margin: 0 auto; min-width: 320px; max-width: 650px; overflow-wrap: break-word; word-wrap: break-word; word-break: break-word; background-color: transparent;">
<div style="border-collapse: collapse;display: table;width: 100%;background-color:transparent;">
<!--[if (mso)|(IE)]><table width="100%" cellpadding="0" cellspacing="0" border="0" style="background-color:transparent;"><tr><td align="center"><table cellpadding="0" cellspacing="0" border="0" style="width:650px"><tr class="layout-full-width" style="background-color:transparent"><![endif]-->
<!--[if (mso)|(IE)]><td align="center" width="650" style="background-color:transparent;width:650px; border-top: 0px solid transparent; border-left: 0px solid transparent; border-bottom: 0px solid transparent; border-right: 0px solid transparent;" valign="top"><table width="100%" cellpadding="0" cellspacing="0" border="0"><tr><td style="padding-right: 10px; padding-left: 10px; padding-top:10px; padding-bottom:10px;"><![endif]-->
<div class="col num12" style="min-width: 320px; max-width: 650px; display: table-cell; vertical-align: top; width: 650px;">
<div style="width:100% !important;">
<!--[if (!mso)&(!IE)]><!-->
<div style="border-top:0px solid transparent; border-left:0px solid transparent; border-bottom:0px solid transparent; border-right:0px solid transparent; padding-top:10px; padding-bottom:10px; padding-right: 10px; padding-left: 10px;">
<!--<![endif]-->
<div class="mobile_hide">
<table border="0" cellpadding="0" cellspacing="0" class="divider" role="presentation" style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;" valign="top" width="100%">
<tbody>
<tr style="vertical-align: top;" valign="top">
<td class="divider_inner" style="word-break: break-word; vertical-align: top; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px;" valign="top">
<table align="center" border="0" cellpadding="0" cellspacing="0" class="divider_content" height="15" role="presentation" style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; border-top: 0px solid transparent; height: 15px; width: 100%;" valign="top" width="100%">
<tbody>
<tr style="vertical-align: top;" valign="top">
<td height="15" style="word-break: break-word; vertical-align: top; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;" valign="top"><span></span></td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
</div>
<!--[if (!mso)&(!IE)]><!-->
</div>
<!--<![endif]-->
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
<!--[if (mso)|(IE)]></td></tr></table></td></tr></table><![endif]-->
</div>
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
</td>
</tr>
</tbody>
</table>
<!--[if (IE)]></div><![endif]-->
</body>
</html>"""
msgText = MIMEText(message, 'html')
msgRoot.attach(msgText)
images = [
["./images/bea_1.png", "<bea_1.png>"],
["./images/diveinto.jpeg", "<diveinto.jpeg>"],
["./images/face.png", "<face.png>"],
["./images/feet.png", "<feet.png>"],
["./images/logoflower.png", "<logoflower.png>"],
["./images/massagstone.png", "<massagstone.png>"],
["./images/swirl.png", "<swirl.png>"],
["./images/swirls_1.png", "<swirls_1.png>"],
["./images/swirls_2.png", "<swirls_2.png>"],
["./images/swirlup.png", "<swirlup.png>"]
]
for image in images:
with open(image[0],'rb') as f:
msgImage = MIMEImage(f.read())
f.close()
msgImage.add_header('Content-ID', image[1])
msgRoot.attach(msgImage)
with smtplib.SMTP_SSL('smtp.gmail.com', 465) as smtp:
smtp.login(EMAIL_ADDRESS, EMAIL_PASSWORD)
smtp.send_message(msgRoot)
smtp.quit()
credentials = pickle.load(open("./token.pkl","rb"))
service = build("calendar","v3",credentials=credentials)
id = service.calendarList().list().execute()['items'][0]['id']
event = {
'summary': 'Horário marcado '+c_nome,
'location': endereco,
'description': 'Horário marcado com '+c_nome+' para '+p_nome+'.',
'start': {
'dateTime': date.strftime("%Y-%m-%dT%H:%M:%S"),
'timeZone': "America/Sao_Paulo",
},
'end': {
'dateTime': (date + timedelta(hours=4)).strftime("%Y-%m-%dT%H:%M:%S"),
'timeZone': "America/Sao_Paulo",
},
'reminders': {
'useDefault': False,
'overrides': [
{'method': 'email', 'minutes': 24 * 60},
{'method': 'email', 'minutes': 60},
{'method': 'popup', 'minutes': 30},
],
},
}
event = service.events().insert(calendarId=id, body=event).execute()
| 75.678133
| 493
| 0.65967
| 8,590
| 61,602
| 4.722352
| 0.057043
| 0.016517
| 0.013805
| 0.046148
| 0.891335
| 0.880192
| 0.874128
| 0.871268
| 0.868507
| 0.865229
| 0
| 0.04795
| 0.112675
| 61,602
| 813
| 494
| 75.771218
| 0.694151
| 0
| 0
| 0.637076
| 0
| 0.285901
| 0.928574
| 0.235593
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.003916
| 0.061358
| 0
| 0.061358
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
79549336a4241631b02e785750cc140cfc8710c5
| 15,643
|
py
|
Python
|
crawler/crawler_2.py
|
marxlee/py-tools
|
4c3699b2a5dd5cb4477a4e339b8f91161cbe3bef
|
[
"Apache-2.0"
] | null | null | null |
crawler/crawler_2.py
|
marxlee/py-tools
|
4c3699b2a5dd5cb4477a4e339b8f91161cbe3bef
|
[
"Apache-2.0"
] | null | null | null |
crawler/crawler_2.py
|
marxlee/py-tools
|
4c3699b2a5dd5cb4477a4e339b8f91161cbe3bef
|
[
"Apache-2.0"
] | null | null | null |
from urllib import request
import random
import json
# 摸你请求头
url = r'https://www.baidu.com/s?cl=3&tn=baidutop10&fr=top1000&wd=%E7%9F%B3%E7%94%B0%E7%BA%AF%E4%B8%80%E6%84%9F%E6%9F%93%E6%96%B0%E5%86%A0&rsv_idx=2&rsv_dl=fyb_n_homepage&hisfilter=1'
# 代理列表
agent_list = [
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 13_2_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Mobile/15E148 Safari/604.1',
]
#头信息
headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
# 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36'
}
# 随机代理
agent = random.choice(agent_list)
headers['User-Agent'] = agent
# 方法体
def print_url(url, header):
# 设置超时时间处理
time_out = 1
req_str = request.Request(url=url, headers=header)
try:
resp = request.urlopen(req_str, timeout=time_out)
data = resp.read().decode()
print(data)
except:
print("超时")
finally:
request.urlcleanup()
def print_url_http(url, header):
'''
GET
POST
PUT
DELETE
UPDATE
HEAD
OPTIONS
'''
json.loads()
pass
def get_json_val():
str = data_json_str()
jd = json.loads(str)
print(jd['sodar_query_id'])
# json = data_json()
# print(json['sodar_query_id'])
def data_json():
data = {"sodar_query_id":"YcqaXvPrIMSW2QTPjZeQAQ","injector_basename":"sodar2","bg_hash_basename":"r_kJ4x66L0q9ptqPN1EZdQZJVGt7LCWecB4z-4tOz0Y","bg_binary":"ALzbBj814lyYEaftZLAVu8KNpcS+Et40flMgUba+katdDRF9kHyC5ekeOn+SnF/oOv/75lAHEFYOblxjV5F4SQhJh/HX5oNaB6yQEscwY+2xY7zf1AOQAdXlwstcQsfcf91ydo9bJs3/nAnh41iqmA3KkV9TfstrgriG5sc8NSoUWQywuHf7ZDZeun3Y92u01kXYPGO8rYRMrwOmuOuo1G4VKz01yCxYiTBspPgxnf7FUa45yXGKR151XIRz4IxwZBgy/9IfJW7j0hUjlY/0miYrdQDTKGXvXdhU+YZvQF9FqLDIrYhg5FTB7SlWwIxZrImc8w8pALEU2idJLMue130yPHz7GfnNs6cIoIb8v+Y5v78QUCPflrJP6GxBEej+a3Fmb2hm7pk2iK4hbMb3guNpMSIou8PIP4nd5KQrpDzuG/WOiaSZIuMfkYYifAhSdi6nam3SMto07vPYW4L1XOy4QCvmkbrMwE8A8FLNrC6IzhIPi3cURKXSE6sI/UFoo8jBYaD/961bsfjDRip/stsq5XCf+P2EhgLW9Yl95ddjtReaObOpV5Di5pMhexp0DaCjfmXZyOrZ+LA3UYcOarlSsAIEJZ85HTn7EiJl+DVPSXPmQSy8LAywMyAVuPtKwanswYNiqlYtayDAlPJI26Om2TOeZzO0lRASIyxK6zkms+YajVYJ1z2wNvnv81D1PzH5N9YbWjImivcqNOHZxF/88olXY6oHG+zBqOVTOLyFahFjD7ftMXKFncA9mnEKC/UNXEkdClNu8B63x/aUHyb4u398Eru3PAupW6gnasf404viputMyvkrGgr7AhTRVJNK4Zt5GoQ8znxJCJZ0TRrGH4XgKFIkcgYopx4fmYGc5hP4q4mqFDouvH/Q0NGjx2YpICYE5CSfG1iIV76XO6nTrZ7Fn4zfE+mkgmm7LU/yAGXu2mjeTL0K2nEyOtgcuxq5POsRRtyN3BpNFRZDG06NxTEVZPbbRnm6aEaL4dntcmYsrLu2bFw2nMywczkpyV3ld+jeItdjeLaeRMjEqxhfR21xsMg3AenilDzpPaYlBCosMK3h/MA1nCwLxGENmjHp4lFYPHJohRnMj2Bbs4ROeG7uZoVg/NTmNiagecZC3+xy7+e+hNSS1Dmdq/lSpYLwJPsgrRRutCBRY/Ie2rfToKEt5juHeg9ExyWA8QJpHOPmIwgvoTXlTjWnQoObJuvlwVlJiT3fFDhmox/tAtiy4HzzQeIXekN8mZu1Lee6qlJ0HFE5jP6FVfDZsdn1VPKe8l01YpktU107evEA8rzrdoTnpPAj+d0IRwTh0HylyKHuulw6RD1MOJxPHTY06aGf5IRjpsz+YOKLR/+UPGiTZq4fc12OXYI/rHZTEfcSQu+lkh2zi2q8NAcRBrexYG6WN9UQ7+q5bPxAOEKxtB265eA1JQVd13LIPlBEJEbNCcvBiQiAzA2wDEqR793VpC0EuCDXuCuHwYGuF23YaKqhOaapZS9xVT8aDwKpdo005BdGvyu5Bux2q23npsv3xDE++5F/ny3z57M1cbpfLJQ4YzMVFyNisvqR5rdY71Ms2mTXy/DyoS022LI21D1RMsc16qKD7oCm00M/ggQVC1X7tJDwl0oe/3iisPHUJRiI79FkGbazm9AbQQKUH2LnMPjZ6GEMLkVpQGhglE/yYwVVpsP/PRdK1Cdftg7OADzPty8G1Q5uFyvdmWmIuR5nbW9bebKvhYFCJZHm2DcWgu8tN5NG5/5lrGpqxoNqxaxPwzAocDdU0xwMajHidsg0nkMruMNd997EUOEIdHPvZZFbBG+4ZDZgaYLGRuxGF2lOYNNxMG7qZfoXV5Vw4h/G0Iy7hy6DXRnZCQWOXuGM6wGqwdG3yy085+gqnOyEclnbgsaVo7Ohz4P1u34rFRoSd+yoHs5Cy4iqCBZtu1o71jKxP+/yVbb+UGMNOOnSnrTO1Qs6MHYnQ+7yrN1AVKKwaNFFtsVKp4dW5vv0+6CJ1TmiEuVekSTR6pQ7FYjjvdAXwob0OZDFoxXY7kAFxrIuHXqgzJ0cG1DjxFJtV1JGCAU2vPtS6iYoNbpQX2GRMQx31yWVG4CO0IYJWjraUwvswrtIFbxkJMP2H8GF1AaV4gLV10ZbNsX8V1m0SwPsburH/3ECRLu3IpU6VLdP53WrtBxF4cidDtgaBin9NuQp0bP9wC3TIR0nZ2OD5yDRPw//pGAzZqIMLhvB2AbrLt4qCFvOWKDxJ39Thy9HOyqJh2DEZ/oWUr496RdSvmYqH5yn/pmYFN+gAqgB33wIsbYJxQtGfT2NsIS8yVka1031cP0azO43smM9dXbkU6HVaxOS5Y1U5PR9pjxAilePqS+PUVOIegsGpLfR4rfjXFQt72kpCTNKG+y8/XWH6Brb2THTzGEF1UrNUZfc6+jJ5fflgGAOuECRgzJwr9x0bToMdomF5vrbaLcGbX+Rqw7+ob7GQ5/E9UmFaAOOeDIGd0eX0hwLP1ZEKnkW+4LHFY5h1L51tUIZVPFnsJ1dxEeGXU7zp2SIJ8nbdcXO3WP6o9Q38Hrrw6udiFNZT9lhKujoBYgUZ/d0EDZCS0JuB/vR4u9uHKic0PBVeZpiUtjlaPJbrdHJK5J+JycwsifHqXKeMUDPOkNdPptuif8vsrXnpTgIqVEXFwYI1SCXr/0/hWhm3kz8ZVMPoPyPSehNFvD5/heLy4BCxaW60SjKfDMWiyliTQRDFsnFJZ+CguIE9tYjkwkdtv6yRQI70ltEWhYEHsX0+uZdixmo3wMPT7xjT6wL7891UFDJIFy8WtwTj5VzdN5nSgwlh+yGF9Djn9ihLSN5EebavuLDiJYNlvVOA2mMKSdeB8jvFcwyH5Q8opwQZUWrdrahdkTRK98S3HoGlyMx2u5x+YUgNxrKUJZxfbI/53aDuS2BV2LY2jtVnXQohEll0afDuVvmWNfJ8SQ2tHwX/YWuYYFKUg05ZF8yfxBdn9oezJMLorAa4wyomHtoowUL2j1ITOYZaG46V+sC6Uwf1T9VCDA3Dyugwz34e+NErKouptm99HeY22BzpTvUutUGo4/0m5Wt5CvbX1fEBeTWMb6BZ4sdP/PxJpR+vxBIFStciwLqBYIlVF/TKzKK0OR4gZp/QF4Z2GZPQUSQ4ZMQST3zhcMIsxNnzThwhDQifjvlTBhfM5bNtV6mNtPzQ9UbY5Qk6/88YFt5jJPaVhnfnaZtC9D7WlO3aNSIJ8QmNhg3J3dp6BiCjKMzBjCkXmlOcWGjTO5oQ1p2HKUubHNxQDpmmLthX8n15qLusnaQUeKSf+vFxcneT4DicqBNpECnPSfbwcIZqbDpwGLjNsRNebJwEI2xdbX+MBOPVQ303ptQHEMychPD+tbi7SCTIgJcHfAfRYAW5/AxbzelIwrk/6PC+a60CSW3OOLuOAoP5CLpeg+zRWW6CL5k9DdFDf4ve2vGu+k9V+2JagU56Ea8YCHOQ5VIzqkF3jIh6LkhCYmCyjFBGQLz4Cvu5OGI7TLC9v5/LQhshoqrEcc/JexcJzbx1i/l7In6HW5Zp1BpJvtruexwzKsbZKclmaG4HzPEGUKHgzwDDkMTFYSU2qPpncqPw6NtBp8og4n1KjyAXpfYecFU5tQVDyeUc7tMUgV0BE/WsXoheOKx7Cvo3bRuySPhSih+PBGp6FzP/S/rLxPOmZ/Lcf2F0IXXtR0Cj4gHXhigNou+PrhTgmeW1ayRnYYJ8Ps5JCP5nW5i2EAlH5SvcyAaoXIb2T3l1z7TmEEVLMRC3k5d+fqxB1AEIYZLvLMoCO2tFBh6L7u3Vyh/k0SchaqKKI9U/JVG/l4QwFqpZ8E+C/p15UVgwMwHaAFBKULWncbwNiSk0R2H46n5Ol7+2kv2yfkFvdYrf7VsKD76/6JOCQydMM2BKmL1NL91N+Yd0hmaYBrrFIxVzxkjP8VULgCRwylKpsTBdYp0nvfVeWU+vq1CXy2hhOxzWMVRmMAE9FO6Fux0fprVdrkxDgLk50mhP7Eq8kfnzpXc3ItSgAddB1JCvUdYzhnsQh+F/viDl5iub0LIeF+Kp+HyemXDTkf9OVM1DGwp3CxgNIam2Z1/UxTVC76H8cKhjeo8yOhzoVF0p46N/o2eOmhB55ZcWKvFESKuRMbV+MjcSAhWE+76v8VgxrfwoIfhg2YlwLfMTiapbfMZ5tSh5rutxOuReIAbh8Mo/IYBesQQ2SybvA2GFg7Mcfe2rC+LEIhwXkm4GZkFahH9UWw4m1VUBmty2V9GcIUwp1/vUNfBCvDA8zyM7+r6P1SHjU4DkKVa0qIqF7AEwqASIbg2gjDMuxHyZ+c1izFQLu/8Nf3WFZUNcpMy92jd+wjICK0HzTKJYUVmraEPAQ96bvuibSo9COX9jAhC0xiG6AXurIm+bExk7Bq49uzkDe2AuK8xc3/ygHsr1pqCP/W99SKv2pds52hZb+ezghamFhznJ67EZIWawes9YJ1khIX6i2/N5qTvgFjv4C7d5IQVuMJgY9On9IbwuLJXnr8Shmy7vcc57b2irRiuKmDW4Vc4SBpRwW7wgvjpeuTwvsZyQgDrWFpKvY8PgrOK9MkXdnLPg3kkgFZF7CVHsogJZa3CVoA9uS4D7RT5hm9gsdVkxMkop+//w5bg1+fm/hrGD8wSmYNzLvld6IJOZxQWhE5JPe+WNzC5zEITxZGomzdKYDHRqp+0tQF8xVyHyZPuWPSgqAE/e5jyJ5m/sBa5Vl5oyKxajcv+gKZJhPiOfMLvgX7/+I8mFVccLz4kljK0KUhIScmYQBjWpAlN8JE2yzh2KmEhiTGqNsA9D9MbsRxZ3O3v9GauT2TYcH/EQCLvqftFn05a4Asz/car34eE7UcMcYvUvn0FYiIpHWmxHXAVCxZQ7+u4XQr/ulMxjKgOOeVFBfYcYl5uBc+U/UWM2nimDDF8q3Ugyybv6lTTke31qSGAqYvZLfHCV2CGK/Z2a83Fq6QOROsSdL1pntMU2jNLt6hC3XXzzeATmGTWPxuJXikRvueMc097kOn6G0NyU0qK4HDvymMcPhlibsSiBIPnoUzv6Had7ED6A7ccKy8hzk9ZZx0BGMoZjnAlpJJGK7HC57yTzsg05tX7NRcP5r9MNN/uBF9nJzY5ggZaQIETXUhfoxCfwY/Ce6nP0iHFHdPlsCbydHefp1dgyjPzQMvI6l9OG9n3OSLh9+rKmYQMyz1pi4aHcvt8CzqYhRKlPQEP1xNchQ0IXBhrm2Mi7SER0nimnz07nF1Ki9mPGk757hCsQz+xGwOj7oz1YeCtFT7vISs/kX9zeOtcpnfUlS0roQkwz1tQU2aTsZ5A42vyFRKRE0rv1KASXsiDNZd0/jkhmcneYQxD3L0ttYjsUg2BP/clXNyVWEoTsPs17xtZb+zZ0bAo29G0CEmFlx9n7PewUJOEqzv0s/W9jP0iIBNEsQ9mWQr6Brar3wQRrfjLk6ip8HUNh+YhhSjW0eSA9NsgQE6GaPKaGe03dNQOk8Yu5O1WrNOP+/Wjn2vWTb8TMbusjEgGG7BjGM5YlchUSurpXob/EPZAaR9gbMPt4CtHKUhB87t256CPGqoYxAVNcEhglUOM/p9hEjwkKZ3dB0AOqKswNtb+Nja9vgMFFCte6dOTXDRuHlyKL6IenAIo+5JBYX15WlGhCHiiWXQpbJoFbjeie3fxjGDjRzr8us5tvKUHXQJQCVW6SlKk1uFImLIdngwkXUpv2hypJX8KRtf4uLPu3+x50HIS5g38o9wdVgPjcPxAIEB3fcyEl0IWAx1eUm1LU8h11yx+gzQ/snBaV2vt1VEvLtNtPFZVYvIDuSpsWY8bv8owdZd4wHB1lJZgAp9bBiSTGGEJMlCOuu4lQDOL/Aj3XMW8SSg5zTZblxdxayss3hIkrtoct1YVxe0itQSpG/OR+m3ZNOLr43J2gFN3MagHZwPuGBZC0kW+7nyZM7Sp7FZA/1+A08ddSL3luh/dCaPTVtk6tY1q1t9JH6dcsl77+Kh4nslE0YRA0qQQQIsqz75n7Bu05aFw+g6oYBgqAs4p0uVoWSKtTtfucPHy8gwCn8lh8jeIpk0mWS64OXXPWqyPptuCOZvJPemmP5uYB9MWLrf1QZmZMWgVZHuMmQXXobMTjGz+Dsw/eEVP+nVL8ftDDxwEDT0XpUckl0v3Qt3Np44jFKNLIcm6CIobyN0QQuouOZEmAVVXcJP6NYclNMd3zdKoVVGzFZS0GqX1Qmw+U4rlS0Knl9p2vDtP/HMWcCtnTNP9KZjRF6sJr2Vu+/4oi4f0JwvbUrHdkcED64VFA53ZxvqAKIPE1ebZjFq6SH6BXXl+CkWGqBUAe4HGh+u1QEKNPGA4ETZV4GNTOKbCP98CEmzf7Vo2nxTZ+0F34OUgMtQgrLTYcy0yZLB/Dk7nCgFO3zRLsNZUpX+KQRkSZ/aqiXJpwDRDh4aL2e40ENPHVI5nbWvuQaT44TG8WMIL60jr5WKgj921RMDAeCWipSP6LLtCHwZrTc2UiJugF/AC2WgY4L3/T0MTIK2"}
return data
def data_json_str():
data = {"sodar_query_id": "YcqaXvPrIMSW2QTPjZeQAQ", "injector_basename": "sodar2",
"bg_hash_basename": "r_kJ4x66L0q9ptqPN1EZdQZJVGt7LCWecB4z-4tOz0Y",
"bg_binary": "ALzbBj814lyYEaftZLAVu8KNpcS+Et40flMgUba+katdDRF9kHyC5ekeOn+SnF/oOv/75lAHEFYOblxjV5F4SQhJh/HX5oNaB6yQEscwY+2xY7zf1AOQAdXlwstcQsfcf91ydo9bJs3/nAnh41iqmA3KkV9TfstrgriG5sc8NSoUWQywuHf7ZDZeun3Y92u01kXYPGO8rYRMrwOmuOuo1G4VKz01yCxYiTBspPgxnf7FUa45yXGKR151XIRz4IxwZBgy/9IfJW7j0hUjlY/0miYrdQDTKGXvXdhU+YZvQF9FqLDIrYhg5FTB7SlWwIxZrImc8w8pALEU2idJLMue130yPHz7GfnNs6cIoIb8v+Y5v78QUCPflrJP6GxBEej+a3Fmb2hm7pk2iK4hbMb3guNpMSIou8PIP4nd5KQrpDzuG/WOiaSZIuMfkYYifAhSdi6nam3SMto07vPYW4L1XOy4QCvmkbrMwE8A8FLNrC6IzhIPi3cURKXSE6sI/UFoo8jBYaD/961bsfjDRip/stsq5XCf+P2EhgLW9Yl95ddjtReaObOpV5Di5pMhexp0DaCjfmXZyOrZ+LA3UYcOarlSsAIEJZ85HTn7EiJl+DVPSXPmQSy8LAywMyAVuPtKwanswYNiqlYtayDAlPJI26Om2TOeZzO0lRASIyxK6zkms+YajVYJ1z2wNvnv81D1PzH5N9YbWjImivcqNOHZxF/88olXY6oHG+zBqOVTOLyFahFjD7ftMXKFncA9mnEKC/UNXEkdClNu8B63x/aUHyb4u398Eru3PAupW6gnasf404viputMyvkrGgr7AhTRVJNK4Zt5GoQ8znxJCJZ0TRrGH4XgKFIkcgYopx4fmYGc5hP4q4mqFDouvH/Q0NGjx2YpICYE5CSfG1iIV76XO6nTrZ7Fn4zfE+mkgmm7LU/yAGXu2mjeTL0K2nEyOtgcuxq5POsRRtyN3BpNFRZDG06NxTEVZPbbRnm6aEaL4dntcmYsrLu2bFw2nMywczkpyV3ld+jeItdjeLaeRMjEqxhfR21xsMg3AenilDzpPaYlBCosMK3h/MA1nCwLxGENmjHp4lFYPHJohRnMj2Bbs4ROeG7uZoVg/NTmNiagecZC3+xy7+e+hNSS1Dmdq/lSpYLwJPsgrRRutCBRY/Ie2rfToKEt5juHeg9ExyWA8QJpHOPmIwgvoTXlTjWnQoObJuvlwVlJiT3fFDhmox/tAtiy4HzzQeIXekN8mZu1Lee6qlJ0HFE5jP6FVfDZsdn1VPKe8l01YpktU107evEA8rzrdoTnpPAj+d0IRwTh0HylyKHuulw6RD1MOJxPHTY06aGf5IRjpsz+YOKLR/+UPGiTZq4fc12OXYI/rHZTEfcSQu+lkh2zi2q8NAcRBrexYG6WN9UQ7+q5bPxAOEKxtB265eA1JQVd13LIPlBEJEbNCcvBiQiAzA2wDEqR793VpC0EuCDXuCuHwYGuF23YaKqhOaapZS9xVT8aDwKpdo005BdGvyu5Bux2q23npsv3xDE++5F/ny3z57M1cbpfLJQ4YzMVFyNisvqR5rdY71Ms2mTXy/DyoS022LI21D1RMsc16qKD7oCm00M/ggQVC1X7tJDwl0oe/3iisPHUJRiI79FkGbazm9AbQQKUH2LnMPjZ6GEMLkVpQGhglE/yYwVVpsP/PRdK1Cdftg7OADzPty8G1Q5uFyvdmWmIuR5nbW9bebKvhYFCJZHm2DcWgu8tN5NG5/5lrGpqxoNqxaxPwzAocDdU0xwMajHidsg0nkMruMNd997EUOEIdHPvZZFbBG+4ZDZgaYLGRuxGF2lOYNNxMG7qZfoXV5Vw4h/G0Iy7hy6DXRnZCQWOXuGM6wGqwdG3yy085+gqnOyEclnbgsaVo7Ohz4P1u34rFRoSd+yoHs5Cy4iqCBZtu1o71jKxP+/yVbb+UGMNOOnSnrTO1Qs6MHYnQ+7yrN1AVKKwaNFFtsVKp4dW5vv0+6CJ1TmiEuVekSTR6pQ7FYjjvdAXwob0OZDFoxXY7kAFxrIuHXqgzJ0cG1DjxFJtV1JGCAU2vPtS6iYoNbpQX2GRMQx31yWVG4CO0IYJWjraUwvswrtIFbxkJMP2H8GF1AaV4gLV10ZbNsX8V1m0SwPsburH/3ECRLu3IpU6VLdP53WrtBxF4cidDtgaBin9NuQp0bP9wC3TIR0nZ2OD5yDRPw//pGAzZqIMLhvB2AbrLt4qCFvOWKDxJ39Thy9HOyqJh2DEZ/oWUr496RdSvmYqH5yn/pmYFN+gAqgB33wIsbYJxQtGfT2NsIS8yVka1031cP0azO43smM9dXbkU6HVaxOS5Y1U5PR9pjxAilePqS+PUVOIegsGpLfR4rfjXFQt72kpCTNKG+y8/XWH6Brb2THTzGEF1UrNUZfc6+jJ5fflgGAOuECRgzJwr9x0bToMdomF5vrbaLcGbX+Rqw7+ob7GQ5/E9UmFaAOOeDIGd0eX0hwLP1ZEKnkW+4LHFY5h1L51tUIZVPFnsJ1dxEeGXU7zp2SIJ8nbdcXO3WP6o9Q38Hrrw6udiFNZT9lhKujoBYgUZ/d0EDZCS0JuB/vR4u9uHKic0PBVeZpiUtjlaPJbrdHJK5J+JycwsifHqXKeMUDPOkNdPptuif8vsrXnpTgIqVEXFwYI1SCXr/0/hWhm3kz8ZVMPoPyPSehNFvD5/heLy4BCxaW60SjKfDMWiyliTQRDFsnFJZ+CguIE9tYjkwkdtv6yRQI70ltEWhYEHsX0+uZdixmo3wMPT7xjT6wL7891UFDJIFy8WtwTj5VzdN5nSgwlh+yGF9Djn9ihLSN5EebavuLDiJYNlvVOA2mMKSdeB8jvFcwyH5Q8opwQZUWrdrahdkTRK98S3HoGlyMx2u5x+YUgNxrKUJZxfbI/53aDuS2BV2LY2jtVnXQohEll0afDuVvmWNfJ8SQ2tHwX/YWuYYFKUg05ZF8yfxBdn9oezJMLorAa4wyomHtoowUL2j1ITOYZaG46V+sC6Uwf1T9VCDA3Dyugwz34e+NErKouptm99HeY22BzpTvUutUGo4/0m5Wt5CvbX1fEBeTWMb6BZ4sdP/PxJpR+vxBIFStciwLqBYIlVF/TKzKK0OR4gZp/QF4Z2GZPQUSQ4ZMQST3zhcMIsxNnzThwhDQifjvlTBhfM5bNtV6mNtPzQ9UbY5Qk6/88YFt5jJPaVhnfnaZtC9D7WlO3aNSIJ8QmNhg3J3dp6BiCjKMzBjCkXmlOcWGjTO5oQ1p2HKUubHNxQDpmmLthX8n15qLusnaQUeKSf+vFxcneT4DicqBNpECnPSfbwcIZqbDpwGLjNsRNebJwEI2xdbX+MBOPVQ303ptQHEMychPD+tbi7SCTIgJcHfAfRYAW5/AxbzelIwrk/6PC+a60CSW3OOLuOAoP5CLpeg+zRWW6CL5k9DdFDf4ve2vGu+k9V+2JagU56Ea8YCHOQ5VIzqkF3jIh6LkhCYmCyjFBGQLz4Cvu5OGI7TLC9v5/LQhshoqrEcc/JexcJzbx1i/l7In6HW5Zp1BpJvtruexwzKsbZKclmaG4HzPEGUKHgzwDDkMTFYSU2qPpncqPw6NtBp8og4n1KjyAXpfYecFU5tQVDyeUc7tMUgV0BE/WsXoheOKx7Cvo3bRuySPhSih+PBGp6FzP/S/rLxPOmZ/Lcf2F0IXXtR0Cj4gHXhigNou+PrhTgmeW1ayRnYYJ8Ps5JCP5nW5i2EAlH5SvcyAaoXIb2T3l1z7TmEEVLMRC3k5d+fqxB1AEIYZLvLMoCO2tFBh6L7u3Vyh/k0SchaqKKI9U/JVG/l4QwFqpZ8E+C/p15UVgwMwHaAFBKULWncbwNiSk0R2H46n5Ol7+2kv2yfkFvdYrf7VsKD76/6JOCQydMM2BKmL1NL91N+Yd0hmaYBrrFIxVzxkjP8VULgCRwylKpsTBdYp0nvfVeWU+vq1CXy2hhOxzWMVRmMAE9FO6Fux0fprVdrkxDgLk50mhP7Eq8kfnzpXc3ItSgAddB1JCvUdYzhnsQh+F/viDl5iub0LIeF+Kp+HyemXDTkf9OVM1DGwp3CxgNIam2Z1/UxTVC76H8cKhjeo8yOhzoVF0p46N/o2eOmhB55ZcWKvFESKuRMbV+MjcSAhWE+76v8VgxrfwoIfhg2YlwLfMTiapbfMZ5tSh5rutxOuReIAbh8Mo/IYBesQQ2SybvA2GFg7Mcfe2rC+LEIhwXkm4GZkFahH9UWw4m1VUBmty2V9GcIUwp1/vUNfBCvDA8zyM7+r6P1SHjU4DkKVa0qIqF7AEwqASIbg2gjDMuxHyZ+c1izFQLu/8Nf3WFZUNcpMy92jd+wjICK0HzTKJYUVmraEPAQ96bvuibSo9COX9jAhC0xiG6AXurIm+bExk7Bq49uzkDe2AuK8xc3/ygHsr1pqCP/W99SKv2pds52hZb+ezghamFhznJ67EZIWawes9YJ1khIX6i2/N5qTvgFjv4C7d5IQVuMJgY9On9IbwuLJXnr8Shmy7vcc57b2irRiuKmDW4Vc4SBpRwW7wgvjpeuTwvsZyQgDrWFpKvY8PgrOK9MkXdnLPg3kkgFZF7CVHsogJZa3CVoA9uS4D7RT5hm9gsdVkxMkop+//w5bg1+fm/hrGD8wSmYNzLvld6IJOZxQWhE5JPe+WNzC5zEITxZGomzdKYDHRqp+0tQF8xVyHyZPuWPSgqAE/e5jyJ5m/sBa5Vl5oyKxajcv+gKZJhPiOfMLvgX7/+I8mFVccLz4kljK0KUhIScmYQBjWpAlN8JE2yzh2KmEhiTGqNsA9D9MbsRxZ3O3v9GauT2TYcH/EQCLvqftFn05a4Asz/car34eE7UcMcYvUvn0FYiIpHWmxHXAVCxZQ7+u4XQr/ulMxjKgOOeVFBfYcYl5uBc+U/UWM2nimDDF8q3Ugyybv6lTTke31qSGAqYvZLfHCV2CGK/Z2a83Fq6QOROsSdL1pntMU2jNLt6hC3XXzzeATmGTWPxuJXikRvueMc097kOn6G0NyU0qK4HDvymMcPhlibsSiBIPnoUzv6Had7ED6A7ccKy8hzk9ZZx0BGMoZjnAlpJJGK7HC57yTzsg05tX7NRcP5r9MNN/uBF9nJzY5ggZaQIETXUhfoxCfwY/Ce6nP0iHFHdPlsCbydHefp1dgyjPzQMvI6l9OG9n3OSLh9+rKmYQMyz1pi4aHcvt8CzqYhRKlPQEP1xNchQ0IXBhrm2Mi7SER0nimnz07nF1Ki9mPGk757hCsQz+xGwOj7oz1YeCtFT7vISs/kX9zeOtcpnfUlS0roQkwz1tQU2aTsZ5A42vyFRKRE0rv1KASXsiDNZd0/jkhmcneYQxD3L0ttYjsUg2BP/clXNyVWEoTsPs17xtZb+zZ0bAo29G0CEmFlx9n7PewUJOEqzv0s/W9jP0iIBNEsQ9mWQr6Brar3wQRrfjLk6ip8HUNh+YhhSjW0eSA9NsgQE6GaPKaGe03dNQOk8Yu5O1WrNOP+/Wjn2vWTb8TMbusjEgGG7BjGM5YlchUSurpXob/EPZAaR9gbMPt4CtHKUhB87t256CPGqoYxAVNcEhglUOM/p9hEjwkKZ3dB0AOqKswNtb+Nja9vgMFFCte6dOTXDRuHlyKL6IenAIo+5JBYX15WlGhCHiiWXQpbJoFbjeie3fxjGDjRzr8us5tvKUHXQJQCVW6SlKk1uFImLIdngwkXUpv2hypJX8KRtf4uLPu3+x50HIS5g38o9wdVgPjcPxAIEB3fcyEl0IWAx1eUm1LU8h11yx+gzQ/snBaV2vt1VEvLtNtPFZVYvIDuSpsWY8bv8owdZd4wHB1lJZgAp9bBiSTGGEJMlCOuu4lQDOL/Aj3XMW8SSg5zTZblxdxayss3hIkrtoct1YVxe0itQSpG/OR+m3ZNOLr43J2gFN3MagHZwPuGBZC0kW+7nyZM7Sp7FZA/1+A08ddSL3luh/dCaPTVtk6tY1q1t9JH6dcsl77+Kh4nslE0YRA0qQQQIsqz75n7Bu05aFw+g6oYBgqAs4p0uVoWSKtTtfucPHy8gwCn8lh8jeIpk0mWS64OXXPWqyPptuCOZvJPemmP5uYB9MWLrf1QZmZMWgVZHuMmQXXobMTjGz+Dsw/eEVP+nVL8ftDDxwEDT0XpUckl0v3Qt3Np44jFKNLIcm6CIobyN0QQuouOZEmAVVXcJP6NYclNMd3zdKoVVGzFZS0GqX1Qmw+U4rlS0Knl9p2vDtP/HMWcCtnTNP9KZjRF6sJr2Vu+/4oi4f0JwvbUrHdkcED64VFA53ZxvqAKIPE1ebZjFq6SH6BXXl+CkWGqBUAe4HGh+u1QEKNPGA4ETZV4GNTOKbCP98CEmzf7Vo2nxTZ+0F34OUgMtQgrLTYcy0yZLB/Dk7nCgFO3zRLsNZUpX+KQRkSZ/aqiXJpwDRDh4aL2e40ENPHVI5nbWvuQaT44TG8WMIL60jr5WKgj921RMDAeCWipSP6LLtCHwZrTc2UiJugF/AC2WgY4L3/T0MTIK2"}
data = json.dumps(data)
return data
# 读取本地
def load_location():
with open('../files/json.txt', 'rt') as f:
text = f.read()
print(text)
print(type(text))
js = json.loads(text)
print(js['sodar_query_id'])
pass
# 写入本地
def write_location():
with open('../files/json.txt', 'rt') as f:
text = f.read()
with open('../files/json1.txt', 'w') as f1:
f1.write(text)
if __name__ == '__main__':
# print_url(url=url, header=headers)
# load_location()
write_location()
pass
| 140.927928
| 6,714
| 0.915681
| 773
| 15,643
| 18.452781
| 0.463131
| 0.002804
| 0.004206
| 0.002524
| 0.925547
| 0.925547
| 0.925547
| 0.925547
| 0.925547
| 0.925547
| 0
| 0.144264
| 0.032666
| 15,643
| 111
| 6,715
| 140.927928
| 0.798374
| 0.020456
| 0
| 0.160714
| 0
| 0.107143
| 0.918402
| 0.874656
| 0
| 1
| 0
| 0
| 0
| 1
| 0.125
| false
| 0.053571
| 0.053571
| 0
| 0.214286
| 0.142857
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 13
|
30989459794313f3040cb1606c1e87631a244492
| 27,430
|
py
|
Python
|
app/tests/manual/sample_tweet_stream.py
|
MichaelCurrin/twitterverse
|
9629f848377e4346be833db70f11c593cc0d7b6c
|
[
"MIT"
] | 10
|
2019-03-22T07:07:41.000Z
|
2022-01-26T00:57:45.000Z
|
app/tests/manual/sample_tweet_stream.py
|
MichaelCurrin/twitterverse
|
9629f848377e4346be833db70f11c593cc0d7b6c
|
[
"MIT"
] | 70
|
2017-07-12T19:49:38.000Z
|
2020-09-02T10:03:28.000Z
|
app/tests/manual/sample_tweet_stream.py
|
MichaelCurrin/twitterverse
|
9629f848377e4346be833db70f11c593cc0d7b6c
|
[
"MIT"
] | 2
|
2017-06-30T07:13:39.000Z
|
2020-12-04T00:39:12.000Z
|
#!/usr/bin/env python
"""
Process a sample tweet fetched using streaming API.
Usage:
$ ipython -i FILENAME
>>> data = main()
>>> data.keys() # Then explore the `data` object in ipython.
['contributors', 'truncated', 'text', 'is_quote_status',
'in_reply_to_status_id', 'id', 'favorite_count', 'source',
'retweeted', 'coordinates', 'timestamp_ms', 'entities',
'in_reply_to_screen_name', 'id_str', 'retweet_count',
'in_reply_to_user_id', 'favorited', 'retweeted_status',
'user', 'geo', 'in_reply_to_user_id_str', 'lang',
'created_at', 'filter_level', 'in_reply_to_status_id_str',
'place']
>>> data['text']
'RT @twice_ph: [TWICETAGRAM] 170701\n\uc624\ub79c\ub9cc\uc5d0 #\ub450\ubd80\ud55c\ubaa8 \n\uc800\ud76c\ub294 \uc798 \uc788\uc5b4\uc694 \uc6b0\ub9ac #ONCE \ub294?\n#ONCE \uac00 \ubcf4\uace0\uc2f6\ub2e4\n\n\ub4a4\uc5d4 \uc0c1\ud07c\uc8fc\uc758 \ucbd4\uc704\uac00 \ucc0d\uc5b4\uc900 \uc0ac\uc9c4\u314b\u314b\u314b \n#TWICE #\ud2b8\uc640\uc774\uc2a4\u2026 '
>>> print(data['text'])
RT @twice_ph: [TWICETAGRAM] 170701
오랜만에 #두부한모
저희는 잘 있어요 우리 #ONCE 는?
#ONCE 가 보고싶다
뒤엔 상큼주의 쯔위가 찍어준 사진ㅋㅋㅋ
#TWICE #트와이스…
If copying from the command line into Python, characters need to be escaped
or the prefix `r` must be applied to the string.
Alternatives which I couldn't get to work fully:
https://stackoverflow.com/questions/22394235/invalid-control-character-with-python-json-loads
https://stackoverflow.com/questions/7262828/python-how-to-convert-string-literal-to-raw-string-literal
"""
import json
def main():
# No line breaks, straight from API
x = r'{"created_at":"Sat Jul 01 08:43:29 +0000 2017","id":881070742980313088,"id_str":"881070742980313088","text":"RT @twice_ph: [TWICETAGRAM] 170701\n\\uc624\\ub79c\\ub9cc\\uc5d0 #\\ub450\\ubd80\\ud55c\\ubaa8 \n\\uc800\\ud76c\\ub294 \\uc798 \\uc788\\uc5b4\\uc694 \\uc6b0\\ub9ac #ONCE \\ub294?\n#ONCE \\uac00 \\ubcf4\\uace0\\uc2f6\\ub2e4\n\n\\ub4a4\\uc5d4 \\uc0c1\\ud07c\\uc8fc\\uc758 \\ucbd4\\uc704\\uac00 \\ucc0d\\uc5b4\\uc900 \\uc0ac\\uc9c4\\u314b\\u314b\\u314b \n#TWICE #\\ud2b8\\uc640\\uc774\\uc2a4\\u2026 ","source":"\\u003ca href="http:\\/\\/twitter.com\\/download\\/android" rel="nofollow"\\u003eTwitter for Android\\u003c\\/a\\u003e","truncated":false,"in_reply_to_status_id":null,"in_reply_to_status_id_str":null,"in_reply_to_user_id":null,"in_reply_to_user_id_str":null,"in_reply_to_screen_name":null,"user":{"id":3722867834,"id_str":"3722867834","name":"Amazing Twice","screen_name":"metdew1","location":"\\ub300\\ud55c\\ubbfc\\uad6d \\uc11c\\uc6b8","url":null,"description":"I\'m a korean. My age too old. \nBut really really like TWICE. World ONCE\nfamily i alway thanks your support to\nTWICE. Wish good luck alway with U.","protected":false,"verified":false,"followers_count":977,"friends_count":1204,"listed_count":59,"favourites_count":114142,"statuses_count":100351,"created_at":"Tue Sep 29 06:19:21 +0000 2015","utc_offset":-25200,"time_zone":"Pacific Time (US & Canada)","geo_enabled":false,"lang":"ko","contributors_enabled":false,"is_translator":false,"profile_background_color":"000000","profile_background_image_url":"http:\\/\\/abs.twimg.com\\/images\\/themes\\/theme1\\/bg.png","profile_background_image_url_https":"https:\\/\\/abs.twimg.com\\/images\\/themes\\/theme1\\/bg.png","profile_background_tile":false,"profile_link_color":"9266CC","profile_sidebar_border_color":"000000","profile_sidebar_fill_color":"000000","profile_text_color":"000000","profile_use_background_image":false,"profile_image_url":"http:\\/\\/pbs.twimg.com\\/profile_images\\/858888732539207681\\/89mbzS98_normal.jpg","profile_image_url_https":"https:\\/\\/pbs.twimg.com\\/profile_images\\/858888732539207681\\/89mbzS98_normal.jpg","profile_banner_url":"https:\\/\\/pbs.twimg.com\\/profile_banners\\/3722867834\\/1494858108","default_profile":false,"default_profile_image":false,"following":null,"follow_request_sent":null,"notifications":null},"geo":null,"coordinates":null,"place":null,"contributors":null,"retweeted_status":{"created_at":"Sat Jul 01 06:10:54 +0000 2017","id":881032343829463040,"id_str":"881032343829463040","text":"[TWICETAGRAM] 170701\n\\uc624\\ub79c\\ub9cc\\uc5d0 #\\ub450\\ubd80\\ud55c\\ubaa8 \n\\uc800\\ud76c\\ub294 \\uc798 \\uc788\\uc5b4\\uc694 \\uc6b0\\ub9ac #ONCE \\ub294?\n#ONCE \\uac00 \\ubcf4\\uace0\\uc2f6\\ub2e4\n\n\\ub4a4\\uc5d4 \\uc0c1\\ud07c\\uc8fc\\uc758 \\ucbd4\\uc704\\uac00 \\ucc0d\\uc5b4\\uc900 \\uc0ac\\uc9c4\\u314b\\u314b\\u314b \n#TWICE #\\ud2b8\\uc640\\uc774\\uc2a4\\u2026 https:\\/\\/t.co\\/9CPNYQiwcq","display_text_range":[0,140],"source":"\\u003ca href="http:\\/\\/twitter.com\\/download\\/android" rel="nofollow"\\u003eTwitter for Android\\u003c\\/a\\u003e","truncated":true,"in_reply_to_status_id":null,"in_reply_to_status_id_str":null,"in_reply_to_user_id":null,"in_reply_to_user_id_str":null,"in_reply_to_screen_name":null,"user":{"id":3779372892,"id_str":"3779372892","name":"TWICE PHILIPPINES \\u2728","screen_name":"twice_ph","location":"Philippines \\ud544\\ub9ac\\ud540","url":null,"description":"Philippine-based support group for TWICE. Officially affiliated with JYPNPH and PKCI http:\\/\\/facebook.com\\/groups\\/TWICEPH\\u2026 ... http:\\/\\/facebook.com\\/TWICEPH\\/","protected":false,"verified":false,"followers_count":7071,"friends_count":224,"listed_count":72,"favourites_count":523,"statuses_count":12448,"created_at":"Sun Oct 04 09:05:12 +0000 2015","utc_offset":null,"time_zone":null,"geo_enabled":false,"lang":"en","contributors_enabled":false,"is_translator":false,"profile_background_color":"FFFFFF","profile_background_image_url":"http:\\/\\/pbs.twimg.com\\/profile_background_images\\/722977345053712385\\/naASDMjX.jpg","profile_background_image_url_https":"https:\\/\\/pbs.twimg.com\\/profile_background_images\\/722977345053712385\\/naASDMjX.jpg","profile_background_tile":true,"profile_link_color":"FF3485","profile_sidebar_border_color":"000000","profile_sidebar_fill_color":"000000","profile_text_color":"000000","profile_use_background_image":true,"profile_image_url":"http:\\/\\/pbs.twimg.com\\/profile_images\\/863338740629905408\\/tO_19lHj_normal.jpg","profile_image_url_https":"https:\\/\\/pbs.twimg.com\\/profile_images\\/863338740629905408\\/tO_19lHj_normal.jpg","profile_banner_url":"https:\\/\\/pbs.twimg.com\\/profile_banners\\/3779372892\\/1494670969","default_profile":false,"default_profile_image":false,"following":null,"follow_request_sent":null,"notifications":null},"geo":null,"coordinates":null,"place":null,"contributors":null,"is_quote_status":false,"extended_tweet":{"full_text":"[TWICETAGRAM] 170701\n\\uc624\\ub79c\\ub9cc\\uc5d0 #\\ub450\\ubd80\\ud55c\\ubaa8 \n\\uc800\\ud76c\\ub294 \\uc798 \\uc788\\uc5b4\\uc694 \\uc6b0\\ub9ac #ONCE \\ub294?\n#ONCE \\uac00 \\ubcf4\\uace0\\uc2f6\\ub2e4\n\n\\ub4a4\\uc5d4 \\uc0c1\\ud07c\\uc8fc\\uc758 \\ucbd4\\uc704\\uac00 \\ucc0d\\uc5b4\\uc900 \\uc0ac\\uc9c4\\u314b\\u314b\\u314b \n#TWICE #\\ud2b8\\uc640\\uc774\\uc2a4 \nhttps:\\/\\/t.co\\/NHPtfkruR4 https:\\/\\/t.co\\/WRv9qP8Mk2","display_text_range":[0,129],"entities":{"hashtags":[{"text":"\\ub450\\ubd80\\ud55c\\ubaa8","indices":[26,31]},{"text":"ONCE","indices":[46,51]},{"text":"ONCE","indices":[55,60]},{"text":"TWICE","indices":[92,98]},{"text":"\\ud2b8\\uc640\\uc774\\uc2a4","indices":[99,104]}],"urls":[{"url":"https:\\/\\/t.co\\/NHPtfkruR4","expanded_url":"https:\\/\\/www.instagram.com\\/p\\/BV_i2J_gx1w\\/","display_url":"instagram.com\\/p\\/BV_i2J_gx1w\\/","indices":[106,129]}],"user_mentions":[],"symbols":[],"media":[{"id":881032307179573248,"id_str":"881032307179573248","indices":[130,153],"media_url":"http:\\/\\/pbs.twimg.com\\/media\\/DDoONykU0AAT3d6.jpg","media_url_https":"https:\\/\\/pbs.twimg.com\\/media\\/DDoONykU0AAT3d6.jpg","url":"https:\\/\\/t.co\\/WRv9qP8Mk2","display_url":"pic.twitter.com\\/WRv9qP8Mk2","expanded_url":"https:\\/\\/twitter.com\\/twice_ph\\/status\\/881032343829463040\\/photo\\/1","type":"photo","sizes":{"thumb":{"w":150,"h":150,"resize":"crop"},"small":{"w":680,"h":680,"resize":"fit"},"medium":{"w":960,"h":960,"resize":"fit"},"large":{"w":960,"h":960,"resize":"fit"}}},{"id":881032328272683008,"id_str":"881032328272683008","indices":[130,153],"media_url":"http:\\/\\/pbs.twimg.com\\/media\\/DDoOPBJUIAAlDMF.jpg","media_url_https":"https:\\/\\/pbs.twimg.com\\/media\\/DDoOPBJUIAAlDMF.jpg","url":"https:\\/\\/t.co\\/WRv9qP8Mk2","display_url":"pic.twitter.com\\/WRv9qP8Mk2","expanded_url":"https:\\/\\/twitter.com\\/twice_ph\\/status\\/881032343829463040\\/photo\\/1","type":"photo","sizes":{"large":{"w":734,"h":734,"resize":"fit"},"thumb":{"w":150,"h":150,"resize":"crop"},"medium":{"w":734,"h":734,"resize":"fit"},"small":{"w":680,"h":680,"resize":"fit"}}}]},"extended_entities":{"media":[{"id":881032307179573248,"id_str":"881032307179573248","indices":[130,153],"media_url":"http:\\/\\/pbs.twimg.com\\/media\\/DDoONykU0AAT3d6.jpg","media_url_https":"https:\\/\\/pbs.twimg.com\\/media\\/DDoONykU0AAT3d6.jpg","url":"https:\\/\\/t.co\\/WRv9qP8Mk2","display_url":"pic.twitter.com\\/WRv9qP8Mk2","expanded_url":"https:\\/\\/twitter.com\\/twice_ph\\/status\\/881032343829463040\\/photo\\/1","type":"photo","sizes":{"thumb":{"w":150,"h":150,"resize":"crop"},"small":{"w":680,"h":680,"resize":"fit"},"medium":{"w":960,"h":960,"resize":"fit"},"large":{"w":960,"h":960,"resize":"fit"}}},{"id":881032328272683008,"id_str":"881032328272683008","indices":[130,153],"media_url":"http:\\/\\/pbs.twimg.com\\/media\\/DDoOPBJUIAAlDMF.jpg","media_url_https":"https:\\/\\/pbs.twimg.com\\/media\\/DDoOPBJUIAAlDMF.jpg","url":"https:\\/\\/t.co\\/WRv9qP8Mk2","display_url":"pic.twitter.com\\/WRv9qP8Mk2","expanded_url":"https:\\/\\/twitter.com\\/twice_ph\\/status\\/881032343829463040\\/photo\\/1","type":"photo","sizes":{"large":{"w":734,"h":734,"resize":"fit"},"thumb":{"w":150,"h":150,"resize":"crop"},"medium":{"w":734,"h":734,"resize":"fit"},"small":{"w":680,"h":680,"resize":"fit"}}}]}},"retweet_count":1,"favorite_count":40,"entities":{"hashtags":[{"text":"\\ub450\\ubd80\\ud55c\\ubaa8","indices":[26,31]},{"text":"ONCE","indices":[46,51]},{"text":"ONCE","indices":[55,60]},{"text":"TWICE","indices":[92,98]},{"text":"\\ud2b8\\uc640\\uc774\\uc2a4","indices":[99,104]}],"urls":[{"url":"https:\\/\\/t.co\\/9CPNYQiwcq","expanded_url":"https:\\/\\/twitter.com\\/i\\/web\\/status\\/881032343829463040","display_url":"twitter.com\\/i\\/web\\/status\\/8\\u2026","indices":[106,129]}],"user_mentions":[],"symbols":[]},"favorited":false,"retweeted":false,"possibly_sensitive":false,"filter_level":"low","lang":"ko"},"is_quote_status":false,"retweet_count":0,"favorite_count":0,"entities":{"hashtags":[{"text":"\\ub450\\ubd80\\ud55c\\ubaa8","indices":[40,45]},{"text":"ONCE","indices":[60,65]},{"text":"ONCE","indices":[69,74]},{"text":"TWICE","indices":[106,112]},{"text":"\\ud2b8\\uc640\\uc774\\uc2a4","indices":[113,118]}],"urls":[{"url":"","expanded_url":null,"indices":[120,120]}],"user_mentions":[{"screen_name":"twice_ph","name":"TWICE PHILIPPINES \\u2728","id":3779372892,"id_str":"3779372892","indices":[3,12]}],"symbols":[]},"favorited":false,"retweeted":false,"filter_level":"low","lang":"ko","timestamp_ms":"1498898609286"}'
# With link breaks from formatting - see http://jsonviewer.stack.hu/
y = r"""{
"id": 881070742980313088,
"created_at": "Sat Jul 01 08:43:29 +0000 2017",
"id_str": "881070742980313088",
"text": "RT @twice_ph: [TWICETAGRAM] 170701\n\uc624\ub79c\ub9cc\uc5d0 #\ub450\ubd80\ud55c\ubaa8 \n\uc800\ud76c\ub294 \uc798 \uc788\uc5b4\uc694 \uc6b0\ub9ac #ONCE \ub294?\n#ONCE \uac00 \ubcf4\uace0\uc2f6\ub2e4\n\n\ub4a4\uc5d4 \uc0c1\ud07c\uc8fc\uc758 \ucbd4\uc704\uac00 \ucc0d\uc5b4\uc900 \uc0ac\uc9c4\u314b\u314b\u314b \n#TWICE #\ud2b8\uc640\uc774\uc2a4\u2026 ",
"source": "\u003ca href=\"http:\/\/twitter.com\/download\/android\" rel=\"nofollow\"\u003eTwitter for Android\u003c\/a\u003e",
"truncated": false,
"in_reply_to_status_id": null,
"in_reply_to_status_id_str": null,
"in_reply_to_user_id": null,
"in_reply_to_user_id_str": null,
"in_reply_to_screen_name": null,
"user": {
"id": 3722867834,
"id_str": "3722867834",
"name": "Amazing Twice",
"screen_name": "metdew1",
"location": "\ub300\ud55c\ubbfc\uad6d \uc11c\uc6b8",
"url": null,
"description": "I'm a korean. My age too old. \nBut really really like TWICE. World ONCE\nfamily i alway thanks your support to\nTWICE. Wish good luck alway with U.",
"protected": false,
"verified": false,
"followers_count": 977,
"friends_count": 1204,
"listed_count": 59,
"favourites_count": 114142,
"statuses_count": 100351,
"created_at": "Tue Sep 29 06:19:21 +0000 2015",
"utc_offset": -25200,
"time_zone": "Pacific Time (US & Canada)",
"geo_enabled": false,
"lang": "ko",
"contributors_enabled": false,
"is_translator": false,
"profile_background_color": "000000",
"profile_background_image_url": "http:\/\/abs.twimg.com\/images\/themes\/theme1\/bg.png",
"profile_background_image_url_https": "https:\/\/abs.twimg.com\/images\/themes\/theme1\/bg.png",
"profile_background_tile": false,
"profile_link_color": "9266CC",
"profile_sidebar_border_color": "000000",
"profile_sidebar_fill_color": "000000",
"profile_text_color": "000000",
"profile_use_background_image": false,
"profile_image_url": "http:\/\/pbs.twimg.com\/profile_images\/858888732539207681\/89mbzS98_normal.jpg",
"profile_image_url_https": "https:\/\/pbs.twimg.com\/profile_images\/858888732539207681\/89mbzS98_normal.jpg",
"profile_banner_url": "https:\/\/pbs.twimg.com\/profile_banners\/3722867834\/1494858108",
"default_profile": false,
"default_profile_image": false,
"following": null,
"follow_request_sent": null,
"notifications": null
},
"geo": null,
"coordinates": null,
"place": null,
"contributors": null,
"retweeted_status": {
"created_at": "Sat Jul 01 06:10:54 +0000 2017",
"id": 881032343829463040,
"id_str": "881032343829463040",
"text": "[TWICETAGRAM] 170701\n\uc624\ub79c\ub9cc\uc5d0 #\ub450\ubd80\ud55c\ubaa8 \n\uc800\ud76c\ub294 \uc798 \uc788\uc5b4\uc694 \uc6b0\ub9ac #ONCE \ub294?\n#ONCE \uac00 \ubcf4\uace0\uc2f6\ub2e4\n\n\ub4a4\uc5d4 \uc0c1\ud07c\uc8fc\uc758 \ucbd4\uc704\uac00 \ucc0d\uc5b4\uc900 \uc0ac\uc9c4\u314b\u314b\u314b \n#TWICE #\ud2b8\uc640\uc774\uc2a4\u2026 https:\/\/t.co\/9CPNYQiwcq",
"display_text_range": [
0,
140
],
"source": "\u003ca href=\"http:\/\/twitter.com\/download\/android\" rel=\"nofollow\"\u003eTwitter for Android\u003c\/a\u003e",
"truncated": true,
"in_reply_to_status_id": null,
"in_reply_to_status_id_str": null,
"in_reply_to_user_id": null,
"in_reply_to_user_id_str": null,
"in_reply_to_screen_name": null,
"user": {
"id": 3779372892,
"id_str": "3779372892",
"name": "TWICE PHILIPPINES \u2728",
"screen_name": "twice_ph",
"location": "Philippines \ud544\ub9ac\ud540",
"url": null,
"description": "Philippine-based support group for TWICE. Officially affiliated with JYPNPH and PKCI http:\/\/facebook.com\/groups\/TWICEPH\u2026 ... http:\/\/facebook.com\/TWICEPH\/",
"protected": false,
"verified": false,
"followers_count": 7071,
"friends_count": 224,
"listed_count": 72,
"favourites_count": 523,
"statuses_count": 12448,
"created_at": "Sun Oct 04 09:05:12 +0000 2015",
"utc_offset": null,
"time_zone": null,
"geo_enabled": false,
"lang": "en",
"contributors_enabled": false,
"is_translator": false,
"profile_background_color": "FFFFFF",
"profile_background_image_url": "http:\/\/pbs.twimg.com\/profile_background_images\/722977345053712385\/naASDMjX.jpg",
"profile_background_image_url_https": "https:\/\/pbs.twimg.com\/profile_background_images\/722977345053712385\/naASDMjX.jpg",
"profile_background_tile": true,
"profile_link_color": "FF3485",
"profile_sidebar_border_color": "000000",
"profile_sidebar_fill_color": "000000",
"profile_text_color": "000000",
"profile_use_background_image": true,
"profile_image_url": "http:\/\/pbs.twimg.com\/profile_images\/863338740629905408\/tO_19lHj_normal.jpg",
"profile_image_url_https": "https:\/\/pbs.twimg.com\/profile_images\/863338740629905408\/tO_19lHj_normal.jpg",
"profile_banner_url": "https:\/\/pbs.twimg.com\/profile_banners\/3779372892\/1494670969",
"default_profile": false,
"default_profile_image": false,
"following": null,
"follow_request_sent": null,
"notifications": null
},
"geo": null,
"coordinates": null,
"place": null,
"contributors": null,
"is_quote_status": false,
"extended_tweet": {
"full_text": "[TWICETAGRAM] 170701\n\uc624\ub79c\ub9cc\uc5d0 #\ub450\ubd80\ud55c\ubaa8 \n\uc800\ud76c\ub294 \uc798 \uc788\uc5b4\uc694 \uc6b0\ub9ac #ONCE \ub294?\n#ONCE \uac00 \ubcf4\uace0\uc2f6\ub2e4\n\n\ub4a4\uc5d4 \uc0c1\ud07c\uc8fc\uc758 \ucbd4\uc704\uac00 \ucc0d\uc5b4\uc900 \uc0ac\uc9c4\u314b\u314b\u314b \n#TWICE #\ud2b8\uc640\uc774\uc2a4 \nhttps:\/\/t.co\/NHPtfkruR4 https:\/\/t.co\/WRv9qP8Mk2",
"display_text_range": [
0,
129
],
"entities": {
"hashtags": [
{
"text": "\ub450\ubd80\ud55c\ubaa8",
"indices": [
26,
31
]
},
{
"text": "ONCE",
"indices": [
46,
51
]
},
{
"text": "ONCE",
"indices": [
55,
60
]
},
{
"text": "TWICE",
"indices": [
92,
98
]
},
{
"text": "\ud2b8\uc640\uc774\uc2a4",
"indices": [
99,
104
]
}
],
"urls": [
{
"url": "https:\/\/t.co\/NHPtfkruR4",
"expanded_url": "https:\/\/www.instagram.com\/p\/BV_i2J_gx1w\/",
"display_url": "instagram.com\/p\/BV_i2J_gx1w\/",
"indices": [
106,
129
]
}
],
"user_mentions": [
],
"symbols": [
],
"media": [
{
"id": 881032307179573248,
"id_str": "881032307179573248",
"indices": [
130,
153
],
"media_url": "http:\/\/pbs.twimg.com\/media\/DDoONykU0AAT3d6.jpg",
"media_url_https": "https:\/\/pbs.twimg.com\/media\/DDoONykU0AAT3d6.jpg",
"url": "https:\/\/t.co\/WRv9qP8Mk2",
"display_url": "pic.twitter.com\/WRv9qP8Mk2",
"expanded_url": "https:\/\/twitter.com\/twice_ph\/status\/881032343829463040\/photo\/1",
"type": "photo",
"sizes": {
"thumb": {
"w": 150,
"h": 150,
"resize": "crop"
},
"small": {
"w": 680,
"h": 680,
"resize": "fit"
},
"medium": {
"w": 960,
"h": 960,
"resize": "fit"
},
"large": {
"w": 960,
"h": 960,
"resize": "fit"
}
}
},
{
"id": 881032328272683008,
"id_str": "881032328272683008",
"indices": [
130,
153
],
"media_url": "http:\/\/pbs.twimg.com\/media\/DDoOPBJUIAAlDMF.jpg",
"media_url_https": "https:\/\/pbs.twimg.com\/media\/DDoOPBJUIAAlDMF.jpg",
"url": "https:\/\/t.co\/WRv9qP8Mk2",
"display_url": "pic.twitter.com\/WRv9qP8Mk2",
"expanded_url": "https:\/\/twitter.com\/twice_ph\/status\/881032343829463040\/photo\/1",
"type": "photo",
"sizes": {
"large": {
"w": 734,
"h": 734,
"resize": "fit"
},
"thumb": {
"w": 150,
"h": 150,
"resize": "crop"
},
"medium": {
"w": 734,
"h": 734,
"resize": "fit"
},
"small": {
"w": 680,
"h": 680,
"resize": "fit"
}
}
}
]
},
"extended_entities": {
"media": [
{
"id": 881032307179573248,
"id_str": "881032307179573248",
"indices": [
130,
153
],
"media_url": "http:\/\/pbs.twimg.com\/media\/DDoONykU0AAT3d6.jpg",
"media_url_https": "https:\/\/pbs.twimg.com\/media\/DDoONykU0AAT3d6.jpg",
"url": "https:\/\/t.co\/WRv9qP8Mk2",
"display_url": "pic.twitter.com\/WRv9qP8Mk2",
"expanded_url": "https:\/\/twitter.com\/twice_ph\/status\/881032343829463040\/photo\/1",
"type": "photo",
"sizes": {
"thumb": {
"w": 150,
"h": 150,
"resize": "crop"
},
"small": {
"w": 680,
"h": 680,
"resize": "fit"
},
"medium": {
"w": 960,
"h": 960,
"resize": "fit"
},
"large": {
"w": 960,
"h": 960,
"resize": "fit"
}
}
},
{
"id": 881032328272683008,
"id_str": "881032328272683008",
"indices": [
130,
153
],
"media_url": "http:\/\/pbs.twimg.com\/media\/DDoOPBJUIAAlDMF.jpg",
"media_url_https": "https:\/\/pbs.twimg.com\/media\/DDoOPBJUIAAlDMF.jpg",
"url": "https:\/\/t.co\/WRv9qP8Mk2",
"display_url": "pic.twitter.com\/WRv9qP8Mk2",
"expanded_url": "https:\/\/twitter.com\/twice_ph\/status\/881032343829463040\/photo\/1",
"type": "photo",
"sizes": {
"large": {
"w": 734,
"h": 734,
"resize": "fit"
},
"thumb": {
"w": 150,
"h": 150,
"resize": "crop"
},
"medium": {
"w": 734,
"h": 734,
"resize": "fit"
},
"small": {
"w": 680,
"h": 680,
"resize": "fit"
}
}
}
]
}
},
"retweet_count": 1,
"favorite_count": 40,
"entities": {
"hashtags": [
{
"text": "\ub450\ubd80\ud55c\ubaa8",
"indices": [
26,
31
]
},
{
"text": "ONCE",
"indices": [
46,
51
]
},
{
"text": "ONCE",
"indices": [
55,
60
]
},
{
"text": "TWICE",
"indices": [
92,
98
]
},
{
"text": "\ud2b8\uc640\uc774\uc2a4",
"indices": [
99,
104
]
}
],
"urls": [
{
"url": "https:\/\/t.co\/9CPNYQiwcq",
"expanded_url": "https:\/\/twitter.com\/i\/web\/status\/881032343829463040",
"display_url": "twitter.com\/i\/web\/status\/8\u2026",
"indices": [
106,
129
]
}
],
"user_mentions": [
],
"symbols": [
]
},
"favorited": false,
"retweeted": false,
"possibly_sensitive": false,
"filter_level": "low",
"lang": "ko"
},
"is_quote_status": false,
"retweet_count": 0,
"favorite_count": 0,
"entities": {
"hashtags": [
{
"text": "\ub450\ubd80\ud55c\ubaa8",
"indices": [
40,
45
]
},
{
"text": "ONCE",
"indices": [
60,
65
]
},
{
"text": "ONCE",
"indices": [
69,
74
]
},
{
"text": "TWICE",
"indices": [
106,
112
]
},
{
"text": "\ud2b8\uc640\uc774\uc2a4",
"indices": [
113,
118
]
}
],
"urls": [
{
"url": "",
"expanded_url": null,
"indices": [
120,
120
]
}
],
"user_mentions": [
{
"screen_name": "twice_ph",
"name": "TWICE PHILIPPINES \u2728",
"id": 3779372892,
"id_str": "3779372892",
"indices": [
3,
12
]
}
],
"symbols": [
]
},
"favorited": false,
"retweeted": false,
"filter_level": "low",
"lang": "ko",
"timestamp_ms": "1498898609286"
}
"""
# Works on y, but not x
data = json.loads(y)
print((json.dumps(data, indent=4)))
return data
if __name__ == "__main__":
main()
| 52.347328
| 9,560
| 0.537149
| 2,909
| 27,430
| 4.895841
| 0.140254
| 0.024716
| 0.024716
| 0.020222
| 0.937719
| 0.934138
| 0.93147
| 0.928802
| 0.928802
| 0.928802
| 0
| 0.145265
| 0.268939
| 27,430
| 523
| 9,561
| 52.447419
| 0.564803
| 0.052862
| 0
| 0.58811
| 0
| 0.012739
| 0.713109
| 0.19714
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002123
| false
| 0
| 0.002123
| 0
| 0.006369
| 0.002123
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a50784e85bfaff4fb71807d43ea79954cd711a55
| 23,041
|
py
|
Python
|
teste.py
|
dkpm/ES-17-2
|
b965400ab35dd0101c06cba14367090bf88a45a3
|
[
"Apache-2.0"
] | null | null | null |
teste.py
|
dkpm/ES-17-2
|
b965400ab35dd0101c06cba14367090bf88a45a3
|
[
"Apache-2.0"
] | 3
|
2019-08-05T23:08:42.000Z
|
2019-08-05T23:14:17.000Z
|
teste.py
|
dkpm/ES-17-2
|
b965400ab35dd0101c06cba14367090bf88a45a3
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from principal import somar
from principal import sub
def test_somar():
assert somar(2,4) == 6
def test_sub():
assert sub(3,2) == 20
| 1,772.384615
| 22,903
| 0.004731
| 26
| 23,041
| 4.115385
| 0.538462
| 0.242991
| 0.35514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056
| 0.994575
| 23,041
| 12
| 22,904
| 1,920.083333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 1
| 0.285714
| true
| 0
| 0.428571
| 0
| 0.714286
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
eb51353e72618f1a090e46e7c1c65e0cc24ef776
| 17,982
|
py
|
Python
|
tests/lib/bes/archive/test_archive_util.py
|
reconstruir/bes
|
82ff54b2dadcaef6849d7de424787f1dedace85c
|
[
"Apache-2.0"
] | null | null | null |
tests/lib/bes/archive/test_archive_util.py
|
reconstruir/bes
|
82ff54b2dadcaef6849d7de424787f1dedace85c
|
[
"Apache-2.0"
] | null | null | null |
tests/lib/bes/archive/test_archive_util.py
|
reconstruir/bes
|
82ff54b2dadcaef6849d7de424787f1dedace85c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#-*- coding:utf-8; mode:python; indent-tabs-mode: nil; c-basic-offset: 2; tab-width: 2 -*-
from bes.testing.unit_test import unit_test
from bes.archive.archive_util import archive_util
from bes.archive.archiver import archiver
from bes.archive.temp_archive import temp_archive
class test_archive_util(unit_test):
def test_remove_members(self):
items = temp_archive.make_temp_item_list([
( self.xp_path('foo-1.2.3/fruits/apple.txt'), 'apple.txt' ),
( self.xp_path('foo-1.2.3/fruits/durian.txt'), 'durian.txt' ),
( self.xp_path('foo-1.2.3/fruits/kiwi.txt'), 'kiwi.txt' ),
( self.xp_path('foo-1.2.3/.import/foo.txt'), 'foo.txt' ),
( self.xp_path('foo-1.2.3/.import/bar.txt'), 'bar.txt' ),
])
tmp_archive = temp_archive.make_temp_archive(items, 'zip', delete = not self.DEBUG)
self.assertEqual( [
'foo-1.2.3/.import/bar.txt',
'foo-1.2.3/.import/foo.txt',
'foo-1.2.3/fruits/apple.txt',
'foo-1.2.3/fruits/durian.txt',
'foo-1.2.3/fruits/kiwi.txt',
], archiver.members(tmp_archive))
archive_util.remove_members(tmp_archive, [ 'foo-1.2.3/.import' ], debug = self.DEBUG)
self.assertEqual( [
'foo-1.2.3/fruits/apple.txt',
'foo-1.2.3/fruits/durian.txt',
'foo-1.2.3/fruits/kiwi.txt',
], archiver.members(tmp_archive))
def test_member_checksums(self):
a = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('foo-1.2.3/fruits/apple.txt'), 'apple.txt' ),
( self.xp_path('foo-1.2.3/fruits/durian.txt'), 'durian.txt' ),
( self.xp_path('foo-1.2.3/fruits/kiwi.txt'), 'kiwi.txt' ),
]), 'zip', delete = not self.DEBUG)
self.assertEqual( {
'foo-1.2.3/fruits/apple.txt': '7269b27861e2a5ba6947b6279bb5e66b23439d83a65a3c0cf529f5834ed2e7fb',
'foo-1.2.3/fruits/kiwi.txt': 'a7be44d9dda7e951298316b34ce84a1b2da8b5e0bead26118145bda4fbca9329',
}, archive_util.member_checksums(a, [ 'foo-1.2.3/fruits/apple.txt', 'foo-1.2.3/fruits/kiwi.txt' ]) )
def test_duplicate_members(self):
a1 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('foo-1.2.3/fruits/apple.txt'), 'apple.txt' ),
( self.xp_path('foo-1.2.3/fruits/durian.txt'), 'durian.txt' ),
( self.xp_path('foo-1.2.3/fruits/kiwi.txt'), 'kiwi.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a1-')
a2 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('foo-1.2.3/fruits/apple.txt'), 'apple.txt' ),
( self.xp_path('foo-1.2.3/fruits/lemon.txt'), 'lemon.txt' ),
( self.xp_path('foo-1.2.3/fruits/melon.txt'), 'melon.txt' ),
( self.xp_path('foo-1.2.3/wine/barolo.txt'), 'barolo.txt' ),
( self.xp_path('foo-1.2.3/cheese/brie.txt'), 'brie.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a2-')
a3 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('foo-1.2.3/fruits/strawberry.txt'), 'strawberry.txt' ),
( self.xp_path('foo-1.2.3/fruits/blueberry.txt'), 'blueberry.txt' ),
( self.xp_path('foo-1.2.3/fruits/banana.txt'), 'banana.txt' ),
( self.xp_path('foo-1.2.3/wine/barolo.txt'), 'barolo.txt' ),
( self.xp_path('foo-1.2.3/cheese/brie.txt'), 'brie.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a3-')
self.assertEqual( {
'foo-1.2.3/cheese/brie.txt': { a2, a3 },
'foo-1.2.3/fruits/apple.txt': { a1, a2 },
'foo-1.2.3/wine/barolo.txt': { a2, a3 },
}, archive_util.duplicate_members([ a1, a2, a3 ]) )
def test_duplicate_members_with_conflicts(self):
a1 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('foo-1.2.3/fruits/apple.txt'), 'apple.txt' ),
( self.xp_path('foo-1.2.3/fruits/durian.txt'), 'durian.txt' ),
( self.xp_path('foo-1.2.3/fruits/kiwi.txt'), 'kiwi.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a1-')
a2 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('foo-1.2.3/fruits/apple.txt'), 'apple2.txt' ),
( self.xp_path('foo-1.2.3/fruits/durian.txt'), 'durian.txt' ),
( self.xp_path('foo-1.2.3/fruits/melon.txt'), 'melon.txt' ),
( self.xp_path('foo-1.2.3/wine/barolo.txt'), 'barolo.txt' ),
( self.xp_path('foo-1.2.3/cheese/brie.txt'), 'brie.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a2-')
a3 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('foo-1.2.3/fruits/strawberry.txt'), 'strawberry.txt' ),
( self.xp_path('foo-1.2.3/fruits/blueberry.txt'), 'blueberry.txt' ),
( self.xp_path('foo-1.2.3/fruits/banana.txt'), 'banana.txt' ),
( self.xp_path('foo-1.2.3/wine/barolo.txt'), 'barolo.txt' ),
( self.xp_path('foo-1.2.3/cheese/brie.txt'), 'brie2.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a3-')
self.assertEqual( {
'foo-1.2.3/cheese/brie.txt': { a2, a3 },
'foo-1.2.3/fruits/apple.txt': { a1, a2 },
}, archive_util.duplicate_members([ a1, a2, a3 ], only_content_conficts = True) )
def test_combine(self):
a1 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/apple.txt'), 'apple.txt' ),
( self.xp_path('fruits/durian.txt'), 'durian.txt' ),
( self.xp_path('fruits/kiwi.txt'), 'kiwi.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a1-')
a2 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/melon.txt'), 'melon.txt' ),
( self.xp_path('fruits/lemon.txt'), 'lemon.txt' ),
( self.xp_path('fruits/dragonfruit.txt'), 'dragonfruit.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a2-')
a3 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/strawberry.txt'), 'strawberry.txt' ),
( self.xp_path('fruits/pear.txt'), 'pear.txt' ),
( self.xp_path('fruits/plum.txt'), 'plum.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a3-')
tmp_archive = self.make_temp_file(suffix = '.zip')
archive_util.combine([ a1, a2, a3 ], tmp_archive)
self.assertEqual( [
self.xp_path('fruits/apple.txt'),
self.xp_path('fruits/dragonfruit.txt'),
self.xp_path('fruits/durian.txt'),
self.xp_path('fruits/kiwi.txt'),
self.xp_path('fruits/lemon.txt'),
self.xp_path('fruits/melon.txt'),
self.xp_path('fruits/pear.txt'),
self.xp_path('fruits/plum.txt'),
self.xp_path('fruits/strawberry.txt'),
], archiver.members(tmp_archive) )
def test_combine_conflicts_same_content(self):
a1 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/apple.txt'), 'apple.txt' ),
( self.xp_path('fruits/durian.txt'), 'durian.txt' ),
( self.xp_path('fruits/kiwi.txt'), 'kiwi.txt' ),
( self.xp_path('fruits/plum.txt'), 'plum.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a1-')
a2 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/kiwi.txt'), 'kiwi.txt' ),
( self.xp_path('fruits/melon.txt'), 'melon.txt' ),
( self.xp_path('fruits/lemon.txt'), 'lemon.txt' ),
( self.xp_path('fruits/dragonfruit.txt'), 'dragonfruit.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a2-')
a3 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/lemon.txt'), 'lemon.txt' ),
( self.xp_path('fruits/strawberry.txt'), 'strawberry.txt' ),
( self.xp_path('fruits/pear.txt'), 'pear.txt' ),
( self.xp_path('fruits/plum.txt'), 'plum.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a3-')
tmp_archive = self.make_temp_file(suffix = '.zip')
archive_util.combine([ a1, a2, a3 ], tmp_archive)
self.assertEqual( [
self.xp_path('fruits/apple.txt'),
self.xp_path('fruits/dragonfruit.txt'),
self.xp_path('fruits/durian.txt'),
self.xp_path('fruits/kiwi.txt'),
self.xp_path('fruits/lemon.txt'),
self.xp_path('fruits/melon.txt'),
self.xp_path('fruits/pear.txt'),
self.xp_path('fruits/plum.txt'),
self.xp_path('fruits/strawberry.txt'),
], archiver.members(tmp_archive) )
def test_combine_conflicts_different_content_no_check(self):
a1 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/apple.txt'), 'apple.txt' ),
( self.xp_path('fruits/durian.txt'), 'durian.txt' ),
( self.xp_path('fruits/kiwi.txt'), 'kiwi.txt' ),
( self.xp_path('fruits/plum.txt'), '1plum.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a1-')
a2 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/kiwi.txt'), 'kiwi.txt' ),
( self.xp_path('fruits/melon.txt'), 'melon.txt' ),
( self.xp_path('fruits/lemon.txt'), '1lemon.txt' ),
( self.xp_path('fruits/dragonfruit.txt'), 'dragonfruit.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a2-')
a3 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/lemon.txt'), '2lemon.txt' ),
( self.xp_path('fruits/strawberry.txt'), 'strawberry.txt' ),
( self.xp_path('fruits/pear.txt'), 'pear.txt' ),
( self.xp_path('fruits/plum.txt'), '2plum.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a3-')
tmp_archive = self.make_temp_file(suffix = '.zip')
archive_util.combine([ a1, a2, a3 ], tmp_archive)
self.assertEqual( [
self.xp_path('fruits/apple.txt'),
self.xp_path('fruits/dragonfruit.txt'),
self.xp_path('fruits/durian.txt'),
self.xp_path('fruits/kiwi.txt'),
self.xp_path('fruits/lemon.txt'),
self.xp_path('fruits/melon.txt'),
self.xp_path('fruits/pear.txt'),
self.xp_path('fruits/plum.txt'),
self.xp_path('fruits/strawberry.txt'),
], archiver.members(tmp_archive) )
def test_combine_conflicts_different_content_with_check(self):
a1 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/apple.txt'), 'apple.txt' ),
( self.xp_path('fruits/durian.txt'), 'durian.txt' ),
( self.xp_path('fruits/kiwi.txt'), 'kiwi.txt' ),
( self.xp_path('fruits/plum.txt'), '1plum.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a1-')
a2 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/kiwi.txt'), 'kiwi.txt' ),
( self.xp_path('fruits/melon.txt'), 'melon.txt' ),
( self.xp_path('fruits/lemon.txt'), '1lemon.txt' ),
( self.xp_path('fruits/dragonfruit.txt'), 'dragonfruit.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a2-')
a3 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/lemon.txt'), '2lemon.txt' ),
( self.xp_path('fruits/strawberry.txt'), 'strawberry.txt' ),
( self.xp_path('fruits/pear.txt'), 'pear.txt' ),
( self.xp_path('fruits/plum.txt'), '2plum.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a3-')
tmp_archive = self.make_temp_file(suffix = '.zip')
with self.assertRaises(RuntimeError) as ctx:
archive_util.combine([ a1, a2, a3 ], tmp_archive, check_content = True)
def test_combine_with_base_dir(self):
a1 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/apple.txt'), 'apple.txt' ),
( self.xp_path('fruits/durian.txt'), 'durian.txt' ),
( self.xp_path('fruits/kiwi.txt'), 'kiwi.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a1-')
a2 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/melon.txt'), 'melon.txt' ),
( self.xp_path('fruits/lemon.txt'), 'lemon.txt' ),
( self.xp_path('fruits/dragonfruit.txt'), 'dragonfruit.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a2-')
a3 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/strawberry.txt'), 'strawberry.txt' ),
( self.xp_path('fruits/pear.txt'), 'pear.txt' ),
( self.xp_path('fruits/plum.txt'), 'plum.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a3-')
tmp_archive = self.make_temp_file(suffix = '.zip')
archive_util.combine([ a1, a2, a3 ], tmp_archive, base_dir = 'foo-1.2.3')
self.assertEqual( [
self.xp_path('foo-1.2.3/fruits/apple.txt'),
self.xp_path('foo-1.2.3/fruits/dragonfruit.txt'),
self.xp_path('foo-1.2.3/fruits/durian.txt'),
self.xp_path('foo-1.2.3/fruits/kiwi.txt'),
self.xp_path('foo-1.2.3/fruits/lemon.txt'),
self.xp_path('foo-1.2.3/fruits/melon.txt'),
self.xp_path('foo-1.2.3/fruits/pear.txt'),
self.xp_path('foo-1.2.3/fruits/plum.txt'),
self.xp_path('foo-1.2.3/fruits/strawberry.txt'),
], archiver.members(tmp_archive) )
def test_combine_conflicts_different_content_with_check_and_exclude(self):
a1 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/apple.txt'), 'apple.txt' ),
( self.xp_path('fruits/durian.txt'), 'durian.txt' ),
( self.xp_path('fruits/plum.txt'), '1plum.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a1-')
a2 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/kiwi.txt'), 'kiwi.txt' ),
( self.xp_path('fruits/melon.txt'), 'melon.txt' ),
( self.xp_path('fruits/plum.txt'), '2plum.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a2-')
a3 = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/lemon.txt'), 'lemon.txt' ),
( self.xp_path('fruits/strawberry.txt'), 'strawberry.txt' ),
( self.xp_path('fruits/plum.txt'), '3plum.txt' ),
]), 'zip', delete = not self.DEBUG, prefix = 'a3-')
tmp_archive = self.make_temp_file(suffix = '.zip')
archive_util.combine([ a1, a2, a3 ], tmp_archive, check_content = True, exclude = [ 'fruits/plum.txt' ])
self.assertEqual( [
self.xp_path('fruits/apple.txt'),
self.xp_path('fruits/durian.txt'),
self.xp_path('fruits/kiwi.txt'),
self.xp_path('fruits/lemon.txt'),
self.xp_path('fruits/melon.txt'),
self.xp_path('fruits/strawberry.txt'),
], archiver.members(tmp_archive) )
def test_match_members(self):
tmp_archive = temp_archive.make_temp_archive(temp_archive.make_temp_item_list([
( self.xp_path('fruits/apple.pdf'), 'apple.pdf' ),
( self.xp_path('fruits/durian.pdf'), 'durian.pdf' ),
( self.xp_path('fruits/plum.pdf'), 'plum.pdf' ),
( self.xp_path('cheese/brie.txt'), 'brie.txt' ),
( self.xp_path('cheese/cheddar.txt'), 'cheddar.txt' ),
( self.xp_path('cheese/fontina.txt'), 'fontina.txt' ),
]), 'zip', delete = not self.DEBUG)
self.assertEqual( [
'cheese/brie.txt',
'cheese/cheddar.txt',
'cheese/fontina.txt',
'fruits/apple.pdf',
'fruits/durian.pdf',
'fruits/plum.pdf',
], archive_util.match_members(tmp_archive, [ '*' ]) )
self.assertEqual( [
'cheese/brie.txt',
'cheese/cheddar.txt',
'cheese/fontina.txt',
], archive_util.match_members(tmp_archive, [ 'cheese*' ]) )
self.assertEqual( [
'cheese/brie.txt',
'cheese/cheddar.txt',
'cheese/fontina.txt',
], archive_util.match_members(tmp_archive, [ '*.txt' ]) )
self.assertEqual( [
'fruits/apple.pdf',
'fruits/durian.pdf',
'fruits/plum.pdf',
], archive_util.match_members(tmp_archive, [ '*.pdf' ]) )
def test_remove_members_matching_patterns(self):
items = temp_archive.make_temp_item_list([
( self.xp_path('foo-1.2.3/fruits/apple.txt'), 'apple.txt' ),
( self.xp_path('foo-1.2.3/fruits/durian.txt'), 'durian.txt' ),
( self.xp_path('foo-1.2.3/fruits/kiwi.txt'), 'kiwi.txt' ),
( self.xp_path('foo-1.2.3/.import/foo.txt'), 'foo.txt' ),
( self.xp_path('foo-1.2.3/.import/bar.txt'), 'bar.txt' ),
( self.xp_path('foo-1.2.3/cheese/brie.jpg'), 'brie.jpg' ),
( self.xp_path('foo-1.2.3/cheese/halumi.jpg'), 'halumi.jpg' ),
( self.xp_path('foo-1.2.3/cheese/feta.jpg'), 'feta.jpg' ),
])
tmp_archive = temp_archive.make_temp_archive(items, 'zip', delete = not self.DEBUG)
archive_util.remove_members_matching_patterns(tmp_archive, [ 'notfound' ], debug = self.DEBUG)
self.assertEqual( [
'foo-1.2.3/.import/bar.txt',
'foo-1.2.3/.import/foo.txt',
'foo-1.2.3/cheese/brie.jpg',
'foo-1.2.3/cheese/feta.jpg',
'foo-1.2.3/cheese/halumi.jpg',
'foo-1.2.3/fruits/apple.txt',
'foo-1.2.3/fruits/durian.txt',
'foo-1.2.3/fruits/kiwi.txt',
], archiver.members(tmp_archive))
tmp_archive = temp_archive.make_temp_archive(items, 'zip', delete = not self.DEBUG)
archive_util.remove_members_matching_patterns(tmp_archive, [ '*.txt' ], debug = self.DEBUG)
self.assertEqual( [
'foo-1.2.3/cheese/brie.jpg',
'foo-1.2.3/cheese/feta.jpg',
'foo-1.2.3/cheese/halumi.jpg',
], archiver.members(tmp_archive))
tmp_archive = temp_archive.make_temp_archive(items, 'zip', delete = not self.DEBUG)
archive_util.remove_members_matching_patterns(tmp_archive, [ '*cheese*' ], debug = self.DEBUG)
self.assertEqual( [
'foo-1.2.3/.import/bar.txt',
'foo-1.2.3/.import/foo.txt',
'foo-1.2.3/fruits/apple.txt',
'foo-1.2.3/fruits/durian.txt',
'foo-1.2.3/fruits/kiwi.txt',
], archiver.members(tmp_archive))
def test_read_patterns(self):
content = '''\
cheese.txt
foo.jpg
test_orange/foo.txt
test_kiwi/*
'''
tmp_file = self.make_temp_file(content = content)
self.assertEqual( [
'cheese.txt',
'foo.jpg',
'test_orange/foo.txt',
'test_kiwi/*',
], archive_util.read_patterns(tmp_file) )
if __name__ == "__main__":
unit_test.main()
| 49.401099
| 108
| 0.638639
| 2,666
| 17,982
| 4.114029
| 0.044261
| 0.083698
| 0.139497
| 0.136306
| 0.905908
| 0.894055
| 0.892323
| 0.879559
| 0.876732
| 0.864515
| 0
| 0.028853
| 0.163497
| 17,982
| 363
| 109
| 49.53719
| 0.700306
| 0.006062
| 0
| 0.784257
| 0
| 0
| 0.319792
| 0.151195
| 0
| 0
| 0
| 0
| 0.055394
| 1
| 0.037901
| false
| 0
| 0.043732
| 0
| 0.084548
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eb5d8a81de7fb8524c15daf24c0d23dffb316345
| 160
|
py
|
Python
|
database/tests/open_alchemy/package_database/test_models/spec/conftest.py
|
open-alchemy/OpenAlchemyPackage
|
8bf0ed62ed7f6c5015f1bf1c4658dc353395fe9b
|
[
"Apache-2.0"
] | null | null | null |
database/tests/open_alchemy/package_database/test_models/spec/conftest.py
|
open-alchemy/OpenAlchemyPackage
|
8bf0ed62ed7f6c5015f1bf1c4658dc353395fe9b
|
[
"Apache-2.0"
] | 79
|
2020-11-28T04:02:25.000Z
|
2021-01-06T08:52:30.000Z
|
database/tests/open_alchemy/package_database/test_models/spec/conftest.py
|
open-alchemy/Package
|
8bf0ed62ed7f6c5015f1bf1c4658dc353395fe9b
|
[
"Apache-2.0"
] | null | null | null |
"""Database fixtures."""
import pytest
@pytest.fixture(autouse=True)
def _auto_clean_specs_table(_clean_specs_table):
"""Autouses _clean_specs_table."""
| 17.777778
| 48
| 0.7625
| 20
| 160
| 5.6
| 0.65
| 0.267857
| 0.401786
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 160
| 8
| 49
| 20
| 0.777778
| 0.29375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
eb7466ed23ae7f0e3c3ac0530fad95b5be3ab94b
| 10,643
|
py
|
Python
|
notebooks/eppy_residential_archetypes.py
|
rdmolony/energyplus-archetypes
|
1270db12c9e4c32a2b1ea77ba578ecc9f6dbf76e
|
[
"MIT"
] | 1
|
2021-02-01T09:25:42.000Z
|
2021-02-01T09:25:42.000Z
|
notebooks/eppy_residential_archetypes.py
|
rdmolony/energyplus-archetypes
|
1270db12c9e4c32a2b1ea77ba578ecc9f6dbf76e
|
[
"MIT"
] | 1
|
2021-01-26T17:33:58.000Z
|
2021-01-26T17:33:58.000Z
|
notebooks/eppy_residential_archetypes.py
|
rdmolony/energyplus-archetypes
|
1270db12c9e4c32a2b1ea77ba578ecc9f6dbf76e
|
[
"MIT"
] | 2
|
2021-01-13T15:31:50.000Z
|
2021-01-14T09:53:45.000Z
|
# ---
# jupyter:
# jupytext:
# formats: ipynb,md
# text_representation:
# extension: .py
# format_name: percent
# format_version: '1.3'
# jupytext_version: 1.10.1
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# %% [markdown]
# # Eppy is a scripting language for EnergyPlus, used to run annual simulations and produce outputs from archetype idf files
# %%
# cd ..
# %%
from eppy import modeleditor
from eppy.modeleditor import IDF
import pandas as pd
# %%
IDF.setiddname('/usr/local/EnergyPlus-8-9-0/Energy+.idd')
# %% [markdown]
# ### Weather file used is for Dublin, Ireland
# %%
idf = IDF('data/resi_modelling/det_pre/detatched_pre.idf')
idf.epw = "data/resi_modelling/det_pre/IRL_Dublin.039690_IWEC.epw"
idf.run(expandobjects=True, readvars=True, output_directory="data/resi_modelling/det_pre")
# %%
df = pd.read_csv("data/resi_modelling/det_pre/eplusmtr.csv")
peak_demand_joule_det_pre = df["Electricity:Facility [J](Hourly)"].max()
df2 = pd.read_html("data/resi_modelling/det_pre/eplustbl.htm")
df_e = pd.DataFrame(df2[0])
ann_energy_demand_kwh_det_pre = df_e.iloc[1,1]
df3 = pd.DataFrame(df2[3])
ann_elec_demand_kwh_det_pre = df3.iloc[16,1]
ann_heat_demand_kwh_det_pre = df3.iloc[16,5]
print(peak_demand_joule_det_pre, ann_energy_demand_kwh_det_pre, ann_elec_demand_kwh_det_pre, ann_heat_demand_kwh_det_pre)
# %% [markdown]
# ### Hourly outputs here represent the sum of the entire hour
# %%
idf = IDF('data/resi_modelling/det_post/detatched_post.idf')
idf.epw = "data/resi_modelling/det_post/IRL_Dublin.039690_IWEC.epw"
idf.run(expandobjects=True, readvars=True, output_directory="data/resi_modelling/det_post")
# %%
df = pd.read_csv("data/resi_modelling/det_post/eplusmtr.csv")
peak_demand_joule_det_post = df["Electricity:Facility [J](Hourly)"].max()
df2 = pd.read_html("data/resi_modelling/det_post/eplustbl.htm")
df_e = pd.DataFrame(df2[0])
ann_energy_demand_kwh_det_post = df_e.iloc[1,1]
df3 = pd.DataFrame(df2[3])
ann_elec_demand_kwh_det_post = df3.iloc[16,1]
ann_heat_demand_kwh_det_post = df3.iloc[16,5]
print(peak_demand_joule_det_post, ann_energy_demand_kwh_det_post, ann_elec_demand_kwh_det_post, ann_heat_demand_kwh_det_post)
# %%
idf = IDF('data/resi_modelling/semi_d_pre/semi_d_pre.idf')
idf.epw = "data/resi_modelling/semi_d_pre/IRL_Dublin.039690_IWEC.epw"
idf.run(expandobjects=True, readvars=True, output_directory="data/resi_modelling/semid_pre")
# %%
df = pd.read_csv("data/resi_modelling/semi_d_pre/eplusmtr.csv")
peak_demand_joule_semid_pre = df["Electricity:Facility [J](Hourly)"].max()
df2 = pd.read_html("data/resi_modelling/semi_d_pre/eplustbl.htm")
df_e = pd.DataFrame(df2[0])
ann_energy_demand_kwh_semid_pre = df_e.iloc[1,1]
df3 = pd.DataFrame(df2[3])
ann_elec_demand_kwh_semid_pre = df3.iloc[16,1]
ann_heat_demand_kwh_semid_pre = df3.iloc[16,5]
print(peak_demand_joule_semid_pre, ann_energy_demand_kwh_semid_pre, ann_elec_demand_kwh_semid_pre, ann_heat_demand_kwh_semid_pre)
# %%
idf = IDF('data/resi_modelling/semi_d_post/semi_d_post.idf')
idf.epw = "data/resi_modelling/semi_d_post/IRL_Dublin.039690_IWEC.epw"
idf.run(expandobjects=True, readvars=True, output_directory="data/resi_modelling/semid_post")
# %%
df = pd.read_csv("data/resi_modelling/semi_d_post/eplusmtr.csv")
peak_demand_joule_semid_post = df["Electricity:Facility [J](Hourly)"].max()
df2 = pd.read_html("data/resi_modelling/semi_d_post/eplustbl.htm")
df_e = pd.DataFrame(df2[0])
ann_energy_demand_kwh_semid_post = df_e.iloc[1,1]
df3 = pd.DataFrame(df2[3])
ann_elec_demand_kwh_semid_post = df3.iloc[16,1]
ann_heat_demand_kwh_semid_post = df3.iloc[16,5]
print(peak_demand_joule_semid_post, ann_energy_demand_kwh_semid_post, ann_elec_demand_kwh_semid_post, ann_heat_demand_kwh_semid_post)
# %%
idf = IDF('data/resi_modelling/terr_pre/terraced_pre.idf')
idf.epw = "data/resi_modelling/terr_pre/IRL_Dublin.039690_IWEC.epw"
idf.run(expandobjects=True, readvars=True, output_directory="data/resi_modelling/terr_pre")
# %%
df = pd.read_csv("data/resi_modelling/terr_pre/eplusmtr.csv")
peak_demand_joule_terr_pre = df["Electricity:Facility [J](Hourly)"].max()
df2 = pd.read_html("data/resi_modelling/terr_pre/eplustbl.htm")
df_e = pd.DataFrame(df2[0])
ann_energy_demand_kwh_terr_pre = df_e.iloc[1,1]
df3 = pd.DataFrame(df2[3])
ann_elec_demand_kwh_terr_pre = df3.iloc[16,1]
ann_heat_demand_kwh_terr_pre = df3.iloc[16,5]
print(peak_demand_joule_terr_pre, ann_energy_demand_kwh_terr_pre, ann_elec_demand_kwh_terr_pre, ann_heat_demand_kwh_terr_pre)
# %%
idf = IDF('data/resi_modelling/terr_post/terraced_post.idf')
idf.epw = "data/resi_modelling/terr_post/IRL_Dublin.039690_IWEC.epw"
idf.run(expandobjects=True, readvars=True, output_directory="data/resi_modelling/terr_post")
# %%
df = pd.read_csv("data/resi_modelling/terr_post/eplusmtr.csv")
peak_demand_joule_terr_post = df["Electricity:Facility [J](Hourly)"].max()
df2 = pd.read_html("data/resi_modelling/terr_post/eplustbl.htm")
df_e = pd.DataFrame(df2[0])
ann_energy_demand_kwh_terr_post = df_e.iloc[1,1]
df3 = pd.DataFrame(df2[3])
ann_elec_demand_kwh_terr_post = df3.iloc[16,1]
ann_heat_demand_kwh_terr_post = df3.iloc[16,5]
print(peak_demand_joule_terr_post, ann_energy_demand_kwh_terr_post, ann_elec_demand_kwh_terr_post, ann_heat_demand_kwh_terr_post)
# %%
idf = IDF('data/resi_modelling/mid_apt_pre/mid_apt_pre.idf')
idf.epw = "data/resi_modelling/mid_apt_pre/IRL_Dublin.039690_IWEC.epw"
idf.run(expandobjects=True, readvars=True, output_directory="data/resi_modelling/mid_apt_pre")
# %%
df = pd.read_csv("data/resi_modelling/mid_apt_pre/eplusmtr.csv")
peak_demand_joule_mid_apt_pre = df["Electricity:Facility [J](Hourly)"].max()
df2 = pd.read_html("data/resi_modelling/mid_apt_pre/eplustbl.htm")
df_e = pd.DataFrame(df2[0])
ann_energy_demand_kwh_mid_apt_pre = df_e.iloc[1,1]
df3 = pd.DataFrame(df2[3])
ann_elec_demand_kwh_mid_apt_pre = df3.iloc[16,1]
ann_heat_demand_kwh_mid_apt_pre = df3.iloc[16,5]
print(peak_demand_joule_mid_apt_pre, ann_energy_demand_kwh_mid_apt_pre, ann_elec_demand_kwh_mid_apt_pre, ann_heat_demand_kwh_mid_apt_pre)
# %%
idf = IDF('data/resi_modelling/mid_apt_post/mid_apt_post.idf')
idf.epw = "data/resi_modelling/mid_apt_post/IRL_Dublin.039690_IWEC.epw"
idf.run(expandobjects=True, readvars=True, output_directory="data/resi_modelling/mid_apt_post")
# %%
df = pd.read_csv("data/resi_modelling/mid_apt_post/eplusmtr.csv")
peak_demand_joule_mid_apt_post = df["Electricity:Facility [J](Hourly)"].max()
df2 = pd.read_html("data/resi_modelling/mid_apt_post/eplustbl.htm")
df_e = pd.DataFrame(df2[0])
ann_energy_demand_kwh_mid_apt_post = df_e.iloc[1,1]
df3 = pd.DataFrame(df2[3])
ann_elec_demand_kwh_mid_apt_post = df3.iloc[16,1]
ann_heat_demand_kwh_mid_apt_post = df3.iloc[16,5]
print(peak_demand_joule_mid_apt_post, ann_energy_demand_kwh_mid_apt_post, ann_elec_demand_kwh_mid_apt_post, ann_heat_demand_kwh_mid_apt_post)
# %%
idf = IDF('data/resi_modelling/top_apt_pre/top_apt_pre.idf')
idf.epw = "data/resi_modelling/top_apt_pre/IRL_Dublin.039690_IWEC.epw"
idf.run(expandobjects=True, readvars=True, output_directory="data/resi_modelling/top_apt_pre")
# %%
df = pd.read_csv("data/resi_modelling/top_apt_pre/eplusmtr.csv")
peak_demand_joule_top_apt_pre = df["Electricity:Facility [J](Hourly)"].max()
df2 = pd.read_html("data/resi_modelling/top_apt_pre/eplustbl.htm")
df_e = pd.DataFrame(df2[0])
ann_energy_demand_kwh_top_apt_pre = df_e.iloc[1,1]
df3 = pd.DataFrame(df2[3])
ann_elec_demand_kwh_top_apt_pre = df3.iloc[16,1]
ann_heat_demand_kwh_top_apt_pre = df3.iloc[16,5]
print(peak_demand_joule_top_apt_pre, ann_energy_demand_kwh_top_apt_pre, ann_elec_demand_kwh_top_apt_pre, ann_heat_demand_kwh_top_apt_pre)
# %%
idf = IDF('data/resi_modelling/top_apt_post/top_apt_post.idf')
idf.epw = "data/resi_modelling/top_apt_post/IRL_Dublin.039690_IWEC.epw"
idf.run(expandobjects=True, readvars=True, output_directory="data/resi_modelling/top_apt_post")
# %%
df = pd.read_csv("data/resi_modelling/top_apt_post/eplusmtr.csv")
peak_demand_joule_top_apt_post = df["Electricity:Facility [J](Hourly)"].max()
df2 = pd.read_html("data/resi_modelling/top_apt_post/eplustbl.htm")
df_e = pd.DataFrame(df2[0])
ann_energy_demand_kwh_top_apt_post = df_e.iloc[1,1]
df3 = pd.DataFrame(df2[3])
ann_elec_demand_kwh_top_apt_post = df3.iloc[16,1]
ann_heat_demand_kwh_top_apt_post = df3.iloc[16,5]
print(peak_demand_joule_top_apt_post, ann_energy_demand_kwh_top_apt_post, ann_elec_demand_kwh_top_apt_post, ann_heat_demand_kwh_top_apt_post)
# %%
peak_data = [['Detatched housepre', peak_demand_joule_det_pre, ann_energy_demand_kwh_det_pre, ann_elec_demand_kwh_det_pre, ann_heat_demand_kwh_det_pre], ['Detatched housepost', peak_demand_joule_det_post, ann_energy_demand_kwh_det_post, ann_elec_demand_kwh_det_post, ann_heat_demand_kwh_det_post], ['Semi detatched housepre', peak_demand_joule_semid_pre, ann_energy_demand_kwh_semid_pre, ann_elec_demand_kwh_semid_pre, ann_heat_demand_kwh_semid_pre],['Semi detatched housepost', peak_demand_joule_semid_post, ann_energy_demand_kwh_semid_post, ann_elec_demand_kwh_semid_post, ann_heat_demand_kwh_semid_post],['Terraced housepre', peak_demand_joule_terr_pre, ann_energy_demand_kwh_terr_pre, ann_elec_demand_kwh_terr_pre, ann_heat_demand_kwh_terr_pre], ['Terraced housepost', peak_demand_joule_terr_post, ann_energy_demand_kwh_terr_post, ann_elec_demand_kwh_terr_post, ann_heat_demand_kwh_terr_post], ['Apartmentpre', peak_demand_joule_mid_apt_pre, ann_energy_demand_kwh_mid_apt_pre, ann_elec_demand_kwh_mid_apt_pre, ann_heat_demand_kwh_mid_apt_pre],['Apartmentpost', peak_demand_joule_mid_apt_post, ann_energy_demand_kwh_mid_apt_post, ann_elec_demand_kwh_mid_apt_post, ann_heat_demand_kwh_mid_apt_post],['Top floor apt.pre', peak_demand_joule_top_apt_pre, ann_energy_demand_kwh_top_apt_pre, ann_elec_demand_kwh_top_apt_pre, ann_heat_demand_kwh_top_apt_pre],['Top floor apt.post', peak_demand_joule_top_apt_post, ann_energy_demand_kwh_top_apt_post, ann_elec_demand_kwh_top_apt_post, ann_heat_demand_kwh_top_apt_post], ]
# %%
df_peaks = pd.DataFrame(peak_data, columns = ['dwelling_type','peak_hourly_elec_demand(J)', "annual_energy_demand_kwh", "annual_elec_demand_kwh", "annual_heat_demand_kwh"])
# %%
df_peaks
# %% [markdown]
# ### Note that the hourly elec values in J are across an entire hour thus the conversion below
# %%
df_peaks["peak_elec_demand(kW)"] = df_peaks["peak_hourly_elec_demand(J)"]/3600000
# %% [markdown]
# ### Assume a power factor of 0.85
# %%
df_peaks["peak_elec_demand(kVA)"] = df_peaks["peak_elec_demand(kW)"]*0.85
# %%
df_peaks
# %%
df_peaks.to_csv("data/interim/energy_demand_by_building_type_eppy.csv")
# %%
| 47.513393
| 1,517
| 0.807855
| 1,868
| 10,643
| 4.134368
| 0.077623
| 0.108378
| 0.110061
| 0.069921
| 0.878933
| 0.858734
| 0.810048
| 0.752946
| 0.707497
| 0.63913
| 0
| 0.024646
| 0.069811
| 10,643
| 223
| 1,518
| 47.726457
| 0.755455
| 0.072818
| 0
| 0.167939
| 0
| 0
| 0.303735
| 0.247397
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.022901
| 0
| 0.022901
| 0.076336
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eb821deb2b5a5317016e2098eafebee1b6e630ed
| 18,063
|
py
|
Python
|
alembic/versions/9e7ee952f6ae_initial_tables.py
|
SegFaulti4/lingvodoc
|
8b296b43453a46b814d3cd381f94382ebcb9c6a6
|
[
"Apache-2.0"
] | 5
|
2017-03-30T18:02:11.000Z
|
2021-07-20T16:02:34.000Z
|
alembic/versions/9e7ee952f6ae_initial_tables.py
|
SegFaulti4/lingvodoc
|
8b296b43453a46b814d3cd381f94382ebcb9c6a6
|
[
"Apache-2.0"
] | 15
|
2016-02-24T13:16:59.000Z
|
2021-09-03T11:47:15.000Z
|
alembic/versions/9e7ee952f6ae_initial_tables.py
|
Winking-maniac/lingvodoc
|
f037bf0e91ccdf020469037220a43e63849aa24a
|
[
"Apache-2.0"
] | 22
|
2015-09-25T07:13:40.000Z
|
2021-08-04T18:08:26.000Z
|
"""initial tables
Revision ID: 9e7ee952f6ae
Revises:
Create Date: 2016-10-21 17:20:27.709284
"""
# revision identifiers, used by Alembic.
revision = '9e7ee952f6ae'
down_revision = None
branch_labels = None
depends_on = None
from alembic import op
from sqlalchemy.types import Text
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from lingvodoc.models import SLBigInteger
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('basegroup',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('id', SLBigInteger(), nullable=False),
sa.Column('dictionary_default', sa.Boolean(), nullable=False),
sa.Column('perspective_default', sa.Boolean(), nullable=False),
sa.Column('name', sa.UnicodeText(), nullable=False),
sa.Column('subject', sa.UnicodeText(), nullable=False),
sa.Column('action', sa.UnicodeText(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('organization',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('id', SLBigInteger(), nullable=False),
sa.Column('marked_for_deletion', sa.Boolean(), nullable=False),
sa.Column('name', sa.UnicodeText(), nullable=True),
sa.Column('about', sa.UnicodeText(), nullable=True),
sa.Column('additional_metadata', postgresql.JSONB(astext_type=Text()), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('translationgist',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('object_id', SLBigInteger(), nullable=False),
sa.Column('client_id', SLBigInteger(), nullable=False),
sa.Column('marked_for_deletion', sa.Boolean(), nullable=False),
sa.Column('type', sa.UnicodeText(), nullable=True),
sa.PrimaryKeyConstraint('object_id', 'client_id')
)
op.create_table('field',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('object_id', SLBigInteger(), nullable=False),
sa.Column('client_id', SLBigInteger(), nullable=False),
sa.Column('translation_gist_client_id', SLBigInteger(), nullable=False),
sa.Column('translation_gist_object_id', SLBigInteger(), nullable=False),
sa.Column('data_type_translation_gist_client_id', SLBigInteger(), nullable=False),
sa.Column('data_type_translation_gist_object_id', SLBigInteger(), nullable=False),
sa.Column('marked_for_deletion', sa.Boolean(), nullable=False),
sa.Column('is_translatable', sa.Boolean(), nullable=False),
sa.Column('additional_metadata', postgresql.JSONB(astext_type=Text()), nullable=True),
sa.ForeignKeyConstraint(['data_type_translation_gist_client_id', 'data_type_translation_gist_object_id'], ['translationgist.client_id', 'translationgist.object_id'], ),
sa.ForeignKeyConstraint(['translation_gist_object_id', 'translation_gist_client_id'], ['translationgist.object_id', 'translationgist.client_id'], ),
sa.PrimaryKeyConstraint('object_id', 'client_id')
)
op.create_table('group',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('id', postgresql.UUID(), nullable=False),
sa.Column('old_id', SLBigInteger()),
sa.Column('base_group_id', SLBigInteger(), nullable=False),
sa.Column('subject_client_id', SLBigInteger(), nullable=True),
sa.Column('subject_object_id', SLBigInteger(), nullable=True),
sa.Column('subject_override', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['base_group_id'], ['basegroup.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('language',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('object_id', SLBigInteger(), nullable=False),
sa.Column('client_id', SLBigInteger(), nullable=False),
sa.Column('parent_object_id', SLBigInteger(), nullable=True),
sa.Column('parent_client_id', SLBigInteger(), nullable=True),
sa.Column('translation_gist_client_id', SLBigInteger(), nullable=False),
sa.Column('translation_gist_object_id', SLBigInteger(), nullable=False),
sa.Column('marked_for_deletion', sa.Boolean(), nullable=False),
sa.Column('additional_metadata', postgresql.JSONB(astext_type=Text()), nullable=True),
sa.ForeignKeyConstraint(['parent_object_id', 'parent_client_id'], ['language.object_id', 'language.client_id'], ),
sa.ForeignKeyConstraint(['translation_gist_object_id', 'translation_gist_client_id'], ['translationgist.object_id', 'translationgist.client_id'], ),
sa.PrimaryKeyConstraint('object_id', 'client_id')
)
op.create_table('translationatom',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('object_id', SLBigInteger(), nullable=False),
sa.Column('client_id', SLBigInteger(), nullable=False),
sa.Column('parent_object_id', SLBigInteger(), nullable=True),
sa.Column('parent_client_id', SLBigInteger(), nullable=True),
sa.Column('locale_id', SLBigInteger(), nullable=False),
sa.Column('marked_for_deletion', sa.Boolean(), nullable=False),
sa.Column('content', sa.UnicodeText(), nullable=False),
sa.Column('additional_metadata', postgresql.JSONB(astext_type=Text()), nullable=True),
sa.ForeignKeyConstraint(['parent_object_id', 'parent_client_id'], ['translationgist.object_id', 'translationgist.client_id'], ),
sa.PrimaryKeyConstraint('object_id', 'client_id')
)
op.create_table('dictionary',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('object_id', SLBigInteger(), nullable=False),
sa.Column('client_id', SLBigInteger(), nullable=False),
sa.Column('parent_object_id', SLBigInteger(), nullable=True),
sa.Column('parent_client_id', SLBigInteger(), nullable=True),
sa.Column('translation_gist_client_id', SLBigInteger(), nullable=False),
sa.Column('translation_gist_object_id', SLBigInteger(), nullable=False),
sa.Column('state_translation_gist_client_id', SLBigInteger(), nullable=False),
sa.Column('state_translation_gist_object_id', SLBigInteger(), nullable=False),
sa.Column('marked_for_deletion', sa.Boolean(), nullable=False),
sa.Column('category', SLBigInteger(), nullable=True),
sa.Column('domain', SLBigInteger(), nullable=True),
sa.Column('additional_metadata', postgresql.JSONB(astext_type=Text()), nullable=True),
sa.ForeignKeyConstraint(['parent_object_id', 'parent_client_id'], ['language.object_id', 'language.client_id'], ),
sa.ForeignKeyConstraint(['state_translation_gist_client_id', 'state_translation_gist_object_id'], ['translationgist.client_id', 'translationgist.object_id'], ),
sa.ForeignKeyConstraint(['translation_gist_object_id', 'translation_gist_client_id'], ['translationgist.object_id', 'translationgist.client_id'], ),
sa.PrimaryKeyConstraint('object_id', 'client_id')
)
op.create_table('locale',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('id', SLBigInteger(), nullable=False),
sa.Column('parent_object_id', SLBigInteger(), nullable=True),
sa.Column('parent_client_id', SLBigInteger(), nullable=True),
sa.Column('shortcut', sa.UnicodeText(), nullable=False),
sa.Column('intl_name', sa.UnicodeText(), nullable=False),
sa.ForeignKeyConstraint(['parent_object_id', 'parent_client_id'], ['language.object_id', 'language.client_id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('organization_to_group_association',
sa.Column('organization_id', sa.BigInteger(), nullable=True),
sa.Column('group_id', postgresql.UUID(), nullable=True),
sa.ForeignKeyConstraint(['group_id'], ['group.id'], ),
sa.ForeignKeyConstraint(['organization_id'], ['organization.id'], )
)
op.create_table('dictionaryperspective',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('object_id', SLBigInteger(), nullable=False),
sa.Column('client_id', SLBigInteger(), nullable=False),
sa.Column('parent_object_id', SLBigInteger(), nullable=True),
sa.Column('parent_client_id', SLBigInteger(), nullable=True),
sa.Column('translation_gist_client_id', SLBigInteger(), nullable=False),
sa.Column('translation_gist_object_id', SLBigInteger(), nullable=False),
sa.Column('state_translation_gist_client_id', SLBigInteger(), nullable=False),
sa.Column('state_translation_gist_object_id', SLBigInteger(), nullable=False),
sa.Column('marked_for_deletion', sa.Boolean(), nullable=False),
sa.Column('is_template', sa.Boolean(), nullable=False),
sa.Column('import_source', sa.UnicodeText(), nullable=True),
sa.Column('import_hash', sa.UnicodeText(), nullable=True),
sa.Column('additional_metadata', postgresql.JSONB(astext_type=Text()), nullable=True),
sa.ForeignKeyConstraint(['parent_object_id', 'parent_client_id'], ['dictionary.object_id', 'dictionary.client_id'], ),
sa.ForeignKeyConstraint(['state_translation_gist_client_id', 'state_translation_gist_object_id'], ['translationgist.client_id', 'translationgist.object_id'], ),
sa.ForeignKeyConstraint(['translation_gist_object_id', 'translation_gist_client_id'], ['translationgist.object_id', 'translationgist.client_id'], ),
sa.PrimaryKeyConstraint('object_id', 'client_id')
)
op.create_table('user',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('id', SLBigInteger(), nullable=False),
sa.Column('default_locale_id', SLBigInteger(), nullable=False),
sa.Column('birthday', sa.Date(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('login', sa.UnicodeText(), nullable=False),
sa.Column('intl_name', sa.UnicodeText(), nullable=False),
sa.Column('name', sa.UnicodeText(), nullable=True),
sa.Column('additional_metadata', postgresql.JSONB(astext_type=Text()), nullable=True),
sa.ForeignKeyConstraint(['default_locale_id'], ['locale.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('login')
)
op.create_table('client',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('id', SLBigInteger(), nullable=False),
sa.Column('user_id', SLBigInteger(), nullable=False),
sa.Column('is_browser_client', sa.Boolean(), nullable=False),
sa.Column('counter', SLBigInteger(), nullable=False),
sa.Column('additional_metadata', postgresql.JSONB(astext_type=Text()), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('dictionaryperspectivetofield',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('object_id', SLBigInteger(), nullable=False),
sa.Column('client_id', SLBigInteger(), nullable=False),
sa.Column('parent_object_id', SLBigInteger(), nullable=True),
sa.Column('parent_client_id', SLBigInteger(), nullable=True),
sa.Column('self_client_id', SLBigInteger(), nullable=True),
sa.Column('self_object_id', SLBigInteger(), nullable=True),
sa.Column('field_client_id', SLBigInteger(), nullable=False),
sa.Column('field_object_id', SLBigInteger(), nullable=False),
sa.Column('link_client_id', SLBigInteger(), nullable=True),
sa.Column('link_object_id', SLBigInteger(), nullable=True),
sa.Column('marked_for_deletion', sa.Boolean(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['field_client_id', 'field_object_id'], ['field.client_id', 'field.object_id'], ),
sa.ForeignKeyConstraint(['link_client_id', 'link_object_id'], ['dictionaryperspective.client_id', 'dictionaryperspective.object_id'], ),
sa.ForeignKeyConstraint(['parent_object_id', 'parent_client_id'], ['dictionaryperspective.object_id', 'dictionaryperspective.client_id'], ),
sa.ForeignKeyConstraint(['self_client_id', 'self_object_id'], ['dictionaryperspectivetofield.client_id', 'dictionaryperspectivetofield.object_id'], ),
sa.PrimaryKeyConstraint('object_id', 'client_id')
)
op.create_table('email',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('id', SLBigInteger(), nullable=False),
sa.Column('user_id', SLBigInteger(), nullable=False),
sa.Column('email', sa.UnicodeText(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email')
)
op.create_table('lexicalentry',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('object_id', SLBigInteger(), nullable=False),
sa.Column('client_id', SLBigInteger(), nullable=False),
sa.Column('parent_object_id', SLBigInteger(), nullable=True),
sa.Column('parent_client_id', SLBigInteger(), nullable=True),
sa.Column('marked_for_deletion', sa.Boolean(), nullable=True),
sa.Column('moved_to', sa.UnicodeText(), nullable=True),
sa.Column('additional_metadata', postgresql.JSONB(astext_type=Text()), nullable=True),
sa.ForeignKeyConstraint(['parent_object_id', 'parent_client_id'], ['dictionaryperspective.object_id', 'dictionaryperspective.client_id'], ),
sa.PrimaryKeyConstraint('object_id', 'client_id')
)
op.create_table('passhash',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('id', SLBigInteger(), nullable=False),
sa.Column('user_id', SLBigInteger(), nullable=False),
sa.Column('hash', sa.UnicodeText(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('user_to_group_association',
sa.Column('user_id', sa.BigInteger(), nullable=True),
sa.Column('group_id', postgresql.UUID(), nullable=True),
sa.ForeignKeyConstraint(['group_id'], ['group.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], )
)
op.create_table('user_to_organization_association',
sa.Column('user_id', sa.BigInteger(), nullable=True),
sa.Column('organization_id', sa.BigInteger(), nullable=True),
sa.ForeignKeyConstraint(['organization_id'], ['organization.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], )
)
op.create_table('userblobs',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('object_id', SLBigInteger(), nullable=False),
sa.Column('client_id', SLBigInteger(), nullable=False),
sa.Column('marked_for_deletion', sa.Boolean(), nullable=True),
sa.Column('user_id', SLBigInteger(), nullable=True),
sa.Column('name', sa.UnicodeText(), nullable=False),
sa.Column('content', sa.UnicodeText(), nullable=False),
sa.Column('real_storage_path', sa.UnicodeText(), nullable=False),
sa.Column('data_type', sa.UnicodeText(), nullable=False),
sa.Column('additional_metadata', postgresql.JSONB(astext_type=Text()), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('object_id', 'client_id')
)
op.create_table('entity',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('object_id', SLBigInteger(), nullable=False),
sa.Column('client_id', SLBigInteger(), nullable=False),
sa.Column('parent_object_id', SLBigInteger(), nullable=True),
sa.Column('parent_client_id', SLBigInteger(), nullable=True),
sa.Column('self_client_id', SLBigInteger(), nullable=True),
sa.Column('self_object_id', SLBigInteger(), nullable=True),
sa.Column('field_client_id', SLBigInteger(), nullable=False),
sa.Column('field_object_id', SLBigInteger(), nullable=False),
sa.Column('link_client_id', SLBigInteger(), nullable=True),
sa.Column('link_object_id', SLBigInteger(), nullable=True),
sa.Column('locale_id', SLBigInteger(), nullable=True),
sa.Column('marked_for_deletion', sa.Boolean(), nullable=False),
sa.Column('content', sa.UnicodeText(), nullable=True),
sa.Column('additional_metadata', postgresql.JSONB(astext_type=Text()), nullable=True),
sa.ForeignKeyConstraint(['field_client_id', 'field_object_id'], ['field.client_id', 'field.object_id'], ),
sa.ForeignKeyConstraint(['parent_object_id', 'parent_client_id'], ['lexicalentry.object_id', 'lexicalentry.client_id'], ),
sa.ForeignKeyConstraint(['self_client_id', 'self_object_id'], ['entity.client_id', 'entity.object_id'], ),
sa.PrimaryKeyConstraint('object_id', 'client_id')
)
op.create_table('publishingentity',
sa.Column('created_at', sa.TIMESTAMP(), nullable=False),
sa.Column('object_id', SLBigInteger(), nullable=False),
sa.Column('client_id', SLBigInteger(), nullable=False),
sa.Column('published', sa.Boolean(), nullable=False),
sa.Column('accepted', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['client_id', 'object_id'], ['entity.client_id', 'entity.object_id'], ),
sa.PrimaryKeyConstraint('object_id', 'client_id')
)
op.create_table('objecttoc',
sa.Column('object_id', SLBigInteger(), nullable=False),
sa.Column('client_id', SLBigInteger(), nullable=False),
sa.Column('table_name', sa.UnicodeText(), nullable=True),
sa.Column('marked_for_deletion', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('object_id', 'client_id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('objecttoc')
op.drop_table('publishingentity')
op.drop_table('entity')
op.drop_table('userblobs')
op.drop_table('user_to_organization_association')
op.drop_table('user_to_group_association')
op.drop_table('passhash')
op.drop_table('lexicalentry')
op.drop_table('email')
op.drop_table('dictionaryperspectivetofield')
op.drop_table('client')
op.drop_table('user')
op.drop_table('dictionaryperspective')
op.drop_table('organization_to_group_association')
op.drop_table('locale')
op.drop_table('dictionary')
op.drop_table('translationatom')
op.drop_table('language')
op.drop_table('group')
op.drop_table('field')
op.drop_table('translationgist')
op.drop_table('organization')
op.drop_table('basegroup')
### end Alembic commands ###
| 55.75
| 172
| 0.716548
| 2,126
| 18,063
| 5.860301
| 0.062559
| 0.109158
| 0.130026
| 0.171924
| 0.87182
| 0.843567
| 0.792519
| 0.765952
| 0.758889
| 0.746448
| 0
| 0.001997
| 0.112717
| 18,063
| 323
| 173
| 55.922601
| 0.775379
| 0.015446
| 0
| 0.560656
| 0
| 0
| 0.286181
| 0.101403
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006557
| false
| 0.006557
| 0.022951
| 0
| 0.029508
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ebb41e91a8e2783d3b37bf3b6b979d65654f5089
| 262
|
py
|
Python
|
Python/basic-py/exercise_14.py
|
nhutnamhcmus/code
|
22b528084ed234fcabca89cf1ba02a2c347007bc
|
[
"MIT"
] | 1
|
2020-10-12T18:33:22.000Z
|
2020-10-12T18:33:22.000Z
|
Python/basic-py/exercise_14.py
|
nhutnamhcmus/code
|
22b528084ed234fcabca89cf1ba02a2c347007bc
|
[
"MIT"
] | null | null | null |
Python/basic-py/exercise_14.py
|
nhutnamhcmus/code
|
22b528084ed234fcabca89cf1ba02a2c347007bc
|
[
"MIT"
] | null | null | null |
def _get_square(list):
l1 = [x*x for x in range(0, len(list), 2) if x % 3 != 0]
return l1
print(_get_square([1, 2, 3, 4, 5]))
def get_square(list):
l1 = [x*x for x in list if x % 3 != 0 and x % 2 == 0]
return l1
print(get_square([1, 2, 3, 4, 5]))
| 23.818182
| 58
| 0.557252
| 58
| 262
| 2.413793
| 0.344828
| 0.257143
| 0.171429
| 0.228571
| 0.771429
| 0.771429
| 0.771429
| 0.771429
| 0.771429
| 0.771429
| 0
| 0.112821
| 0.255725
| 262
| 11
| 59
| 23.818182
| 0.605128
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0.25
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6901b7acffb34f8f8d84f5944353762bec3fbb2a
| 2,981
|
py
|
Python
|
tests/test_xml_parser_quart.py
|
criteo/quart
|
7d018837d7b2f372da3fa83b1b78e4067d89557a
|
[
"Apache-2.0"
] | 1
|
2017-12-07T11:37:51.000Z
|
2017-12-07T11:37:51.000Z
|
tests/test_xml_parser_quart.py
|
muneebirfan/quart
|
7d018837d7b2f372da3fa83b1b78e4067d89557a
|
[
"Apache-2.0"
] | null | null | null |
tests/test_xml_parser_quart.py
|
muneebirfan/quart
|
7d018837d7b2f372da3fa83b1b78e4067d89557a
|
[
"Apache-2.0"
] | 2
|
2018-06-26T15:28:10.000Z
|
2022-02-21T11:29:35.000Z
|
import pytest
from quart.xml_parser_quart import fusion_vulnerability_dictionaries
EXPECTED_1 = \
{u'1': {'category': u'Category 1',
'consequence': u'Consequence 1',
'diagnosis': u'Diagnosis 1',
'hosts': [{'ip': u'1.1.1.1', 'name': 'host1'},
{'ip': u'2.2.2.2', 'name': 'host2'}],
'qid': u'1',
'severity': 5,
'solution': u'Solution 1',
'title': u'Vulnerability Title 1'},
u'2': {'category': u'Category 2',
'consequence': u'Consequence 2',
'diagnosis': u'Diagnosis 2',
'hosts': [{'ip': u'2.2.2.2', 'name': 'host2'}],
'qid': u'2',
'severity': 4,
'solution': u'Solution 2',
'title': u'Vulnerability Title 2'}}
EXPECTED_2 = \
{u'2': {'category': u'Category 2',
'consequence': u'Consequence 2',
'diagnosis': u'Diagnosis 2',
'hosts': [{'ip': u'3.3.3.3', 'name': 'host3'}],
'qid': u'2',
'severity': 4,
'solution': u'Solution 2',
'title': u'Vulnerability Title 2'},
u'3': {'category': u'Category 3',
'consequence': u'Consequence 3',
'diagnosis': u'Diagnosis 3',
'hosts': [{'ip': u'4.4.4.4', 'name': 'host4'}],
'qid': u'3',
'severity': 3,
'solution': u'Solution 3',
'title': u'Vulnerability Title 3'}}
EXPECTED_1_2= \
{u'1': {'category': u'Category 1',
'consequence': u'Consequence 1',
'diagnosis': u'Diagnosis 1',
'hosts': [{'ip': u'1.1.1.1', 'name': 'host1'},
{'ip': u'2.2.2.2', 'name': 'host2'}],
'qid': u'1',
'severity': 5,
'solution': u'Solution 1',
'title': u'Vulnerability Title 1'},
u'2': {'category': u'Category 2',
'consequence': u'Consequence 2',
'diagnosis': u'Diagnosis 2',
'hosts': [{'ip': u'2.2.2.2', 'name': 'host2'},
{'ip': u'3.3.3.3', 'name': 'host3'}],
'qid': u'2',
'severity': 4,
'solution': u'Solution 2',
'title': u'Vulnerability Title 2'},
u'3': {'category': u'Category 3',
'consequence': u'Consequence 3',
'diagnosis': u'Diagnosis 3',
'hosts': [{'ip': u'4.4.4.4', 'name': 'host4'}],
'qid': u'3',
'severity': 3,
'solution': u'Solution 3',
'title': u'Vulnerability Title 3'}}
@pytest.mark.parametrize('dictionary_1, dictionary_2, fusion_dictionary', (
({}, {}, {}),
({}, EXPECTED_1, EXPECTED_1),
(EXPECTED_2, {}, EXPECTED_2),
(EXPECTED_1, EXPECTED_1, EXPECTED_1),
(EXPECTED_1, EXPECTED_2, EXPECTED_1_2),
))
def test_fusion_vulnerability_dictionaries(dictionary_1, dictionary_2,
fusion_dictionary):
assert fusion_vulnerability_dictionaries(dictionary_1, dictionary_2) ==\
fusion_dictionary
| 36.353659
| 77
| 0.495136
| 352
| 2,981
| 4.099432
| 0.102273
| 0.016632
| 0.016632
| 0.116424
| 0.905059
| 0.905059
| 0.878725
| 0.817741
| 0.817741
| 0.722107
| 0
| 0.061531
| 0.307615
| 2,981
| 82
| 78
| 36.353659
| 0.637597
| 0
| 0
| 0.706667
| 0
| 0
| 0.376767
| 0
| 0
| 0
| 0
| 0
| 0.013333
| 1
| 0.013333
| false
| 0
| 0.026667
| 0
| 0.04
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
695ec52440fba379a30f79fa614d663b2b26c725
| 110
|
py
|
Python
|
MiddleKit/Design/SQLitePythonGenerator.py
|
PeaceWorksTechnologySolutions/w4py
|
74f5a03a63f1a93563502b908474aefaae2abda2
|
[
"MIT"
] | 18
|
2016-08-01T20:15:59.000Z
|
2019-12-24T16:00:03.000Z
|
MiddleKit/Design/SQLitePythonGenerator.py
|
WebwareForPython/w4py
|
bba08f5974d49f5da7e88abe3eeda1037d0824a3
|
[
"MIT"
] | 6
|
2016-09-13T05:48:45.000Z
|
2020-01-09T18:29:12.000Z
|
MiddleKit/Design/SQLitePythonGenerator.py
|
WebwareForPython/w4py
|
bba08f5974d49f5da7e88abe3eeda1037d0824a3
|
[
"MIT"
] | 6
|
2016-09-16T14:32:29.000Z
|
2020-01-03T18:52:16.000Z
|
from SQLPythonGenerator import SQLPythonGenerator
class SQLitePythonGenerator(SQLPythonGenerator):
pass
| 18.333333
| 49
| 0.854545
| 8
| 110
| 11.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118182
| 110
| 5
| 50
| 22
| 0.969072
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
15e6a04a8da0e44c61254eaa10ce768020d0ae51
| 186
|
py
|
Python
|
example_problem/dialogue/dialogue_functions.py
|
seakers/daphne-brain
|
1d703d468cd503a21395f986dd72e67b6e556451
|
[
"MIT"
] | null | null | null |
example_problem/dialogue/dialogue_functions.py
|
seakers/daphne-brain
|
1d703d468cd503a21395f986dd72e67b6e556451
|
[
"MIT"
] | null | null | null |
example_problem/dialogue/dialogue_functions.py
|
seakers/daphne-brain
|
1d703d468cd503a21395f986dd72e67b6e556451
|
[
"MIT"
] | null | null | null |
import example_problem.analyst.dialogue_functions as analyst
import example_problem.engineer.dialogue_functions as engineer
import example_problem.critic.dialogue_functions as critic
| 26.571429
| 62
| 0.887097
| 24
| 186
| 6.625
| 0.375
| 0.245283
| 0.377358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080645
| 186
| 6
| 63
| 31
| 0.929825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
15f0c68253b44fcf807be3d2c13c0296e0b8bc18
| 47
|
py
|
Python
|
python/graph/__init__.py
|
advaithm582/algorithms
|
9a5261b69ea7bc84afb9f8561c1d497a799733f9
|
[
"MIT"
] | 2
|
2021-11-17T07:48:34.000Z
|
2021-12-10T03:09:00.000Z
|
python/graph/__init__.py
|
advaithm582/algorithms
|
9a5261b69ea7bc84afb9f8561c1d497a799733f9
|
[
"MIT"
] | null | null | null |
python/graph/__init__.py
|
advaithm582/algorithms
|
9a5261b69ea7bc84afb9f8561c1d497a799733f9
|
[
"MIT"
] | null | null | null |
import graph.bfs as bfs
import graph.dfs as dfs
| 23.5
| 23
| 0.808511
| 10
| 47
| 3.8
| 0.5
| 0.578947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148936
| 47
| 2
| 24
| 23.5
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c63aa7bd0e0701b71a3831892875ee207bc50f62
| 2,707
|
py
|
Python
|
tests/test_chol.py
|
luk036/ellalgo
|
8e83587b271f35c906c0d0aa4175dac153e5e29b
|
[
"MIT"
] | null | null | null |
tests/test_chol.py
|
luk036/ellalgo
|
8e83587b271f35c906c0d0aa4175dac153e5e29b
|
[
"MIT"
] | null | null | null |
tests/test_chol.py
|
luk036/ellalgo
|
8e83587b271f35c906c0d0aa4175dac153e5e29b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import print_function
import numpy as np
from ellalgo.oracles.chol_ext import chol_ext
def test_chol1():
"""[summary]"""
l1 = [[25.0, 15.0, -5.0], [15.0, 18.0, 0.0], [-5.0, 0.0, 11.0]]
m1 = np.array(l1)
Q1 = chol_ext(len(m1))
assert Q1.factorize(m1)
def test_chol2():
"""[summary]"""
l2 = [
[18.0, 22.0, 54.0, 42.0],
[22.0, -70.0, 86.0, 62.0],
[54.0, 86.0, -174.0, 134.0],
[42.0, 62.0, 134.0, -106.0],
]
m2 = np.array(l2)
Q = chol_ext(len(m2))
assert not Q.factorize(m2)
Q.witness()
assert Q.p == (0, 2)
# assert ep == 1.0
def test_chol3():
"""[summary]"""
l3 = [[0.0, 15.0, -5.0], [15.0, 18.0, 0.0], [-5.0, 0.0, 11.0]]
m3 = np.array(l3)
Q = chol_ext(len(m3))
assert not Q.factorize(m3)
ep = Q.witness()
assert Q.p == (0, 1)
assert Q.v[0] == 1.0
assert ep == 0.0
def test_chol4():
"""[summary]"""
l1 = [[25.0, 15.0, -5.0], [15.0, 18.0, 0.0], [-5.0, 0.0, 11.0]]
m1 = np.array(l1)
Q1 = chol_ext(len(m1))
Q1.allow_semidefinite = True
assert Q1.factorize(m1)
def test_chol5():
"""[summary]"""
l2 = [
[18.0, 22.0, 54.0, 42.0],
[22.0, -70.0, 86.0, 62.0],
[54.0, 86.0, -174.0, 134.0],
[42.0, 62.0, 134.0, -106.0],
]
m2 = np.array(l2)
Q = chol_ext(len(m2))
Q.allow_semidefinite = True
assert not Q.factorize(m2)
Q.witness()
assert Q.p == (0, 2)
# assert ep == 1.0
def test_chol6():
"""[summary]"""
l3 = [[0.0, 15.0, -5.0], [15.0, 18.0, 0.0], [-5.0, 0.0, 11.0]]
m3 = np.array(l3)
Q = chol_ext(len(m3))
Q.allow_semidefinite = True
assert Q.factorize(m3)
# [v, ep] = Q.witness2()
# assert len(v) == 1
# assert v[0] == 1.0
# assert ep == 0.0
def test_chol7():
"""[summary]"""
l3 = [[0.0, 15.0, -5.0], [15.0, 18.0, 0.0], [-5.0, 0.0, -20.0]]
m3 = np.array(l3)
Q = chol_ext(len(m3))
Q.allow_semidefinite = True
assert not Q.factorize(m3)
ep = Q.witness()
assert ep == 20.0
def test_chol8():
"""[summary]"""
"""[summary]
"""
l3 = [[0.0, 15.0, -5.0], [15.0, 18.0, 0.0], [-5.0, 0.0, 20.0]]
m3 = np.array(l3)
Q = chol_ext(len(m3))
Q.allow_semidefinite = False
assert not Q.factorize(m3)
def test_chol9():
"""[summary]"""
"""[summary]
"""
l3 = [[0.0, 15.0, -5.0], [15.0, 18.0, 0.0], [-5.0, 0.0, 20.0]]
m3 = np.array(l3)
Q = chol_ext(len(m3))
Q.allow_semidefinite = True
assert Q.factorize(m3)
| 23.136752
| 68
| 0.472109
| 467
| 2,707
| 2.670236
| 0.143469
| 0.056135
| 0.044908
| 0.028067
| 0.817161
| 0.800321
| 0.756215
| 0.756215
| 0.738573
| 0.684042
| 0
| 0.182733
| 0.302549
| 2,707
| 116
| 69
| 23.336207
| 0.477754
| 0.088659
| 0
| 0.716216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.202703
| 1
| 0.121622
| false
| 0
| 0.040541
| 0
| 0.162162
| 0.013514
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d6a31066ae56fbab5790e025522ea654a924256a
| 36
|
py
|
Python
|
raguel/__init__.py
|
WireShoutLLC/piraguel
|
edbca6338d23735d7e1d6a63273f55851ef76518
|
[
"MIT"
] | null | null | null |
raguel/__init__.py
|
WireShoutLLC/piraguel
|
edbca6338d23735d7e1d6a63273f55851ef76518
|
[
"MIT"
] | null | null | null |
raguel/__init__.py
|
WireShoutLLC/piraguel
|
edbca6338d23735d7e1d6a63273f55851ef76518
|
[
"MIT"
] | null | null | null |
import raguel.fptp
import raguel.irv
| 18
| 18
| 0.861111
| 6
| 36
| 5.166667
| 0.666667
| 0.774194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 36
| 2
| 19
| 18
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d6bfbf2d051a74fc9be123eeb1ea06791a6a5509
| 1,359
|
py
|
Python
|
source/Objects/XTRA_Scaling_Parameters.py
|
afarahi/XTRA
|
6550b216264abaa3ed705835aca0981f2934e069
|
[
"MIT"
] | 2
|
2018-11-01T12:38:56.000Z
|
2019-10-22T07:02:54.000Z
|
source/Objects/XTRA_Scaling_Parameters.py
|
afarahi/XTRA
|
6550b216264abaa3ed705835aca0981f2934e069
|
[
"MIT"
] | null | null | null |
source/Objects/XTRA_Scaling_Parameters.py
|
afarahi/XTRA
|
6550b216264abaa3ed705835aca0981f2934e069
|
[
"MIT"
] | null | null | null |
import os.path
import json
class Temprature_scaling:
def __init__(self, label):
fname = './parameters/Models/Txm/' + label + '_parameters.json'
if os.path.isfile(fname) == False:
print("Error: %s does not exists it uses Tx scaling default parameters."%s)
exit(1)
# fname = './parameters/Models/Txm/default_parameters.xml'
with open(fname) as fp:
_param = json.load(fp)
# Parameters
self.Norm = _param['a']
self.M_slope = _param['M_slope']
self.E_slope = _param['E_slope']
self.M_p = _param['M_p']
self.z_p = _param['z_p']
self.sig = _param['sig']
class Luminocity_scaling:
def __init__(self, label):
fname = './parameters/Models/Lxm/' + label + '_parameters.json'
if os.path.isfile(fname) == False:
print("ERROR: %s does not exists it uses Lx scaling default parameters." % s)
exit(1)
# fname = './parameters/Models/Lxm/default_parameters.xml'
with open(fname) as fp:
_param = json.load(fp)
# Parameters
self.Norm = _param['a']
self.M_slope = _param['M_slope']
self.E_slope = _param['E_slope']
self.M_p = _param['M_p']
self.z_p = _param['z_p']
self.sig = _param['sig']
| 22.278689
| 89
| 0.56365
| 173
| 1,359
| 4.17341
| 0.277457
| 0.083102
| 0.116343
| 0.049862
| 0.905817
| 0.905817
| 0.905817
| 0.905817
| 0.783934
| 0.642659
| 0
| 0.002116
| 0.304636
| 1,359
| 60
| 90
| 22.65
| 0.761905
| 0.099338
| 0
| 0.733333
| 0
| 0
| 0.211396
| 0.039637
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.066667
| 0
| 0.2
| 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d6e3fe20726371350367db02dca201ae5e10d187
| 159
|
py
|
Python
|
ASSIGN-1/assignment_1/assignment_1/envs/__init__.py
|
ShivenTripathi/CS698-Deep-Reinforcement-Learning
|
184f7887cea3065d2bfa4ba05bfb249838c3dab4
|
[
"MIT"
] | null | null | null |
ASSIGN-1/assignment_1/assignment_1/envs/__init__.py
|
ShivenTripathi/CS698-Deep-Reinforcement-Learning
|
184f7887cea3065d2bfa4ba05bfb249838c3dab4
|
[
"MIT"
] | null | null | null |
ASSIGN-1/assignment_1/assignment_1/envs/__init__.py
|
ShivenTripathi/CS698-Deep-Reinforcement-Learning
|
184f7887cea3065d2bfa4ba05bfb249838c3dab4
|
[
"MIT"
] | null | null | null |
from assignment_1.envs.gaussianBandit import gaussianBandit
from assignment_1.envs.bernoulliBandit import bernoulliBandit
from assignment_1.envs.RWE import RWE
| 53
| 61
| 0.893082
| 21
| 159
| 6.619048
| 0.380952
| 0.302158
| 0.323741
| 0.410072
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02027
| 0.069182
| 159
| 3
| 62
| 53
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ba61b5bc2650055217908e9853377121e0e5e11e
| 5,321
|
py
|
Python
|
wilson/translate/__init__.py
|
bednya/wilson
|
2cd803bc298c3f967401aed119f617fc5d7ba5c0
|
[
"MIT"
] | 24
|
2018-04-16T15:01:39.000Z
|
2022-01-26T07:16:22.000Z
|
wilson/translate/__init__.py
|
bednya/wilson
|
2cd803bc298c3f967401aed119f617fc5d7ba5c0
|
[
"MIT"
] | 85
|
2018-04-27T08:17:00.000Z
|
2022-02-22T16:47:14.000Z
|
wilson/translate/__init__.py
|
bednya/wilson
|
2cd803bc298c3f967401aed119f617fc5d7ba5c0
|
[
"MIT"
] | 17
|
2018-04-27T07:59:35.000Z
|
2022-02-09T22:46:05.000Z
|
"""Provides basis translators for SMEFT and and WET that can be used with the
`wcxf` Python package."""
from . import smeft, smeft_higgs
from . import wet
from wilson import wcxf
@wcxf.translator('SMEFT', 'Higgs-Warsaw up', 'Warsaw up')
def higgs_up_to_warsaw_up(C, scale, parameters, sectors=None):
return smeft_higgs.higgslike_to_warsaw_up(C, parameters, sectors)
@wcxf.translator('SMEFT', 'Higgs-Warsaw up', 'Warsaw')
def higgs_up_to_warsaw(C, scale, parameters, sectors=None):
C = smeft_higgs.higgslike_to_warsaw_up(C, parameters, sectors)
return smeft.warsaw_up_to_warsaw(C, sectors)
@wcxf.translator('SMEFT', 'Warsaw up', 'Higgs-Warsaw up')
def warsaw_up_to_higgs_up(C, scale, parameters, sectors=None):
return smeft_higgs.warsaw_up_to_higgslike(C, parameters, sectors)
@wcxf.translator('SMEFT', 'Warsaw', 'Higgs-Warsaw up')
def warsaw_up_to_higgs_up(C, scale, parameters, sectors=None):
C = smeft.warsaw_to_warsaw_up(C, sectors)
return smeft_higgs.warsaw_up_to_higgslike(C, parameters, sectors)
@wcxf.translator('SMEFT', 'Warsaw', 'Warsaw mass')
def warsaw_to_warsawmass(C, scale, parameters, sectors=None):
return smeft.warsaw_to_warsawmass(C, sectors)
@wcxf.translator('SMEFT', 'Warsaw', 'Warsaw up')
def warsaw_to_warsaw_up(C, scale, parameters, sectors=None):
return smeft.warsaw_to_warsaw_up(C, sectors)
@wcxf.translator('SMEFT', 'Warsaw up', 'Warsaw')
def warsaw_up_to_warsaw(C, scale, parameters, sectors=None):
return smeft.warsaw_up_to_warsaw(C, sectors)
@wcxf.translator('WET', 'flavio', 'JMS')
def flavio_to_JMS(C, scale, parameters, sectors=None):
return wet.flavio_to_JMS(C, scale, parameters, sectors)
@wcxf.translator('WET-4', 'flavio', 'JMS')
def flavio_to_JMS_wet4(C, scale, parameters, sectors=None):
return wet.flavio_to_JMS(C, scale, parameters, sectors)
@wcxf.translator('WET-3', 'flavio', 'JMS')
def flavio_to_JMS_wet3(C, scale, parameters, sectors=None):
return wet.flavio_to_JMS(C, scale, parameters, sectors)
@wcxf.translator('WET', 'JMS', 'flavio')
def JMS_to_flavio(C, scale, parameters, sectors=None):
return wet.JMS_to_flavio(C, scale, parameters, sectors)
@wcxf.translator('WET-4', 'JMS', 'flavio')
def JMS_to_flavio_wet4(C, scale, parameters, sectors=None):
return wet.JMS_to_flavio(C, scale, parameters, sectors)
@wcxf.translator('WET-3', 'JMS', 'flavio')
def JMS_to_flavio_wet3(C, scale, parameters, sectors=None):
return wet.JMS_to_flavio(C, scale, parameters, sectors)
@wcxf.translator('WET', 'Bern', 'flavio')
def Bern_to_flavio(C, scale, parameters, sectors=None):
return wet.Bern_to_flavio(C, scale, parameters, sectors)
@wcxf.translator('WET', 'flavio', 'Bern')
def flavio_to_Bern(C, scale, parameters, sectors=None):
return wet.flavio_to_Bern(C, scale, parameters, sectors)
@wcxf.translator('WET-4', 'Bern', 'flavio')
def Bern_to_flavio_wet4(C, scale, parameters, sectors=None):
return wet.Bern_to_flavio(C, scale, parameters, sectors)
@wcxf.translator('WET-4', 'flavio', 'Bern')
def flavio_to_Bern_wet4(C, scale, parameters, sectors=None):
return wet.flavio_to_Bern(C, scale, parameters, sectors)
@wcxf.translator('WET-3', 'Bern', 'flavio')
def Bern_to_flavio_wet3(C, scale, parameters, sectors=None):
return wet.Bern_to_flavio(C, scale, parameters, sectors)
@wcxf.translator('WET-3', 'flavio', 'Bern')
def flavio_to_Bern_wet3(C, scale, parameters, sectors=None):
return wet.flavio_to_Bern(C, scale, parameters, sectors)
@wcxf.translator('WET', 'JMS', 'EOS')
def JMS_to_EOS(C, scale, parameters, sectors=None):
return wet.JMS_to_EOS(C, scale, parameters, sectors)
@wcxf.translator('WET', 'JMS', 'Bern')
def JMS_to_Bern(C, scale, parameters, sectors=None):
return wet.JMS_to_Bern(C, scale, parameters, sectors)
@wcxf.translator('WET-4', 'JMS', 'Bern')
def JMS_to_Bern_wet4(C, scale, parameters, sectors=None):
return wet.JMS_to_Bern(C, scale, parameters, sectors)
@wcxf.translator('WET-3', 'JMS', 'Bern')
def JMS_to_Bern_wet3(C, scale, parameters, sectors=None):
return wet.JMS_to_Bern(C, scale, parameters, sectors)
@wcxf.translator('WET', 'Bern', 'JMS')
def Bern_to_JMS(C, scale, parameters, sectors=None):
return wet.Bern_to_JMS(C, scale, parameters, sectors)
@wcxf.translator('WET-4', 'Bern', 'JMS')
def Bern_to_JMS_wet4(C, scale, parameters, sectors=None):
return wet.Bern_to_JMS(C, scale, parameters, sectors)
@wcxf.translator('WET-3', 'Bern', 'JMS')
def Bern_to_JMS_wet3(C, scale, parameters, sectors=None):
return wet.Bern_to_JMS(C, scale, parameters, sectors)
@wcxf.translator('WET', 'JMS', 'formflavor')
def JMS_to_FormFlavor(C, scale, parameters, sectors=None):
return wet.JMS_to_FormFlavor(C, scale, parameters, sectors)
@wcxf.translator('WET', 'FlavorKit', 'JMS')
def FlavorKit_to_JMS(C, scale, parameters, sectors=None):
return wet.FlavorKit_to_JMS(C, scale, parameters, sectors)
@wcxf.translator('WET', 'JMS', 'FlavorKit')
def JMS_to_FlavorKit(C, scale, parameters, sectors=None):
return wet.JMS_to_FlavorKit(C, scale, parameters, sectors)
@wcxf.translator('WET', 'FlavorKit', 'flavio')
def FlavorKit_to_flavio(C, scale, parameters, sectors=None):
C_JMS = wet.FlavorKit_to_JMS(C, scale, parameters, sectors)
return wet.JMS_to_flavio(C_JMS, scale, parameters, sectors)
| 33.677215
| 77
| 0.7352
| 792
| 5,321
| 4.734848
| 0.055556
| 0.262933
| 0.3168
| 0.325067
| 0.928
| 0.9192
| 0.849067
| 0.7712
| 0.697867
| 0.592
| 0
| 0.00511
| 0.117271
| 5,321
| 157
| 78
| 33.89172
| 0.793272
| 0.01823
| 0
| 0.25
| 0
| 0
| 0.091798
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3125
| false
| 0
| 0.03125
| 0.28125
| 0.65625
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
ba9ff79eed66de8a976604f59d5d1e814bf57989
| 21,116
|
py
|
Python
|
cloudroast/objectstorage/smoke/container_smoke.py
|
kurhula/cloudroast
|
dcccce6b3af9d150cb667fc05bd051e97b5f6e2c
|
[
"Apache-2.0"
] | null | null | null |
cloudroast/objectstorage/smoke/container_smoke.py
|
kurhula/cloudroast
|
dcccce6b3af9d150cb667fc05bd051e97b5f6e2c
|
[
"Apache-2.0"
] | null | null | null |
cloudroast/objectstorage/smoke/container_smoke.py
|
kurhula/cloudroast
|
dcccce6b3af9d150cb667fc05bd051e97b5f6e2c
|
[
"Apache-2.0"
] | 1
|
2020-04-13T17:47:04.000Z
|
2020-04-13T17:47:04.000Z
|
"""
Copyright 2013 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from cloudroast.objectstorage.fixtures import ObjectStorageFixture
from cloudcafe.common.tools import randomstring as randstring
CONTENT_TYPE_TEXT = 'text/plain; charset=UTF-8'
class ContainerSmokeTest(ObjectStorageFixture):
"""4.2.1. List Objects in a Container"""
def test_objects_list_with_non_empty_container(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
object_data = 'Test file data'
content_length = str(len(object_data))
object_name = '{0}_{1}'.format(
self.base_object_name,
randstring.get_random_string())
headers = {'Content-Length': content_length,
'Content-Type': CONTENT_TYPE_TEXT}
self.client.create_object(
container_name,
object_name,
headers=headers,
data=object_data)
response = self.client.list_objects(container_name)
self.assertEqual(response.status_code, 200, 'should list object')
"""4.2.1.1. Serialized List Output"""
def test_objects_list_with_format_json_query_parameter(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
object_data = 'Test file data'
content_length = str(len(object_data))
object_name = '{0}_{1}'.format(
self.base_object_name,
randstring.get_random_string())
headers = {'Content-Length': content_length,
'Content-Type': CONTENT_TYPE_TEXT}
self.client.create_object(
container_name,
object_name,
headers=headers,
data=object_data)
format_ = {'format': 'json'}
response = self.client.list_objects(container_name, params=format_)
self.assertEqual(
response.status_code,
200,
'should list object using content-type json')
def test_objects_list_with_format_xml_query_parameter(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
object_data = 'Test file data'
content_length = str(len(object_data))
object_name = '{0}_{1}'.format(
self.base_object_name,
randstring.get_random_string())
headers = {'Content-Length': content_length,
'Content-Type': CONTENT_TYPE_TEXT}
self.client.create_object(
container_name,
object_name,
headers=headers,
data=object_data)
format_ = {'format': 'xml'}
response = self.client.list_objects(container_name, params=format_)
self.assertEqual(
response.status_code,
200,
'should list object using content-type xml')
def test_object_list_with_accept_header(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
object_data = 'Test file data'
content_length = str(len(object_data))
object_name = '{0}_{1}'.format(
self.base_object_name,
randstring.get_random_string())
headers = {'Content-Length': content_length,
'Content-Type': CONTENT_TYPE_TEXT}
headers = {'Content-Length': content_length,
'Content-Type': CONTENT_TYPE_TEXT}
self.client.create_object(
container_name,
object_name,
headers=headers,
data=object_data)
headers = {'Accept': '*/*'}
response = self.client.list_objects(
container_name,
headers=headers)
self.assertEqual(
response.status_code,
200,
'should list objects using content-type text/plain')
def test_object_list_with_text_accept_header(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
object_data = 'Test file data'
content_length = str(len(object_data))
object_name = '{0}_{1}'.format(
self.base_object_name,
randstring.get_random_string())
headers = {'Content-Length': content_length,
'Content-Type': CONTENT_TYPE_TEXT}
self.client.create_object(
container_name,
object_name,
headers=headers,
data=object_data)
headers = {'Accept': 'text/plain'}
response = self.client.list_objects(
container_name,
headers=headers)
self.assertEqual(
response.status_code,
200,
'should list objects using content-type text/plain')
def test_object_list_with_json_accept_header(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
object_data = 'Test file data'
content_length = str(len(object_data))
object_name = '{0}_{1}'.format(
self.base_object_name,
randstring.get_random_string())
headers = {'Content-Length': content_length,
'Content-Type': CONTENT_TYPE_TEXT}
self.client.create_object(
container_name,
object_name,
headers=headers,
data=object_data)
headers = {'Accept': 'application/json'}
response = self.client.list_objects(
container_name,
headers=headers)
self.assertEqual(
response.status_code,
200,
'should list objects using content-type application/json')
def test_object_list_with_xml_accept_header(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
object_data = 'Test file data'
content_length = str(len(object_data))
object_name = '{0}_{1}'.format(
self.base_object_name,
randstring.get_random_string())
headers = {'Content-Length': content_length,
'Content-Type': CONTENT_TYPE_TEXT}
self.client.create_object(
container_name,
object_name,
headers=headers,
data=object_data)
headers = {'Accept': 'application/xml'}
response = self.client.list_objects(
container_name,
headers=headers)
self.assertEqual(
response.status_code,
200,
'should list objects using content-type application/xml')
headers = {'Accept': 'text/xml'}
response = self.client.list_objects(
container_name,
headers=headers)
self.assertEqual(
response.status_code,
200,
'should list objects using content-type text/xml')
"""4.2.1.2. Controlling a Large List of Objects"""
def test_objects_list_with_limit_query_parameter(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
object_data = 'Test file data'
content_length = str(len(object_data))
object_name = '{0}_{1}'.format(
self.base_object_name,
randstring.get_random_string())
headers = {'Content-Length': content_length,
'Content-Type': CONTENT_TYPE_TEXT}
self.client.create_object(
container_name,
object_name,
headers=headers,
data=object_data)
limit = {'limit': '3'}
response = self.client.list_objects(container_name, params=limit)
self.assertEqual(response.status_code, 200, 'should list object')
def test_objects_list_with_marker_query_parameter(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
object_data = 'Test file data'
content_length = str(len(object_data))
object_name = '{0}_{1}'.format(
self.base_object_name,
randstring.get_random_string())
headers = {'Content-Length': content_length,
'Content-Type': CONTENT_TYPE_TEXT}
self.client.create_object(
container_name,
object_name,
headers=headers,
data=object_data)
marker = {'marker': container_name}
response = self.client.list_objects(container_name, params=marker)
self.assertEqual(response.status_code, 200, 'should list object')
"""4.2.1.3. Pseudo-Hierarchical Folders/Directories"""
def test_objects_list_with_prefix_query_parameter(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
object_data = 'Test file data'
content_length = str(len(object_data))
object_name = '{0}_{1}'.format(
self.base_object_name,
randstring.get_random_string())
headers = {'Content-Length': content_length,
'Content-Type': CONTENT_TYPE_TEXT}
self.client.create_object(
container_name,
object_name,
headers=headers,
data=object_data)
prefix = {'prefix': container_name[0:3]}
response = self.client.list_objects(container_name, params=prefix)
self.assertEqual(response.status_code, 200, 'should list object')
"""
This is a depricated feature that has little documentation.
The following things need to be done for the path parameter to work.
1. For every 'directory' a 'directory marker' must be added as a
object. The directory marker does not need to contain data, and
thus can have a length of 0.
Example:
If you want a directory 'foo/bar/', you would upload a object
named 'foo/bar/' to your container.
2. You must upload your objects, prefixed with the 'directory' path.
Example:
If you wanted to create an object in 'foo/' and another in
'foo/bar/', you would have to name the objects as follows:
foo/object1.txt
foo/bar/object2.txt
3. Once this has been done, you can use the path query string
parameter to list the objects in the simulated directory structure.
Example:
Using the above examples, setting path to 'foo/' should list
the following:
foo/objet1.txt
foo/bar/
"""
def test_objects_list_with_path_query_parameter(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
dir_marker = 'path_test/'
headers = {'Content-Length': '0'}
self.client.create_object(
container_name,
dir_marker,
headers=headers)
dir_marker = 'path_test/nested_dir/'
headers = {'Content-Length': '0'}
self.client.create_object(
container_name,
dir_marker,
headers=headers)
object_data = 'Test file data'
content_length = str(len(object_data))
object_name_prefix = 'path_test/nested_dir/'
object_name_postfix = '{0}_{1}'.format(
self.base_object_name,
randstring.get_random_string())
object_name = '{0}{1}'.format(object_name_prefix, object_name_postfix)
headers = {'Content-Length': content_length,
'Content-Type': CONTENT_TYPE_TEXT}
self.client.create_object(
container_name,
object_name,
headers=headers,
data=object_data)
params = {'path': 'path_test/'}
response = self.client.list_objects(container_name, params=params)
self.assertEqual(
response.status_code,
200,
'should list the simulated directory')
params = {'path': 'path_test/nested_dir/'}
response = self.client.list_objects(container_name, params=params)
self.assertEqual(
response.status_code,
200,
'should list the object in the simulated directory')
def test_objects_list_with_delimiter_query_parameter(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
object_data = 'Test file data'
content_length = str(len(object_data))
object_name_prefix = 'delimiter_test/'
object_name_postfix = '{0}_{1}'.format(
self.base_object_name,
randstring.get_random_string())
object_name = '{0}{1}'.format(object_name_prefix, object_name_postfix)
headers = {'Content-Length': content_length,
'Content-Type': CONTENT_TYPE_TEXT}
self.client.create_object(
container_name,
object_name,
headers=headers,
data=object_data)
params = {'delimiter': '/'}
response = self.client.list_objects(container_name, params=params)
self.assertEqual(
response.status_code, 200,
'should list the simulated directory')
params = {'prefix': object_name_prefix, 'delimiter': '/'}
response = self.client.list_objects(container_name, params=params)
self.assertEqual(
response.status_code,
200,
'should list the object in the simulated directory')
"""4.2.2. Create Container"""
def test_container_creation_with_valid_container_name(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
response = self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
self.assertEqual(response.status_code, 201, 'should be created')
def test_container_creation_with_existing_container_name(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
response = self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
self.assertEqual(response.status_code, 201, 'should be created')
response = self.client.create_container(container_name)
self.assertEqual(response.status_code, 202, 'should be successful')
def test_container_creation_with_metadata(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
metadata = {'Book-One': 'fight_club',
'Book-Two': 'a_clockwork_orange'}
response = self.client.create_container(
container_name,
metadata=metadata)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
self.assertEqual(response.status_code, 201, 'should be created')
"""4.2.3. Delete Container"""
def test_container_deletion_with_existing_empty_container(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
response = self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
self.assertEqual(response.status_code, 201, 'should be created')
response = self.client.delete_container(container_name)
self.assertEqual(response.status_code, 204, 'should be deleted')
response = self.client.list_objects(container_name)
self.assertEqual(
response.status_code,
404,
'should not exist after deletion')
"""4.2.4. Retrieve Container Metadata"""
def test_metadata_retrieval_with_newly_created_container(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
metadata = {'Book-One': 'fight_club',
'Book-Two': 'a_clockwork_orange'}
response = self.client.create_container(container_name, metadata)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
self.assertEqual(response.status_code, 201, 'should be created')
response = self.client.get_container_metadata(container_name)
self.assertEqual(
response.status_code,
204,
'new container should return metadata')
def test_metadata_retrieval_with_container_possessing_metadata(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
metadata = {'Book-One': 'fight_club',
'Book-Two': 'a_clockwork_orange'}
response = self.client.set_container_metadata(
container_name,
metadata)
response = self.client.get_container_metadata(container_name)
self.assertEqual(
response.status_code,
204,
'container should return metadata')
"""4.2.5. Create/Update Container Metadata"""
def test_metadata_update_with_container_possessing_metadata(self):
container_name = '{0}_{1}'.format(
self.base_container_name,
randstring.get_random_string())
self.client.create_container(container_name)
self.addCleanup(
self.client.force_delete_containers,
[container_name])
metadata = {'Book-One': 'fight_club',
'Book-Two': 'a_clockwork_orange'}
response = self.client.set_container_metadata(
container_name,
metadata)
self.assertEqual(response.status_code, 204, 'metadata should be added')
metadata = {'Book-One': 'Fight_Club'}
response = self.client.set_container_metadata(
container_name,
metadata)
self.assertEqual(
response.status_code,
204,
'metadata should be updated')
| 33.305994
| 79
| 0.617446
| 2,276
| 21,116
| 5.442443
| 0.095782
| 0.12279
| 0.043917
| 0.030031
| 0.839913
| 0.813837
| 0.803827
| 0.803827
| 0.789457
| 0.773149
| 0
| 0.013014
| 0.290396
| 21,116
| 633
| 80
| 33.35861
| 0.813668
| 0.027657
| 0
| 0.860927
| 0
| 0
| 0.108563
| 0.003283
| 0
| 0
| 0
| 0
| 0.059603
| 1
| 0.041943
| false
| 0
| 0.004415
| 0
| 0.048565
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
baa7798d6951efa49c7dfc86901c4f88cfb2e6a7
| 243
|
py
|
Python
|
tests/test_bz2.py
|
refi64/arclib
|
a872904fa4f4270b851b2e96c917d26d3c0a0a4c
|
[
"MIT"
] | 2
|
2019-05-04T16:41:16.000Z
|
2020-11-09T09:44:19.000Z
|
tests/test_bz2.py
|
refi64/arclib
|
a872904fa4f4270b851b2e96c917d26d3c0a0a4c
|
[
"MIT"
] | null | null | null |
tests/test_bz2.py
|
refi64/arclib
|
a872904fa4f4270b851b2e96c917d26d3c0a0a4c
|
[
"MIT"
] | null | null | null |
from util import *
from arclib import bz2
from bz2 import compress, decompress
def test_incremental_compress():
basic_test_c(bz2.Compressor(), decompress)
def test_incremental_decompress():
basic_test_d(bz2.Decompressor(), compress)
| 24.3
| 46
| 0.790123
| 32
| 243
| 5.75
| 0.46875
| 0.141304
| 0.184783
| 0.304348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018868
| 0.127572
| 243
| 9
| 47
| 27
| 0.849057
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| true
| 0
| 0.428571
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
badc956a17a6eddf0152abe143da26f2b0200f6c
| 106
|
py
|
Python
|
security/compatibility.py
|
jsilhan/django-security
|
fd56a6ca8ab7ad2b1e91a33ea4ecb40bb10cc42b
|
[
"MIT"
] | null | null | null |
security/compatibility.py
|
jsilhan/django-security
|
fd56a6ca8ab7ad2b1e91a33ea4ecb40bb10cc42b
|
[
"MIT"
] | null | null | null |
security/compatibility.py
|
jsilhan/django-security
|
fd56a6ca8ab7ad2b1e91a33ea4ecb40bb10cc42b
|
[
"MIT"
] | null | null | null |
try:
from ipware.ip import get_client_ip
except ImportError:
from ipware.ip2 import get_client_ip
| 21.2
| 40
| 0.783019
| 17
| 106
| 4.647059
| 0.588235
| 0.253165
| 0.379747
| 0.43038
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011494
| 0.179245
| 106
| 4
| 41
| 26.5
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
baf7b7cfc3a32ed71b8ccf99a03a13c2d80fa7cf
| 155
|
py
|
Python
|
r3det/ops/polygon_geo/polygon_geo.py
|
SJTU-Thinklab-Det/r3det-pytorch
|
aed1c26ecfad7ac518d24f0f4d537e1926a7e8bd
|
[
"Apache-2.0"
] | 42
|
2021-12-09T10:02:35.000Z
|
2022-03-30T08:40:20.000Z
|
r3det/ops/polygon_geo/polygon_geo.py
|
SJTU-Thinklab-Det/r3det-pytorch
|
aed1c26ecfad7ac518d24f0f4d537e1926a7e8bd
|
[
"Apache-2.0"
] | 13
|
2021-12-14T01:47:32.000Z
|
2022-03-30T08:01:17.000Z
|
r3det/ops/polygon_geo/polygon_geo.py
|
SJTU-Thinklab-Det/r3det-pytorch
|
aed1c26ecfad7ac518d24f0f4d537e1926a7e8bd
|
[
"Apache-2.0"
] | 5
|
2021-12-14T09:57:29.000Z
|
2022-03-03T12:25:54.000Z
|
from . import polygon_geo_cpu
def polygon_iou(poly1, poly2):
"""Compute the IoU of polygons."""
return polygon_geo_cpu.polygon_iou(poly1, poly2)
| 22.142857
| 52
| 0.735484
| 23
| 155
| 4.695652
| 0.608696
| 0.185185
| 0.240741
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030769
| 0.16129
| 155
| 6
| 53
| 25.833333
| 0.8
| 0.180645
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
240a83d7ee6d8a5009d4cbc9cb32f8bc334441c1
| 32,053
|
py
|
Python
|
tests/test_dispatcher.py
|
electronhead/whendo
|
27112834be0935b5b0f7ade4316e35532532e047
|
[
"MIT"
] | 1
|
2022-03-04T09:25:13.000Z
|
2022-03-04T09:25:13.000Z
|
tests/test_dispatcher.py
|
electronhead/whendo
|
27112834be0935b5b0f7ade4316e35532532e047
|
[
"MIT"
] | null | null | null |
tests/test_dispatcher.py
|
electronhead/whendo
|
27112834be0935b5b0f7ade4316e35532532e047
|
[
"MIT"
] | null | null | null |
import pytest
import time
from datetime import timedelta
from typing import Optional, Dict, Any
from whendo.core.util import Rez, SystemInfo, Now, KeyTagMode, DateTime, Rez
from whendo.core.action import Action
from whendo.core.server import Server
from whendo.core.actions.list_action import (
UntilFailure,
All,
Terminate,
IfElse,
RaiseCmp,
Result,
)
from whendo.core.schedulers.timed_scheduler import Timely
from whendo.core.scheduler import Immediately
from whendo.core.dispatcher import Dispatcher
from whendo.core.programs.simple_program import PBEProgram
from whendo.core.actions.dispatch_action import (
UnscheduleProgram,
ScheduleAction,
DeferAction,
ExpireAction,
)
from whendo.core.timed import Timed
from .fixtures import port, host
pause = 3
def test_server_all_1(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ALL
result = dispatcher.get_servers_by_tags(
key_tags={"foo": ["bar", "baz"]}, key_tag_mode=mode
)
assert len(result) == 2
def test_server_all_2(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ALL
result = dispatcher.get_servers_by_tags(
key_tags={"foo": ["bar"]}, key_tag_mode=mode
)
assert len(result) == 1
def test_server_all_3(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ALL
result = dispatcher.get_servers_by_tags(key_tags={"foo": []}, key_tag_mode=mode)
assert len(result) == 0
def test_server_all_4(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ALL
result = dispatcher.get_servers_by_tags(
key_tags={"foo": ["clasp"]}, key_tag_mode=mode
)
assert len(result) == 0
def test_server_any_1(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ANY
result = dispatcher.get_servers_by_tags(
key_tags={"foo": ["bar", "baz"]}, key_tag_mode=mode
)
assert len(result) == 2
def test_server_any_2(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ANY
result = dispatcher.get_servers_by_tags(
key_tags={"foo": ["bar"]}, key_tag_mode=mode
)
assert len(result) == 2
def test_server_any_3(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ANY
result = dispatcher.get_servers_by_tags(key_tags={"foo": []}, key_tag_mode=mode)
assert len(result) == 0
def test_server_any_4(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ANY
result = dispatcher.get_servers_by_tags(
key_tags={"foo": ["clasp"]}, key_tag_mode=mode
)
assert len(result) == 0
def test_schedule_action(friends):
"""
Tests Dispatcher and Timed objects running a scheduled action.
"""
dispatcher, scheduler, action = friends()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action("bar", "foo")
assert dispatcher.get_scheduled_action_count() == 1
dispatcher.run_jobs()
time.sleep(pause)
dispatcher.stop_jobs()
dispatcher.clear_jobs()
assert action.flea_count > 0
def test_schedule_action_action(friends):
"""
Tests Dispatcher and Timed objects running a scheduled action.
"""
dispatcher, scheduler, action = friends()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
schedule_action = ScheduleAction(scheduler_name="bar", action_name="foo")
schedule_action.execute()
assert dispatcher.get_scheduled_action_count() == 1
dispatcher.run_jobs()
time.sleep(pause)
dispatcher.stop_jobs()
dispatcher.clear_jobs()
assert action.flea_count > 0
# def test_dispatcher_action_args_1(friends):
# """
# Tests computation of args based on fields, data and mode (=field).
# """
# dispatcher, scheduler, action = friends()
# action2 = FleaCount(flea_count=100)
# dispatcher.add_action("foo", action)
# dispatcher.add_action("flea", action2)
# dispatcher.add_scheduler("bar", scheduler)
# schedule_action = ScheduleAction(
# scheduler_name="bar", action_name="foo", mode=DispActionMode.FIELD
# )
# args = schedule_action.compute_args(
# args={"scheduler_name": "bar", "action_name": "foo"},
# data={"action_name": "flea"},
# )
# assert args["scheduler_name"] == "bar"
# assert args["action_name"] == "foo"
# def test_dispatcher_action_args_2(friends):
# """
# Tests computation of args based on fields, data and mode (=data).
# """
# dispatcher, scheduler, action = friends()
# action2 = FleaCount(flea_count=100)
# dispatcher.add_action("foo", action)
# dispatcher.add_action("flea", action2)
# dispatcher.add_scheduler("bar", scheduler)
# schedule_action = ScheduleAction(
# scheduler_name="bar", action_name="foo", mode=DispActionMode.DATA
# )
# args = schedule_action.compute_args(
# args={"scheduler_name": "bar", "action_name": "foo"},
# data={"action_name": "flea"},
# )
# assert args["scheduler_name"] == "bar"
# assert args["action_name"] == "flea"
# def test_dispatcher_action_args_3(friends):
# """
# Tests computation of args based on fields, data and mode (=field).
# """
# dispatcher, scheduler, action = friends()
# action2 = FleaCount(flea_count=100)
# dispatcher.add_action("foo", action)
# dispatcher.add_action("flea", action2)
# dispatcher.add_scheduler("bar", scheduler)
# schedule_action = ScheduleAction(
# scheduler_name="bar", action_name="foo", mode=DispActionMode.FIELD
# )
# args = schedule_action.compute_args(
# args={"scheduler_name": "bar", "action_name": "foo"},
# data={"result": {"action_name": "flea"}},
# )
# assert args["scheduler_name"] == "bar"
# assert args["action_name"] == "foo"
# def test_dispatcher_action_args_4(friends):
# """
# Tests computation of args based on fields, data and mode (=data).
# """
# dispatcher, scheduler, action = friends()
# action2 = FleaCount(flea_count=100)
# dispatcher.add_action("foo", action)
# dispatcher.add_action("flea", action2)
# dispatcher.add_scheduler("bar", scheduler)
# schedule_action = ScheduleAction(
# scheduler_name="bar", action_name="foo", mode=DispActionMode.DATA
# )
# args = schedule_action.compute_args(
# args={"scheduler_name": "bar", "action_name": "foo"},
# data={"result": {"action_name": "flea"}},
# )
# assert args["scheduler_name"] == "bar"
# assert args["action_name"] == "flea"
def test_schedule_action_action_data_1(friends):
"""
Tests Dispatcher and Timed objects running a scheduled action.
"""
dispatcher, scheduler, action = friends()
action2 = FleaCount(flea_count=100)
dispatcher.add_action("foo", action)
dispatcher.add_action("flea", action2)
dispatcher.add_scheduler("bar", scheduler)
schedule_action = ScheduleAction(scheduler_name="bar", action_name="foo")
schedule_action.execute(rez=Rez(flds={"action_name": "flea"}))
assert dispatcher.get_scheduled_action_count() == 1
dispatcher.run_jobs()
time.sleep(pause)
dispatcher.stop_jobs()
dispatcher.clear_jobs()
assert action.flea_count > 1
assert action2.flea_count == 100
def test_schedule_action_action_data_2(friends):
"""
Tests Dispatcher and Timed objects running a scheduled action.
"""
dispatcher, scheduler, action = friends()
action2 = FleaCount(flea_count=100)
dispatcher.add_action("foo", action)
dispatcher.add_action("flea", action2)
dispatcher.add_scheduler("bar", scheduler)
schedule_action = ScheduleAction(scheduler_name="bar")
schedule_action.execute(rez=Rez(flds={"action_name": "flea"}))
assert dispatcher.get_scheduled_action_count() == 1
dispatcher.run_jobs()
time.sleep(pause)
dispatcher.stop_jobs()
dispatcher.clear_jobs()
assert action.flea_count == 0
assert action2.flea_count > 100
def test_unschedule_scheduler(friends):
"""
Tests unscheduling a scheduler.
"""
dispatcher, scheduler, action = friends()
assert dispatcher.job_count() == 0
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action("bar", "foo")
assert dispatcher.job_count() == 1
dispatcher.unschedule_scheduler("bar")
assert dispatcher.job_count() == 0
assert dispatcher.get_scheduled_action_count() == dispatcher.job_count()
# make sure that bar and foo remain
assert dispatcher.get_scheduler("bar")
assert dispatcher.get_action("foo")
def test_unschedule_all(friends):
"""
Tests unscheduling all schedulers.
"""
dispatcher, scheduler, action = friends()
assert dispatcher.job_count() == 0
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action("bar", "foo")
assert dispatcher.job_count() == 1
assert dispatcher.get_scheduled_action_count() == 1
dispatcher.clear_jobs()
assert dispatcher.job_count() == 0
dispatcher.add_action("foo2", action)
dispatcher.schedule_action("bar", "foo2")
assert dispatcher.get_scheduled_action_count() == 2
dispatcher.unschedule_all_schedulers()
assert dispatcher.job_count() == 0
assert dispatcher.get_scheduled_action_count() == 0
def test_reschedule_all(friends):
"""
Tests unscheduling a scheduler.
"""
dispatcher, scheduler, action = friends()
assert dispatcher.job_count() == 0
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action("bar", "foo")
assert dispatcher.job_count() == 1
assert dispatcher.get_scheduled_action_count() == 1
dispatcher.clear_jobs()
assert dispatcher.job_count() == 0
dispatcher.add_action("foo2", action)
dispatcher.schedule_action("bar", "foo2")
assert dispatcher.get_scheduled_action_count() == 2
dispatcher.reschedule_all_schedulers()
assert dispatcher.job_count() == 1
assert dispatcher.get_scheduled_action_count() == 2
def test_clear_dispatcher(friends):
"""
Tests clearing a dispatcher.
"""
dispatcher, scheduler, action = friends()
assert dispatcher.job_count() == 0
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action("bar", "foo")
assert dispatcher.job_count() == 1
dispatcher.clear_all()
assert dispatcher.job_count() == 0
assert dispatcher.get_scheduled_action_count() == dispatcher.job_count()
# make sure that bar and foo are Gone
assert dispatcher.get_scheduler("bar") is None
assert dispatcher.get_action("foo") is None
def test_scheduled_action_count(friends):
"""
Tests scheduled action count
"""
# original
dispatcher, scheduler, action = friends()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action(action_name="foo", scheduler_name="bar")
assert 1 == dispatcher.get_scheduled_action_count()
assert 1 == dispatcher.job_count()
def test_jobs_are_running(friends):
dispatcher, scheduler, action = friends()
try:
dispatcher.run_jobs()
assert dispatcher.jobs_are_running()
finally:
try:
dispatcher.stop_jobs()
except:
pass
def test_jobs_are_not_running(friends):
dispatcher, scheduler, action = friends()
try:
dispatcher.run_jobs()
assert dispatcher.jobs_are_running()
dispatcher.stop_jobs()
assert not dispatcher.jobs_are_running()
finally:
try:
dispatcher.stop_jobs()
except:
pass
def test_replace_dispatcher(friends):
"""
Tests replacing a dispatcher
"""
# original
dispatcher, scheduler, action = friends()
saved_dir = dispatcher.get_saved_dir()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action(action_name="foo", scheduler_name="bar")
# replacement
replacement = Dispatcher() # no saved_dir
replacement.add_action("flea", action)
replacement.add_scheduler("bath", scheduler)
replacement.schedule_action(action_name="flea", scheduler_name="bath")
# do the business
dispatcher.replace_all(replacement)
# is everyone okay?
assert not dispatcher.get_action("foo")
assert not dispatcher.get_scheduler("bar")
assert dispatcher.get_action("flea")
assert dispatcher.get_scheduler("bath")
assert {"bath"} == set(k for k in dispatcher.get_schedulers())
assert {"flea"} == set(k for k in dispatcher.get_actions())
assert {"bath"} == set(
k for k in dispatcher.get_scheduled_actions().scheduler_names()
)
assert {"flea"} == dispatcher.get_scheduled_actions().actions("bath")
def test_load_dispatcher(friends):
"""
Tests loading a dispatcher
"""
dispatcher, scheduler, action = friends()
saved_dir = dispatcher.get_saved_dir()
assert saved_dir is not None
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action(action_name="foo", scheduler_name="bar")
dispatcher2 = dispatcher.load_current()
assert dispatcher2 is not None, f"saved_dir({saved_dir})"
assert set(k for k in dispatcher.get_actions()) == set(
k for k in dispatcher2.get_actions()
)
assert set(k for k in dispatcher.get_schedulers()) == set(
k for k in dispatcher2.get_schedulers()
)
assert set(k for k in dispatcher.get_scheduled_actions().action_names()) == set(
k for k in dispatcher2.get_scheduled_actions().action_names()
)
def test_saved_dir_1(tmp_path):
saved_dir = str(tmp_path)
dispatcher = Dispatcher()
dispatcher.set_saved_dir(saved_dir=saved_dir)
assert dispatcher.get_saved_dir() == saved_dir
def test_saved_dir_2(tmp_path):
saved_dir = str(tmp_path)
dispatcher = Dispatcher(saved_dir=saved_dir)
assert dispatcher.get_saved_dir() == saved_dir
def test_defer_action(friends):
"""
Want to observe the scheduling move from deferred state to ready state.
"""
dispatcher, scheduler, action = friends()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
assert 0 == dispatcher.get_deferred_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
dispatcher.defer_action(
scheduler_name="bar", action_name="foo", wait_until=Now.dt()
)
# deferred state -- can run jobs and actions will _not_ be executed
assert 1 == dispatcher.get_deferred_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
time.sleep(6) # the out-of-band job runs every five seconds
# ready state -- can run jobs and actions will be executed
assert 0 == dispatcher.get_deferred_action_count()
assert 1 == dispatcher.get_scheduled_action_count()
def test_defer_action_action(friends):
"""
Want to observe the scheduling move from deferred state to ready state.
"""
dispatcher, scheduler, action = friends()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
assert 0 == dispatcher.get_deferred_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
defer_action = DeferAction(
scheduler_name="bar",
action_name="foo",
wait_until=DateTime(dt=Now.dt()),
)
defer_action.execute()
# deferred state -- can run jobs and actions will _not_ be executed
assert 1 == dispatcher.get_deferred_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
time.sleep(6) # the out-of-band job runs every five seconds
# ready state -- can run jobs and actions will be executed
assert 0 == dispatcher.get_deferred_action_count()
assert 1 == dispatcher.get_scheduled_action_count()
def test_expire_action(friends):
"""
Want to observe the scheduling move from deferred state to ready state.
"""
dispatcher, scheduler, action = friends()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
assert 0 == dispatcher.get_expiring_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
dispatcher.schedule_action(scheduler_name="bar", action_name="foo")
assert 0 == dispatcher.get_expiring_action_count()
assert 1 == dispatcher.get_scheduled_action_count()
dispatcher.expire_action(
scheduler_name="bar",
action_name="foo",
expire_on=Now.dt() + timedelta(seconds=1),
)
assert 1 == dispatcher.get_expiring_action_count()
assert 1 == dispatcher.get_scheduled_action_count()
time.sleep(6) # the out-of-band job runs every 2-5 seconds
assert 0 == dispatcher.get_expiring_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
def test_expire_action_action(friends):
"""
Want to observe the scheduling move from deferred state to ready state.
"""
dispatcher, scheduler, action = friends()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
assert 0 == dispatcher.get_expiring_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
dispatcher.schedule_action(scheduler_name="bar", action_name="foo")
assert 0 == dispatcher.get_expiring_action_count()
assert 1 == dispatcher.get_scheduled_action_count()
expire_action = ExpireAction(
scheduler_name="bar",
action_name="foo",
expire_on=DateTime(dt=Now.dt() + timedelta(seconds=2)),
)
expire_action.execute()
assert 1 == dispatcher.get_expiring_action_count()
assert 1 == dispatcher.get_scheduled_action_count()
time.sleep(6) # the out-of-band job runs every 2-5 seconds
assert 0 == dispatcher.get_expiring_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
def test_immediately(friends):
"""
Want to observe that action get executed immediately and that schedulers_actions
is not impacted.
"""
dispatcher, scheduler, action = friends()
class TestAction(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result(result=self.fleas)
test_action = TestAction()
assert dispatcher.get_scheduled_action_count() == 0
assert test_action.fleas == 0
dispatcher.add_action("foo", test_action)
dispatcher.add_scheduler("imm", Immediately())
dispatcher.schedule_action(scheduler_name="imm", action_name="foo")
assert dispatcher.get_scheduled_action_count() == 0
assert test_action.fleas == 1
def test_program_1(friends):
"""
Want to observe that a Program's actions are executed.
"""
dispatcher, scheduler, action = friends()
class TestAction1(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction2(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction3(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
action1 = TestAction1()
action2 = TestAction2()
action3 = TestAction3()
dispatcher.add_action("foo1", action1)
dispatcher.add_action("foo2", action2)
dispatcher.add_action("foo3", action3)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.add_scheduler("immediately", Immediately())
program = PBEProgram().prologue("foo1").body_element("bar", "foo2").epilogue("foo3")
dispatcher.add_program("baz", program)
start = Now().dt() + timedelta(seconds=1)
stop = start + timedelta(seconds=4)
dispatcher.run_jobs()
dispatcher.schedule_program("baz", start, stop)
assert action1.fleas == 0
time.sleep(3)
assert action1.fleas == 1
time.sleep(4)
assert action2.fleas >= 2
time.sleep(2)
assert action3.fleas == 1
def test_unschedule_program(friends):
"""
Want to observe that a Program's actions are not executed
after being unscheduled prior to the deferral time.
"""
dispatcher, scheduler, action = friends()
class TestAction1(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction2(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction3(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
action1 = TestAction1()
action2 = TestAction2()
action3 = TestAction3()
dispatcher.add_action("foo1", action1)
dispatcher.add_action("foo2", action2)
dispatcher.add_action("foo3", action3)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.add_scheduler("immediately", Immediately())
program = PBEProgram().prologue("foo1").body_element("bar", "foo2").epilogue("foo3")
dispatcher.add_program("baz", program)
start = Now().dt() + timedelta(seconds=4)
stop = start + timedelta(seconds=4)
dispatcher.run_jobs()
dispatcher.schedule_program("baz", start, stop)
assert dispatcher.get_deferred_program_count() == 1
assert dispatcher.get_scheduled_action_count() == 0
dispatcher.unschedule_program("baz")
assert len(dispatcher.get_programs()) == 1
assert dispatcher.get_deferred_program_count() == 0
assert action1.fleas == 0
time.sleep(3)
assert action1.fleas == 0
time.sleep(4)
assert action2.fleas == 0
time.sleep(2)
assert action3.fleas == 0
def test_unschedule_program_action(friends):
"""
Want to observe that a Program's actions are not executed
after being unscheduled prior to the deferral time.
"""
dispatcher, scheduler, action = friends()
class TestAction1(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction2(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction3(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
action1 = TestAction1()
action2 = TestAction2()
action3 = TestAction3()
dispatcher.add_action("foo1", action1)
dispatcher.add_action("foo2", action2)
dispatcher.add_action("foo3", action3)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.add_scheduler("immediately", Immediately())
program = PBEProgram().prologue("foo1").body_element("bar", "foo2").epilogue("foo3")
dispatcher.add_program("baz", program)
start = Now().dt() + timedelta(seconds=4)
stop = start + timedelta(seconds=4)
dispatcher.run_jobs()
dispatcher.schedule_program("baz", start, stop)
assert dispatcher.get_deferred_program_count() == 1
assert dispatcher.get_scheduled_action_count() == 0
unschedule_program = UnscheduleProgram(program_name="baz")
unschedule_program.execute()
time.sleep(1)
assert len(dispatcher.get_programs()) == 1
assert dispatcher.get_deferred_program_count() == 0
assert action1.fleas == 0
time.sleep(3)
assert action1.fleas == 0
time.sleep(4)
assert action2.fleas == 0
time.sleep(2)
assert action3.fleas == 0
def test_delete_program(friends):
"""
Want to observe that a Program's actions are not executed
after being deleted prior to the deferral time.
"""
dispatcher, scheduler, action = friends()
class TestAction1(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction2(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction3(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
action1 = TestAction1()
action2 = TestAction2()
action3 = TestAction3()
dispatcher.add_action("foo1", action1)
dispatcher.add_action("foo2", action2)
dispatcher.add_action("foo3", action3)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.add_scheduler("immediately", Immediately())
program = PBEProgram().prologue("foo1").body_element("bar", "foo2").epilogue("foo3")
dispatcher.add_program("baz", program)
start = Now().dt() + timedelta(seconds=4)
stop = start + timedelta(seconds=4)
assert len(dispatcher.get_programs()) == 1
dispatcher.run_jobs()
dispatcher.schedule_program("baz", start, stop)
assert dispatcher.get_deferred_program_count() == 1
assert dispatcher.get_scheduled_action_count() == 0
dispatcher.delete_program("baz")
assert len(dispatcher.get_programs()) == 0
assert dispatcher.get_deferred_program_count() == 0
assert action1.fleas == 0
time.sleep(3)
assert action1.fleas == 0
time.sleep(4)
assert action2.fleas == 0
time.sleep(2)
assert action3.fleas == 0
def test_execute_with_rez(friends):
"""
Want to see execute work with supplied dictionary.
"""
dispatcher, scheduler, action = friends()
result = action.execute(rez=Rez(result={"fleacount": "infinite"}))
assert result.rez.result == {"fleacount": "infinite"}
def test_terminate_scheduler(friends):
"""
Want to terminate scheduler using TerminateScheduler action.
"""
dispatcher, scheduler, action = friends()
action2 = Terminate()
dispatcher.add_action("foo", action)
dispatcher.add_action("terminate", action2)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.run_jobs()
dispatcher.schedule_action("bar", "foo")
time.sleep(2)
assert action.flea_count >= 1
assert dispatcher.get_scheduled_action_count() == 1
dispatcher.schedule_action("bar", "terminate")
assert dispatcher.get_scheduled_action_count() == 2
time.sleep(2)
assert dispatcher.get_scheduled_action_count() == 0
def test_terminate_scheduler_and(friends):
"""
Want to terminate scheduler using TerminateScheduler action.
"""
dispatcher, scheduler, action = friends()
action2 = FleaCount(flea_count=100)
actions = [action, Terminate(), action2]
action3 = (
UntilFailure()
) # add actions on next line to use them directly below; pydantic deep copies field values
action3.actions = actions
dispatcher.add_action("foo", action3)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.run_jobs()
dispatcher.schedule_action("bar", "foo")
time.sleep(3)
assert action.flea_count == 1
assert action2.flea_count == 100
def test_if_else_1(friends):
"""
Want to terminate scheduler using TerminateScheduler action.
"""
dispatcher, scheduler, action = friends()
action2 = FleaCount(flea_count=100)
immediately = Immediately()
dispatcher.add_action("foo1", action)
dispatcher.add_action("foo2", action2)
dispatcher.add_scheduler("immediately", immediately)
if_else = IfElse(
test_action=RaiseCmp(value=1),
if_action=ScheduleAction(scheduler_name="immediately", action_name="foo1"),
else_action=ScheduleAction(scheduler_name="immediately", action_name="foo2"),
)
schedule_action = All(actions=[Result(value=2), if_else])
dispatcher.add_action("schedule_action", schedule_action)
dispatcher.schedule_action("immediately", "schedule_action")
assert action.flea_count == 1
assert action2.flea_count == 100
def test_if_else_2(friends):
"""
Want to terminate scheduler using TerminateScheduler action.
"""
dispatcher, scheduler, action = friends()
action2 = FleaCount(flea_count=100)
immediately = Immediately()
dispatcher.add_action("foo1", action)
dispatcher.add_action("foo2", action2)
dispatcher.add_scheduler("immediately", immediately)
if_else = IfElse(
test_action=RaiseCmp(value=2),
if_action=ScheduleAction(scheduler_name="immediately", action_name="foo1"),
else_action=ScheduleAction(scheduler_name="immediately", action_name="foo2"),
)
schedule_action = All(actions=[Result(value=2), if_else])
dispatcher.add_action("schedule_action", schedule_action)
dispatcher.schedule_action("immediately", "schedule_action")
assert action.flea_count == 0
assert action2.flea_count == 101
# ====================================
class FleaCount(Action):
flea_count: int = 0
data: Optional[Dict[Any, Any]]
def execute(self, tag: str = None, rez: Rez = None):
self.flea_count += 1
return self.action_result(
result=self.flea_count, rez=rez, flds=rez.flds if rez else {}
)
@pytest.fixture
def friends(tmp_path, host, port):
""" returns a tuple of useful test objects """
SystemInfo.init(host, port)
def stuff():
# want a fresh tuple from the fixture
dispatcher = Dispatcher(saved_dir=str(tmp_path))
dispatcher.set_timed(Timed())
dispatcher.initialize()
action = FleaCount()
scheduler = Timely(interval=1)
return dispatcher, scheduler, action
return stuff
@pytest.fixture
def servers():
def stuff():
server1 = Server(host="localhost", port=8000)
server1.add_key_tag("foo", "bar")
server1.add_key_tag("foo", "baz")
server1.add_key_tag("fleas", "standdown")
server1.add_key_tag("krimp", "kramp")
server2 = Server(host="localhost", port=8000)
server2.add_key_tag("foo", "bar")
server2.add_key_tag("fleas", "riseup")
server2.add_key_tag("slip", "slide")
return (server1, server2)
return stuff
| 30.939189
| 95
| 0.679063
| 3,837
| 32,053
| 5.47068
| 0.062549
| 0.062551
| 0.044352
| 0.059454
| 0.852556
| 0.829308
| 0.817684
| 0.80444
| 0.784527
| 0.769139
| 0
| 0.015133
| 0.202134
| 32,053
| 1,035
| 96
| 30.969082
| 0.805662
| 0.159236
| 0
| 0.704334
| 0
| 0
| 0.037791
| 0.000831
| 0
| 0
| 0
| 0
| 0.210526
| 1
| 0.085139
| false
| 0.003096
| 0.02322
| 0
| 0.181115
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2447e0552af3107be48ac3a9066299a7b4696ec1
| 188
|
py
|
Python
|
backend/database/__init__.py
|
Stampeder525/police-data-trust-1
|
5fdc9d58ed51e23f1b23c626c5b97a58c3da57d8
|
[
"MIT"
] | null | null | null |
backend/database/__init__.py
|
Stampeder525/police-data-trust-1
|
5fdc9d58ed51e23f1b23c626c5b97a58c3da57d8
|
[
"MIT"
] | null | null | null |
backend/database/__init__.py
|
Stampeder525/police-data-trust-1
|
5fdc9d58ed51e23f1b23c626c5b97a58c3da57d8
|
[
"MIT"
] | null | null | null |
# flake8: noqa: F401
from .core import db
from .core import db_cli
from .core import execute_query
from .models.incidents import Incidents, IncidentSchema
from .models.users import Users
| 23.5
| 55
| 0.803191
| 28
| 188
| 5.321429
| 0.5
| 0.161074
| 0.281879
| 0.214765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024691
| 0.138298
| 188
| 7
| 56
| 26.857143
| 0.895062
| 0.095745
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2465cc873ed6feb0ba74d0cd8e91b52c64dfa6ff
| 130
|
py
|
Python
|
bolt_srl/__init__.py
|
Michael-Dyq/SRL-English
|
2187b692515fb00c8f78428fa007266129d1a6b8
|
[
"MIT"
] | null | null | null |
bolt_srl/__init__.py
|
Michael-Dyq/SRL-English
|
2187b692515fb00c8f78428fa007266129d1a6b8
|
[
"MIT"
] | null | null | null |
bolt_srl/__init__.py
|
Michael-Dyq/SRL-English
|
2187b692515fb00c8f78428fa007266129d1a6b8
|
[
"MIT"
] | null | null | null |
from bolt_srl.model import BoltSRLModel
from bolt_srl.predictor import BoltSRLPredictor
from bolt_srl.reader import BoltSRLReader
| 32.5
| 47
| 0.884615
| 18
| 130
| 6.222222
| 0.555556
| 0.214286
| 0.294643
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092308
| 130
| 3
| 48
| 43.333333
| 0.949153
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
79e321fad4040ad4eb4cb794f188196f9be074b1
| 94
|
py
|
Python
|
selection/sampling/randomized/api.py
|
Xiaoying-Tian/selective-inference
|
a20c5ad3f527beb709d5b8d7301016640738b092
|
[
"BSD-3-Clause"
] | null | null | null |
selection/sampling/randomized/api.py
|
Xiaoying-Tian/selective-inference
|
a20c5ad3f527beb709d5b8d7301016640738b092
|
[
"BSD-3-Clause"
] | null | null | null |
selection/sampling/randomized/api.py
|
Xiaoying-Tian/selective-inference
|
a20c5ad3f527beb709d5b8d7301016640738b092
|
[
"BSD-3-Clause"
] | null | null | null |
from .norms.api import *
from .losses.api import *
from .sampler import selective_sampler_MH
| 18.8
| 41
| 0.787234
| 14
| 94
| 5.142857
| 0.571429
| 0.25
| 0.361111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138298
| 94
| 4
| 42
| 23.5
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0306f469c225a6dd8f6632d28c4cbdab089a9d7c
| 107,563
|
py
|
Python
|
platform/core/tests/test_experiments/test_views.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/tests/test_experiments/test_views.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/tests/test_experiments/test_views.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
# pylint:disable=too-many-lines
import os
import time
from faker import Faker
from unittest.mock import patch
import pytest
from hestia.internal_services import InternalServices
from rest_framework import status
import conf
import stores
from api.experiments import queries
from api.experiments.serializers import (
BookmarkedExperimentSerializer,
ExperimentChartViewSerializer,
ExperimentDeclarationsSerializer,
ExperimentDetailSerializer,
ExperimentJobDetailSerializer,
ExperimentJobSerializer,
ExperimentJobStatusSerializer,
ExperimentLastMetricSerializer,
ExperimentMetricSerializer,
ExperimentSerializer,
ExperimentStatusSerializer
)
from api.utils.views.protected import ProtectedView
from constants.urls import API_V1, WS_V1
from db.models.bookmarks import Bookmark
from db.models.experiment_groups import GroupTypes
from db.models.experiment_jobs import ExperimentJob, ExperimentJobStatus
from db.models.experiments import (
Experiment,
ExperimentChartView,
ExperimentMetric,
ExperimentStatus
)
from db.redis.ephemeral_tokens import RedisEphemeralTokens
from db.redis.group_check import GroupChecks
from db.redis.heartbeat import RedisHeartBeat
from db.redis.ttl import RedisTTL
from factories.factory_build_jobs import BuildJobFactory
from factories.factory_experiment_groups import ExperimentGroupFactory
from factories.factory_experiments import (
ExperimentChartViewFactory,
ExperimentFactory,
ExperimentJobFactory,
ExperimentJobStatusFactory,
ExperimentMetricFactory,
ExperimentStatusFactory
)
from factories.factory_jobs import JobFactory
from factories.factory_projects import ProjectFactory
from factories.fixtures import (
exec_experiment_outputs_refs_parsed_content,
exec_experiment_resources_parsed_content,
exec_experiment_spec_parsed_content,
exec_experiment_spec_parsed_regression_artifact_refs,
)
from lifecycles.experiments import ExperimentLifeCycle
from lifecycles.jobs import JobLifeCycle
from options.registry.archives import ARCHIVES_ROOT_ARTIFACTS
from options.registry.scheduler import SCHEDULER_GLOBAL_COUNTDOWN
from schemas import ExperimentSpecification
from tests.base.clients import EphemeralClient
from tests.base.views import BaseEntityCodeReferenceViewTest, BaseFilesViewTest, BaseViewTest
@pytest.mark.experiments_mark
class TestProjectExperimentListViewV1(BaseViewTest):
serializer_class = BookmarkedExperimentSerializer
model_class = Experiment
factory_class = ExperimentFactory
num_objects = 3
HAS_AUTH = True
DISABLE_EXECUTOR = False
def setUp(self):
super().setUp()
self.project = ProjectFactory(user=self.auth_client.user)
self.other_project = ProjectFactory()
self.url = '/{}/{}/{}/experiments/'.format(API_V1,
self.project.user.username,
self.project.name)
self.other_url = '/{}/{}/{}/experiments/'.format(API_V1,
self.other_project.user.username,
self.other_project.name)
self.objects = [self.factory_class(project=self.project) for _ in range(self.num_objects)]
# one object that does not belong to the filter
self.factory_class()
self.queryset = self.model_class.objects.filter(project=self.project)
self.other_object = self.factory_class(project=self.other_project)
self.queryset = self.queryset.order_by('-updated_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
# Test other
resp = self.auth_client.get(self.other_url)
assert resp.status_code == status.HTTP_200_OK
independent_count = self.queryset.count()
# Create group to test independent filter
with patch('scheduler.tasks.experiment_groups.'
'experiments_group_create.apply_async') as mock_fct:
group = ExperimentGroupFactory(project=self.project)
assert mock_fct.call_count == 1
[self.factory_class(project=self.project, experiment_group=group) for _ in range(2)] # noqa
all_experiment_count = self.queryset.all().count()
assert all_experiment_count == independent_count + group.experiments.count()
# Getting all experiments
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == all_experiment_count
# Getting only independent experiments
resp = self.auth_client.get(self.url + '?independent=true')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == independent_count
# Through query
resp = self.auth_client.get(self.url + '?query=independent:true')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == independent_count
# Getting only group experiments
resp = self.auth_client.get(self.url + '?group={}'.format(group.id))
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == group.experiments.count()
# Filtering for independent and group experiments should raise
resp = self.auth_client.get(self.url + '?independent=true&group={}'.format(group.id))
assert resp.status_code == status.HTTP_400_BAD_REQUEST
def test_get_with_bookmarked_objects(self):
# Other user bookmark
Bookmark.objects.create(
user=self.other_project.user,
content_object=self.objects[0])
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
self.assertEqual(len([1 for obj in resp.data['results'] if obj['bookmarked'] is True]), 0)
# Authenticated user bookmark
Bookmark.objects.create(
user=self.auth_client.user,
content_object=self.objects[0])
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert len([1 for obj in resp.data['results'] if obj['bookmarked'] is True]) == 1
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_get_order(self):
resp = self.auth_client.get(self.url + '?sort=created_at,updated_at')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data != self.serializer_class(self.queryset, many=True).data
assert data == self.serializer_class(self.queryset.order_by('created_at', 'updated_at'),
many=True).data
resp = self.auth_client.get(self.url + '?sort=-started_at')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset.order_by('-started_at'),
many=True).data
def test_get_order_pagination(self):
queryset = self.queryset.order_by('created_at', 'updated_at')
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}&{}".format(self.url,
limit,
'sort=created_at,updated_at'))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(queryset[limit:], many=True).data
@pytest.mark.filterwarnings('ignore::RuntimeWarning')
def test_get_filter(self): # pylint:disable=too-many-statements
# Wrong filter raises
resp = self.auth_client.get(self.url + '?query=created_at<2010-01-01')
assert resp.status_code == status.HTTP_400_BAD_REQUEST
resp = self.auth_client.get(self.url + '?query=created_at:<2010-01-01')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 0
resp = self.auth_client.get(self.url +
'?query=created_at:>=2010-01-01,status:Finished')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 0
resp = self.auth_client.get(self.url +
'?query=created_at:>=2010-01-01,status:created|running')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
# Id
resp = self.auth_client.get(self.url +
'?query=id:{}|{}'.format(self.objects[0].id,
self.objects[1].id))
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 2
# Name
self.objects[0].name = 'exp_foo'
self.objects[0].save()
resp = self.auth_client.get(self.url +
'?query=name:exp_foo')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 1
# Name Regex
resp = self.auth_client.get(self.url +
'?query=name:%foo')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 1
resp = self.auth_client.get(self.url +
'?query=project.name:{}'.format(self.project.name))
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
# Set metrics
optimizers = ['sgd', 'sgd', 'adam']
tags = [['tag1'], ['tag1', 'tag2'], ['tag2']]
losses = [0.1, 0.2, 0.9]
for i, obj in enumerate(self.objects[:3]):
ExperimentMetricFactory(experiment=obj, values={'loss': losses[i]})
obj.params = {'optimizer': optimizers[i]}
obj.tags = tags[i]
obj.save()
resp = self.auth_client.get(
self.url + '?query=created_at:>=2010-01-01,'
'params.optimizer:sgd,'
'metric.loss:>=0.2,'
'tags:tag1')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 1
# Test that metrics works as well
resp = self.auth_client.get(
self.url + '?query=created_at:>=2010-01-01,'
'params.optimizer:sgd,'
'metrics.loss:>=0.2,'
'tags:tag1')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 1
resp = self.auth_client.get(
self.url + '?query=created_at:>=2010-01-01,'
'params.optimizer:sgd|adam,'
'metric.loss:>=0.2,'
'tags:tag1|tag2')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == 2
# Order by metrics
resp = self.auth_client.get(self.url + '?sort=-metric.loss')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == [self.serializer_class(obj).data for obj in reversed(self.objects)]
resp = self.auth_client.get(self.url + '?sort=metric.loss')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == [self.serializer_class(obj).data for obj in self.objects]
# Order by metrics
resp = self.auth_client.get(self.url + '?sort=-metrics.loss')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == [self.serializer_class(obj).data for obj in reversed(self.objects)]
resp = self.auth_client.get(self.url + '?sort=metrics.loss')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == [self.serializer_class(obj).data for obj in self.objects]
def test_get_filter_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}&{}".format(
self.url,
limit,
'?query=created_at:>=2010-01-01,status:created|running'))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_create_ttl(self):
data = {'is_managed': False}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
xp = Experiment.objects.last()
assert RedisTTL.get_for_experiment(xp.id) == conf.get(SCHEDULER_GLOBAL_COUNTDOWN)
data = {'ttl': 10, 'is_managed': False}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
xp = Experiment.objects.last()
assert RedisTTL.get_for_experiment(xp.id) == 10
data = {'ttl': 'foo', 'is_managed': False}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
def test_create_is_managed(self):
data = {'is_managed': False}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
xp = Experiment.objects.last()
assert xp.is_managed is False
assert xp.run_env is None
data = {'is_managed': False, 'run_env': {'foo': 'bar'}}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
xp = Experiment.objects.last()
assert xp.is_managed is False
assert xp.run_env == {'foo': 'bar'}
def test_create_with_invalid_config(self):
data = {'content': 'bar'}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
def test_create(self):
resp = self.auth_client.post(self.url)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
data = {'content': exec_experiment_spec_parsed_regression_artifact_refs.raw_data}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.queryset.count() == self.num_objects + 1
# Test other
resp = self.auth_client.post(self.other_url, data)
assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
def test_create_with_runner(self):
resp = self.auth_client.post(self.url)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
data = {'content': exec_experiment_spec_parsed_content.raw_data}
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == self.num_objects + 1
# Test other
resp = self.auth_client.post(self.other_url, data)
assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
def test_create_with_outputs_refs(self):
data = {'content': exec_experiment_outputs_refs_parsed_content.raw_data}
resp = self.auth_client.post(self.url, data)
# No job refs
assert resp.status_code == status.HTTP_400_BAD_REQUEST
# Creating the job should pass
JobFactory(project=self.project, name='foo') # noqa
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == self.num_objects + 1
experiment = self.queryset.order_by('created_at').last()
assert experiment.outputs_refs is not None
assert len(experiment.outputs_refs_jobs) == 1
assert experiment.outputs_refs_experiments is None
assert len(experiment.outputs_jobs) == 1
assert experiment.outputs_experiments is None
def test_create_without_config_passes_if_no_spec_validation_requested(self):
data = {'is_managed': False}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.queryset.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.project == self.project
assert last_object.content is None
def test_create_with_params(self):
data = {
'is_managed': False,
'params': {
'lr': 0.1,
'dropout': 0.5
}
}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.queryset.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.project == self.project
assert last_object.content is None
assert last_object.params == {
'lr': 0.1,
'dropout': 0.5
}
def test_create_in_group(self):
# Create in wrong group raises
group = ExperimentGroupFactory()
assert group.experiments.count() == 0
data = {
'is_managed': False,
'params': {
'lr': 0.1,
'dropout': 0.5
},
'experiment_group': group.id
}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
# Create in correct group passes
group = ExperimentGroupFactory(project=self.project)
assert group.experiments.count() == 0
data = {
'is_managed': False,
'params': {
'lr': 0.1,
'dropout': 0.5
},
'experiment_group': group.id
}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert group.experiments.count() == 1
def test_create_in_selection(self):
# Create in wrong selection raises
group = ExperimentGroupFactory(group_type=GroupTypes.SELECTION, content=None)
assert group.experiments.count() == 0
data = {
'params': {
'lr': 0.1,
'dropout': 0.5
},
'experiment_group': group.id
}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
# Create in correct group passes
group = ExperimentGroupFactory(project=self.project,
group_type=GroupTypes.SELECTION,
content=None)
assert group.experiments.count() == 0
assert group.selection_experiments.count() == 0
data = {
'is_managed': False,
'params': {
'lr': 0.1,
'dropout': 0.5
},
'experiment_group': group.id
}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert group.selection_experiments.count() == 1
def test_create_with_build(self):
# Test create with build
build = BuildJobFactory()
data = {'build_job': build.id, 'is_managed': False}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
last_object = self.model_class.objects.last()
assert last_object.build_job == build
@pytest.mark.experiments_mark
class TestProjectExperimentLastMetricListViewV1(BaseViewTest):
metrics_serializer_class = ExperimentLastMetricSerializer
params_serializer_class = ExperimentDeclarationsSerializer
model_class = Experiment
factory_class = ExperimentFactory
num_objects = 3
HAS_AUTH = True
def setUp(self):
super().setUp()
self.project = ProjectFactory(user=self.auth_client.user)
self.url = '/{}/{}/{}/experiments/'.format(API_V1,
self.project.user.username,
self.project.name)
self.objects = [self.factory_class(project=self.project,
params={'param1': i, 'param2': i * 2})
for i in range(self.num_objects)]
# Create Metrics
for obj in self.objects:
ExperimentMetricFactory(experiment=obj)
self.queryset = self.model_class.objects.filter(project=self.project)
self.queryset = self.queryset.order_by('-updated_at')
def test_get_metrics(self):
resp = self.auth_client.get(self.url + '?metrics=true')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == self.queryset.count()
assert resp.data['results'] == self.metrics_serializer_class(
self.queryset, many=True).data
def test_get_params(self):
resp = self.auth_client.get(self.url + '?params=true')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == self.queryset.count()
assert resp.data['results'] == self.params_serializer_class(
self.queryset, many=True).data
def test_get_all(self):
Experiment.objects.bulk_create([
Experiment(project=self.project, user=self.auth_client.user)
for _ in range(30)
])
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == self.queryset.count()
assert len(resp.data['results']) < self.queryset.count()
resp = self.auth_client.get(self.url + '?all=true')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['count'] == self.queryset.count()
assert len(resp.data['results']) == self.queryset.count()
@pytest.mark.experiments_mark
class TestExperimentGroupExperimentListViewV1(BaseViewTest):
serializer_class = BookmarkedExperimentSerializer
model_class = Experiment
factory_class = ExperimentFactory
num_objects = 3
HAS_AUTH = True
def setUp(self):
super().setUp()
self.project = ProjectFactory()
self.experiment_group = ExperimentGroupFactory(project=self.project)
self.objects = [self.factory_class(project=self.project,
experiment_group=self.experiment_group)
for _ in range(self.num_objects)]
self.url = '/{}/{}/{}/experiments?group={}'.format(
API_V1,
self.experiment_group.project.user,
self.experiment_group.project.name,
self.experiment_group.id)
# one object that does not belong to the filter
self.factory_class(project=self.experiment_group.project)
self.queryset = self.model_class.objects.filter(experiment_group=self.experiment_group)
self.queryset = self.queryset.order_by('-updated_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}&limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_pagination_all(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}&limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_get_order(self):
resp = self.auth_client.get(self.url + '&sort=created_at,updated_at')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data != self.serializer_class(self.queryset, many=True).data
assert data == self.serializer_class(self.queryset.order_by('created_at', 'updated_at'),
many=True).data
def test_get_order_pagination(self):
queryset = self.queryset.order_by('created_at', 'updated_at')
limit = self.num_objects - 1
resp = self.auth_client.get("{}&limit={}&{}".format(self.url,
limit,
'sort=created_at,updated_at'))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(queryset[limit:], many=True).data
@pytest.mark.experiments_mark
class TestExperimentSelectionListViewV1(BaseViewTest):
serializer_class = BookmarkedExperimentSerializer
model_class = Experiment
factory_class = ExperimentFactory
num_objects = 3
HAS_AUTH = True
def setUp(self):
super().setUp()
self.project = ProjectFactory()
self.experiment_group = ExperimentGroupFactory(project=self.project,
content=None,
group_type=GroupTypes.SELECTION)
self.objects = [self.factory_class(project=self.project)
for _ in range(self.num_objects)]
self.experiment_group.selection_experiments.set(self.objects)
self.url = '/{}/{}/{}/experiments?group={}'.format(
API_V1,
self.experiment_group.project.user,
self.experiment_group.project.name,
self.experiment_group.id)
# one object that does not belong to the filter
self.factory_class(project=self.experiment_group.project)
self.queryset = self.experiment_group.selection_experiments.all()
self.queryset = self.queryset.order_by('-updated_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}&limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_get_order(self):
resp = self.auth_client.get(self.url + '&sort=created_at,updated_at')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data != self.serializer_class(self.queryset, many=True).data
assert data == self.serializer_class(self.queryset.order_by('created_at', 'updated_at'),
many=True).data
def test_get_order_pagination(self):
queryset = self.queryset.order_by('created_at', 'updated_at')
limit = self.num_objects - 1
resp = self.auth_client.get("{}&limit={}&{}".format(self.url,
limit,
'sort=created_at,updated_at'))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(queryset[limit:], many=True).data
@pytest.mark.experiments_mark
class TestRunnerExperimentGroupExperimentListViewV1(BaseViewTest):
serializer_class = BookmarkedExperimentSerializer
model_class = Experiment
factory_class = ExperimentFactory
num_objects = 3
HAS_AUTH = True
DISABLE_EXECUTOR = False
DISABLE_RUNNER = False
def setUp(self):
super().setUp()
content = """---
version: 1
kind: group
hptuning:
matrix:
lr:
linspace: '1.:3.:3'
run:
cmd: python -u model.py --lr={{ lr }}
"""
self.project = ProjectFactory()
with patch.object(GroupChecks, 'is_checked') as mock_is_check:
with patch('hpsearch.tasks.grid.hp_grid_search_start.retry') as start_fct:
with patch('scheduler.tasks.experiments.'
'experiments_build.apply_async') as build_fct:
mock_is_check.return_value = False
self.experiment_group = ExperimentGroupFactory(
project=self.project,
content=content)
assert start_fct.call_count == 1
assert build_fct.call_count == 1
assert self.experiment_group.specification.matrix_space == 3
self.url = '/{}/{}/{}/experiments?group={}'.format(
API_V1,
self.experiment_group.project.user,
self.experiment_group.project.name,
self.experiment_group.id)
# one object that does not belong to the filter
self.factory_class(project=self.project)
self.queryset = self.model_class.objects.filter(experiment_group=self.experiment_group)
self.queryset = self.queryset.order_by('-updated_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}&limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_get_order(self):
resp = self.auth_client.get(self.url + '&sort=created_at,updated_at')
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == self.num_objects
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data != self.serializer_class(self.queryset, many=True).data
assert data == self.serializer_class(self.queryset.order_by('created_at', 'updated_at'),
many=True).data
def test_get_order_pagination(self):
queryset = self.queryset.order_by('created_at', 'updated_at')
limit = self.num_objects - 1
resp = self.auth_client.get("{}&limit={}&{}".format(self.url,
limit,
'sort=created_at,updated_at'))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(queryset[limit:], many=True).data
@pytest.mark.experiments_mark
class TestExperimentDetailViewV1(BaseViewTest):
serializer_class = ExperimentDetailSerializer
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
DISABLE_RUNNER = False
DISABLE_EXECUTOR = False
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
with patch('scheduler.dockerizer_scheduler.start_dockerizer') as spawner_mock_start:
self.object = self.factory_class(project=project)
assert spawner_mock_start.call_count == 1
self.url = '/{}/{}/{}/experiments/{}/'.format(API_V1,
project.user.username,
project.name,
self.object.id)
self.queryset = self.model_class.objects.all()
# Create related fields
for _ in range(2):
ExperimentJobFactory(experiment=self.object)
self.object_query = queries.experiments_details.get(id=self.object.id)
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
self.object.refresh_from_db()
assert resp.data == self.serializer_class(self.object_query).data
assert resp.data['num_jobs'] == 2
def test_get_with_resource_reg_90(self):
# Fix issue#90:
# Failed to getting experiment when specify resources without framework in environment
spec_content = """---
version: 1
kind: experiment
environment:
node_selector:
foo: bar
tolerations:
- key: "key"
operator: "Equal"
value: "value"
effect: "NoSchedule"
affinity:
foo: bar
resources:
gpu:
requests: 1
limits: 1
tpu:
requests: 1
limits: 1
build:
image: my_image
run:
cmd: video_prediction_train --model=DNA --num_masks=1
"""
spec_parsed_content = ExperimentSpecification.read(spec_content)
project = ProjectFactory(user=self.auth_client.user)
exp = self.factory_class(project=project, content=spec_parsed_content.raw_data)
url = '/{}/{}/{}/experiments/{}/'.format(API_V1,
project.user.username,
project.name,
exp.id)
resp = self.auth_client.get(url)
assert resp.status_code == status.HTTP_200_OK
exp_query = queries.experiments_details.get(id=exp.id)
assert resp.data == self.serializer_class(exp_query).data
def test_patch_exp(self): # pylint:disable=too-many-statements
new_description = 'updated_xp_name'
data = {'description': new_description}
assert self.object.description != data['description']
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.user == self.object.user
assert new_object.description != self.object.description
assert new_object.description == new_description
assert new_object.jobs.count() == 2
# path is_managed
data = {'is_managed': False}
assert self.object.is_managed is True
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.jobs.count() == 2
assert new_object.is_managed is False
# path is_managed
data = {'is_managed': None}
assert new_object.is_managed is False
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.jobs.count() == 2
assert new_object.is_managed is True
# path is_managed
data = {'is_managed': False}
assert new_object.is_managed is True
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.jobs.count() == 2
assert new_object.is_managed is False
data = {'is_managed': True}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.jobs.count() == 2
assert new_object.is_managed is True
# Update original experiment
assert new_object.is_clone is False
new_experiment = ExperimentFactory()
data = {'original_experiment': new_experiment.id}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.user == self.object.user
assert new_object.description == new_description
assert new_object.jobs.count() == 2
assert new_object.is_clone is True
assert new_object.original_experiment == new_experiment
# Update tags
assert new_object.tags == ['fixtures']
data = {'tags': ['foo', 'bar']}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert sorted(new_object.tags) == sorted(['foo', 'bar'])
data = {'tags': ['foo_new', 'bar_new'], 'merge': False}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert sorted(new_object.tags) == sorted(['foo_new', 'bar_new'])
data = {'tags': ['foo', 'bar'], 'merge': True}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert sorted(new_object.tags) == sorted(['foo_new', 'bar_new', 'foo', 'bar'])
# Update params
assert new_object.params is None
data = {'params': {'foo': 'bar'}}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.params == {'foo': 'bar'}
data = {'params': {'foo_new': 'bar_new'}, 'merge': False}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.params == {'foo_new': 'bar_new'}
data = {'params': {'foo': 'bar'}, 'merge': True}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.params == {'foo_new': 'bar_new', 'foo': 'bar'}
# Update name
data = {'name': 'new_name'}
assert new_object.name is None
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.name == data['name']
def test_delete_from_created_status_archives_and_schedules_stop(self):
assert self.model_class.objects.count() == 1
assert ExperimentJob.objects.count() == 2
with patch('scheduler.experiment_scheduler.stop_experiment') as spawner_mock_stop:
resp = self.auth_client.delete(self.url)
assert spawner_mock_stop.call_count == 1
assert resp.status_code == status.HTTP_204_NO_CONTENT
# Deleted
assert self.model_class.objects.count() == 0
assert self.model_class.all.count() == 0
assert ExperimentJob.objects.count() == 0
def test_delete_from_running_status_archives_and_schedules_stop(self):
self.object.set_status(ExperimentLifeCycle.RUNNING)
assert self.model_class.objects.count() == 1
assert ExperimentJob.objects.count() == 2
with patch('scheduler.experiment_scheduler.stop_experiment') as spawner_mock_stop:
resp = self.auth_client.delete(self.url)
assert spawner_mock_stop.call_count == 1
assert resp.status_code == status.HTTP_204_NO_CONTENT
# Deleted
assert self.model_class.objects.count() == 0
assert self.model_class.all.count() == 0
assert ExperimentJob.objects.count() == 0
def test_delete_archives_and_schedules_deletion(self):
self.object.set_status(ExperimentLifeCycle.RUNNING)
assert self.model_class.objects.count() == 1
assert ExperimentJob.objects.count() == 2
with patch('scheduler.tasks.experiments.'
'experiments_schedule_deletion.apply_async') as spawner_mock_stop:
resp = self.auth_client.delete(self.url)
assert spawner_mock_stop.call_count == 1
assert resp.status_code == status.HTTP_204_NO_CONTENT
# Patched
assert self.model_class.objects.count() == 0
assert self.model_class.all.count() == 1
assert ExperimentJob.objects.count() == 2
def test_archive_schedule_deletion(self):
self.object.set_status(ExperimentLifeCycle.RUNNING)
assert self.model_class.objects.count() == 1
assert ExperimentJob.objects.count() == 2
with patch('scheduler.tasks.experiments.'
'experiments_schedule_deletion.apply_async') as spawner_mock_stop:
resp = self.auth_client.post(self.url + 'archive/')
assert resp.status_code == status.HTTP_200_OK
assert spawner_mock_stop.call_count == 1
assert self.model_class.objects.count() == 1
assert self.model_class.all.count() == 1
def test_archive_schedule_archives_and_schedules_stop(self):
self.object.set_status(ExperimentLifeCycle.RUNNING)
assert self.model_class.objects.count() == 1
assert ExperimentJob.objects.count() == 2
with patch('scheduler.tasks.experiments.'
'experiments_stop.apply_async') as spawner_mock_stop:
resp = self.auth_client.post(self.url + 'archive/')
assert resp.status_code == status.HTTP_200_OK
assert spawner_mock_stop.call_count == 1
assert self.model_class.objects.count() == 0
assert self.model_class.all.count() == 1
assert ExperimentJob.objects.count() == 2
def test_restore(self):
self.object.archive()
assert self.model_class.objects.count() == 0
assert self.model_class.all.count() == 1
resp = self.auth_client.post(self.url + 'restore/')
assert resp.status_code == status.HTTP_200_OK
assert self.model_class.objects.count() == 1
assert self.model_class.all.count() == 1
assert ExperimentJob.objects.count() == 2
@pytest.mark.experiments_mark
class TestExperimentCodeReferenceViewV1(BaseEntityCodeReferenceViewTest):
entity_factory_class = ExperimentFactory
def get_url(self):
return '/{}/{}/{}/experiments/{}/coderef/'.format(API_V1,
self.project.user.username,
self.project.name,
self.obj.id)
@pytest.mark.experiments_mark
class TestExperimentStatusListViewV1(BaseViewTest):
serializer_class = ExperimentStatusSerializer
model_class = ExperimentStatus
factory_class = ExperimentStatusFactory
num_objects = 3
HAS_AUTH = True
HAS_INTERNAL = True
INTERNAL_SERVICE = InternalServices.SIDECAR
def setUp(self):
super().setUp()
with patch.object(Experiment, 'set_status') as _:
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as _: # noqa
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.url = '/{}/{}/{}/experiments/{}/statuses/'.format(API_V1,
project.user.username,
project.name,
self.experiment.id)
self.objects = [self.factory_class(experiment=self.experiment,
status=ExperimentLifeCycle.CHOICES[i][0])
for i in range(self.num_objects)]
self.queryset = self.model_class.objects.all()
self.queryset = self.queryset.order_by('created_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
resp = self.internal_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_create(self):
data = {}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.status == ExperimentLifeCycle.CREATED
data = {'status': ExperimentLifeCycle.RUNNING}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 2
last_object = self.model_class.objects.last()
assert last_object.experiment == self.experiment
assert last_object.status == data['status']
# Create with message and traceback
data = {'status': ExperimentLifeCycle.FAILED,
'message': 'message1',
'traceback': 'traceback1'}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 3
last_object = self.model_class.objects.last()
assert last_object.experiment == self.experiment
assert last_object.message == data['message']
assert last_object.traceback == data['traceback']
# Test internal
data = {}
resp = self.internal_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 4
@pytest.mark.experiments_mark
class TestExperimentMetricListViewV1(BaseViewTest):
serializer_class = ExperimentMetricSerializer
model_class = ExperimentMetric
factory_class = ExperimentMetricFactory
num_objects = 3
HAS_AUTH = True
HAS_INTERNAL = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.url = '/{}/{}/{}/experiments/{}/metrics/'.format(API_V1,
project.user.username,
project.name,
self.experiment.id)
self.objects = [self.factory_class(experiment=self.experiment, values={'accuracy': i / 10})
for i in range(self.num_objects)]
self.queryset = self.model_class.objects.all()
self.queryset = self.queryset.order_by('created_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_create(self):
data = {}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
data = {'values': {'precision': 0.9}}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.experiment == self.experiment
assert last_object.values == data['values']
def test_create_many(self):
data = {}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
data = [
{'values': {'precision': 0.9}},
{'values': {'precision': 0.95}},
{'values': {'precision': 0.99}}
]
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 3
last_object = self.model_class.objects.last()
assert last_object.experiment == self.experiment
assert last_object.values == data[-1]['values']
with patch('scheduler.tasks.experiments.experiments_set_metrics.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
def test_create_internal(self):
data = {}
resp = self.internal_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
data = {'values': {'precision': 0.9}}
resp = self.internal_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.experiment == self.experiment
assert last_object.values == data['values']
@pytest.mark.experiments_mark
class TestExperimentStatusDetailViewV1(BaseViewTest):
serializer_class = ExperimentStatusSerializer
model_class = ExperimentStatus
factory_class = ExperimentStatusFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
with patch.object(Experiment, 'set_status') as _: # noqa
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as _: # noqa
self.experiment = ExperimentFactory()
self.object = self.factory_class(experiment=self.experiment)
self.url = '/{}/{}/{}/experiments/{}/statuses/{}/'.format(
API_V1,
self.experiment.project.user.username,
self.experiment.project.name,
self.experiment.id,
self.object.uuid.hex)
self.queryset = self.model_class.objects.all()
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data == self.serializer_class(self.object).data
def test_patch(self):
data = {'status': ExperimentLifeCycle.SUCCEEDED}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
def test_delete(self):
assert self.model_class.objects.count() == 1
resp = self.auth_client.delete(self.url)
assert resp.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
assert self.model_class.objects.count() == 1
@pytest.mark.experiments_mark
class TestExperimentJobListViewV1(BaseViewTest):
serializer_class = ExperimentJobSerializer
model_class = ExperimentJob
factory_class = ExperimentJobFactory
num_objects = 3
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.url = '/{}/{}/{}/experiments/{}/jobs/'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id)
self.objects = [self.factory_class(experiment=self.experiment)
for _ in range(self.num_objects)]
self.queryset = self.model_class.objects.all()
self.queryset = self.queryset.order_by('-updated_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_create(self):
data = {'definition': {'key': 'my new kob k8s'}}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.experiment == self.experiment
assert last_object.definition == data['definition']
@pytest.mark.experiments_mark
class TestExperimentJobDetailViewV1(BaseViewTest):
serializer_class = ExperimentJobDetailSerializer
model_class = ExperimentJob
factory_class = ExperimentJobFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.object = self.factory_class(experiment=self.experiment)
self.url = '/{}/{}/{}/experiments/{}/jobs/{}/'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id,
self.object.id)
self.queryset = self.model_class.objects.filter(experiment=self.experiment)
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data == self.serializer_class(self.object).data
def test_patch(self):
data = {'definition': {'new_key': 'new_value'}}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.experiment == self.object.experiment
assert new_object.definition != self.object.definition
assert new_object.definition == data['definition']
def test_cannot_path_experiment(self):
data = {'experiment': ExperimentFactory().id}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.experiment == self.object.experiment
def test_delete(self):
assert self.model_class.objects.count() == 1
resp = self.auth_client.delete(self.url)
assert resp.status_code == status.HTTP_204_NO_CONTENT
assert self.model_class.objects.count() == 0
@pytest.mark.experiments_mark
class TestExperimentJobStatusListViewV1(BaseViewTest):
serializer_class = ExperimentJobStatusSerializer
model_class = ExperimentJobStatus
factory_class = ExperimentJobStatusFactory
num_objects = 3
HAS_AUTH = True
def setUp(self):
super().setUp()
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as _: # noqa
with patch.object(ExperimentJob, 'set_status') as _: # noqa
project = ProjectFactory(user=self.auth_client.user)
experiment = ExperimentFactory(project=project)
self.experiment_job = ExperimentJobFactory(experiment=experiment)
self.url = '/{}/{}/{}/experiments/{}/jobs/{}/statuses/'.format(
API_V1,
project.user.username,
project.name,
experiment.id,
self.experiment_job.id)
self.objects = [self.factory_class(job=self.experiment_job,
status=JobLifeCycle.CHOICES[i][0])
for i in range(self.num_objects)]
self.queryset = self.model_class.objects.filter(job=self.experiment_job)
self.queryset = self.queryset.order_by('created_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_create(self):
data = {}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.status == JobLifeCycle.CREATED
data = {'status': JobLifeCycle.SUCCEEDED}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 2
last_object = self.model_class.objects.last()
assert last_object.job == self.experiment_job
assert last_object.status == data['status']
@pytest.mark.experiments_mark
class TestExperimentJobStatusDetailViewV1(BaseViewTest):
serializer_class = ExperimentJobStatusSerializer
model_class = ExperimentJobStatus
factory_class = ExperimentJobStatusFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as _: # noqa
with patch.object(ExperimentJob, 'set_status') as _: # noqa
project = ProjectFactory(user=self.auth_client.user)
experiment = ExperimentFactory(project=project)
self.experiment_job = ExperimentJobFactory(experiment=experiment)
self.object = self.factory_class(job=self.experiment_job)
self.url = '/{}/{}/{}/experiments/{}/jobs/{}/statuses/{}'.format(
API_V1,
project.user.username,
project.name,
experiment.id,
self.experiment_job.id,
self.object.uuid.hex)
self.queryset = self.model_class.objects.filter(job=self.experiment_job)
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data == self.serializer_class(self.object).data
def test_patch(self):
data = {'details': {'message': 'bla', 'reason': 'some reason'}}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
assert self.object.details == {}
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.details == {'message': 'bla', 'reason': 'some reason'}
data = {'message': 'new reason', 'details': {'message': 'bla2', 'reason': 'some reason3'}}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
new_object = self.model_class.objects.get(id=self.object.id)
assert new_object.message == 'new reason'
assert new_object.details == {'message': 'bla2', 'reason': 'some reason3'}
def test_delete(self):
assert self.model_class.objects.count() == 1
resp = self.auth_client.delete(self.url)
assert resp.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
assert self.model_class.objects.count() == 1
@pytest.mark.experiments_mark
class TestExperimentJobLogsViewV1(BaseViewTest):
num_log_lines = 10
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(
project=project,
content=exec_experiment_resources_parsed_content.raw_data)
self.experiment_job = ExperimentJobFactory(experiment=self.experiment)
self.logs = []
self.url = '/{}/{}/{}/experiments/{}/jobs/{}/logs'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id,
self.experiment_job.id)
self.stream_url = '/{}/{}/{}/experiments/{}/jobs/{}/logs/stream'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id,
self.experiment_job.id)
self.ws_url = '/{}/{}/{}/experiments/{}/jobs/{}/logs'.format(
WS_V1,
project.user.username,
project.name,
self.experiment.id,
self.experiment_job.id)
def create_logs(self, temp):
log_path = stores.get_experiment_job_logs_path(
experiment_job_name=self.experiment_job.unique_name,
temp=temp)
stores.create_experiment_job_logs_path(experiment_job_name=self.experiment_job.unique_name,
temp=temp)
fake = Faker()
self.logs = []
for _ in range(self.num_log_lines):
self.logs.append(fake.sentence())
with open(log_path, 'w') as file:
for line in self.logs:
file.write(line)
file.write('\n')
def test_get_done_experiment(self):
self.experiment.set_status(ExperimentLifeCycle.SUCCEEDED)
self.assertTrue(self.experiment.is_done)
# No logs
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check the it does not return temp file
self.create_logs(temp=True)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check returns the correct file
self.create_logs(temp=False)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
data = [i for i in resp._iterator] # pylint:disable=protected-access
data = [d for d in data[0].decode('utf-8').split('\n') if d]
assert len(data) == len(self.logs)
assert data == self.logs
@patch('api.experiments.views.process_experiment_job_logs')
def test_get_non_done_experiment(self, _):
self.assertFalse(self.experiment.is_done)
# No logs
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check the it does not return non temp file
self.create_logs(temp=False)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check returns the correct file
self.create_logs(temp=True)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
data = [i for i in resp._iterator] # pylint:disable=protected-access
data = [d for d in data[0].decode('utf-8').split('\n') if d]
assert len(data) == len(self.logs)
assert data == self.logs
def test_stream_redirects_to_internal_service(self):
response = self.auth_client.get(self.stream_url)
self.assertEqual(response.status_code, 200)
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], self.ws_url)
@pytest.mark.experiments_mark
class TestRestartExperimentViewV1(BaseViewTest):
serializer_class = ExperimentSerializer
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
DISABLE_RUNNER = False
DISABLE_EXECUTOR = False
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.object = self.factory_class(project=project)
self.url = '/{}/{}/{}/experiments/{}/restart'.format(
API_V1,
project.user.username,
project.name,
self.object.id)
self.queryset = self.model_class.objects.all()
def test_restart(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == 2
last_experiment = self.queryset.last()
assert last_experiment.is_clone is True
assert last_experiment.is_restart is True
assert last_experiment.is_copy is False
assert last_experiment.is_resume is False
assert last_experiment.original_experiment == self.object
assert last_experiment.original_unique_name == self.object.unique_name
def test_restart_patch_config(self):
data = {'content': "{'params': {'lr': 0.1}}"}
assert self.queryset.first().params is None
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == 2
assert self.queryset.first().params is None
assert self.queryset.last().params == {'lr': 0.1}
last_experiment = self.queryset.last()
assert last_experiment.is_clone is True
assert last_experiment.is_restart is True
assert last_experiment.is_copy is False
assert last_experiment.is_resume is False
assert last_experiment.original_experiment == self.object
assert last_experiment.original_unique_name == self.object.unique_name
def test_restart_patch_wrong_config_raises(self):
data = {'content': "{'lr': 0.1}"}
assert self.queryset.first().params is None
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
assert mock_fct.call_count == 0
assert self.queryset.count() == 1
@pytest.mark.experiments_mark
class TestResumeExperimentViewV1(BaseViewTest):
serializer_class = ExperimentSerializer
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.object = self.factory_class(project=project)
self.url = '/{}/{}/{}/experiments/{}/resume'.format(
API_V1,
project.user.username,
project.name,
self.object.id)
self.queryset = self.model_class.objects.all()
def test_resume(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == 2
last_experiment = self.queryset.last()
assert last_experiment.is_clone is True
assert last_experiment.is_restart is False
assert last_experiment.is_copy is False
assert last_experiment.is_resume is True
assert last_experiment.original_experiment == self.object
assert last_experiment.original_unique_name == self.object.unique_name
def test_resume_patch_config(self):
data = {'content': "{'params': {'lr': 0.1}}"}
assert self.queryset.first().params is None
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == 2
assert self.queryset.first().params is None
assert self.queryset.last().params == {'lr': 0.1}
last_experiment = self.queryset.last()
assert last_experiment.is_clone is True
assert last_experiment.is_restart is False
assert last_experiment.is_copy is False
assert last_experiment.is_resume is True
assert last_experiment.original_experiment == self.object
assert last_experiment.original_unique_name == self.object.unique_name
def test_resume_patch_wrong_config_raises(self):
data = {'content': "{'lr': 0.1}"}
assert self.queryset.first().params is None
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
assert mock_fct.call_count == 0
assert self.queryset.count() == 1
@pytest.mark.experiments_mark
class TestCopyExperimentViewV1(BaseViewTest):
serializer_class = ExperimentSerializer
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
DISABLE_RUNNER = False
DISABLE_EXECUTOR = False
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.object = self.factory_class(project=project)
self.url = '/{}/{}/{}/experiments/{}/copy'.format(
API_V1,
project.user.username,
project.name,
self.object.id)
self.queryset = self.model_class.objects.all()
def test_resume(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == 2
last_experiment = self.queryset.last()
assert last_experiment.is_clone is True
assert last_experiment.is_restart is False
assert last_experiment.is_copy is True
assert last_experiment.is_resume is False
assert last_experiment.original_experiment == self.object
assert last_experiment.original_unique_name == self.object.unique_name
def test_resume_patch_config(self):
data = {'content': "{'params': {'lr': 0.1}}"}
assert self.queryset.first().params is None
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert mock_fct.call_count == 1
assert self.queryset.count() == 2
assert self.queryset.first().params is None
assert self.queryset.last().params == {'lr': 0.1}
last_experiment = self.queryset.last()
assert last_experiment.is_clone is True
assert last_experiment.is_restart is False
assert last_experiment.is_copy is True
assert last_experiment.is_resume is False
assert last_experiment.original_experiment == self.object
assert last_experiment.original_unique_name == self.object.unique_name
def test_resume_patch_wrong_config_raises(self):
data = {'content': "{'lr': 0.1}"}
assert self.queryset.first().params is None
with patch('scheduler.tasks.experiments.experiments_build.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
assert mock_fct.call_count == 0
assert self.queryset.count() == 1
@pytest.mark.experiments_mark
class TestStopExperimentViewV1(BaseViewTest):
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.object = self.factory_class(project=project)
self.url = '/{}/{}/{}/experiments/{}/stop'.format(
API_V1,
project.user.username,
project.name,
self.object.id)
self.queryset = self.model_class.objects.all()
def test_stop(self):
data = {}
assert self.queryset.count() == 1
with patch('scheduler.tasks.experiments.experiments_stop.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert mock_fct.call_count == 1
assert resp.status_code == status.HTTP_200_OK
assert self.queryset.count() == 1
@pytest.mark.experiments_mark
class TestStopExperimentManyViewV1(BaseViewTest):
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.objects = [self.factory_class(project=project) for _ in range(3)]
self.url = '/{}/{}/{}/experiments/stop'.format(
API_V1,
project.user.username,
project.name)
self.queryset = self.model_class.objects.all()
def test_stop_many(self):
data = {}
assert self.queryset.count() == 3
with patch('scheduler.tasks.experiments.experiments_stop.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_200_OK
assert mock_fct.call_count == 0
data = {'ids': [obj.id for obj in self.objects]}
with patch('scheduler.tasks.experiments.experiments_stop.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_200_OK
assert mock_fct.call_count == 3
assert self.queryset.count() == 3
@pytest.mark.experiments_mark
class TestDeleteExperimentManyViewV1(BaseViewTest):
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.objects = [self.factory_class(project=project) for _ in range(3)]
self.url = '/{}/{}/{}/experiments/delete'.format(
API_V1,
project.user.username,
project.name)
self.queryset = self.model_class.objects.all()
def test_delete_many(self):
data = {}
assert self.queryset.count() == 3
resp = self.auth_client.delete(self.url, data)
assert resp.status_code == status.HTTP_200_OK
assert self.queryset.count() == 3
data = {'ids': [obj.id for obj in self.objects]}
resp = self.auth_client.delete(self.url, data)
assert resp.status_code == status.HTTP_200_OK
assert self.queryset.count() == 0
@pytest.mark.experiments_mark
class TestExperimentLogsViewV1(BaseViewTest):
num_log_lines = 10
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.logs = []
self.url = '/{}/{}/{}/experiments/{}/logs'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id)
self.stream_url = '/{}/{}/{}/experiments/{}/logs/stream'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id)
self.ws_url = '/{}/{}/{}/experiments/{}/logs'.format(
WS_V1,
project.user.username,
project.name,
self.experiment.id)
def create_logs(self, temp):
log_path = stores.get_experiment_logs_path(
experiment_name=self.experiment.unique_name,
temp=temp)
stores.create_experiment_logs_path(experiment_name=self.experiment.unique_name, temp=temp)
fake = Faker()
self.logs = []
for _ in range(self.num_log_lines):
self.logs.append(fake.sentence())
with open(log_path, 'w') as file:
for line in self.logs:
file.write(line)
file.write('\n')
def test_get_done_experiment(self):
self.experiment.set_status(ExperimentLifeCycle.SUCCEEDED)
self.assertTrue(self.experiment.is_done)
# No logs
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check the it does not return temp file
self.create_logs(temp=True)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check returns the correct file
self.create_logs(temp=False)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
data = [i for i in resp._iterator] # pylint:disable=protected-access
data = [d for d in data[0].decode('utf-8').split('\n') if d]
assert len(data) == len(self.logs)
assert data == self.logs
@patch('api.experiments.views.process_logs')
def test_get_non_done_experiment(self, _):
self.assertFalse(self.experiment.is_done)
# No logs
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check the it does not return non temp file
self.create_logs(temp=False)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_404_NOT_FOUND
# Check returns the correct file
self.create_logs(temp=True)
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
data = [i for i in resp._iterator] # pylint:disable=protected-access
data = [d for d in data[0].decode('utf-8').split('\n') if d]
assert len(data) == len(self.logs)
assert data == self.logs
def test_post_logs(self):
resp = self.auth_client.post(self.url)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
data = 'logs here'
with patch('logs_handlers.tasks.logs_handle_experiment_job.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_200_OK
assert mock_fct.call_count == 1
data = ['logs here', 'dfg dfg']
with patch('logs_handlers.tasks.logs_handle_experiment_job.apply_async') as mock_fct:
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_200_OK
assert mock_fct.call_count == 1
def test_stream_redirects_to_internal_service(self):
response = self.auth_client.get(self.stream_url)
self.assertEqual(response.status_code, 200)
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], self.ws_url)
@pytest.mark.experiments_mark
class TestExperimentOutputsTreeViewV1(BaseFilesViewTest):
num_log_lines = 10
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
experiment = ExperimentFactory(project=project)
self.url = '/{}/{}/{}/experiments/{}/outputs/tree'.format(
API_V1,
project.user.username,
project.name,
experiment.id)
outputs_path = stores.get_experiment_outputs_path(
persistence=experiment.persistence_outputs,
experiment_name=experiment.unique_name,
original_name=experiment.original_unique_name,
cloning_strategy=experiment.cloning_strategy)
stores.create_experiment_outputs_path(
persistence=experiment.persistence_outputs,
experiment_name=experiment.unique_name)
self.create_paths(path=outputs_path, url=self.url)
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
self.assert_same_content(resp.data['files'], self.top_level['files'])
self.assert_same_content(resp.data['dirs'], self.top_level['dirs'])
resp = self.auth_client.get(self.url_second_level)
assert resp.status_code == status.HTTP_200_OK
self.assert_same_content(resp.data['files'], self.second_level['files'])
self.assert_same_content(resp.data['dirs'], self.second_level['dirs'])
resp = self.auth_client.get(self.url_second_level2)
assert resp.status_code == status.HTTP_200_OK
self.assert_same_content(resp.data['files'], self.second_level['files'])
self.assert_same_content(resp.data['dirs'], self.second_level['dirs'])
@pytest.mark.experiments_mark
class TestExperimentOutputsFilesViewV1(BaseFilesViewTest):
num_log_lines = 10
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
experiment = ExperimentFactory(project=project)
self.url = '/{}/{}/{}/experiments/{}/outputs/files'.format(
API_V1,
project.user.username,
project.name,
experiment.id)
outputs_path = stores.get_experiment_outputs_path(
persistence=experiment.persistence_outputs,
experiment_name=experiment.unique_name,
original_name=experiment.original_unique_name,
cloning_strategy=experiment.cloning_strategy)
stores.create_experiment_outputs_path(
persistence=experiment.persistence_outputs,
experiment_name=experiment.unique_name)
self.create_paths(path=outputs_path, url=self.url)
def test_get(self):
for file_content in self.top_level_files:
resp = self.auth_client.get(self.url + '?path={}'.format(file_content['file']))
assert resp.status_code == status.HTTP_200_OK
data = [i for i in resp._iterator] # pylint:disable=protected-access
assert data[0].decode('utf-8') == file_content['data']
for file_content in self.second_level_files:
resp = self.auth_client.get(self.url + '?path={}'.format(file_content['file']))
assert resp.status_code == status.HTTP_200_OK
data = [i for i in resp._iterator] # pylint:disable=protected-access
assert data[0].decode('utf-8') == file_content['data']
@pytest.mark.experiments_mark
class DownloadExperimentOutputsViewTest(BaseViewTest):
model_class = Experiment
factory_class = ExperimentFactory
HAS_AUTH = True
HAS_INTERNAL = True
def setUp(self):
super().setUp()
self.project = ProjectFactory(user=self.auth_client.user)
self.experiment = self.factory_class(project=self.project)
self.download_url = '/{}/{}/{}/experiments/{}/outputs/download'.format(
API_V1,
self.project.user.username,
self.project.name,
self.experiment.id)
self.experiment_outputs_path = stores.get_experiment_outputs_path(
persistence=self.experiment.persistence_outputs,
experiment_name=self.experiment.unique_name)
self.url = self.download_url
def create_tmp_outputs(self):
stores.create_experiment_outputs_path(
persistence=self.experiment.persistence_outputs,
experiment_name=self.experiment.unique_name)
for i in range(4):
open('{}/{}'.format(self.experiment_outputs_path, i), '+w')
def test_redirects_nginx_to_file(self):
self.create_tmp_outputs()
# Assert that the experiment outputs
self.assertTrue(os.path.exists(self.experiment_outputs_path))
response = self.auth_client.get(self.download_url)
self.assertEqual(response.status_code, 200)
self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response)
self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER],
'{}/{}.tar.gz'.format(conf.get(ARCHIVES_ROOT_ARTIFACTS),
self.experiment.unique_name.replace('.', '_')))
@pytest.mark.experiments_mark
class TestExperimentEphemeralTokenViewV1(BaseViewTest):
HAS_AUTH = False
factory_class = ExperimentFactory
def setUp(self):
super().setUp()
self.auth_user = self.auth_client.user
self.project = ProjectFactory(user=self.auth_client.user)
self.experiment = self.factory_class(project=self.project)
self.other_experiment = self.factory_class(project=self.project)
self.url = '/{}/{}/{}/experiments/{}/ephemeraltoken'.format(
API_V1,
self.project.user.username,
self.project.name,
self.experiment.id)
self.other_url = '/{}/{}/{}/experiments/{}/ephemeraltoken'.format(
API_V1,
self.project.user.username,
self.project.name,
self.other_experiment.id)
@staticmethod
def create_ephemeral_token(experiment, **kwargs):
scope = RedisEphemeralTokens.get_scope(user=experiment.user.id,
model='experiment',
object_id=experiment.id)
return RedisEphemeralTokens.generate(scope=scope, **kwargs)
def test_is_forbidden_for_non_running_or_scheduled_experiment(self):
ephemeral_token = self.create_ephemeral_token(self.experiment)
token = RedisEphemeralTokens.create_header_token(ephemeral_token)
ephemeral_client = EphemeralClient(token=token)
resp = ephemeral_client.post(self.url)
assert resp.status_code == status.HTTP_403_FORBIDDEN
self.assertEqual(ephemeral_token.get_state(), None)
def test_using_other_experiment_token(self):
ephemeral_token = self.create_ephemeral_token(self.other_experiment)
token = RedisEphemeralTokens.create_header_token(ephemeral_token)
ephemeral_client = EphemeralClient(token=token)
resp = ephemeral_client.post(self.url)
assert resp.status_code == status.HTTP_403_FORBIDDEN
self.assertEqual(ephemeral_token.get_state(), None)
def test_using_timed_out_experiment_token(self):
self.experiment.set_status(status=JobLifeCycle.RUNNING)
ephemeral_token = self.create_ephemeral_token(self.experiment, ttl=1)
token = RedisEphemeralTokens.create_header_token(ephemeral_token)
ephemeral_client = EphemeralClient(token=token)
time.sleep(1.1)
resp = ephemeral_client.post(self.url)
assert resp.status_code == status.HTTP_401_UNAUTHORIZED
self.assertEqual(ephemeral_token.get_state(), None)
def test_using_used_experiment_token(self):
self.experiment.set_status(status=JobLifeCycle.RUNNING)
ephemeral_token = self.create_ephemeral_token(self.experiment)
token = RedisEphemeralTokens.create_header_token(ephemeral_token)
ephemeral_token.clear()
ephemeral_client = EphemeralClient(token=token)
resp = ephemeral_client.post(self.url)
assert resp.status_code == status.HTTP_401_UNAUTHORIZED
self.assertEqual(ephemeral_token.get_state(), None)
def test_using_scheduled_experiment_token(self):
self.experiment.set_status(status=ExperimentLifeCycle.SCHEDULED)
ephemeral_token = self.create_ephemeral_token(self.experiment)
token = RedisEphemeralTokens.create_header_token(ephemeral_token)
ephemeral_client = EphemeralClient(token=token)
resp = ephemeral_client.post(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data == {'token': self.experiment.user.tokens.last().key}
self.assertEqual(ephemeral_token.get_state(), None)
def test_using_starting_experiment_token(self):
self.experiment.set_status(status=ExperimentLifeCycle.STARTING)
ephemeral_token = self.create_ephemeral_token(self.experiment)
token = RedisEphemeralTokens.create_header_token(ephemeral_token)
ephemeral_client = EphemeralClient(token=token)
resp = ephemeral_client.post(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data == {'token': self.experiment.user.tokens.last().key}
self.assertEqual(ephemeral_token.get_state(), None)
def test_using_running_experiment_token(self):
self.experiment.set_status(status=ExperimentLifeCycle.RUNNING)
ephemeral_token = self.create_ephemeral_token(self.experiment)
token = RedisEphemeralTokens.create_header_token(ephemeral_token)
ephemeral_client = EphemeralClient(token=token)
resp = ephemeral_client.post(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data == {'token': self.experiment.user.tokens.last().key}
self.assertEqual(ephemeral_token.get_state(), None)
@pytest.mark.experiments_mark
class TestExperimentChartViewListViewV1(BaseViewTest):
serializer_class = ExperimentChartViewSerializer
model_class = ExperimentChartView
factory_class = ExperimentChartViewFactory
num_objects = 3
HAS_AUTH = True
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.url = '/{}/{}/{}/experiments/{}/chartviews/'.format(API_V1,
project.user.username,
project.name,
self.experiment.id)
self.objects = [self.factory_class(experiment=self.experiment, name='view{}'.format(i))
for i in range(self.num_objects)]
self.queryset = self.model_class.objects.all()
self.queryset = self.queryset.order_by('created_at')
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
assert resp.data['count'] == len(self.objects)
data = resp.data['results']
assert len(data) == self.queryset.count()
assert data == self.serializer_class(self.queryset, many=True).data
def test_pagination(self):
limit = self.num_objects - 1
resp = self.auth_client.get("{}?limit={}".format(self.url, limit))
assert resp.status_code == status.HTTP_200_OK
next_page = resp.data.get('next')
assert next_page is not None
assert resp.data['count'] == self.queryset.count()
data = resp.data['results']
assert len(data) == limit
assert data == self.serializer_class(self.queryset[:limit], many=True).data
resp = self.auth_client.get(next_page)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['next'] is None
data = resp.data['results']
assert len(data) == 1
assert data == self.serializer_class(self.queryset[limit:], many=True).data
def test_create(self):
data = {}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_400_BAD_REQUEST
data = {'charts': [{'id': '1'}, {'id': '2'}]}
resp = self.auth_client.post(self.url, data)
assert resp.status_code == status.HTTP_201_CREATED
assert self.model_class.objects.count() == self.num_objects + 1
last_object = self.model_class.objects.last()
assert last_object.experiment == self.experiment
assert last_object.charts == data['charts']
@pytest.mark.experiments_mark
class TestExperimentChartViewDetailViewV1(BaseViewTest):
serializer_class = ExperimentChartViewSerializer
model_class = ExperimentChartView
factory_class = ExperimentChartViewFactory
HAS_AUTH = True
def setUp(self):
super().setUp()
self.project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=self.project)
self.object = self.factory_class(experiment=self.experiment)
self.url = '/{}/{}/{}/experiments/{}/chartviews/{}/'.format(
API_V1,
self.experiment.project.user.username,
self.experiment.project.name,
self.experiment.id,
self.object.id)
self.queryset = self.model_class.objects.all()
def test_get(self):
resp = self.auth_client.get(self.url)
assert resp.status_code == status.HTTP_200_OK
assert resp.data == self.serializer_class(self.object).data
def test_patch(self):
data = {'charts': [{'uuid': 'id22'}, {'uuid': 'id23'}, {'uuid': 'id24'}, {'uuid': 'id25'}]}
resp = self.auth_client.patch(self.url, data=data)
assert resp.status_code == status.HTTP_200_OK
assert resp.data['charts'] == data['charts']
def test_delete(self):
assert self.model_class.objects.count() == 1
resp = self.auth_client.delete(self.url)
assert resp.status_code == status.HTTP_204_NO_CONTENT
assert self.model_class.objects.count() == 0
@pytest.mark.experiments_mark
class TestExperimentHeartBeatViewV1(BaseViewTest):
HAS_AUTH = True
HAS_INTERNAL = True
INTERNAL_SERVICE = InternalServices.SIDECAR
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.url = '/{}/{}/{}/experiments/{}/_heartbeat'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id)
def test_post_experiment_heartbeat(self):
self.assertEqual(RedisHeartBeat.experiment_is_alive(self.experiment.id), False)
resp = self.auth_client.post(self.url)
assert resp.status_code == status.HTTP_200_OK
self.assertEqual(RedisHeartBeat.experiment_is_alive(self.experiment.id), True)
def test_post_internal_experiment_heartbeat(self):
self.assertEqual(RedisHeartBeat.experiment_is_alive(self.experiment.id), False)
resp = self.internal_client.post(self.url)
assert resp.status_code == status.HTTP_200_OK
self.assertEqual(RedisHeartBeat.experiment_is_alive(self.experiment.id), True)
@pytest.mark.experiments_mark
class TestExperimentJobReconcileViewV1(BaseViewTest):
HAS_AUTH = True
HAS_INTERNAL = True
INTERNAL_SERVICE = InternalServices.SIDECAR
def setUp(self):
super().setUp()
project = ProjectFactory(user=self.auth_client.user)
self.experiment = ExperimentFactory(project=project)
self.object = ExperimentJobFactory(experiment=self.experiment)
self.url = '/{}/{}/{}/experiments/{}/jobs/{}/_reconcile/'.format(
API_V1,
project.user.username,
project.name,
self.experiment.id,
self.object.uuid.hex)
def _reconcile(self, client):
with patch('k8s_events_handlers.tasks.'
'k8s_events_reconcile_experiment_job_statuses.apply_async') as mock_fct:
resp = client.post(self.url, data={'status': 'succeeded'})
assert resp.status_code == status.HTTP_200_OK
assert mock_fct.call_count == 1
def _reconcile_done(self, client):
ExperimentJobStatusFactory(job=self.object, status='failed')
with patch('k8s_events_handlers.tasks.'
'k8s_events_reconcile_experiment_job_statuses.apply_async') as mock_fct:
resp = client.post(self.url, data={'status': 'succeeded'})
assert mock_fct.call_count == 0
assert resp.status_code == status.HTTP_200_OK
def test_reconcile(self):
self._reconcile(self.auth_client)
def test_reconcile_done(self):
self._reconcile(self.auth_client)
def test_reconcile_internal(self):
self._reconcile(self.internal_client)
def test_reconcile_done_internal(self):
self._reconcile(self.internal_client)
del BaseEntityCodeReferenceViewTest
| 40.914036
| 100
| 0.638826
| 12,815
| 107,563
| 5.16348
| 0.038939
| 0.044582
| 0.044219
| 0.056823
| 0.869684
| 0.850159
| 0.831162
| 0.821007
| 0.808176
| 0.797658
| 0
| 0.012917
| 0.252206
| 107,563
| 2,628
| 101
| 40.929604
| 0.809735
| 0.016623
| 0
| 0.770277
| 0
| 0
| 0.075708
| 0.0367
| 0
| 0
| 0
| 0
| 0.323488
| 1
| 0.067511
| false
| 0.000469
| 0.01594
| 0.000469
| 0.157056
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
030c7de9f7e1401c045078e46717850748e35111
| 5,250
|
py
|
Python
|
tests/unit/language/ast/test_interface_type_extension.py
|
matt-koevort/tartiflette
|
5777866b133d846ce4f8aa03f735fa81832896cd
|
[
"MIT"
] | 530
|
2019-06-04T11:45:36.000Z
|
2022-03-31T09:29:56.000Z
|
tests/unit/language/ast/test_interface_type_extension.py
|
matt-koevort/tartiflette
|
5777866b133d846ce4f8aa03f735fa81832896cd
|
[
"MIT"
] | 242
|
2019-06-04T11:53:08.000Z
|
2022-03-28T07:06:27.000Z
|
tests/unit/language/ast/test_interface_type_extension.py
|
matt-koevort/tartiflette
|
5777866b133d846ce4f8aa03f735fa81832896cd
|
[
"MIT"
] | 36
|
2019-06-21T06:40:27.000Z
|
2021-11-04T13:11:16.000Z
|
import pytest
from tartiflette.language.ast import InterfaceTypeExtensionNode
def test_interfacetypeextensionnode__init__():
interface_type_extension_node = InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
)
assert interface_type_extension_node.name == "interfaceTypeExtensionName"
assert (
interface_type_extension_node.directives
== "interfaceTypeExtensionDirectives"
)
assert (
interface_type_extension_node.fields == "interfaceTypeExtensionFields"
)
assert (
interface_type_extension_node.location
== "interfaceTypeExtensionLocation"
)
@pytest.mark.parametrize(
"interface_type_extension_node,other,expected",
[
(
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
Ellipsis,
False,
),
(
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionNameBis",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
False,
),
(
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectivesBis",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
False,
),
(
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFieldsBis",
location="interfaceTypeExtensionLocation",
),
False,
),
(
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocationBis",
),
False,
),
(
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
True,
),
],
)
def test_interfacetypeextensionnode__eq__(
interface_type_extension_node, other, expected
):
assert (interface_type_extension_node == other) is expected
@pytest.mark.parametrize(
"interface_type_extension_node,expected",
[
(
InterfaceTypeExtensionNode(
name="interfaceTypeExtensionName",
directives="interfaceTypeExtensionDirectives",
fields="interfaceTypeExtensionFields",
location="interfaceTypeExtensionLocation",
),
"InterfaceTypeExtensionNode("
"name='interfaceTypeExtensionName', "
"directives='interfaceTypeExtensionDirectives', "
"fields='interfaceTypeExtensionFields', "
"location='interfaceTypeExtensionLocation')",
)
],
)
def test_interfacetypeextensionnode__repr__(
interface_type_extension_node, expected
):
assert interface_type_extension_node.__repr__() == expected
| 36.206897
| 78
| 0.616571
| 228
| 5,250
| 13.969298
| 0.162281
| 0.131868
| 0.228571
| 0.269388
| 0.83956
| 0.789639
| 0.740031
| 0.710518
| 0.710518
| 0.657143
| 0
| 0
| 0.31181
| 5,250
| 144
| 79
| 36.458333
| 0.881539
| 0
| 0
| 0.671533
| 0
| 0
| 0.363429
| 0.362857
| 0
| 0
| 0
| 0
| 0.043796
| 1
| 0.021898
| false
| 0
| 0.014599
| 0
| 0.036496
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
03153f7cf1c50ca8cd8cf5e6dbf17444f5fb56d1
| 22,597
|
py
|
Python
|
tests/data_namespace_test.py
|
XD-embedded/xd-build-core
|
357e4d78d35456d6906aa30151ddc989781227ab
|
[
"MIT"
] | 1
|
2020-11-27T23:34:53.000Z
|
2020-11-27T23:34:53.000Z
|
tests/data_namespace_test.py
|
XD-embedded/xd-build-core
|
357e4d78d35456d6906aa30151ddc989781227ab
|
[
"MIT"
] | 6
|
2015-10-30T12:22:56.000Z
|
2016-08-25T09:38:48.000Z
|
tests/data_namespace_test.py
|
XD-embedded/xd-build-core
|
357e4d78d35456d6906aa30151ddc989781227ab
|
[
"MIT"
] | null | null | null |
from xd.build.core.data.namespace import *
from xd.build.core.data.expr import Expression
from xd.build.core.data.string import String
from xd.build.core.data.list import List
from xd.build.core.data.dict import Dict
from xd.build.core.data.func import Function
from xd.build.core.data.num import *
import unittest
class tests(unittest.case.TestCase):
def setUp(self):
self.ns = Namespace()
def test_set_get_1(self):
self.ns['FOO'] = 'foo'
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_set_get_2(self):
self.ns['FOO'] = String('foo')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_set2_get_1(self):
self.ns['FOO'] = 'foo'
self.ns['FOO'] = 'bar'
self.assertEqual(self.ns['FOO'].get(), 'bar')
def test_set_variable(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.ns['BAR'] = self.ns['FOO']
self.ns['FOO'] = 'hello world'
self.assertEqual(self.ns['FOO'].get(), 'hello world')
self.assertEqual(self.ns['BAR'].get(), 'hello world')
def test_set_get_bool(self):
self.ns['FOO'] = True
self.assertEqual(self.ns['FOO'].get(), True)
def test_set_get_int(self):
self.ns['FOO'] = 42
self.assertEqual(self.ns['FOO'].get(), 42)
def test_set_get_float(self):
self.ns['FOO'] = 3.14
self.assertEqual(self.ns['FOO'].get(), 3.14)
def test_set_bad_type(self):
self.ns['FOO'] = 'foo'
with self.assertRaises(TypeError):
self.ns['FOO'] = 42
def test_get_keyerror(self):
with self.assertRaises(KeyError):
self.ns['FOO']
def test_get_typeerror(self):
self.ns['FOO'] = String()
self.ns['I'] = 42
self.ns['FOO'] = Expression('I')
with self.assertRaises(TypeError):
self.ns['FOO'].get()
def test_del(self):
self.ns['FOO'] = 'foo'
del self.ns['FOO']
with self.assertRaises(KeyError):
self.ns['FOO']
def test_eval_source_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.assertEqual(self.ns.eval('FOO+BAR'), 'foobar')
def test_eval_source_2(self):
self.ns['FOO'] = 'foo'
with self.assertRaises(NameError):
self.ns.eval('FOO+BAR')
def test_eval_expression_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
expr = Expression('FOO+BAR')
self.assertEqual(self.ns.eval(expr), 'foobar')
def test_eval_expression_2(self):
self.ns['FOO'] = 'foo'
expr = Expression('FOO+BAR')
with self.assertRaises(NameError):
self.ns.eval(expr)
def test_eval_globals(self):
self.ns['FOO'] = 'foo'
BAR = 'bar'
expr = Expression('FOO+BAR')
self.assertEqual(self.ns.eval(expr, g={'BAR': BAR}), 'foobar')
def test_append_variable(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.ns['FOO'].append(self.ns['BAR'])
self.assertEqual(self.ns['FOO'].get(), 'foobar')
def test_append_to_expr(self):
self.ns['FOO'] = 'foo'
self.ns['FOOBAR'] = String(Expression('FOO'))
self.ns['FOOBAR'].append('bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
self.assertEqual(self.ns['FOOBAR'].get(), 'foobar')
def test_append_expr(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.ns['FOO'].append(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'foobar')
def test_append_expr_none_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
self.ns['FOO'].append(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_append_expr_none_2(self):
self.ns['FOO'] = String()
self.ns['BAR'] = 'bar'
self.ns['FOO'].append(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'bar')
def test_append_expr_typeerror(self):
self.ns['FOO'] = String()
self.ns['BAR'] = 42
self.ns['FOO'].append(Expression('BAR'))
with self.assertRaises(TypeError):
self.ns['FOO'].get()
def test_prepend_variable(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.ns['FOO'].prepend(self.ns['BAR'])
self.assertEqual(self.ns['FOO'].get(), 'barfoo')
def test_prepend_expr(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.ns['FOO'].prepend(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'barfoo')
def test_prepend_expr_none_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
self.ns['FOO'].prepend(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_prepend_expr_none_2(self):
self.ns['FOO'] = String()
self.ns['BAR'] = 'bar'
self.ns['FOO'].prepend(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'bar')
def test_prepend_expr_typeerror(self):
self.ns['FOO'] = String()
self.ns['BAR'] = 42
self.ns['FOO'].prepend(Expression('BAR'))
with self.assertRaises(TypeError):
self.ns['FOO'].get()
def test_multibinding(self):
FOO = self.ns['FOO'] = 'foo'
with self.assertRaises(MultiBinding):
self.ns['BAR'] = self.ns['FOO']
def test_expr_as_init(self):
FOO = self.ns['FOO'] = 'foo'
self.ns['BAR'] = Expression('FOO')
self.assertEqual(self.ns['FOO'].get(), 'foo')
self.assertEqual(self.ns['BAR'].get(), 'foo')
def test_init_with_unsupported(self):
with self.assertRaises(TypeError):
self.ns['BAR'] = set()
def test_init_with_other_variable(self):
self.ns['FOO'] = 'foo'
FOO = String(self.ns['FOO'])
self.ns['BAR'] = FOO
self.ns['FOO'] = 'bar'
self.assertEqual(self.ns['FOO'].get(), 'bar')
self.assertEqual(self.ns['BAR'].get(), 'bar')
def test_str_set_if_1(self):
self.ns['FOOBAR'] = 'foo'
self.ns['BAR'] = 'b'
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'bar')
def test_str_set_if_2(self):
self.ns['FOOBAR'] = 'foo'
self.ns['BAR'] = ''
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_3(self):
self.ns['FOOBAR'] = 'foo'
self.ns['BAR'] = String()
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_4(self):
self.ns['FOOBAR'] = 'foo'
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_5(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['FOO'] = 'f'
self.ns['BAR'] = 'b'
self.ns['FOOBAR'].set_if(Expression('FOO'), 'foo')
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'bar')
def test_str_set_if_6(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['FOO'] = 'f'
self.ns['BAR'] = 'b'
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.ns['FOOBAR'].set_if(Expression('FOO'), 'foo')
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_7(self):
self.ns['FOOBAR'] = 'foo'
self.ns['BAR'] = 'b'
self.ns['FOOBAR'].set_if(self.ns['BAR'], 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'bar')
def test_str_set_if_8(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['BAR'] = 'bar'
self.ns['FOO'] = 'foo'
self.ns['FOOBAR'].set_if(Expression('BAR'), Expression('FOO'))
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_9(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['BAR'] = 'bar'
self.ns['FOO'] = 'foo'
self.ns['FOOBAR'].set_if(Expression('BAR'), self.ns['FOO'])
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_typeerror_1(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['BAR'] = True
with self.assertRaises(TypeError):
self.ns['FOOBAR'].set_if(Expression('BAR'), 42)
def test_str_set_if_typeerror_2(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['BAR'] = True
self.ns['FOO'] = 42
self.ns['FOOBAR'].set_if(Expression('BAR'), Expression('FOO'))
with self.assertRaises(TypeError):
self.ns['FOOBAR'].get()
def test_str_append_if_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'b'
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foobar')
def test_str_append_if_2(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = ''
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_append_if_3(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_append_if_4(self):
self.ns['FOO'] = 'foo'
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_append_if_5(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'b'
BAR = self.ns['BAR']
self.ns['FOO'].append_if(BAR, 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foobar')
def test_str_append_if_6(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
BAR = self.ns['BAR']
self.ns['FOO'].append_if(BAR, 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_append_if_7(self):
self.ns['FOO'] = 'foo'
self.ns['B'] = 'b'
self.ns['BAR'] = 'bar'
BAR = self.ns['BAR']
self.ns['FOO'].append_if(Expression('B'), BAR)
self.assertEqual(self.ns['FOO'].get(), 'foobar')
def test_str_append_if_8(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
BAR = self.ns['BAR']
self.ns['FOO'].append_if(Expression('B'), BAR)
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_append_if_9(self):
self.ns['FOO'] = 'foo'
self.ns['X'] = 'x'
self.ns['Y'] = ''
self.ns['Z'] = 'z'
self.ns['FOO'].append_if(Expression('X'), 'xxx')
self.ns['FOO'].append_if(Expression('Y'), 'yyy')
self.ns['FOO'].append_if(Expression('Z'), 'zzz')
self.assertEqual(self.ns['FOO'].get(), 'fooxxxzzz')
def test_str_append_if_typeerror_1(self):
self.ns['FOO'] = 'foo'
self.ns['b'] = True
with self.assertRaises(TypeError):
self.ns['FOO'].append_if(Expression('b'), 42)
def test_str_append_if_typeerror_2(self):
self.ns['FOO'] = 'foo'
self.ns['I'] = 42
self.ns['FOO'].append_if(Expression('I'), Expression('I'))
with self.assertRaises(TypeError):
self.ns['FOO'].get()
def test_str_prepend_if_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'b'
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'barfoo')
def test_str_prepend_if_2(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = ''
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_prepend_if_3(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_prepend_if_4(self):
self.ns['FOO'] = 'foo'
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_prepend_if_5(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'b'
BAR = self.ns['BAR']
self.ns['FOO'].prepend_if(BAR, 'bar')
self.assertEqual(self.ns['FOO'].get(), 'barfoo')
def test_str_prepend_if_6(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
BAR = self.ns['BAR']
self.ns['FOO'].prepend_if(BAR, 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_prepend_if_7(self):
self.ns['FOO'] = 'foo'
self.ns['B'] = 'b'
self.ns['BAR'] = 'bar'
BAR = self.ns['BAR']
self.ns['FOO'].prepend_if(Expression('B'), BAR)
self.assertEqual(self.ns['FOO'].get(), 'barfoo')
def test_str_prepend_if_8(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
BAR = self.ns['BAR']
self.ns['FOO'].prepend_if(Expression('B'), BAR)
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_prepend_if_9(self):
self.ns['FOO'] = 'foo'
self.ns['X'] = 'x'
self.ns['Y'] = ''
self.ns['Z'] = 'z'
self.ns['FOO'].prepend_if(Expression('X'), 'xxx')
self.ns['FOO'].prepend_if(Expression('Y'), 'yyy')
self.ns['FOO'].prepend_if(Expression('Z'), 'zzz')
self.assertEqual(self.ns['FOO'].get(), 'zzzxxxfoo')
def test_str_prepend_if_typeerror_1(self):
self.ns['FOO'] = 'foo'
self.ns['b'] = True
with self.assertRaises(TypeError):
self.ns['FOO'].prepend_if(Expression('b'), 42)
def test_str_prepend_if_typeerror_2(self):
self.ns['FOO'] = 'foo'
self.ns['I'] = 42
self.ns['FOO'].prepend_if(Expression('I'), Expression('I'))
with self.assertRaises(TypeError):
self.ns['FOO'].get()
def test_str_string(self):
self.ns['FOO'] = ''
self.assertEqual(str(self.ns['FOO']), 'String(FOO)')
def test_str_bool(self):
self.ns['FOO'] = True
self.assertEqual(str(self.ns['FOO']), 'Bool(FOO)')
def test_str_int(self):
self.ns['FOO'] = 42
self.assertEqual(str(self.ns['FOO']), 'Int(FOO)')
def test_str_float(self):
self.ns['FOO'] = 3.14
self.assertEqual(str(self.ns['FOO']), 'Float(FOO)')
def test_list_set_if_1(self):
self.ns['FOOBAR'] = ['foo']
self.ns['BAR'] = True
self.ns['FOOBAR'].set_if(Expression('BAR'), ['bar'])
self.assertEqual(self.ns['FOOBAR'].get(), ['bar'])
def test_list_set_if_2(self):
self.ns['FOOBAR'] = ['foo']
self.ns['BAR'] = False
self.ns['FOOBAR'].set_if(Expression('BAR'), ['bar'])
self.assertEqual(self.ns['FOOBAR'].get(), ['foo'])
def test_list_set_if_3(self):
self.ns['FOOBAR'] = ['foo']
self.ns['FOOBAR'].set_if(Expression('BAR'), ['bar'])
self.assertEqual(self.ns['FOOBAR'].get(), ['foo'])
def test_list_prepend_if_1(self):
self.ns['FOO'] = ['foo']
self.ns['BAR'] = True
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['bar', 'foo'])
def test_list_prepend_if_2(self):
self.ns['FOO'] = ['foo']
self.ns['BAR'] = False
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['foo'])
def test_list_prepend_if_3(self):
self.ns['FOO'] = ['foo']
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['foo'])
def test_list_append_if_1(self):
self.ns['FOO'] = ['foo']
self.ns['BAR'] = True
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['foo', 'bar'])
def test_list_append_if_2(self):
self.ns['FOO'] = ['foo']
self.ns['BAR'] = False
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['foo'])
def test_list_append_if_3(self):
self.ns['FOO'] = ['foo']
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['foo'])
def test_list_remove_1(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = 'bar'
self.ns['L'].remove(Expression('BAR'))
self.assertEqual(self.ns['L'].get(), ['foo'])
def test_list_remove_2(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = 'bar'
self.ns['L'].remove(self.ns['BAR'])
self.assertEqual(self.ns['L'].get(), ['foo'])
def test_list_remove_if_1(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = True
self.ns['L'].remove_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['L'].get(), ['foo'])
def test_list_remove_if_2(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = False
self.ns['L'].remove_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['L'].get(), ['foo', 'bar'])
def test_list_remove_if_3(self):
self.ns['L'] = ['foo', 'bar']
self.ns['L'].remove_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['L'].get(), ['foo', 'bar'])
def test_list_extend_if_1(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = True
self.ns['L'].extend_if(Expression('BAR'), ['hello', 'world'])
self.assertEqual(self.ns['L'].get(), ['foo', 'bar', 'hello', 'world'])
def test_list_extend_if_2(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = False
self.ns['L'].extend_if(Expression('BAR'), ['hello', 'world'])
self.assertEqual(self.ns['L'].get(), ['foo', 'bar'])
def test_list_extend_if_3(self):
self.ns['L'] = ['foo', 'bar']
self.ns['L'].extend_if(Expression('BAR'), ['hello', 'world'])
self.assertEqual(self.ns['L'].get(), ['foo', 'bar'])
def test_list_item_invalid(self):
self.ns['l'] = []
def foo():
return 42
self.ns['f'] = Function(foo)
self.ns['l'].append(Expression('f'))
with self.assertRaises(TypeError):
self.ns['l'].get()
def test_dict_update_if_1(self):
self.ns['D'] = {'foo': 42}
self.ns['BAR'] = True
self.ns['D'].update_if(Expression('BAR'), {'bar': 43})
self.assertEqual(self.ns['D'].get(), {'foo': 42, 'bar': 43})
def test_dict_update_if_2(self):
self.ns['D'] = {'foo': 42}
self.ns['BAR'] = False
self.ns['D'].update_if(Expression('BAR'), {'bar': 43})
self.assertEqual(self.ns['D'].get(), {'foo': 42})
def test_dict_update_if_3(self):
self.ns['D'] = {'foo': 42}
self.ns['D'].update_if(Expression('BAR'), {'bar': 43})
self.assertEqual(self.ns['D'].get(), {'foo': 42})
def test_dict_update_if_4(self):
self.ns['D'] = {'foo': 42}
self.ns['E'] = Dict()
self.ns['BAR'] = False
self.ns['D'].update_if(Expression('BAR'), Expression('E'))
self.assertEqual(self.ns['D'].get(), {'foo': 42})
def test_dict_item_1(self):
self.ns['D'] = {}
self.ns['D']['i'] = 42
self.assertIsInstance(self.ns['D']['i'], Int)
self.assertEqual(self.ns['D']['i'].get(), 42)
def test_dict_item_2(self):
self.ns['D'] = {}
self.ns['D']['i'] = 42
self.ns['D']['i'].set_if(Expression('FOO'), 43)
self.assertIsInstance(self.ns['D']['i'], Int)
self.assertEqual(self.ns['D']['i'].get(), 42)
def test_dict_item_3(self):
self.ns['D'] = {}
self.ns['D']['i'] = 42
self.ns['D']['i'].set_if(Expression('FOO'), 43)
self.ns['FOO'] = True
self.assertIsInstance(self.ns['D']['i'], Int)
self.assertEqual(self.ns['D']['i'].get(), 43)
def test_dict_item_4(self):
self.ns['D'] = {}
self.ns['D']['i'] = [42]
self.ns['D']['i'].append_if(Expression('FOO'), 43)
self.ns['FOO'] = True
self.assertIsInstance(self.ns['D']['i'], List)
self.assertEqual(self.ns['D']['i'].get(), [42, 43])
def test_dict_item_5(self):
self.ns['D'] = {}
self.ns['D']['i'] = {'foo': 42}
self.ns['D']['i'].update_if(Expression('FOO'), {'bar': 43})
self.ns['FOO'] = True
self.assertIsInstance(self.ns['D']['i'], Dict)
self.assertEqual(self.ns['D']['i'].get(), {'foo': 42, 'bar': 43})
def test_dict_item_6(self):
self.ns['D'] = {}
self.ns['D']['i'] = {'foo': 42}
self.ns['D']['i'].update_if(Expression('FOO'), {'foo': 43})
self.ns['FOO'] = True
self.assertIsInstance(self.ns['D']['i'], Dict)
self.assertEqual(self.ns['D']['i'].get(), {'foo': 43})
def test_dict_item_implicit_expr_1(self):
self.ns['D'] = {}
self.ns['d'] = {'foo': 42}
self.ns['D']['i'] = self.ns['d']
self.ns['d']['foo'] = 43
self.assertEqual(self.ns['D'].get()['i'], {'foo': 43})
def test_dict_item_bad(self):
self.ns['D'] = {}
with self.assertRaises(TypeError):
self.ns['D']['i'] = self.ns
def test_dict_item_invalid(self):
self.ns['D'] = {}
def foo():
return 42
self.ns['f'] = Function(foo)
self.ns['D']['i'] = Expression('f')
with self.assertRaises(TypeError):
self.ns['D'].get()
def test_nested_scope_1(self):
D = Dict({'foo': Dict({'bar': 'baah'})})
D['foo'].set_if(Expression('BAR'),
{'bar': String(Expression('hello'))})
self.ns['D'] = D
self.ns['hello'] = 'booh'
self.assertEqual(self.ns['D'].get()['foo']['bar'], 'baah')
self.ns['BAR'] = True
self.assertEqual(self.ns['D'].get()['foo']['bar'], 'booh')
def test_nested_scope_2(self):
D = Dict({'foo': Dict({'bar': 42})})
D['foo'].update({'bar': 43})
self.ns['D'] = D
self.assertEqual(self.ns['D'].get()['foo']['bar'], 43)
def test_nested_scope_3(self):
D = Dict({'foo': Dict({'bar': 42})})
D['foo'].update_if(Expression('BAR'),
{'bar': Float(Expression('pi'))})
self.ns['D'] = D
self.ns['BAR'] = True
self.ns['pi'] = 3.14
self.assertEqual(self.ns['D'].get()['foo']['bar'], 3.14)
| 34.764615
| 78
| 0.538523
| 3,118
| 22,597
| 3.768121
| 0.033034
| 0.207337
| 0.134054
| 0.15014
| 0.919568
| 0.87999
| 0.822879
| 0.775555
| 0.716061
| 0.690952
| 0
| 0.01149
| 0.241227
| 22,597
| 649
| 79
| 34.818182
| 0.673743
| 0
| 0
| 0.630515
| 0
| 0
| 0.106563
| 0
| 0
| 0
| 0
| 0
| 0.207721
| 1
| 0.193015
| false
| 0
| 0.014706
| 0.003676
| 0.213235
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0317d33db00641c8632076d25e16d90972c26ea2
| 82
|
py
|
Python
|
cmake-build-debug/devel/lib/python2.7/dist-packages/kinect2_tracker/msg/__init__.py
|
myboyhood/intention_recognize
|
30e2b7e4e8c8a5df59989500ff92a0b807cf15f2
|
[
"MIT"
] | null | null | null |
cmake-build-debug/devel/lib/python2.7/dist-packages/kinect2_tracker/msg/__init__.py
|
myboyhood/intention_recognize
|
30e2b7e4e8c8a5df59989500ff92a0b807cf15f2
|
[
"MIT"
] | null | null | null |
cmake-build-debug/devel/lib/python2.7/dist-packages/kinect2_tracker/msg/__init__.py
|
myboyhood/intention_recognize
|
30e2b7e4e8c8a5df59989500ff92a0b807cf15f2
|
[
"MIT"
] | null | null | null |
from ._bounding_box import *
from ._user_IDs import *
from ._user_points import *
| 20.5
| 28
| 0.780488
| 12
| 82
| 4.833333
| 0.583333
| 0.344828
| 0.482759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146341
| 82
| 3
| 29
| 27.333333
| 0.828571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0636d326475e08bd9d33f6ec4c15e278d333f1e5
| 4,554
|
py
|
Python
|
tests/core/test_tile_generator.py
|
virtualritz/tessagon
|
af1b1b12a6e92b226f76bfd616bde90d5e15b07c
|
[
"Apache-2.0"
] | 199
|
2017-10-27T12:13:08.000Z
|
2022-03-24T09:54:48.000Z
|
tests/core/test_tile_generator.py
|
virtualritz/tessagon
|
af1b1b12a6e92b226f76bfd616bde90d5e15b07c
|
[
"Apache-2.0"
] | 4
|
2018-03-23T03:15:37.000Z
|
2019-11-06T15:58:24.000Z
|
tests/core/test_tile_generator.py
|
virtualritz/tessagon
|
af1b1b12a6e92b226f76bfd616bde90d5e15b07c
|
[
"Apache-2.0"
] | 20
|
2017-10-27T14:41:08.000Z
|
2021-12-28T10:12:59.000Z
|
from core_tests_base import CoreTestsBase, FakeTessagon, FakeTileSubClass
from tessagon.core.tile_generator import TileGenerator
class TestTileGenerator(CoreTestsBase):
def test_non_cyclic(self):
tessagon = FakeTessagon()
tile_generator = TileGenerator(tessagon,
u_range=[0.5, 1.0], v_range=[2.5, 4.0],
u_num=2, v_num=3,
u_cyclic=False, v_cyclic=False)
tiles = tile_generator.initialize_tiles(FakeTileSubClass)
assert len(tiles) == 2
assert len(tiles[0]) == 3
assert len(tiles[1]) == 3
tile_generator.initialize_neighbors(tiles)
assert(tiles[0][0].get_neighbor_tile(['left']) is None)
assert(tiles[0][0].get_neighbor_tile(['bottom']) is None)
assert(tiles[0][0].get_neighbor_tile(['right']) is tiles[1][0])
assert(tiles[0][0].get_neighbor_tile(['top']) is tiles[0][1])
assert(tiles[1][2].get_neighbor_tile(['left']) is tiles[0][2])
assert(tiles[1][2].get_neighbor_tile(['bottom']) is tiles[1][1])
assert(tiles[1][2].get_neighbor_tile(['right']) is None)
assert(tiles[1][2].get_neighbor_tile(['top']) is None)
def test_u_cyclic(self):
tessagon = FakeTessagon()
tile_generator = TileGenerator(tessagon,
u_range=[0.5, 1.0], v_range=[2.5, 4.0],
u_num=2, v_num=3,
u_cyclic=True, v_cyclic=False)
tiles = tile_generator.initialize_tiles(FakeTileSubClass)
assert len(tiles) == 2
assert len(tiles[0]) == 3
assert len(tiles[1]) == 3
tile_generator.initialize_neighbors(tiles)
assert(tiles[0][0].get_neighbor_tile(['left']) is tiles[1][0])
assert(tiles[0][0].get_neighbor_tile(['bottom']) is None)
assert(tiles[0][0].get_neighbor_tile(['right']) is tiles[1][0])
assert(tiles[0][0].get_neighbor_tile(['top']) is tiles[0][1])
assert(tiles[1][2].get_neighbor_tile(['left']) is tiles[0][2])
assert(tiles[1][2].get_neighbor_tile(['bottom']) is tiles[1][1])
assert(tiles[1][2].get_neighbor_tile(['right']) is tiles[0][2])
assert(tiles[1][2].get_neighbor_tile(['top']) is None)
def test_v_cyclic(self):
tessagon = FakeTessagon()
tile_generator = TileGenerator(tessagon,
u_range=[0.5, 1.0], v_range=[2.5, 4.0],
u_num=2, v_num=3,
u_cyclic=False, v_cyclic=True)
tiles = tile_generator.initialize_tiles(FakeTileSubClass)
assert len(tiles) == 2
assert len(tiles[0]) == 3
assert len(tiles[1]) == 3
tile_generator.initialize_neighbors(tiles)
assert(tiles[0][0].get_neighbor_tile(['left']) is None)
assert(tiles[0][0].get_neighbor_tile(['bottom']) is tiles[0][2])
assert(tiles[0][0].get_neighbor_tile(['right']) is tiles[1][0])
assert(tiles[0][0].get_neighbor_tile(['top']) is tiles[0][1])
assert(tiles[1][2].get_neighbor_tile(['left']) is tiles[0][2])
assert(tiles[1][2].get_neighbor_tile(['bottom']) is tiles[1][1])
assert(tiles[1][2].get_neighbor_tile(['right']) is None)
assert(tiles[1][2].get_neighbor_tile(['top']) is tiles[1][0])
def test_u_v_cyclic(self):
tessagon = FakeTessagon()
tile_generator = TileGenerator(tessagon,
u_range=[0.5, 1.0], v_range=[2.5, 4.0],
u_num=2, v_num=3,
u_cyclic=True, v_cyclic=True)
tiles = tile_generator.initialize_tiles(FakeTileSubClass)
assert len(tiles) == 2
assert len(tiles[0]) == 3
assert len(tiles[1]) == 3
tile_generator.initialize_neighbors(tiles)
assert(tiles[0][0].get_neighbor_tile(['left']) is tiles[1][0])
assert(tiles[0][0].get_neighbor_tile(['bottom']) is tiles[0][2])
assert(tiles[0][0].get_neighbor_tile(['right']) is tiles[1][0])
assert(tiles[0][0].get_neighbor_tile(['top']) is tiles[0][1])
assert(tiles[1][2].get_neighbor_tile(['left']) is tiles[0][2])
assert(tiles[1][2].get_neighbor_tile(['bottom']) is tiles[1][1])
assert(tiles[1][2].get_neighbor_tile(['right']) is tiles[0][2])
assert(tiles[1][2].get_neighbor_tile(['top']) is tiles[1][0])
| 48.967742
| 78
| 0.572244
| 627
| 4,554
| 3.964912
| 0.070175
| 0.077233
| 0.193081
| 0.083669
| 0.933226
| 0.933226
| 0.933226
| 0.933226
| 0.933226
| 0.933226
| 0
| 0.051482
| 0.266359
| 4,554
| 92
| 79
| 49.5
| 0.692607
| 0
| 0
| 0.860759
| 0
| 0
| 0.031621
| 0
| 0
| 0
| 0
| 0
| 0.556962
| 1
| 0.050633
| false
| 0
| 0.025316
| 0
| 0.088608
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
064ad9d1f980f75038d7cfdfdcbb95549772aa8a
| 92
|
py
|
Python
|
src/auth/__init__.py
|
MarkStefanovic/todo-api
|
fb6198511712df853e693787839533f0c9956178
|
[
"MIT"
] | null | null | null |
src/auth/__init__.py
|
MarkStefanovic/todo-api
|
fb6198511712df853e693787839533f0c9956178
|
[
"MIT"
] | null | null | null |
src/auth/__init__.py
|
MarkStefanovic/todo-api
|
fb6198511712df853e693787839533f0c9956178
|
[
"MIT"
] | null | null | null |
from src.auth.adapter import *
from src.auth.domain import *
from src.auth.service import *
| 23
| 30
| 0.771739
| 15
| 92
| 4.733333
| 0.466667
| 0.295775
| 0.464789
| 0.478873
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 92
| 3
| 31
| 30.666667
| 0.8875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
065a10f929ff09c5fb1f252b5c8f9281c467c8ab
| 13,477
|
py
|
Python
|
reinforcement_learning/tabular_RL/algorithms/td_zero.py
|
EliorBenYosef/reinforcement-learning
|
c694d07044e12c92e08ca8c2ef06b073ca1704d4
|
[
"MIT"
] | 16
|
2019-09-01T14:12:45.000Z
|
2022-03-07T03:10:38.000Z
|
reinforcement_learning/tabular_RL/algorithms/td_zero.py
|
EliorBenYosef/reinforcement-learning
|
c694d07044e12c92e08ca8c2ef06b073ca1704d4
|
[
"MIT"
] | 5
|
2020-11-13T19:08:40.000Z
|
2022-02-10T04:13:29.000Z
|
reinforcement_learning/tabular_RL/algorithms/td_zero.py
|
EliorBenYosef/reinforcement-learning
|
c694d07044e12c92e08ca8c2ef06b073ca1704d4
|
[
"MIT"
] | 1
|
2022-03-07T03:10:39.000Z
|
2022-03-07T03:10:39.000Z
|
import numpy as np
from gym import wrappers
from reinforcement_learning.utils.utils import decrement_eps, EPS_DEC_LINEAR, pickle_save
from reinforcement_learning.tabular_RL.utils import init_v, init_q, init_q1_q2, \
max_action_q, max_action_q1_q2, eps_greedy_q, eps_greedy_q1_q2, print_v
class TD0PredictionModel:
def __init__(self, custom_env, episodes=50000, alpha=0.1, gamma=None):
self.custom_env = custom_env
self.env = custom_env.env
self.action_space_size = self.env.action_space.n
self.states = custom_env.states
self.episodes = episodes
self.totalSteps = np.zeros(episodes)
self.totalScores = np.zeros(episodes)
self.totalAccumulatedScores = np.zeros(episodes)
self.ALPHA = alpha
if gamma is not None:
self.GAMMA = gamma
elif custom_env.GAMMA is not None:
self.GAMMA = custom_env.GAMMA
else:
self.GAMMA = 0.9
def perform_td0_policy_evaluation(self, policy, print_info=False, visualize=False, record=False):
if record:
self.env = wrappers.Monitor(
self.env, 'recordings/TD0-PE/', force=True,
video_callable=lambda episode_id: episode_id == 0 or episode_id == (self.episodes - 1)
)
V = init_v(self.states)
accumulated_scores = 0
print('\n', 'Game Started', '\n')
for i in range(self.episodes):
done = False
ep_steps = 0
ep_score = 0
observation = self.env.reset()
s = self.custom_env.get_state(observation)
if visualize and i == self.episodes - 1:
self.env.render()
while not done:
a = policy(s)
# print(observation, s, a) # for debugging purposes
observation_, reward, done, info = self.env.step(a)
ep_steps += 1
ep_score += reward
accumulated_scores += reward
s_ = self.custom_env.get_state(observation_)
V[s] += self.ALPHA * (reward + self.GAMMA * V[s_] - V[s])
# option: instead of the (V[s] += ...) line:
# value = weights.dot(s)
# value_ = weights.dot(s_)
# weights += self.ALPHA / dt * (reward + self.GAMMA * value_ - value) * s
observation, s = observation_, s_
if visualize and i == self.episodes - 1:
self.env.render()
if self.episodes < 10 or (i + 1) % (self.episodes // 10) == 0:
print('episode %d - score: %d, steps: %d' % (i + 1, ep_score, ep_steps))
self.totalSteps[i] = ep_steps
self.totalScores[i] = ep_score
self.totalAccumulatedScores[i] = accumulated_scores
if visualize and i == self.episodes - 1:
self.env.close()
if print_info:
print_v(V)
print('\n', 'Game Ended', '\n')
return V, self.totalScores, self.totalAccumulatedScores
class TD0ControlModel:
"""
On-policy:
SARSA
Expected SARSA
Off-policy:
Q Learning
Double Q Learning
"""
def __init__(self, custom_env, episodes=50000, alpha=0.1, gamma=None,
eps_max=1.0, eps_min=None, eps_dec=None, eps_dec_type=EPS_DEC_LINEAR):
self.custom_env = custom_env
self.env = custom_env.env
self.action_space_size = self.env.action_space.n
self.states = custom_env.states
self.episodes = episodes
self.totalSteps = np.zeros(episodes)
self.totalScores = np.zeros(episodes)
self.totalAccumulatedScores = np.zeros(episodes)
self.ALPHA = alpha
if gamma is not None:
self.GAMMA = gamma
elif custom_env.GAMMA is not None:
self.GAMMA = custom_env.GAMMA
else:
self.GAMMA = 0.9
self.EPS = eps_max
self.eps_max = eps_max
if eps_min is not None:
self.eps_min = eps_min
elif custom_env.EPS_MIN is not None:
self.eps_min = custom_env.EPS_MIN
else:
self.eps_min = 0.0
if eps_dec is not None:
self.eps_dec = eps_dec
else:
# will arrive to eps_min after half the episodes:
self.eps_dec = (self.eps_max - self.eps_min) * 2 / self.episodes
self.eps_dec_type = eps_dec_type
def perform_sarsa(self, visualize=False, record=False, pickle=False):
if record:
self.env = wrappers.Monitor(
self.env, 'recordings/SARSA/', force=True,
video_callable=lambda episode_id: episode_id == 0 or episode_id == (self.episodes - 1)
)
Q = init_q(self.states, self.action_space_size, self.custom_env.file_name, pickle)
accumulated_scores = 0
print('\n', 'Game Started', '\n')
for i in range(self.episodes):
done = False
ep_steps = 0
ep_score = 0
observation = self.env.reset()
s = self.custom_env.get_state(observation)
a = eps_greedy_q(Q, s, self.action_space_size, self.EPS, self.env)
if visualize and i == self.episodes - 1:
self.env.render()
while not done:
observation_, reward, done, info = self.env.step(a)
ep_steps += 1
ep_score += reward
accumulated_scores += reward
s_ = self.custom_env.get_state(observation_)
a_ = eps_greedy_q(Q, s_, self.action_space_size, self.EPS, self.env)
Q[s, a] += self.ALPHA * (reward + self.GAMMA * Q[s_, a_] - Q[s, a])
observation, s, a = observation_, s_, a_
if visualize and i == self.episodes - 1:
self.env.render()
if self.episodes < 10 or (i + 1) % (self.episodes // 10) == 0:
print('episode %d - eps: %.2f, score: %d, steps: %d' % (i + 1, self.EPS, ep_score, ep_steps))
self.EPS = decrement_eps(self.EPS, self.eps_min, self.eps_dec, self.eps_dec_type)
self.totalSteps[i] = ep_steps
self.totalScores[i] = ep_score
self.totalAccumulatedScores[i] = accumulated_scores
if visualize and i == self.episodes - 1:
self.env.close()
print('\n', 'Game Ended', '\n')
if pickle:
pickle_save(Q, self.custom_env.file_name + '-q-table')
return Q, self.totalScores, self.totalAccumulatedScores
def perform_expected_sarsa(self, visualize=False, record=False, pickle=False):
if record:
self.env = wrappers.Monitor(
self.env, 'recordings/E-SARSA/', force=True,
video_callable=lambda episode_id: episode_id == 0 or episode_id == (self.episodes - 1)
)
Q = init_q(self.states, self.action_space_size, self.custom_env.file_name, pickle)
accumulated_scores = 0
print('\n', 'Game Started', '\n')
for i in range(self.episodes):
done = False
ep_steps = 0
ep_score = 0
observation = self.env.reset()
s = self.custom_env.get_state(observation)
if visualize and i == self.episodes - 1:
self.env.render()
while not done:
a = eps_greedy_q(Q, s, self.action_space_size, self.EPS, self.env)
observation_, reward, done, info = self.env.step(a)
ep_steps += 1
ep_score += reward
accumulated_scores += reward
s_ = self.custom_env.get_state(observation_)
expected_value = np.mean(np.array([Q[s_, a] for a in range(self.action_space_size)]))
Q[s, a] += self.ALPHA * (reward + self.GAMMA * expected_value - Q[s, a])
observation, s = observation_, s_
if visualize and i == self.episodes - 1:
self.env.render()
if self.episodes < 10 or (i + 1) % (self.episodes // 10) == 0:
print('episode %d - eps: %.2f, score: %d, steps: %d' % (i + 1, self.EPS, ep_score, ep_steps))
self.EPS = decrement_eps(self.EPS, self.eps_min, self.eps_dec, self.eps_dec_type)
self.totalSteps[i] = ep_steps
self.totalScores[i] = ep_score
self.totalAccumulatedScores[i] = accumulated_scores
if visualize and i == self.episodes - 1:
self.env.close()
print('\n', 'Game Ended', '\n')
if pickle:
pickle_save(Q, self.custom_env.file_name + '-q-table')
return Q, self.totalScores, self.totalAccumulatedScores
def perform_q_learning(self, visualize=False, record=False, pickle=False):
if record:
self.env = wrappers.Monitor(
self.env, 'recordings/Q-L/', force=True,
video_callable=lambda episode_id: episode_id == 0 or episode_id == (self.episodes - 1)
)
Q = init_q(self.states, self.action_space_size, self.custom_env.file_name, pickle)
accumulated_scores = 0
print('\n', 'Game Started', '\n')
for i in range(self.episodes):
done = False
ep_steps = 0
ep_score = 0
observation = self.env.reset()
s = self.custom_env.get_state(observation)
if visualize and i == self.episodes - 1:
self.env.render()
while not done:
a = eps_greedy_q(Q, s, self.action_space_size, self.EPS, self.env)
observation_, reward, done, info = self.env.step(a)
ep_steps += 1
ep_score += reward
accumulated_scores += reward
s_ = self.custom_env.get_state(observation_)
a_ = max_action_q(Q, s_, self.action_space_size)
Q[s, a] += self.ALPHA * (reward + self.GAMMA * Q[s_, a_] - Q[s, a])
# Q[s, a] += self.ALPHA * (reward + self.GAMMA * np.max(Q[s_, :]) - Q[s, a]) # if Q is a numpy.ndarray
observation, s = observation_, s_
if visualize and i == self.episodes - 1:
self.env.render()
if self.episodes < 10 or (i + 1) % (self.episodes // 10) == 0:
print('episode %d - eps: %.2f, score: %d, steps: %d' % (i + 1, self.EPS, ep_score, ep_steps))
self.EPS = decrement_eps(self.EPS, self.eps_min, self.eps_dec, self.eps_dec_type)
self.totalSteps[i] = ep_steps
self.totalScores[i] = ep_score
self.totalAccumulatedScores[i] = accumulated_scores
if visualize and i == self.episodes - 1:
self.env.close()
print('\n', 'Game Ended', '\n')
if pickle:
pickle_save(Q, self.custom_env.file_name + '-q-table')
return Q, self.totalScores, self.totalAccumulatedScores
def perform_double_q_learning(self, visualize=False, record=False):
if record:
self.env = wrappers.Monitor(
self.env, 'recordings/D-Q-L/', force=True,
video_callable=lambda episode_id: episode_id == 0 or episode_id == (self.episodes - 1)
)
Q1, Q2 = init_q1_q2(self.states, self.action_space_size)
accumulated_scores = 0
print('\n', 'Game Started', '\n')
for i in range(self.episodes):
done = False
ep_steps = 0
ep_score = 0
observation = self.env.reset()
s = self.custom_env.get_state(observation)
if visualize and i == self.episodes - 1:
self.env.render()
while not done:
a = eps_greedy_q1_q2(Q1, Q2, s, self.action_space_size, self.EPS, self.env)
observation_, reward, done, info = self.env.step(a)
ep_steps += 1
ep_score += reward
accumulated_scores += reward
s_ = self.custom_env.get_state(observation_)
rand = np.random.random()
if rand <= 0.5:
a_ = max_action_q1_q2(Q1, Q1, s_, self.action_space_size)
Q1[s, a] += self.ALPHA * (reward + self.GAMMA * Q2[s_, a_] - Q1[s, a])
else: # elif rand > 0.5
a_ = max_action_q1_q2(Q2, Q2, s_, self.action_space_size)
Q2[s, a] += self.ALPHA * (reward + self.GAMMA * Q1[s_, a_] - Q2[s, a])
observation, s = observation_, s_
if visualize and i == self.episodes - 1:
self.env.render()
if self.episodes < 10 or (i + 1) % (self.episodes // 10) == 0:
print('episode %d - eps: %.2f, score: %d, steps: %d' % (i + 1, self.EPS, ep_score, ep_steps))
self.EPS = decrement_eps(self.EPS, self.eps_min, self.eps_dec, self.eps_dec_type)
self.totalSteps[i] = ep_steps
self.totalScores[i] = ep_score
self.totalAccumulatedScores[i] = accumulated_scores
if visualize and i == self.episodes - 1:
self.env.close()
print('\n', 'Game Ended', '\n')
return Q1, Q2, self.totalScores, self.totalAccumulatedScores
| 34.205584
| 119
| 0.549974
| 1,697
| 13,477
| 4.176193
| 0.08132
| 0.04346
| 0.036687
| 0.040214
| 0.851559
| 0.830394
| 0.816989
| 0.806124
| 0.789615
| 0.789615
| 0
| 0.01632
| 0.340729
| 13,477
| 393
| 120
| 34.292621
| 0.781317
| 0.034726
| 0
| 0.779923
| 0
| 0
| 0.036177
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027027
| false
| 0
| 0.015444
| 0
| 0.069498
| 0.073359
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0672274e210ffb823f4cb0faec6bba2fb13a9739
| 2,491
|
py
|
Python
|
008.py
|
ThomasB123/Project-Euler
|
ca6786513f210e79fe55417ed43797ffb24610af
|
[
"MIT"
] | null | null | null |
008.py
|
ThomasB123/Project-Euler
|
ca6786513f210e79fe55417ed43797ffb24610af
|
[
"MIT"
] | null | null | null |
008.py
|
ThomasB123/Project-Euler
|
ca6786513f210e79fe55417ed43797ffb24610af
|
[
"MIT"
] | null | null | null |
# Largest product in a series
'''
The four adjacent digits in the 1000-digit number that have the greatest product are 9 × 9 × 8 × 9 = 5832.
73167176531330624919225119674426574742355349194934
96983520312774506326239578318016984801869478851843
85861560789112949495459501737958331952853208805511
12540698747158523863050715693290963295227443043557
66896648950445244523161731856403098711121722383113
62229893423380308135336276614282806444486645238749
30358907296290491560440772390713810515859307960866
70172427121883998797908792274921901699720888093776
65727333001053367881220235421809751254540594752243
52584907711670556013604839586446706324415722155397
53697817977846174064955149290862569321978468622482
83972241375657056057490261407972968652414535100474
82166370484403199890008895243450658541227588666881
16427171479924442928230863465674813919123162824586
17866458359124566529476545682848912883142607690042
24219022671055626321111109370544217506941658960408
07198403850962455444362981230987879927244284909188
84580156166097919133875499200524063689912560717606
05886116467109405077541002256983155200055935729725
71636269561882670428252483600823257530420752963450
Find the thirteen adjacent digits in the 1000-digit number that have the greatest product. What is the value of this product?
'''
# Answer = 23514624000
number = '7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450'
greatest = 1
for x in range(988):
product = 1
for y in range(13):
product *= int(number[x+y])
greatest = max(greatest,product)
print(greatest)
| 67.324324
| 1,011
| 0.934163
| 99
| 2,491
| 23.535354
| 0.606061
| 0.019313
| 0.013734
| 0.016309
| 0.051502
| 0.051502
| 0.051502
| 0.051502
| 0.051502
| 0.051502
| 0
| 0.85859
| 0.048976
| 2,491
| 37
| 1,012
| 67.324324
| 0.123681
| 0.523485
| 0
| 0
| 0
| 0
| 0.848176
| 0.848176
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
069dc6e8933ab9b9913ed9fdacb63aac7e39388b
| 3,830
|
py
|
Python
|
c4/test.py
|
duilio/c4
|
6dcde8316603192b0bc713d1bedb94290d123a9d
|
[
"MIT"
] | 16
|
2016-03-19T16:34:58.000Z
|
2021-11-07T08:59:53.000Z
|
c4/test.py
|
duilio/c4
|
6dcde8316603192b0bc713d1bedb94290d123a9d
|
[
"MIT"
] | 1
|
2017-08-27T10:18:39.000Z
|
2018-02-24T20:55:27.000Z
|
c4/test.py
|
duilio/c4
|
6dcde8316603192b0bc713d1bedb94290d123a9d
|
[
"MIT"
] | 9
|
2017-02-23T23:14:17.000Z
|
2020-12-25T12:26:47.000Z
|
import unittest
import numpy as np
from c4.board import Board, PLAYER1
class TestBoard(unittest.TestCase):
def test_end_diag_lr(self):
b = Board(np.array([[1, 0, 0, 0, 0, 0, 0],
[2, 1, 0, 0, 0, 0, 0],
[2, 2, 1, 0, 0, 0, 0],
[1, 1, 2, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]]))
self.assertTrue(b.end == PLAYER1)
b = Board(np.array([[1, 2, 1, 2, 1, 2, 1],
[1, 2, 1, 2, 1, 1, 0],
[1, 2, 1, 2, 1, 0, 0],
[2, 1, 2, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]]))
self.assertTrue(b.end == PLAYER1)
b = Board(np.array([[1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[1, 1, 2, 1, 0, 0, 0],
[2, 1, 0, 0, 0, 0, 0],
[2, 2, 1, 0, 0, 0, 0],
[1, 1, 2, 1, 0, 0, 0]]))
self.assertTrue(b.end == PLAYER1)
b = Board(np.array([[1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[1, 1, 2, 1, 0, 0, 0],
[2, 1, 2, 1, 1, 0, 0],
[1, 2, 1, 2, 2, 1, 0],
[1, 2, 1, 1, 1, 2, 1]]))
self.assertTrue(b.end == PLAYER1)
def test_end_diag_rl(self):
b = Board(np.array([[1, 0, 0, 0, 0, 0, 0],
[2, 1, 0, 0, 0, 0, 0],
[2, 2, 1, 0, 0, 0, 0],
[1, 1, 2, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]])[::-1])
self.assertTrue(b.end == PLAYER1)
b = Board(np.array([[1, 2, 1, 2, 1, 2, 1],
[1, 2, 1, 2, 1, 1, 0],
[1, 2, 1, 2, 1, 0, 0],
[2, 1, 2, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]])[::-1])
self.assertTrue(b.end == PLAYER1)
b = Board(np.array([[1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[1, 1, 2, 1, 0, 0, 0],
[2, 1, 0, 0, 0, 0, 0],
[2, 2, 1, 0, 0, 0, 0],
[1, 1, 2, 1, 0, 0, 0]])[::-1])
self.assertTrue(b.end == PLAYER1)
b = Board(np.array([[1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[1, 1, 2, 1, 0, 0, 0],
[2, 1, 2, 1, 1, 0, 0],
[1, 2, 1, 2, 2, 1, 0],
[1, 2, 1, 1, 1, 2, 1]])[::-1])
self.assertTrue(b.end == PLAYER1)
| 42.555556
| 58
| 0.232376
| 552
| 3,830
| 1.601449
| 0.04529
| 0.628959
| 0.848416
| 1.022624
| 0.864253
| 0.864253
| 0.806561
| 0.806561
| 0.806561
| 0.806561
| 0
| 0.28839
| 0.581723
| 3,830
| 89
| 59
| 43.033708
| 0.263421
| 0
| 0
| 0.871795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 1
| 0.025641
| false
| 0
| 0.038462
| 0
| 0.076923
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
2347b9234fc5c7c0d69316595f595a34f0ab7e85
| 2,988
|
py
|
Python
|
app/test/test_s3.py
|
troydieter/aws-auto-cleanup
|
523bae5cc57b81d3a2f0d43c87b9f1ef5390e3a4
|
[
"MIT"
] | 322
|
2019-04-15T01:59:57.000Z
|
2022-03-09T00:06:55.000Z
|
app/test/test_s3.py
|
troydieter/aws-auto-cleanup
|
523bae5cc57b81d3a2f0d43c87b9f1ef5390e3a4
|
[
"MIT"
] | 70
|
2019-04-15T01:27:21.000Z
|
2022-03-02T00:39:29.000Z
|
app/test/test_s3.py
|
troydieter/aws-auto-cleanup
|
523bae5cc57b81d3a2f0d43c87b9f1ef5390e3a4
|
[
"MIT"
] | 49
|
2019-04-15T06:36:42.000Z
|
2022-01-17T11:37:32.000Z
|
import datetime
import logging
import moto
import pytest
from .. import s3_cleanup
class TestBucketsMoreThanTTL:
@pytest.fixture
def test_class(self):
with moto.mock_s3():
whitelist = {}
settings = {
"general": {"dry_run": False},
"services": {"s3": {"buckets": {"clean": True, "ttl": -1}}},
}
execution_log = {"AWS": {}}
test_class = s3_cleanup.S3Cleanup(
logging, whitelist, settings, execution_log
)
yield test_class
def test(self, test_class):
# create test table
test_class.client_s3.create_bucket(Bucket="test")
# validate bucket creation
response = test_class.client_s3.list_buckets()
assert response["Buckets"][0]["Name"] == "test"
# test buckets functions
test_class.buckets()
# validate bucket deletion
response = test_class.client_s3.list_buckets()
assert response["Buckets"] == []
class TestBucketsLessThanTTL:
@pytest.fixture
def test_class(self):
with moto.mock_s3():
whitelist = {}
settings = {
"general": {"dry_run": False},
"services": {"s3": {"buckets": {"clean": True, "ttl": 5000}}},
}
execution_log = {"AWS": {}}
test_class = s3_cleanup.S3Cleanup(
logging, whitelist, settings, execution_log
)
yield test_class
def test(self, test_class):
# create test table
test_class.client_s3.create_bucket(Bucket="test")
# validate bucket creation
response = test_class.client_s3.list_buckets()
assert response["Buckets"][0]["Name"] == "test"
# test buckets functions
test_class.buckets()
# validate bucket deletion
response = test_class.client_s3.list_buckets()
assert response["Buckets"][0]["Name"] == "test"
class TestBucketsWhitelist:
@pytest.fixture
def test_class(self):
with moto.mock_s3():
whitelist = {"s3": {"bucket": ["test"]}}
settings = {
"general": {"dry_run": False},
"services": {"s3": {"buckets": {"clean": True, "ttl": -1}}},
}
execution_log = {"AWS": {}}
test_class = s3_cleanup.S3Cleanup(
logging, whitelist, settings, execution_log
)
yield test_class
def test(self, test_class):
# create test table
test_class.client_s3.create_bucket(Bucket="test")
# validate bucket creation
response = test_class.client_s3.list_buckets()
assert response["Buckets"][0]["Name"] == "test"
# test buckets functions
test_class.buckets()
# validate bucket deletion
response = test_class.client_s3.list_buckets()
assert response["Buckets"][0]["Name"] == "test"
| 28.730769
| 78
| 0.558568
| 297
| 2,988
| 5.424242
| 0.164983
| 0.139665
| 0.083799
| 0.094972
| 0.898821
| 0.898821
| 0.898821
| 0.898821
| 0.898821
| 0.898821
| 0
| 0.016699
| 0.318608
| 2,988
| 103
| 79
| 29.009709
| 0.774558
| 0.091031
| 0
| 0.75
| 0
| 0
| 0.085831
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 1
| 0.088235
| false
| 0
| 0.073529
| 0
| 0.205882
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
23568ef84806142d79d34cfa3458b41993b9107e
| 3,902
|
py
|
Python
|
python/researchDev/boot.py
|
jzadeh/aktaion
|
485488908e88212e615cd8bde04c6b1b63403cd0
|
[
"Apache-2.0"
] | 112
|
2017-07-26T00:30:29.000Z
|
2021-11-09T14:02:12.000Z
|
python/researchDev/boot.py
|
jzadeh/aktaion
|
485488908e88212e615cd8bde04c6b1b63403cd0
|
[
"Apache-2.0"
] | null | null | null |
python/researchDev/boot.py
|
jzadeh/aktaion
|
485488908e88212e615cd8bde04c6b1b63403cd0
|
[
"Apache-2.0"
] | 38
|
2017-07-28T03:09:01.000Z
|
2021-05-07T03:21:32.000Z
|
import os
def boot():
print
print (' _____ _____ _____ _____ _____ _______ _____ ')
print (' /\ \ /\ \ /\ \ /\ \ /\ \ /::\ \ /\ \ ')
print (' /::\ \ /::\____\ /::\ \ /::\ \ /::\ \ /::::\ \ /::\___ \ ')
print (' /::::\ \ /:::/ / \:::\ \ /::::\ \ \:::\ \ /::::::\ \ /::::| | ')
print (' /::::::\ \ /:::/ / \:::\ \ /::::::\ \ \:::\ \ /::::::::\ \ /:::::| | ')
print (' /:::/\:::\ \ /:::/ / \:::\ \ /:::/\:::\ \ \:::\ \ /:::/~~\:::\ \ /::::::| | ')
print (' /:::/__\:::\ \ /:::/____/ \:::\ \ /:::/__\:::\ \ \:::\ \ /:::/ \:::\ \ /:::/|::| | ')
print (' /::::\ \:::\ \ /::::\ \ /::::\ \ /::::\ \:::\ \ /::::\ \ /:::/ / \:::\ \ /:::/ |::| | ')
print (' /::::::\ \:::\ \ /::::::\____\________ /::::::\ \ /::::::\ \:::\ \ ____ /::::::\ \ /:::/____/ \:::\____\ /:::/ |::| | _____ ')
print (' /:::/\:::\ \:::\ \ /:::/\:::::::::::\ \ /:::/\:::\ \ /:::/\:::\ \:::\ \ /\ \ /:::/\:::\ \ |:::| | |:::| | /:::/ |::| |/\ \ ')
print ('/:::/ \:::\ \:::\____\/:::/ |:::::::::::\____\ /:::/ \:::\____\/:::/ \:::\ \:::\____\/::\ \/:::/ \:::\____\|:::|____| |:::| |/:: / |::| /::\___ \ ')
print ('\::/ \:::\ /:::/ /\::/ |::|~~~|~~~~~ /:::/ \::/ /\::/ \:::\ /:::/ /\:::\ /:::/ \::/ / \:::\ \ /:::/ / \::/ /|::| /:::/ / ')
print (' \/____/ \:::\/:::/ / \/____|::| | /:::/ / \/____/ \/____/ \:::\/:::/ / \:::\/:::/ / \/____/ \:::\ \ /:::/ / \/____/ |::| /:::/ / ')
print (' \::::::/ / |::| | /:::/ / \::::::/ / \::::::/ / \:::\ /:::/ / |::|/:::/ / ')
print (' \::::/ / |::| | /:::/ / \::::/ / \::::/____/ \:::\__/:::/ / |::::::/ / ')
print (' /:::/ / |::| | \::/ / /:::/ / \:::\ \ \::::::::/ / |:::::/ / ')
print (' /:::/ / |::| | \/____/ /:::/ / \:::\ \ \::::::/ / |::::/ / ')
print (' /:::/ / |::| | /:::/ / \:::\ \ \::::/ / /:::/ / ')
print (' /:::/ / \::| | /:::/ / \:::\____\ \::/____/ /:::/ / ')
print (' \::/ / \:| | \::/ / \::/ / ~~ \::/ / ')
print (' \/____/ \|___| \/____/ \/____/ \/____/ ')
#try:
# input ('Press enter to continue:')
#except NameError:
# pass
os.system('read -s -n 1 -p "Press any key to continue..."')
print
| 121.9375
| 182
| 0.098155
| 48
| 3,902
| 4.458333
| 0.458333
| 0.981308
| 1.401869
| 1.775701
| 0.514019
| 0.514019
| 0.514019
| 0.514019
| 0.514019
| 0.514019
| 0
| 0.000617
| 0.584572
| 3,902
| 32
| 183
| 121.9375
| 0.1314
| 0.017171
| 0
| 0.076923
| 0
| 0.192308
| 0.924582
| 0.00548
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| true
| 0
| 0.038462
| 0
| 0.076923
| 0.884615
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
88debece08c1e96c24b4f8a33cbf44d5d6611a9b
| 1,655
|
py
|
Python
|
Malaria-Cell-Analyzer/mysite/core/gen_dataset_completed.py
|
suryabranwal/Malaria-Cell-Analyzer
|
eadbad1e0b5a51eeeb43fa75367d8b4d9eabe033
|
[
"MIT"
] | 1
|
2019-12-20T18:04:40.000Z
|
2019-12-20T18:04:40.000Z
|
Malaria-Cell-Analyzer/mysite/core/gen_dataset_completed.py
|
suryabranwal/Malaria-Cell-Analyzer
|
eadbad1e0b5a51eeeb43fa75367d8b4d9eabe033
|
[
"MIT"
] | null | null | null |
Malaria-Cell-Analyzer/mysite/core/gen_dataset_completed.py
|
suryabranwal/Malaria-Cell-Analyzer
|
eadbad1e0b5a51eeeb43fa75367d8b4d9eabe033
|
[
"MIT"
] | 1
|
2019-11-26T14:06:03.000Z
|
2019-11-26T14:06:03.000Z
|
import cv2, os
import numpy as np
import csv
import glob
label = "Parasitized"
dirList = glob.glob("cell_images/" + label + "/*.png")
file = open("csv/dataset.csv", "a")
for img_path in dirList:
im = cv2.imread(img_path)
im = cv2.GaussianBlur(im, (5, 5), 2)
im_gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
ret, thresh = cv2.threshold(im_gray, 127, 255, 0)
contours, _ = cv2.findContours(thresh, 1, 2)
for contour in contours:
cv2.drawContours(im_gray, contours, -1, (0, 255, 0), 3)
#cv2.imshow("window", im_gray)
#break
file.write(label)
file.write(",")
for i in range(5):
try:
area = cv2.contourArea(contours[i])
file.write(str(area))
except:
file.write("0")
file.write(",")
file.write("\n")
cv2.waitKey(19000)
label = "Uninfected"
dirList = glob.glob("cell_images/" + label + "/*.png")
file = open("csv/dataset.csv", "a")
for img_path in dirList:
im = cv2.imread(img_path)
im = cv2.GaussianBlur(im, (5, 5), 2)
im_gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
ret, thresh = cv2.threshold(im_gray, 127, 255, 0)
contours, _ = cv2.findContours(thresh, 1, 2)
for contour in contours:
cv2.drawContours(im_gray, contours, -1, (0, 255, 0), 3)
#cv2.imshow("window", im_gray)
#break
file.write(label)
file.write(",")
for i in range(5):
try:
area = cv2.contourArea(contours[i])
file.write(str(area))
except:
file.write("0")
if i != 4:
file.write(",")
file.write("\n")
cv2.waitKey(19000)
| 18.388889
| 63
| 0.575227
| 228
| 1,655
| 4.096491
| 0.263158
| 0.115632
| 0.03212
| 0.040685
| 0.914347
| 0.914347
| 0.914347
| 0.914347
| 0.841542
| 0.841542
| 0
| 0.062706
| 0.267674
| 1,655
| 89
| 64
| 18.595506
| 0.707921
| 0.041088
| 0
| 0.857143
| 0
| 0
| 0.062857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.081633
| 0
| 0.081633
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
88eac0a7976441ef42ccc1c8a624876cda4f745b
| 7,908
|
py
|
Python
|
sims/ch4-effective-g1-duration/run-well-mixed-effective-g1-duration.py
|
ThomasPak/cell-competition
|
bb058d67e297d95c4c8ff2a0aea5b1fe5a82be09
|
[
"BSD-3-Clause"
] | null | null | null |
sims/ch4-effective-g1-duration/run-well-mixed-effective-g1-duration.py
|
ThomasPak/cell-competition
|
bb058d67e297d95c4c8ff2a0aea5b1fe5a82be09
|
[
"BSD-3-Clause"
] | null | null | null |
sims/ch4-effective-g1-duration/run-well-mixed-effective-g1-duration.py
|
ThomasPak/cell-competition
|
bb058d67e297d95c4c8ff2a0aea5b1fe5a82be09
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import pandas as pd
from scipy.stats import expon, uniform
import sys
sys.path.append('../../well_mixed')
from well_mixed_death_clock import (WellMixedSimulator,
WellMixedSimulationData, exponential_ccm, uniform_ccm,
base_rate_death_signal)
# Exponential cell cycle model
tG1 = 50
tG2 = 50
# Constant base rate death signal
f = base_rate_death_signal
base_rate = 1
Tdeath_fun = lambda eta: eta * base_rate * tG1
# Simulation parameters
tstart = 0
tend = np.inf
max_cell_count = 1000
initial_cell_count = 64
num_eta = 10
num_iter = 100
# Arguments to f and ccm
f_args = (base_rate,)
ccm_args = (tG1,)
# Helper function
def run_g1_truncation_exponential_simulation(eta, seed=None):
# We create a random_state seeded with seed + 1 to sample the initial
# conditions in order to avoid correlations with the simulation.
if not seed is None:
random_state = np.random.RandomState(seed + 1)
else:
random_state = None
ccm = exponential_ccm
Tdeath = Tdeath_fun(eta)
# Initialise simulator
simulator = WellMixedSimulator(f, ccm, Tdeath, tG2, tstart, tend,
f_args, ccm_args, max_cell_count)
# Generate initial conditions
tau_0 = np.zeros(initial_cell_count)
tbirth_0 = np.zeros(initial_cell_count)
tG1_0 = expon.rvs(scale=tG1, size=initial_cell_count, random_state=random_state)
clone_0 = np.arange(initial_cell_count)
# Run simulation
data = simulator.run(tau_0, tbirth_0, tG1_0, clone_0, seed=seed)
# Return processed data
return WellMixedSimulationData(data)
if __name__ == '__main__':
# Exponential ccm parameter sweep
etas = np.arange(4 / num_eta, 4 + 4 / num_eta, 4 / num_eta)
# Generate parameters
eta_data = []
for eta in etas:
for i in range(num_iter):
eta_data.append(eta)
# If initial seed is given as command-line arguments, create seeds in
# increments of 2 to avoid correlations between simulations because seed +
# 1 is used for initial conditions.
if len(sys.argv) == 2:
initial_seed = int(sys.argv[1])
seed_data = np.arange(initial_seed, initial_seed + 2 * len(eta_data), 2)
else:
seed_data = [None] * len(eta_data)
# Run simulations and postprocess data
status_data = []
final_timestep_data = []
final_cell_count_data = []
num_divisions_data = []
num_deaths_data = []
average_time_in_G1_data = []
effective_g1_sample_size_data = []
for eta, seed in zip(eta_data, seed_data):
sim_data = run_g1_truncation_exponential_simulation(eta, seed)
status = sim_data.get_status()
t_events = sim_data.get_t_events()
cell_count = sim_data.get_cell_count()
num_divisions = sim_data.get_num_divisions()
num_deaths = sim_data.get_num_deaths()
effective_time_in_G1 = sim_data.get_effective_time_in_G1()
if status == 0:
final_timestep = t_events[-1]
else:
final_timestep = t_events[-2]
final_cell_count = cell_count[-1]
average_time_in_G1 = np.mean(effective_time_in_G1)
effective_g1_sample_size = len(effective_time_in_G1)
status_data.append(status)
final_timestep_data.append(final_timestep)
final_cell_count_data.append(final_cell_count)
num_divisions_data.append(num_divisions)
num_deaths_data.append(num_deaths)
average_time_in_G1_data.append(average_time_in_G1)
effective_g1_sample_size_data.append(effective_g1_sample_size)
# Create and write dataframe
df = pd.DataFrame({
'eta' : eta_data,
'seed' : seed_data,
'status' : status_data,
'final_timestep' : final_timestep_data,
'final_cell_count' : final_cell_count_data,
'num_divisions' : num_divisions_data,
'num_deaths' : num_deaths_data,
'average_time_in_G1' : average_time_in_G1_data,
'effective_g1_sample_size' : effective_g1_sample_size_data,
})
df.to_csv('exponential-effective-g1-duration-data.csv', index_label='simulation_id')
# Uniform ccm
r_fun = lambda alpha: 2 * alpha * tG1
# Helper function
def run_g1_truncation_uniform_simulation(alpha, eta, seed=None):
# We create a random_state seeded with seed + 1 to sample the initial
# conditions in order to avoid correlations with the simulation.
if not seed is None:
random_state = np.random.RandomState(seed + 1)
else:
random_state = None
ccm = uniform_ccm
r = r_fun(alpha)
Tdeath = Tdeath_fun(eta)
ccm_args = (tG1,r)
# Initialise simulator
simulator = WellMixedSimulator(f, ccm, Tdeath, tG2, tstart, tend,
f_args, ccm_args, max_cell_count)
# Generate initial conditions
tau_0 = np.zeros(initial_cell_count)
tbirth_0 = np.zeros(initial_cell_count)
tG1_0 = uniform.rvs(loc=tG1 - 0.5 * r, scale=r, size=initial_cell_count,
random_state=random_state)
clone_0 = np.arange(initial_cell_count)
# Run simulation
data = simulator.run(tau_0, tbirth_0, tG1_0, clone_0, seed=seed)
# Return processed data
return WellMixedSimulationData(data)
if __name__ == '__main__':
# Uniform ccm parameter sweep
alphas = [0.3, 0.5, 0.7, 1.0]
etas = np.arange(2 / num_eta, 2 + 2 / num_eta, 2 / num_eta)
# Generate parameters
alpha_data = []
eta_data = []
for alpha in alphas:
for eta in etas:
for i in range(num_iter):
alpha_data.append(alpha)
eta_data.append(eta)
# If initial seed is given as command-line arguments, create seeds in
# increments of 2 to avoid correlations between simulations because seed +
# 1 is used for initial conditions.
if len(sys.argv) == 2:
initial_seed = int(sys.argv[1])
seed_data = np.arange(initial_seed, initial_seed + 2 * len(eta_data), 2)
else:
seed_data = [None] * len(eta_data)
# Run simulations and postprocess data
status_data = []
final_timestep_data = []
final_cell_count_data = []
num_divisions_data = []
num_deaths_data = []
average_time_in_G1_data = []
effective_g1_sample_size_data = []
for alpha, eta, seed in zip(alpha_data, eta_data, seed_data):
sim_data = run_g1_truncation_uniform_simulation(alpha, eta, seed)
status = sim_data.get_status()
t_events = sim_data.get_t_events()
cell_count = sim_data.get_cell_count()
num_divisions = sim_data.get_num_divisions()
num_deaths = sim_data.get_num_deaths()
effective_time_in_G1 = sim_data.get_effective_time_in_G1()
if status == 0:
final_timestep = t_events[-1]
else:
final_timestep = t_events[-2]
final_cell_count = cell_count[-1]
average_time_in_G1 = np.mean(effective_time_in_G1)
effective_g1_sample_size = len(effective_time_in_G1)
status_data.append(status)
final_timestep_data.append(final_timestep)
final_cell_count_data.append(final_cell_count)
num_divisions_data.append(num_divisions)
num_deaths_data.append(num_deaths)
average_time_in_G1_data.append(average_time_in_G1)
effective_g1_sample_size_data.append(effective_g1_sample_size)
# Create and write dataframe
df = pd.DataFrame({
'alpha' : alpha_data,
'eta' : eta_data,
'seed' : seed_data,
'status' : status_data,
'final_timestep' : final_timestep_data,
'final_cell_count' : final_cell_count_data,
'num_divisions' : num_divisions_data,
'num_deaths' : num_deaths_data,
'average_time_in_G1' : average_time_in_G1_data,
'effective_g1_sample_size' : effective_g1_sample_size_data,
})
df.to_csv('uniform-effective-g1-duration-data.csv', index_label='simulation_id')
| 31.759036
| 88
| 0.682473
| 1,106
| 7,908
| 4.517179
| 0.136528
| 0.054043
| 0.032026
| 0.036029
| 0.818255
| 0.818255
| 0.818255
| 0.798839
| 0.78703
| 0.753002
| 0
| 0.021108
| 0.233182
| 7,908
| 248
| 89
| 31.887097
| 0.80277
| 0.146813
| 0
| 0.731707
| 0
| 0
| 0.053478
| 0.019067
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012195
| false
| 0
| 0.030488
| 0
| 0.054878
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
002d0e1349cf357ca18a823230bd151a88d70277
| 70,946
|
py
|
Python
|
stackchat/parse/html/transcript_day_test.py
|
jeremybanks/ChatExchange
|
e350de944d0f221a9b2afc545bf60ae309e402b6
|
[
"Apache-2.0"
] | 3
|
2017-12-27T02:40:06.000Z
|
2018-04-21T00:28:31.000Z
|
stackchat/parse/html/transcript_day_test.py
|
jeremybanks/ChatExchange
|
e350de944d0f221a9b2afc545bf60ae309e402b6
|
[
"Apache-2.0"
] | 1
|
2017-12-11T22:45:13.000Z
|
2020-09-04T17:49:41.000Z
|
stackchat/parse/html/transcript_day_test.py
|
jeremybanks/ChatExchange
|
e350de944d0f221a9b2afc545bf60ae309e402b6
|
[
"Apache-2.0"
] | 1
|
2018-05-08T22:17:58.000Z
|
2018-05-08T22:17:58.000Z
|
from .transcript_day import *
def test():
parsed = TranscriptDay(EXAMPLE_DATA)
print(parsed)
assert parsed.room_id == 11540
assert parsed.room_name == "Charcoal HQ"
assert parsed.first_day == datetime.date(2013, 11, 16)
assert parsed.previous_day == datetime.date(2017, 11, 16)
assert parsed.next_day == datetime.date(2017, 11, 18)
assert parsed.last_day == datetime.date(2017, 11, 22)
assert parsed.messages[0].id == 41197805
assert parsed.messages[0].parent_message_id is None
assert parsed.messages[0].owner_user_id == 205533
assert parsed.messages[0].owner_user_name == "Videonauth"
assert len(parsed.messages) == 61
EXAMPLE_DATA = r'''
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" >
<head>
<title>Charcoal HQ - 2017-11-17 (page 2 of 4)</title>
<link rel="shortcut icon" href="//cdn.sstatic.net/stackexchange/img/favicon.ico?v=da"><link rel="apple-touch-icon" href="//cdn.sstatic.net/stackexchange/img/apple-touch-icon.png?v=da"><link rel="search" type="application/opensearchdescription+xml" title="Chat for chat.stackexchange.com" href="/opensearch.xml">
<link rel="canonical" href="/transcript/11540/2017/11/17/1-2" />
<script type="text/javascript" src="//ajax.googleapis.com/ajax/libs/jquery/1.12.4/jquery.min.js"></script>
<script type="text/javascript" src="//cdn-chat.sstatic.net/chat/Js/master-chat.js?v=f1e5ed9ea207"></script>
<link rel="stylesheet" href="//cdn-chat.sstatic.net/chat/css/chat.stackexchange.com.css?v=7d154b0411cf">
<script type="text/javascript">
function IMAGE(f) { return ("//cdn-chat.sstatic.net/chat/img/" + f); }
</script>
<script type="text/javascript">
$(function() {
initTranscript(true,
1251, true,
true, 11540,
true);
popupDismisser();
});
</script>
</head>
<body id="transcript-body">
<div id="container">
<div id="main">
<a href="/transcript/11540/2013/11/16" class="button noprint" title="2013-11-16">« first day (1461 days earlier)</a>
<a href="/transcript/11540/2017/11/16" class="button noprint" rel="prev" title="2017-11-16">← previous day</a>
<link rel="prev" title="2017-11-16" href="/transcript/11540/2017/11/16" />
<a href="/transcript/11540/2017/11/18" class="button noprint" rel="next" title="2017-11-18">next day →</a>
<link rel="next" title="2017-11-18" href="/transcript/11540/2017/11/18" />
<a href="/transcript/11540/2017/11/22" class="button noprint" title="2017-11-22"> last day (5 days later) »</a>
<div class="clear-both"></div>
<div class="clear-both"></div><div class="pager"><a href="/transcript/11540/2017/11/17/0-1"><span class="page-numbers">00:00 - 01:00</span></a><span class="page-numbers current">01:00 - 02:00</span><a href="/transcript/11540/2017/11/17/2-13"><span class="page-numbers">02:00 - 13:00</span></a><a href="/transcript/11540/2017/11/17/13-24"><span class="page-numbers">13:00 - 00:00</span></a></div><div class="clear-both"></div>
<br/>
<div id="transcript">
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="timestamp">1:00 AM</div>
<div class="message" id="message-41197805">
<a name="41197805" href="/transcript/11540?m=41197805#41197805"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
<div class="onebox ob-image"><a rel="nofollow noopener noreferrer" href="//i.stack.imgur.com/mdGKA.jpg"><img src="//i.stack.imgur.com/mdGKA.jpg" class="user-image" alt="user image" /></a></div>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41197807">
<a name="41197807" href="/transcript/11540?m=41197807#41197807"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
and this :)
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41197831">
<a name="41197831" href="/transcript/11540?m=41197831#41197831"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
Mhm
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-120914">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/WyV1l.png?s=16&g=1" alt="SmokeDetector" />
</div>
<div class="username"><a href="/users/120914/smokedetector" title="SmokeDetector">SmokeDetector</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41197902">
<a name="41197902" href="/transcript/11540?m=41197902#41197902"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
Merged SmokeDetector <a href="https://github.com/Charcoal-SE/SmokeDetector/pull/1236" rel="nofollow noopener noreferrer">#1236</a>.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-167070">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/AYXNm.png?s=16&g=1" alt="quartata" />
</div>
<div class="username"><a href="/users/167070/quartata" title="quartata">quartata</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41197912">
<a name="41197912" href="/transcript/11540?m=41197912#41197912"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
sorry for the delay
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-120914">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/WyV1l.png?s=16&g=1" alt="SmokeDetector" />
</div>
<div class="username"><a href="/users/120914/smokedetector" title="SmokeDetector">SmokeDetector</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41197915">
<a name="41197915" href="/transcript/11540?m=41197915#41197915"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
<a href="https://codecov.io/gh/Charcoal-SE/SmokeDetector/compare/8c1cd7633587085fff94743dcc9096c646c7344a...afbbeba682ba4094d33c8d9dd2a522b2d713b665" rel="nofollow noopener noreferrer">CI</a> on <a href="https://github.com/Charcoal-SE/SmokeDetector/commit/afbbeba" rel="nofollow noopener noreferrer"><code>afbbeba</code></a> succeeded. Message contains 'autopull', pulling...
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41197923">
<a name="41197923" href="/transcript/11540?m=41197923#41197923"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//github.com/Charcoal-SE/SmokeDetector" rel="nofollow noopener noreferrer">SmokeDetector</a> ] SmokeDetector started at <a href="//github.com/Charcoal-SE/SmokeDetector/commit/6ad928a" rel="nofollow noopener noreferrer">rev 6ad928a (metasmoke: <i>Merge pull request #1236 from Charcoal-SE/auto-blacklist-1510879822.8478458</i>)</a> (running on Henders/EC2)
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41197925">
<a name="41197925" href="/transcript/11540?m=41197925#41197925"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
Restart: API quota is 18014.
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41197984">
<a name="41197984" href="/transcript/11540?m=41197984#41197984"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//askubuntu.com/a/977247" rel="nofollow noopener noreferrer">MS</a> ] Potentially bad keyword in answer, blacklisted user: <a href="//askubuntu.com/a/977247">viewer for X.509 certificate</a> by <a href="//askubuntu.com/users/760491">vite11</a> on <code>askubuntu.com</code>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198028">
<a name="41198028" href="/transcript/11540?m=41198028#41198028"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/questions/118122" rel="nofollow noopener noreferrer">MS</a> ] Mostly dots in body: <a href="//es.stackoverflow.com/questions/118122">Por qué el organo varonil se llama pene?</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="timestamp">1:15 AM</div>
<div class="message" id="message-41198159">
<a name="41198159" href="/transcript/11540?m=41198159#41198159"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
sd k
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198165">
<a name="41198165" href="/transcript/11540?m=41198165#41198165"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
sd - k
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-120914">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/WyV1l.png?s=16&g=1" alt="SmokeDetector" />
</div>
<div class="username"><a href="/users/120914/smokedetector" title="SmokeDetector">SmokeDetector</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198166">
<a name="41198166" href="/transcript/11540?m=41198166#41198166"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
Conflicting feedback across revisions: <a href="//metasmoke.erwaysoftware.com/post/93853" rel="nofollow noopener noreferrer">current</a>, <a href="//metasmoke.erwaysoftware.com/post/93852" rel="nofollow noopener noreferrer">#1</a>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198171">
<a name="41198171" href="/transcript/11540?m=41198171#41198171"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
tpu by QPaysTaxes on <a href="//askubuntu.com/a/977247">viewer for X.509 certificate</a> [<a href="http://metasmoke.erwaysoftware.com/post/93852" rel="nofollow noopener noreferrer">MS</a>]
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198235">
<a name="41198235" href="/transcript/11540?m=41198235#41198235"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/a/118124" rel="nofollow noopener noreferrer">MS</a> ] Blacklisted user: <a href="//es.stackoverflow.com/a/118124">Como mandar un registro de una celda DataGridView a un textbox de otro formulario?</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198237">
<a name="41198237" href="/transcript/11540?m=41198237#41198237"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/a/118127" rel="nofollow noopener noreferrer">MS</a> ] Blacklisted user: <a href="//es.stackoverflow.com/a/118127">Como mandar un registro de una celda DataGridView a un textbox de otro formulario?</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198240">
<a name="41198240" href="/transcript/11540?m=41198240#41198240"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/questions/118125" rel="nofollow noopener noreferrer">MS</a> ] Blacklisted user: <a href="//es.stackoverflow.com/questions/118125">¿Cómo puedo hacer este código funcional para mi website-blog de tecnología?</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198242">
<a name="41198242" href="/transcript/11540?m=41198242#41198242"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/questions/118122" rel="nofollow noopener noreferrer">MS</a> ] Blacklisted user: <a href="//es.stackoverflow.com/questions/118122">Por que el organo varonil se le denomina pene</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198272">
<a name="41198272" href="/transcript/11540?m=41198272#41198272"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198242#41198242"> </a>
<div class="content">
@SmokeDetector tpu-
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198287">
<a name="41198287" href="/transcript/11540?m=41198287#41198287"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198240#41198240"> </a>
<div class="content">
@SmokeDetector tpu-
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198300">
<a name="41198300" href="/transcript/11540?m=41198300#41198300"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198237#41198237"> </a>
<div class="content">
@SmokeDetector tpu-
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198304">
<a name="41198304" href="/transcript/11540?m=41198304#41198304"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
question: can anyone in here do: <code>!!/repor t <link></code> when a case linke above happens?
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198305">
<a name="41198305" href="/transcript/11540?m=41198305#41198305"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198235#41198235"> </a>
<div class="content">
@SmokeDetector tpu-
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198345">
<a name="41198345" href="/transcript/11540?m=41198345#41198345"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198304#41198304"> </a>
<div class="content">
@Videonauth You need to be a <a href="https://charcoal-se.org/smokey/Commands#privileged-commands" rel="nofollow noopener noreferrer">privileged user</a>.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-120914">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/WyV1l.png?s=16&g=1" alt="SmokeDetector" />
</div>
<div class="username"><a href="/users/120914/smokedetector" title="SmokeDetector">SmokeDetector</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198349">
<a name="41198349" href="/transcript/11540?m=41198349#41198349"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
Conflicting feedback across revisions: <a href="//metasmoke.erwaysoftware.com/post/93848" rel="nofollow noopener noreferrer">current</a>, <a href="//metasmoke.erwaysoftware.com/post/93847" rel="nofollow noopener noreferrer">#1</a>
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="timestamp">1:28 AM</div>
<div class="message" id="message-41198350">
<a name="41198350" href="/transcript/11540?m=41198350#41198350"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
ah ok :)
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198423">
<a name="41198423" href="/transcript/11540?m=41198423#41198423"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198350#41198350"> </a>
<div class="content">
@Videonauth There's a <a href="https://charcoal-se.org/pings/mods" rel="nofollow noopener noreferrer">list of mods to ping</a> when things get out of hand.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198457">
<a name="41198457" href="/transcript/11540?m=41198457#41198457"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
oh yes i know a few i would ping then otherwise i drop you guys here a line if i stumble on a missed one
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198542">
<a name="41198542" href="/transcript/11540?m=41198542#41198542"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198457#41198457"> </a>
<div class="content">
@Videonauth Do you understand Spanish?
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198550">
<a name="41198550" href="/transcript/11540?m=41198550#41198550"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
nope only english and german (native)
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198594">
<a name="41198594" href="/transcript/11540?m=41198594#41198594"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
I have no idea what <a href="https://es.stackoverflow.com/a/118124/">this</a> means. It should probably be reported, but I don't know. It sort of looks like Italian though.
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198607">
<a name="41198607" href="/transcript/11540?m=41198607#41198607"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
No wait, it's already caught above.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="timestamp">1:45 AM</div>
<div class="message" id="message-41198629">
<a name="41198629" href="/transcript/11540?m=41198629#41198629"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
<span class="deleted">(removed)</span>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198635">
<a name="41198635" href="/transcript/11540?m=41198635#41198635"><span style="display:inline-block;" class="action-link edits"><span class="img"> </span></span></a>
<div class="content">
use google translate, cant let this stand here :) at least not without getting a time out for naughtyness
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198641">
<a name="41198641" href="/transcript/11540?m=41198641#41198641"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198594#41198594"> </a>
<div class="content">
@NisseEngström It's spam.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198709">
<a name="41198709" href="/transcript/11540?m=41198709#41198709"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198635#41198635"> </a>
<div class="content">
@Videonauth Google Translate didn't work on that one.
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198714">
<a name="41198714" href="/transcript/11540?m=41198714#41198714"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198641#41198641"> </a>
<div class="content">
@QPaysTaxes Thanks.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198715">
<a name="41198715" href="/transcript/11540?m=41198715#41198715"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
it did
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198719">
<a name="41198719" href="/transcript/11540?m=41198719#41198719"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
dont know if you can see deleted messages
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198731">
<a name="41198731" href="/transcript/11540?m=41198731#41198731"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198719#41198719"> </a>
<div class="content">
@Videonauth Nope.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198769">
<a name="41198769" href="/transcript/11540?m=41198769#41198769"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
its spanish, autodetection did work to english: will post it in a few seconds againhere for short please dont flagbann me
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198776">
<a name="41198776" href="/transcript/11540?m=41198776#41198776"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
<span class="deleted">(removed)</span>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198787">
<a name="41198787" href="/transcript/11540?m=41198787#41198787"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
seen?
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="timestamp">1:54 AM</div>
<div class="message" id="message-41198799">
<a name="41198799" href="/transcript/11540?m=41198799#41198799"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198787#41198787"> </a>
<div class="content">
@Videonauth Yes, but that's not the one I linked to.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198835">
<a name="41198835" href="/transcript/11540?m=41198835#41198835"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
ah the one above mhmmm yes doesnt work
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198853">
<a name="41198853" href="/transcript/11540?m=41198853#41198853"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198769#41198769"> </a>
<div class="content">
@Videonauth No one here flag-bans people.
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198854">
<a name="41198854" href="/transcript/11540?m=41198854#41198854"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
You get flag banned if you flag too many things that get declined by mods.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198858">
<a name="41198858" href="/transcript/11540?m=41198858#41198858"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
its gibberish tho talking about a garden party at an uncles place
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198860">
<a name="41198860" href="/transcript/11540?m=41198860#41198860"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
Gibberish is a perfectly good reason to red-flag.
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198864">
<a name="41198864" href="/transcript/11540?m=41198864#41198864"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
Whether you flag as spam or r/a doesn't really matter.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198869">
<a name="41198869" href="/transcript/11540?m=41198869#41198869"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
got it translated via leo.org
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="timestamp">1:58 AM</div>
<div class="message" id="message-41198872">
<a name="41198872" href="/transcript/11540?m=41198872#41198872"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
!!/report <a href="https://es.stackoverflow.com/a/118116/" rel="nofollow noopener noreferrer">es.stackoverflow.com/a/118116</a> <a href="https://es.stackoverflow.com/a/118114/" rel="nofollow noopener noreferrer">es.stackoverflow.com/a/118114</a> <a href="https://es.stackoverflow.com/a/118120/" rel="nofollow noopener noreferrer">es.stackoverflow.com/a/118120</a> <a href="https://es.stackoverflow.com/a/118119/" rel="nofollow noopener noreferrer">es.stackoverflow.com/a/118119</a>
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-120914">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/WyV1l.png?s=16&g=1" alt="SmokeDetector" />
</div>
<div class="username"><a href="/users/120914/smokedetector" title="SmokeDetector">SmokeDetector</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198873">
<a name="41198873" href="/transcript/11540?m=41198873#41198873"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/a/118116" rel="nofollow noopener noreferrer">MS</a> ] Manually reported answer (batch report: post 1 out of 4): <a href="//es.stackoverflow.com/a/118116">No logro entender porque me da este error ArrayIndexOutOfBoundsException: 6</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198876">
<a name="41198876" href="/transcript/11540?m=41198876#41198876"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/a/118114" rel="nofollow noopener noreferrer">MS</a> ] Manually reported answer (batch report: post 2 out of 4): <a href="//es.stackoverflow.com/a/118114">Asignar valores a un combobox html con JSOUP</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198880">
<a name="41198880" href="/transcript/11540?m=41198880#41198880"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198873#41198873"> </a>
<div class="content">
@SmokeDetector k
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-120914">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/WyV1l.png?s=16&g=1" alt="SmokeDetector" />
</div>
<div class="username"><a href="/users/120914/smokedetector" title="SmokeDetector">SmokeDetector</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198879">
<a name="41198879" href="/transcript/11540?m=41198879#41198879"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/a/118120" rel="nofollow noopener noreferrer">MS</a> ] Manually reported answer (batch report: post 3 out of 4): <a href="//es.stackoverflow.com/a/118120">Crystal Reports Arroja "E_NOINTERFACE" cuando reporte.SetDataSource(ds);</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198882">
<a name="41198882" href="/transcript/11540?m=41198882#41198882"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/a/118119" rel="nofollow noopener noreferrer">MS</a> ] Manually reported answer (batch report: post 4 out of 4): <a href="//es.stackoverflow.com/a/118119">Descargar archivos desde la terminal de Mac</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198884">
<a name="41198884" href="/transcript/11540?m=41198884#41198884"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198876#41198876"> </a>
<div class="content">
@SmokeDetector k
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198888">
<a name="41198888" href="/transcript/11540?m=41198888#41198888"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198853#41198853"> </a>
<div class="content">
@QPaysTaxes well i posted the translation of that other spanish post which was not really PG friendly <i>coughs coughs</i>
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="timestamp">1:59 AM</div>
<div class="message" id="message-41198889">
<a name="41198889" href="/transcript/11540?m=41198889#41198889"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198879#41198879"> </a>
<div class="content">
@SmokeDetector k
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198893">
<a name="41198893" href="/transcript/11540?m=41198893#41198893"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198882#41198882"> </a>
<div class="content">
@SmokeDetector k
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
</div>
<div class="clear-both"></div><div class="pager"><a href="/transcript/11540/2017/11/17/0-1"><span class="page-numbers">00:00 - 01:00</span></a><span class="page-numbers current">01:00 - 02:00</span><a href="/transcript/11540/2017/11/17/2-13"><span class="page-numbers">02:00 - 13:00</span></a><a href="/transcript/11540/2017/11/17/13-24"><span class="page-numbers">13:00 - 00:00</span></a></div><div class="clear-both"></div>
<br/>
<a href="/transcript/11540/2013/11/16" class="button noprint" title="2013-11-16">« first day (1461 days earlier)</a>
<a href="/transcript/11540/2017/11/16" class="button noprint" rel="prev" title="2017-11-16">← previous day</a>
<link rel="prev" title="2017-11-16" href="/transcript/11540/2017/11/16" />
<a href="/transcript/11540/2017/11/18" class="button noprint" rel="next" title="2017-11-18">next day →</a>
<link rel="next" title="2017-11-18" href="/transcript/11540/2017/11/18" />
<a href="/transcript/11540/2017/11/22" class="button noprint" title="2017-11-22"> last day (5 days later) »</a>
<div class="clear-both"></div>
<div id="sidebar">
<div id="sidebar-content">
<div id="info">
<form method="get" action="/search">
<input type="text" id="searchbox" name="q"/>
<input type="hidden" name="room" value="11540" />
</form>
<div style="padding-top:3px;"><a href="/" class="button">all rooms</a></div>
<br clear=left />
<h2>Transcript for</h2>
<a class="calendar-small-link" href="/transcript/11540/2017/11/16">
<div class="icon" title="2017-11-16"><div class="calendar-small"><span class="weekday-small">Nov</span>16</div></div>
</a>
<div class="icon" title="2017-11-17"><div class="calendar"><span class="weekday">Nov</span>17</div></div>
<a class="calendar-small-link" href="/transcript/11540/2017/11/18">
<div class="icon" title="2017-11-18"><div class="calendar-small"><span class="weekday-small">Nov</span>18</div></div>
</a>
<br clear=left />
<div class="room-mini"><div class="room-mini-header"><h3><span class="room-name"><a rel="noreferrer noopener" href="/rooms/11540/charcoal-hq">Charcoal HQ</a></span></h3><div title="Where diamonds are made, smoke is detected, and we break things by developing on production. 76,000 true positives and counting. [Recursive] oneboxes are awesome. Handy links: http://charcoal-se.org, https://github.com/Charcoal-SE, http://charcoal-se.org/blaze/" class="room-mini-description">Where diamonds are made, smoke is detected, and we break thing...<a href="http://charcoal-se.org" rel="nofollow noopener noreferrer"></a><a href="https://github.com/Charcoal-SE" rel="nofollow noopener noreferrer"></a><a href="http://charcoal-se.org/blaze/" rel="nofollow noopener noreferrer"></a></div></div><div class="room-current-user-count" title="current users"><a rel="noopener noreferrer" href="/rooms/info/11540/charcoal-hq">33</a></div><div class="room-message-count" title="messages in the last 2h"><a rel="noopener noreferrer" href="/transcript/11540">75</a></div><div class="mspark" style="height:25px;width:205px">
<div class="mspbar" style="width:8px;height:6px;left:0px;"></div><div class="mspbar" style="width:8px;height:7px;left:8px;"></div><div class="mspbar" style="width:8px;height:9px;left:16px;"></div><div class="mspbar" style="width:8px;height:9px;left:24px;"></div><div class="mspbar" style="width:8px;height:16px;left:32px;"></div><div class="mspbar" style="width:8px;height:20px;left:40px;"></div><div class="mspbar" style="width:8px;height:21px;left:48px;"></div><div class="mspbar" style="width:8px;height:25px;left:56px;"></div><div class="mspbar" style="width:8px;height:21px;left:64px;"></div><div class="mspbar" style="width:8px;height:25px;left:72px;"></div><div class="mspbar" style="width:8px;height:20px;left:80px;"></div><div class="mspbar" style="width:8px;height:20px;left:88px;"></div><div class="mspbar" style="width:8px;height:17px;left:96px;"></div><div class="mspbar" style="width:8px;height:13px;left:104px;"></div><div class="mspbar" style="width:8px;height:10px;left:112px;"></div><div class="mspbar" style="width:8px;height:9px;left:120px;"></div><div class="mspbar" style="width:8px;height:13px;left:128px;"></div><div class="mspbar" style="width:8px;height:12px;left:136px;"></div><div class="mspbar" style="width:8px;height:17px;left:144px;"></div><div class="mspbar" style="width:8px;height:15px;left:152px;"></div><div class="mspbar" style="width:8px;height:12px;left:160px;"></div><div class="mspbar" style="width:8px;height:9px;left:168px;"></div><div class="mspbar" style="width:8px;height:7px;left:176px;"></div><div class="mspbar" style="width:8px;height:4px;left:184px;"></div><div class="mspbar now" style="height:25px;left:166px;"></div></div>
<div class="clear-both"></div></div>
<div><a rel="noopener noreferrer" class="tag" href="http://stackexchange.com/tags/best-bad-practices/info">best-bad-practices</a> <a rel="noopener noreferrer" class="tag" href="http://stackexchange.com/tags/dev-on-prod/info">dev-on-prod</a> <a rel="noopener noreferrer" class="tag" href="http://stackexchange.com/tags/panic-driven-development/info">panic-driven-development</a> <a rel="noopener noreferrer" class="tag" href="http://stackexchange.com/tags/plastic-knives/info">plastic-knives</a></div>
<br class="clear-both" />
<div class="noprint">
<div id="transcript-links">
<a id="join-room" href="/rooms/11540/charcoal-hq" class="button">join this room</a><br />
<a href="/rooms/info/11540/charcoal-hq" class="button">about this room</a><br />
<a class="button" href="#" id="bookmark-button">bookmark a conversation</a><br />
</div>
<br />
<div class="mspark" style="height:300px;width:200px">
<div class="mspbar" style="height:12px;width:57px;top:0px;"></div><div class="msplab" style="top:0px;">00:00</div><div class="mspbar" style="height:12px;width:182px;top:12px;"></div><div class="mspbar" style="height:12px;width:54px;top:24px;"></div><div class="mspbar" style="height:12px;width:12px;top:36px;"></div><div class="mspbar" style="height:12px;width:131px;top:48px;"></div><div class="mspbar" style="height:12px;width:110px;top:60px;"></div><div class="mspbar" style="height:12px;width:161px;top:72px;"></div><div class="msplab" style="top:72px;">06:00</div><div class="mspbar" style="height:12px;width:99px;top:84px;"></div><div class="mspbar" style="height:12px;width:113px;top:96px;"></div><div class="mspbar" style="height:12px;width:200px;top:108px;"></div><div class="mspbar" style="height:12px;width:99px;top:120px;"></div><div class="mspbar" style="height:12px;width:110px;top:132px;"></div><div class="mspbar" style="height:12px;width:90px;top:144px;"></div><div class="msplab" style="top:144px;">12:00</div><div class="mspbar" style="height:12px;width:191px;top:156px;"></div><div class="mspbar" style="height:12px;width:18px;top:168px;"></div><div class="mspbar" style="height:12px;width:15px;top:180px;"></div><div class="mspbar" style="height:12px;width:6px;top:192px;"></div><div class="mspbar" style="height:12px;width:75px;top:204px;"></div><div class="mspbar" style="height:12px;width:90px;top:216px;"></div><div class="msplab" style="top:216px;">18:00</div><div class="mspbar" style="height:12px;width:45px;top:228px;"></div><div class="mspbar" style="height:12px;width:57px;top:240px;"></div><div class="mspbar" style="height:12px;width:36px;top:252px;"></div><div class="mspbar" style="height:12px;width:54px;top:264px;"></div><div class="mspbar" style="height:12px;width:27px;top:276px;"></div><a href="/transcript/11540/2017/11/17/0-1"><div class="msparea" style="top:0px;width:200px;height:12px" title="19 messages"></div></a><a href="/transcript/11540/2017/11/17/1-2"><div class="msparea now" style="top:12px;width:200px;height:12px" title="61 messages"></div></a><a href="/transcript/11540/2017/11/17/2-13"><div class="msparea" style="top:24px;width:200px;height:132px" title="395 messages"></div></a><a href="/transcript/11540/2017/11/17/13-24"><div class="msparea" style="top:156px;width:200px;height:132px" title="205 messages"></div></a></div>
<div class="msg-small">
all times are UTC
</div>
<br />
</div>
<br /><br /><div id="transcript-logo"><a rel="noreferrer noopener" href="http://stackexchange.com" title="The Stack Exchange Network"><img style="max-width:150px" src="//cdn-chat.sstatic.net/chat/img/se-logo-white.png?v=da" alt="The Stack Exchange Network"/></a>
</div>
<br class="clear-both" /><br />
<div id="copyright">
site design / logo © 2017 Stack Exchange Inc; <a rel="noopener noreferrer" href="http://stackexchange.com/legal">legal</a>
<br /><br />
<a href="#" class="mobile-on">mobile</a>
</div>
</div>
</div>
</div>
</div>
</div> <input id="fkey" name="fkey" type="hidden" value="64f0ae1fdde80a7b92d9281473795fde" />
</body>
</html>'''
| 43.956629
| 2,383
| 0.536746
| 8,370
| 70,946
| 4.54767
| 0.088172
| 0.0971
| 0.06849
| 0.040458
| 0.791798
| 0.76353
| 0.722362
| 0.706888
| 0.683113
| 0.664775
| 0
| 0.099848
| 0.28626
| 70,946
| 1,613
| 2,384
| 43.983881
| 0.651855
| 0
| 0
| 0.729941
| 0
| 0.208415
| 0.990359
| 0.330477
| 0
| 0
| 0
| 0
| 0.010763
| 1
| 0.000978
| false
| 0
| 0.000978
| 0
| 0.001957
| 0.009785
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
003db226f92b023c219ed5b17f0367bc583465db
| 124,728
|
py
|
Python
|
ros_bt_py/test/unittest/test_tree_manager.py
|
fzi-forschungszentrum-informatik/ros_bt_py
|
ed65e2b2f0a03411101f455c0ab38401ba50bada
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | 4
|
2022-03-11T14:30:43.000Z
|
2022-03-31T07:21:35.000Z
|
ros_bt_py/test/unittest/test_tree_manager.py
|
fzi-forschungszentrum-informatik/ros_bt_py
|
ed65e2b2f0a03411101f455c0ab38401ba50bada
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
ros_bt_py/test/unittest/test_tree_manager.py
|
fzi-forschungszentrum-informatik/ros_bt_py
|
ed65e2b2f0a03411101f455c0ab38401ba50bada
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# -------- BEGIN LICENSE BLOCK --------
# Copyright 2022 FZI Forschungszentrum Informatik
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of the {copyright_holder} nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# -------- END LICENSE BLOCK --------
import unittest
try:
import unittest.mock as mock
except ImportError:
import mock
import sys
import time
from ros_bt_py_msgs.msg import Node as NodeMsg, Message, Package
from ros_bt_py_msgs.msg import NodeData, NodeDataWiring, NodeDataLocation, Tree
from ros_bt_py_msgs.srv import (WireNodeDataRequest, AddNodeRequest, RemoveNodeRequest,
ControlTreeExecutionRequest, GetAvailableNodesRequest,
SetExecutionModeRequest, SetOptionsRequest, ContinueRequest,
LoadTreeRequest, MoveNodeRequest, ReplaceNodeRequest,
MorphNodeRequest, ClearTreeRequest, LoadTreeFromPathRequest,
SetExecutionModeResponse, ModifyBreakpointsRequest,
GetSubtreeRequest, ReloadTreeRequest, WireNodeDataResponse,
RemoveNodeResponse, GenerateSubtreeRequest, AddNodeAtIndexRequest,
ChangeTreeNameRequest)
from ros_bt_py.node import Node, Leaf, FlowControl, define_bt_node
from ros_bt_py.node_config import NodeConfig
from ros_bt_py.nodes.sequence import Sequence
from ros_bt_py.nodes.mock_nodes import MockLeaf
from ros_bt_py.exceptions import BehaviorTreeException, MissingParentError, TreeTopologyError
from ros_bt_py.tree_manager import TreeManager
from ros_bt_py.tree_manager import (get_success as tm_get_success,
get_error_message as tm_get_error_message)
from ros_bt_py.helpers import json_encode, json_decode
from ros_bt_py.ros_helpers import LoggerLevel
try:
unicode
except NameError:
unicode = str
@define_bt_node(NodeConfig(
options={},
inputs={},
outputs={},
max_children=0))
class LongRunningNode(Leaf):
def _do_setup(self):
pass
def _do_tick(self):
time.sleep(1.0)
return NodeMsg.SUCCEEDED
def _do_shutdown(self):
pass
def _do_reset(self):
return NodeMsg.IDLE
def _do_untick(self):
return NodeMsg.IDLE
class TestTreeManager(unittest.TestCase):
def setUp(self):
self.tree_msg = None
self.debug_info_msg = None
def set_tree_msg(msg):
self.tree_msg = msg
def set_debug_info_msg(msg):
self.debug_info_msg = msg
self.manager = TreeManager(publish_tree_callback=set_tree_msg,
publish_debug_info_callback=set_debug_info_msg)
self.node_msg = NodeMsg(
module='ros_bt_py.nodes.passthrough_node',
node_class='PassthroughNode',
inputs=[NodeData(key='in',
serialized_value=json_encode(42))],
options=[NodeData(key='passthrough_type',
serialized_value=json_encode(int))])
self.constant_msg = NodeMsg(
module='ros_bt_py.nodes.constant',
node_class='Constant',
options=[NodeData(key='constant_type',
serialized_value=json_encode(int)),
NodeData(key='constant_value',
serialized_value=json_encode(42))])
self.sequence_msg = NodeMsg(
module='ros_bt_py.nodes.sequence',
node_class='Sequence')
self.memory_sequence_msg = NodeMsg(
module='ros_bt_py.nodes.sequence',
node_class='MemorySequence')
self.succeeder_msg = NodeMsg(
module='ros_bt_py.nodes.mock_nodes',
node_class='MockLeaf',
options=[NodeData(key='output_type',
serialized_value=json_encode(str)),
NodeData(key='state_values',
serialized_value=json_encode([NodeMsg.SUCCEEDED])),
NodeData(key='output_values',
serialized_value=json_encode(['Yay!']))])
def testEnsureTickFrequencyGreaterZero(self):
manager = TreeManager(tick_frequency_hz=0)
self.assertNotEquals(manager.tree_msg.tick_frequency_hz, 0)
def testTickFrequencyTooHigh(self):
tick_frequency_hz = 10000000000000.0
sleep_duration_sec = (1.0 / tick_frequency_hz)
manager = TreeManager(tick_frequency_hz=tick_frequency_hz)
add_request = AddNodeRequest(node=self.node_msg,
allow_rename=True)
self.assertTrue(manager.add_node(add_request).success)
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.TICK_PERIODICALLY
start_time = time.time()
self.assertTrue(get_success(manager.control_execution(execution_request)))
tick_duration = time.time() - start_time
self.assertGreater(tick_duration, sleep_duration_sec)
time.sleep(0.1)
manager.tree_msg.state = Tree.STOP_REQUESTED
manager._tick_thread.join(0.1)
self.assertFalse(manager._tick_thread.is_alive())
def testLoadNodeModule(self):
manager = TreeManager(module_list=['ros_bt_py.nodes.sequence'])
self.assertIn('ros_bt_py.nodes.sequence', sys.modules)
def testCycle(self):
node = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
self.manager.nodes[node.name].parent = node.name
self.assertRaises(TreeTopologyError, self.manager.find_root)
def testOrphan(self):
node = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
node2 = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
self.manager.nodes[node.name].parent = node2
self.manager.remove_node(RemoveNodeRequest(node_name=node2.name,
remove_children=False))
self.assertRaises(MissingParentError, self.manager.tick, True)
def testNoNodes(self):
self.manager.tick(once=True)
self.assertEqual(self.manager.tree_msg.state, Tree.EDITABLE)
def testGetSuccessErrorMessageDict(self):
message = {'success': False,
'error_message': 'error'}
self.assertFalse(tm_get_success(message))
self.assertEqual(tm_get_error_message(message), 'error')
def testGenerateSubtreeService(self):
generate_request = GenerateSubtreeRequest()
generate_response = self.manager.generate_subtree(generate_request)
self.assertFalse(get_success(generate_response))
add_request = AddNodeRequest(node=self.sequence_msg,
allow_rename=True)
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 1)
self.assertTrue(get_success(response))
add_request = AddNodeRequest(node=self.sequence_msg,
allow_rename=True,
parent_name=response.actual_node_name)
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 2)
self.assertTrue(get_success(response))
seq_2_name = response.actual_node_name
add_request = AddNodeRequest(node=self.succeeder_msg,
allow_rename=True,
parent_name=seq_2_name)
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 3)
self.assertTrue(get_success(response))
generate_request = GenerateSubtreeRequest()
generate_request.nodes = [response.actual_node_name]
generate_response = self.manager.generate_subtree(generate_request)
self.assertTrue(get_success(generate_response))
def testGetSubtreeService(self):
add_request = AddNodeRequest(node=self.sequence_msg,
allow_rename=True)
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 1)
self.assertTrue(get_success(response))
add_request = AddNodeRequest(node=self.sequence_msg,
allow_rename=True,
parent_name=response.actual_node_name)
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 2)
self.assertTrue(get_success(response))
seq_2_name = response.actual_node_name
add_request = AddNodeRequest(node=self.succeeder_msg,
allow_rename=True,
parent_name=seq_2_name)
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 3)
self.assertTrue(get_success(response))
add_request = AddNodeRequest(node=self.succeeder_msg,
allow_rename=True,
parent_name=seq_2_name)
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 4)
self.assertTrue(get_success(response))
subtree_request = GetSubtreeRequest(subtree_root_name=seq_2_name)
subtree_response = self.manager.get_subtree(subtree_request)
self.assertTrue(get_success(subtree_response))
self.assertEqual(len(subtree_response.subtree.nodes), 3)
subtree_request = GetSubtreeRequest(subtree_root_name='not_in_tree')
subtree_response = self.manager.get_subtree(subtree_request)
self.assertFalse(get_success(subtree_response))
def testGetSubtreeServiceWirings(self):
load_request = LoadTreeRequest(tree=Tree(
name='from_file',
path='package://ros_bt_py/test/testdata/trees/get_subtree.yaml'))
self.assertTrue(get_success(self.manager.load_tree(load_request)))
wire_request = WireNodeDataRequest()
wire_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(node_name='PassthroughNode_2',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='PassthroughNode',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA)))
wire_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(node_name='PassthroughNode_2',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA),
target=NodeDataLocation(node_name='PassthroughNode',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA)))
wire_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(node_name='PassthroughNode_2',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA),
target=NodeDataLocation(node_name='PassthroughNode',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA)))
wire_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(node_name='PassthroughNode_2',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='PassthroughNode',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA)))
wire_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(node_name='PassthroughNode',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='PassthroughNode_2',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA)))
wire_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(node_name='PassthroughNode',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA),
target=NodeDataLocation(node_name='PassthroughNode_2',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA)))
wire_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(node_name='PassthroughNode',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA),
target=NodeDataLocation(node_name='PassthroughNode_2',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA)))
wire_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(node_name='PassthroughNode',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='PassthroughNode_2',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA)))
response = self.manager.wire_data(wire_request)
self.assertTrue(get_success(response), get_error_message(response))
subtree_request = GetSubtreeRequest(subtree_root_name='Sequence')
subtree_response = self.manager.get_subtree(subtree_request)
self.assertTrue(get_success(subtree_response))
self.assertEqual(len(subtree_response.subtree.nodes), 4)
response = self.manager.unwire_data(wire_request)
self.assertTrue(get_success(response), get_error_message(response))
def testGetSubtreeServiceSubscriptions(self):
add_request = AddNodeRequest(node=self.sequence_msg,
allow_rename=True)
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 1)
self.assertTrue(get_success(response))
node = self.manager.nodes[response.actual_node_name]
node.subscriptions.append(NodeDataWiring(
source=NodeDataLocation(node_name=node.name,
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='also_missing',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA)))
subtree_request = GetSubtreeRequest(subtree_root_name=response.actual_node_name)
subtree_response = self.manager.get_subtree(subtree_request)
self.assertTrue(get_success(subtree_response))
node.subscriptions.append(NodeDataWiring(
source=NodeDataLocation(node_name='missing',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='also_missing',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA)))
subtree_request = GetSubtreeRequest(subtree_root_name=response.actual_node_name)
subtree_response = self.manager.get_subtree(subtree_request)
self.assertFalse(get_success(subtree_response))
def testTickExceptionHandling(self):
@define_bt_node(NodeConfig(
options={},
inputs={},
outputs={},
max_children=0))
class ExceptionNode(Leaf):
def _do_setup(self):
pass
def _do_tick(self):
raise BehaviorTreeException
def _do_shutdown(self):
pass
def _do_reset(self):
return NodeMsg.IDLE
def _do_untick(self):
return NodeMsg.IDLE
node = ExceptionNode()
manager = TreeManager(show_traceback_on_exception=False)
manager.nodes[node.name] = node
self.assertEqual(manager.tree_msg.state, Tree.EDITABLE)
manager.tick_report_exceptions()
self.assertEqual(manager.tree_msg.state, Tree.ERROR)
manager = TreeManager(show_traceback_on_exception=True)
manager.nodes[node.name] = node
self.assertEqual(manager.tree_msg.state, Tree.EDITABLE)
manager.tick_report_exceptions()
self.assertEqual(manager.tree_msg.state, Tree.ERROR)
def testLoadNode(self):
_ = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
_ = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
self.node_msg.name = 'Test Node'
_ = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
self.assertIn('PassthroughNode', self.manager.nodes)
self.assertIn('PassthroughNode_2', self.manager.nodes)
self.assertIn('Test Node', self.manager.nodes)
def testWireData(self):
root = self.manager.instantiate_node_from_msg(
NodeMsg(
module='ros_bt_py.nodes.sequence',
node_class='Sequence',
name='root'),
allow_rename=False)
self.node_msg.name = 'source_node'
source = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
root.add_child(source)
self.node_msg.name = 'target_node'
target = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
root.add_child(target)
self.assertIn('source_node', self.manager.nodes)
self.assertIn('target_node', self.manager.nodes)
valid_request = WireNodeDataRequest()
valid_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(node_name='source_node',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='target_node',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA)))
response = self.manager.wire_data(valid_request)
self.assertTrue(get_success(response), get_error_message(response))
self.assertEqual(len(self.manager.nodes['source_node'].outputs.callbacks), 1)
self.assertEqual(len(self.manager.tree_msg.data_wirings), 1)
def testWireWithInvalidKey(self):
self.node_msg.name = 'source_node'
self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
self.node_msg.name = 'target_node'
self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
self.assertIn('source_node', self.manager.nodes)
self.assertIn('target_node', self.manager.nodes)
invalid_key_request = WireNodeDataRequest()
invalid_key_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(node_name='source_node',
# PassthroghNode does not have this key!
data_key='wrong',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='target_node',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA)))
response = self.manager.wire_data(invalid_key_request)
self.assertFalse(get_success(response))
def testWireWithInvalidNodeName(self):
self.node_msg.name = 'source_node'
self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
self.node_msg.name = 'target_node'
self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
self.assertIn('source_node', self.manager.nodes)
self.assertIn('target_node', self.manager.nodes)
invalid_node_request = WireNodeDataRequest()
invalid_node_request.wirings.append(NodeDataWiring(
# Wrong node name for source node
source=NodeDataLocation(node_name='fantasy_node',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='target_node',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA)))
response = self.manager.wire_data(invalid_node_request)
self.assertFalse(get_success(response))
def testMultiWireWithOneInvalid(self):
"""WireNodeData supports wiring multiple pairs of NodeData at once.
If there's an error while handling any pair, none of the wirings must be applied!
"""
self.node_msg.name = 'source_node'
self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
self.node_msg.name = 'target_node'
self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
self.assertIn('source_node', self.manager.nodes)
self.assertIn('target_node', self.manager.nodes)
invalid_multi_request = WireNodeDataRequest()
# This is fine and should work
invalid_multi_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(node_name='source_node',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='target_node',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA)))
invalid_multi_request.wirings.append(NodeDataWiring(
# Wrong node name for source node
source=NodeDataLocation(node_name='fantasy_node',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='target_node',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA)))
response = self.manager.wire_data(invalid_multi_request)
self.assertFalse(get_success(response))
# The first half should not have been applied -> no callbacks for
# source_node
self.assertEqual(len(self.manager.nodes['source_node'].outputs.callbacks), 0)
def testWireWithError(self):
root = self.manager.instantiate_node_from_msg(
NodeMsg(
module='ros_bt_py.nodes.sequence',
node_class='Sequence',
name='root'),
allow_rename=False)
self.node_msg.name = 'source_node'
source = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
root.add_child(source)
self.node_msg.name = 'target_node'
target = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
root.add_child(target)
self.assertIn('source_node', self.manager.nodes)
self.assertIn('target_node', self.manager.nodes)
valid_request = WireNodeDataRequest()
valid_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(node_name='source_node',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='target_node',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA)))
response = self.manager.wire_data(valid_request)
self.assertTrue(get_success(response))
valid_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(node_name='source_node',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='target_node_does_not_exist',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA)))
self.manager.nodes['target_node'].wire_data = mock.MagicMock()
self.manager.nodes['target_node'].wire_data.side_effect = BehaviorTreeException()
response = self.manager.unwire_data(valid_request)
self.assertFalse(get_success(response))
def testUnwire(self):
root = self.manager.instantiate_node_from_msg(
NodeMsg(
module='ros_bt_py.nodes.sequence',
node_class='Sequence',
name='root'),
allow_rename=False)
wire_request = WireNodeDataRequest()
wire_request.wirings.append(NodeDataWiring(
# Wrong node name for source node
source=NodeDataLocation(node_name='source_node',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='target_node',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA)))
response = self.manager.unwire_data(wire_request)
# Our manager has no nodes at all, so unwiring anything won't work
self.assertFalse(get_success(response))
self.node_msg.name = 'source_node'
source = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
root.add_child(source)
self.node_msg.name = 'target_node'
target = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
root.add_child(target)
response = self.manager.unwire_data(wire_request)
# The nodes and keys exist. There aren't any callbacks to remove, but
# the unwire operation still succeeds (after running it, the two data
# values are unconnected).
self.assertTrue(
get_success(response),
get_error_message(response) + "\n" + str(self.manager.nodes))
response = self.manager.wire_data(wire_request)
self.assertTrue(get_success(response))
self.assertEqual(len(self.manager.tree_msg.data_wirings), 1)
response = self.manager.unwire_data(wire_request)
self.assertTrue(get_success(response))
self.assertEqual(len(self.manager.tree_msg.data_wirings), 0)
def testUnwireWithError(self):
root = self.manager.instantiate_node_from_msg(
NodeMsg(
module='ros_bt_py.nodes.sequence',
node_class='Sequence',
name='root'),
allow_rename=False)
self.node_msg.name = 'source_node'
source = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
root.add_child(source)
self.node_msg.name = 'target_node'
target = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
root.add_child(target)
self.assertIn('source_node', self.manager.nodes)
self.assertIn('target_node', self.manager.nodes)
self.manager.nodes['target_node'].unwire_data = mock.MagicMock()
self.manager.nodes['target_node'].unwire_data.side_effect = BehaviorTreeException()
valid_request = WireNodeDataRequest()
valid_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(node_name='source_node',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='target_node',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA)))
valid_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(node_name='source_node',
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='target_node_does_not_exist',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA)))
response = self.manager.wire_data(valid_request)
self.assertFalse(get_success(response))
def testClearTree(self):
# Adding a node to the tree and ticking it once
add_request = AddNodeRequest(node=self.succeeder_msg)
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 1)
self.assertTrue(get_success(response))
self.manager.nodes['MockLeaf'].state = NodeMsg.RUNNING
# Clear will fail until the tree is shutdown
clear_request = ClearTreeRequest()
response = self.manager.clear(clear_request)
self.assertFalse(get_success(response))
self.assertEqual(len(self.manager.nodes), 1)
execution_request = ControlTreeExecutionRequest(
command=ControlTreeExecutionRequest.TICK_ONCE)
execution_request.command = ControlTreeExecutionRequest.SHUTDOWN
self.assertTrue(self.manager.control_execution(execution_request).success)
# after shutdown clear works again
response = self.manager.clear(clear_request)
self.assertTrue(get_success(response))
self.assertEqual(len(self.manager.nodes), 0)
# even a tree with multiple nodes (and no root) is cleared
add_request = AddNodeRequest(node=self.succeeder_msg, allow_rename=True)
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 1)
self.assertTrue(get_success(response))
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 2)
self.assertTrue(get_success(response))
response = self.manager.clear(clear_request)
self.assertTrue(get_success(response))
self.assertEqual(len(self.manager.nodes), 0)
def testAddNode(self):
add_request = AddNodeRequest(node=self.node_msg)
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 1)
self.assertTrue(get_success(response))
broken_add = AddNodeRequest(node=NodeMsg(module='asdf',
node_class='foo'))
response = self.manager.add_node(broken_add)
self.assertFalse(get_success(response))
def testAddWithMissingParent(self):
self.assertFalse(self.manager.add_node(AddNodeRequest(node=self.node_msg,
parent_name='foo')).success)
def testAddMultiple(self):
add_request = AddNodeRequest(node=self.sequence_msg)
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 1)
self.assertTrue(get_success(response))
add_request = AddNodeRequest(node=self.node_msg,
parent_name=response.actual_node_name)
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 2)
self.assertTrue(get_success(response))
def testAddRenaming(self):
add_request = AddNodeRequest(node=self.sequence_msg)
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 1)
self.assertTrue(get_success(response))
# Add the same node again - since allow_rename should default
# to false, this will fail.
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 1)
self.assertFalse(get_success(response))
# Same with allow_rename set to False explicitly
add_request.allow_rename = False
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 1)
self.assertFalse(get_success(response))
# But it should work if we set allow_rename to True
add_request.allow_rename = True
response = self.manager.add_node(add_request)
self.assertEqual(len(self.manager.nodes), 2)
self.assertTrue(get_success(response))
def testAddWithChild(self):
add_request = AddNodeRequest(node=self.sequence_msg)
response = self.manager.add_node(add_request)
self.assertTrue(get_success(response))
self.assertEqual(len(self.manager.nodes), 1)
self.sequence_msg.child_names.append(response.actual_node_name)
add_request = AddNodeRequest(node=self.sequence_msg,
allow_rename=True)
response = self.manager.add_node(add_request)
self.assertTrue(get_success(response))
self.assertEqual(len(self.manager.nodes), 2)
root = self.manager.find_root()
# The newly inserted second node should be the root of the tree, since
# the other one is its child
self.assertEqual(response.actual_node_name, root.name)
self.assertEqual(len(root.children), 1)
def testAddWithMissingChild(self):
self.sequence_msg.child_names.append('imaginary_node')
add_request = AddNodeRequest(node=self.sequence_msg)
response = self.manager.add_node(add_request)
# Don't add nodes with missing children to the tree
self.assertFalse(get_success(response))
self.assertEqual(len(self.manager.nodes), 0)
def testAddWithInvalidOption(self):
self.node_msg.options = [
NodeData(key='passthrough_type',
# passthrough_type must be a type, not an int
serialized_value=json_encode(42))]
add_request = AddNodeRequest(node=self.node_msg)
response = self.manager.add_node(add_request)
self.assertFalse(get_success(response))
def testBuildCycle(self):
add_request = AddNodeRequest(node=self.sequence_msg)
response = self.manager.add_node(add_request)
self.assertTrue(get_success(response))
self.assertEqual(len(self.manager.nodes), 1)
self.sequence_msg.child_names.append(response.actual_node_name)
add_request = AddNodeRequest(parent_name=response.actual_node_name,
node=self.sequence_msg,
allow_rename=True)
response = self.manager.add_node(add_request)
self.assertFalse(get_success(response))
self.assertEqual(len(self.manager.nodes), 1)
def testRemoveNode(self):
instance = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
self.assertEqual(len(self.manager.nodes), 1)
remove_request = RemoveNodeRequest(node_name=instance.name)
response = self.manager.remove_node(remove_request)
self.assertTrue(get_success(response))
self.assertEqual(len(self.manager.nodes), 0)
# Second remove will fail, there's nothing left to remove.
response = self.manager.remove_node(remove_request)
self.assertFalse(get_success(response))
def testRemoveParent(self):
add_response = self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))
self.manager.add_node(
AddNodeRequest(node=self.node_msg,
parent_name=add_response.actual_node_name))
self.assertEqual(len(self.manager.nodes), 2)
remove_response = self.manager.remove_node(
RemoveNodeRequest(node_name=add_response.actual_node_name,
remove_children=False))
self.assertTrue(get_success(remove_response))
self.assertEqual(len(self.manager.nodes), 1)
self.manager.tick(once=True)
root_node = [node for node in self.tree_msg.nodes
if node.name == self.tree_msg.root_name][0]
self.assertEqual(root_node.state, NodeMsg.SUCCEEDED)
def testRemoveParentAndChildren(self):
add_response = self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))
self.manager.add_node(
AddNodeRequest(node=self.node_msg,
parent_name=add_response.actual_node_name))
self.assertEqual(len(self.manager.nodes), 2)
remove_response = self.manager.remove_node(
RemoveNodeRequest(node_name=add_response.actual_node_name,
remove_children=True))
self.assertTrue(get_success(remove_response), get_error_message(remove_response))
self.assertEqual(len(self.manager.nodes), 0)
def testRemoveParentAndChildrenWithBrokenChildren(self):
add_response = self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))
child_response = self.manager.add_node(
AddNodeRequest(node=self.node_msg,
parent_name=add_response.actual_node_name))
self.assertEqual(len(self.manager.nodes), 2)
self.manager.nodes[child_response.actual_node_name].children.append(
Sequence(name='not_in_tree'))
remove_response = self.manager.remove_node(
RemoveNodeRequest(node_name=add_response.actual_node_name,
remove_children=True))
self.assertFalse(get_success(remove_response), get_error_message(remove_response))
self.assertEqual(len(self.manager.nodes), 2)
def testRemoveParentAndChildrenWithIdenticalChildren(self):
add_response = self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))
child_response = self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name=add_response.actual_node_name))
first_child_name = child_response.actual_node_name
child_response = self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name=add_response.actual_node_name,
allow_rename=True))
self.assertEqual(len(self.manager.nodes), 3)
self.manager.nodes[add_response.actual_node_name].children[1].name = first_child_name
remove_response = self.manager.remove_node(
RemoveNodeRequest(node_name=add_response.actual_node_name,
remove_children=True))
self.assertTrue(get_success(remove_response))
self.assertEqual(len(self.manager.nodes), 1)
def testRemoveParentAndChildrenWithParentThatDoesNotShutdownItsChildren(self):
@define_bt_node(NodeConfig(
options={},
inputs={},
outputs={},
max_children=None))
class FlowControlNode(FlowControl):
def _do_setup(self):
for child in self.children:
child.setup()
def _do_tick(self):
return NodeMsg.SUCCEEDED
def _do_shutdown(self):
pass
def _do_reset(self):
return NodeMsg.IDLE
def _do_untick(self):
return NodeMsg.IDLE
parent = FlowControlNode()
self.manager.nodes[parent.name] = parent
child = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
parent.add_child(child)
self.assertEqual(self.manager.nodes[parent.name].state, NodeMsg.UNINITIALIZED)
self.assertEqual(self.manager.nodes[child.name].state, NodeMsg.UNINITIALIZED)
self.manager.nodes[parent.name].setup()
self.assertEqual(self.manager.nodes[parent.name].state, NodeMsg.IDLE)
self.assertEqual(self.manager.nodes[child.name].state, NodeMsg.IDLE)
remove_response = self.manager.remove_node(
RemoveNodeRequest(node_name=parent.name,
remove_children=True))
self.assertTrue(get_success(remove_response))
self.assertEqual(len(self.manager.nodes), 0)
def testMoveNode(self):
self.sequence_msg.name = 'outer_seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.sequence_msg.name = "inner_seq"
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg,
parent_name='outer_seq'))))
self.succeeder_msg.name = 'A'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='outer_seq'))))
self.succeeder_msg.name = 'B'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='inner_seq'))))
self.assertEqual(len(self.manager.nodes), 4)
# Should fail, since "A" is a MockLeaf, which can't have children
self.assertFalse(get_success(self.manager.move_node(
MoveNodeRequest(
node_name='B',
new_parent_name='A',
new_child_index=0))))
# Should fail, since "asdf" is not in the tree
self.assertFalse(get_success(self.manager.move_node(
MoveNodeRequest(
node_name='B',
new_parent_name='asdf',
new_child_index=0))))
# Should fail, since "asdf" is not in the tree
self.assertFalse(get_success(self.manager.move_node(
MoveNodeRequest(
node_name='asdf',
new_parent_name='outer_seq',
new_child_index=0))))
# Should succeed and put "A" after "B" (-1 means
# "first from the back")
self.assertTrue(get_success(self.manager.move_node(
MoveNodeRequest(
node_name='A',
new_parent_name='inner_seq',
new_child_index=-1))))
self.assertIn('inner_seq', [node.name for node in self.tree_msg.nodes])
for node in self.tree_msg.nodes:
if node.name == 'outer_seq':
# After moving A into inner_seq, outer_seq has only
# one child
self.assertEqual(len(node.child_names), 1)
if node.name == 'inner_seq':
A_index = None
B_index = None
for index, name in enumerate(node.child_names):
if name == "A":
A_index = index
if name == "B":
B_index = index
self.assertIsNotNone(A_index, 'Node A is not a child of inner_seq!')
self.assertIsNotNone(B_index, 'Node B is not a child of inner_seq!')
# As mentioned above, A should appear *after* B in the
# list of inner_seq's children!
self.assertGreater(A_index, B_index)
def testMoveToNoParent(self):
self.sequence_msg.name = 'seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.succeeder_msg.name = 'A'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='seq'))))
self.assertEqual(len(self.tree_msg.nodes), 2)
self.assertTrue(get_success(self.manager.move_node(
MoveNodeRequest(
node_name='A',
new_parent_name=''))))
# With A removed from seq's children, no node should have any
# children!
self.assertTrue(all([not node.child_names for node in self.tree_msg.nodes]))
def testMoveWithinSameParent(self):
self.sequence_msg.name = 'seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.succeeder_msg.name = 'A'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='seq'))))
self.succeeder_msg.name = 'B'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='seq'))))
self.succeeder_msg.name = 'C'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='seq'))))
self.assertEqual(len(self.tree_msg.nodes), 4)
# Confirm the positions of all three succeeders
seq_msg = None
for node in self.tree_msg.nodes:
if node.name == 'seq':
seq_msg = node
break
self.assertIsNotNone(seq_msg, 'Failed to find sequence in tree message')
self.assertEqual(seq_msg.child_names, ['A', 'B', 'C'])
self.assertTrue(get_success(self.manager.move_node(
MoveNodeRequest(
node_name='A',
new_parent_name='seq',
new_child_index=1
))))
seq_msg = None
for node in self.tree_msg.nodes:
if node.name == 'seq':
seq_msg = node
break
self.assertIsNotNone(seq_msg, 'Failed to find sequence in tree message')
self.assertEqual(seq_msg.child_names, ['B', 'A', 'C'])
def testMoveToOwnChild(self):
self.sequence_msg.name = 'seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.sequence_msg.name = 'seq_2'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg,
parent_name='seq'))))
# This should be impossible, since it leads to a
# circular graph!
self.assertFalse(get_success(self.manager.move_node(
MoveNodeRequest(
node_name='seq',
new_parent_name='seq_2',
new_child_index=0
))))
def testMorphNode(self):
self.sequence_msg.name = 'seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.succeeder_msg.name = 'A'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='seq'))))
self.succeeder_msg.name = 'B'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='seq'))))
self.assertEqual(len(self.tree_msg.nodes), 3)
self.assertFalse(get_success(self.manager.morph_node(
MorphNodeRequest(node_name='node_not_in_tree',
new_node=self.memory_sequence_msg)
)))
self.assertTrue(get_success(self.manager.morph_node(
MorphNodeRequest(node_name='seq',
new_node=self.memory_sequence_msg)
)))
def testMorphNodeWithParent(self):
self.sequence_msg.name = 'outer_seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.sequence_msg.name = "inner_seq"
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg,
parent_name='outer_seq'))))
self.assertTrue(get_success(self.manager.morph_node(
MorphNodeRequest(node_name='inner_seq',
new_node=self.memory_sequence_msg)
)))
def testMorphNodeWithBrokenParent(self):
self.sequence_msg.name = 'outer_seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.sequence_msg.name = "inner_seq"
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg,
parent_name='outer_seq'))))
# break parent node
self.manager.nodes['outer_seq'].node_config.max_children = 0
self.assertFalse(get_success(self.manager.morph_node(
MorphNodeRequest(node_name='inner_seq',
new_node=self.memory_sequence_msg)
)))
self.manager.nodes['outer_seq'].node_config.max_children = None
self.manager.nodes['outer_seq'].children = []
self.assertFalse(get_success(self.manager.morph_node(
MorphNodeRequest(node_name='inner_seq',
new_node=self.memory_sequence_msg)
)))
def testMorphNodeWithAnotherBrokenParent(self):
self.sequence_msg.name = 'outer_seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.sequence_msg.name = "inner_seq"
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg,
parent_name='outer_seq'))))
# break parent node
self.manager.nodes['outer_seq'].children = []
self.assertFalse(get_success(self.manager.morph_node(
MorphNodeRequest(node_name='inner_seq',
new_node=self.memory_sequence_msg)
)))
def testMorphNodeBrokenMessage(self):
self.sequence_msg.name = 'seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
msg = NodeMsg(
module="ros_bt_py.nodes.passthrough_node",
node_class="PassthroughNode",
options=[NodeData(key='passthrough_type',
serialized_value='definitely_not_a_type')])
self.assertFalse(get_success(self.manager.morph_node(
MorphNodeRequest(node_name='seq',
new_node=msg)
)))
# intentionally break wiring
self.manager.tree_msg.data_wirings.append(
NodeDataWiring(
source=NodeDataLocation(
node_name='seq'
),
target=NodeDataLocation(
node_name='missing'
)
))
self.assertFalse(get_success(self.manager.morph_node(
MorphNodeRequest(node_name='seq',
new_node=self.memory_sequence_msg)
)))
def testMorphNodeWithParentError(self):
self.sequence_msg.name = 'outer_seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.sequence_msg.name = "inner_seq"
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg,
parent_name='outer_seq'))))
self.manager.nodes['outer_seq'].add_child = mock.MagicMock()
self.manager.nodes['outer_seq'].add_child.side_effect = [BehaviorTreeException(), None]
self.assertFalse(get_success(self.manager.morph_node(
MorphNodeRequest(node_name='inner_seq',
new_node=self.memory_sequence_msg)
)))
def testMorphNodeWithParentWireError(self):
self.sequence_msg.name = 'outer_seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.sequence_msg.name = "inner_seq"
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg,
parent_name='outer_seq'))))
self.manager.wire_data = mock.MagicMock()
self.manager.wire_data.return_value = WireNodeDataResponse(success=False)
self.assertTrue(get_success(self.manager.morph_node(
MorphNodeRequest(node_name='inner_seq',
new_node=self.memory_sequence_msg)
)))
def testReplaceNode(self):
self.sequence_msg.name = 'seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.succeeder_msg.name = 'A'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='seq'))))
self.succeeder_msg.name = 'B'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='seq'))))
self.assertEqual(len(self.tree_msg.nodes), 3)
self.assertFalse(get_success(self.manager.replace_node(
ReplaceNodeRequest(
old_node_name="asdf",
new_node_name="A"))))
self.assertFalse(get_success(self.manager.replace_node(
ReplaceNodeRequest(
old_node_name="B",
new_node_name="asdf"))))
self.assertTrue(get_success(self.manager.replace_node(
ReplaceNodeRequest(
old_node_name="B",
new_node_name="A"))))
self.assertEqual(len(self.tree_msg.nodes), 2)
# B was overwritten by A
self.assertNotIn("B", [node.name for node in self.tree_msg.nodes])
self.sequence_msg.name = 'new_seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.assertEqual(len(self.tree_msg.nodes), 3)
self.assertTrue(get_success(self.manager.replace_node(
ReplaceNodeRequest(
old_node_name="seq",
new_node_name="new_seq"))))
self.assertEqual(len(self.tree_msg.nodes), 2)
# seq should be overwritten by new_seq
self.assertNotIn("seq", [node.name for node in self.tree_msg.nodes])
self.assertIn("new_seq", [node.name for node in self.tree_msg.nodes])
for node in self.tree_msg.nodes:
if node.name == 'new_seq':
self.assertIn("A", node.child_names)
def testReplaceParent(self):
self.sequence_msg.name = 'seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.sequence_msg.name = 'seq_2'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg,
parent_name='seq'))))
# This should succeed, but to avoid cycles, seq_2 cannot
# inherit all of seq's children (which would include itself)
self.assertTrue(get_success(self.manager.replace_node(
ReplaceNodeRequest(
old_node_name='seq',
new_node_name='seq_2'
))))
self.assertEqual(len(self.tree_msg.nodes), 1)
# seq only had seq_2 as its child, so seq_2 should have 0
# children now
self.assertEqual(len(self.tree_msg.nodes[0].child_names), 0)
def testReplaceOrder(self):
self.sequence_msg.name = 'seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.succeeder_msg.name = 'A'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='seq'))))
self.succeeder_msg.name = 'B'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='seq'))))
self.succeeder_msg.name = 'C'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='seq'))))
self.assertEqual(len(self.tree_msg.nodes), 4)
# Confirm the positions of all three succeeders
seq_msg = None
for node in self.tree_msg.nodes:
if node.name == 'seq':
seq_msg = node
break
self.assertIsNotNone(seq_msg, 'Failed to find sequence in tree message')
self.assertEqual(seq_msg.child_names, ['A', 'B', 'C'])
self.assertTrue(get_success(self.manager.replace_node(
ReplaceNodeRequest(
new_node_name='A',
old_node_name='B'
))))
seq_msg = None
for node in self.tree_msg.nodes:
if node.name == 'seq':
seq_msg = node
break
self.assertIsNotNone(seq_msg, 'Failed to find sequence in tree message')
self.assertEqual(seq_msg.child_names, ['A', 'C'])
def testReplaceBrokenNode(self):
self.succeeder_msg.name = 'A'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg))))
self.succeeder_msg.name = 'B'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg))))
self.assertEqual(len(self.tree_msg.nodes), 2)
# Break the node
node = self.manager.nodes['A'].children = [self.manager.nodes['B']]
self.assertFalse(get_success(self.manager.replace_node(
ReplaceNodeRequest(
old_node_name="A",
new_node_name="B"))))
def testReplaceNodeNotSuccessful(self):
self.sequence_msg.name = 'seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.succeeder_msg.name = 'A'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='seq'))))
self.succeeder_msg.name = 'B'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='seq'))))
self.assertEqual(len(self.tree_msg.nodes), 3)
self.manager.remove_node = mock.MagicMock()
self.manager.remove_node.return_value = RemoveNodeResponse(success=False)
self.assertFalse(get_success(self.manager.replace_node(
ReplaceNodeRequest(
old_node_name="B",
new_node_name="A"))))
def testTick(self):
add_request = AddNodeRequest(node=self.node_msg)
add_request.node.inputs.append(NodeData(key='in',
serialized_value=json_encode(42)))
response = self.manager.add_node(add_request)
self.assertTrue(get_success(response))
self.manager.tick(once=True)
self.assertEqual(self.manager.nodes[response.actual_node_name].outputs['out'], 42)
# After finishing the tick, the TreeManager should have called the tree
# and debug info callbacks, setting these values.
self.assertIsNotNone(self.tree_msg)
self.assertIsNotNone(self.debug_info_msg)
self.assertIn(response.actual_node_name,
[node.name for
node in self.tree_msg.nodes])
node_msg = next((node for
node in self.tree_msg.nodes if node.name == response.actual_node_name))
self.assertEqual(json_decode(node_msg.inputs[0].serialized_value), 42)
self.assertEqual(json_decode(node_msg.outputs[0].serialized_value), 42)
def testControlTree(self):
add_request = AddNodeRequest(node=self.node_msg)
add_request.node.name = 'passthrough'
add_request.node.inputs.append(NodeData(key='in',
serialized_value=json_encode(42)))
self.assertTrue(self.manager.add_node(add_request).success)
self.assertEqual(self.manager.nodes['passthrough'].inputs['in'], 42)
self.assertIsNone(self.manager.nodes['passthrough'].outputs['out'])
execution_request = ControlTreeExecutionRequest(
command=ControlTreeExecutionRequest.TICK_ONCE)
response = self.manager.control_execution(execution_request)
self.assertTrue(get_success(response))
self.assertEqual(response.tree_state, Tree.WAITING_FOR_TICK)
self.assertEqual(self.manager.nodes['passthrough'].outputs['out'], 42)
# Start, then stop, continuous execution
execution_request.command = ControlTreeExecutionRequest.TICK_PERIODICALLY
execution_request.tick_frequency_hz = 2
response = self.manager.control_execution(execution_request)
self.assertTrue(get_success(response))
self.assertEqual(response.tree_state, Tree.TICKING)
# Trying to start ticking while the tree already is ticking should fail
self.assertFalse(self.manager.control_execution(execution_request).success)
execution_request.command = ControlTreeExecutionRequest.TICK_ONCE
self.assertFalse(self.manager.control_execution(execution_request).success)
# Stopping should put the tree back in the IDLE state
execution_request.command = ControlTreeExecutionRequest.STOP
response = self.manager.control_execution(execution_request)
self.assertTrue(get_success(response))
self.assertEqual(response.tree_state, Tree.IDLE)
# stopping a stopped tree is fine
self.assertTrue(self.manager.control_execution(execution_request).success)
# After resetting, output should be None again
execution_request.command = ControlTreeExecutionRequest.RESET
self.assertIsNotNone(self.manager.nodes['passthrough'].outputs['out'])
self.assertTrue(self.manager.control_execution(execution_request).success)
self.assertIsNone(self.manager.nodes['passthrough'].outputs['out'])
execution_request.command = ControlTreeExecutionRequest.SHUTDOWN
self.assertTrue(self.manager.control_execution(execution_request).success)
self.assertEqual(self.manager.nodes['passthrough'].state, NodeMsg.SHUTDOWN)
# test DO_NOTHING and an unknown command
execution_request.command = ControlTreeExecutionRequest.DO_NOTHING
self.assertTrue(self.manager.control_execution(execution_request).success)
execution_request.command = 42
self.assertFalse(self.manager.control_execution(execution_request).success)
def testControlBrokenTree(self):
add_request = AddNodeRequest(node=self.node_msg,
allow_rename=True)
# Add two nodes, so there's no one root node
self.assertTrue(self.manager.add_node(add_request).success)
self.assertTrue(self.manager.add_node(add_request).success)
execution_request = ControlTreeExecutionRequest()
# All of these should fail, since the manager cannot find a root node
# to tick (or reset)
execution_request.command = ControlTreeExecutionRequest.TICK_ONCE
self.assertFalse(get_success(self.manager.control_execution(execution_request)))
execution_request.command = ControlTreeExecutionRequest.TICK_PERIODICALLY
self.assertFalse(get_success(self.manager.control_execution(execution_request)))
execution_request.command = ControlTreeExecutionRequest.RESET
self.assertFalse(get_success(self.manager.control_execution(execution_request)))
def testControlTreeWithUnsetInputNode(self):
load_request = LoadTreeRequest(tree=Tree(
name='from_file',
path='package://ros_bt_py/test/testdata/trees/subtree_compare.yaml'))
self.assertTrue(get_success(self.manager.load_tree(load_request)))
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.TICK_ONCE
self.assertFalse(get_success(self.manager.control_execution(execution_request)))
self.assertFalse(get_success(self.manager.control_execution(execution_request)))
def testControlSetupAndShutdown(self):
add_request = AddNodeRequest(node=self.node_msg,
allow_rename=True)
self.assertTrue(self.manager.add_node(add_request).success)
execution_request = ControlTreeExecutionRequest()
# SETUP_AND_SHUTDOWN does not work when ticking
execution_request.command = ControlTreeExecutionRequest.TICK_PERIODICALLY
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
execution_request.command = ControlTreeExecutionRequest.SETUP_AND_SHUTDOWN
self.assertFalse(get_success(self.manager.control_execution(execution_request)))
execution_request.command = ControlTreeExecutionRequest.SHUTDOWN
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
execution_request.command = ControlTreeExecutionRequest.SETUP_AND_SHUTDOWN
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
# SETUP fails on a TreeTopologyError
self.assertTrue(self.manager.add_node(add_request).success)
execution_request.command = ControlTreeExecutionRequest.SETUP_AND_SHUTDOWN
self.assertFalse(get_success(self.manager.control_execution(execution_request)))
def testControlSetupAndShutdownFails(self):
random_int_msg = NodeMsg(
module='ros_bt_py.nodes.random_number',
node_class='RandomInt',
options=[NodeData(key='min',
serialized_value=json_encode(1)),
NodeData(key='max',
serialized_value=json_encode(0))])
add_request = AddNodeRequest(node=random_int_msg,
allow_rename=True)
self.assertTrue(self.manager.add_node(add_request).success)
execution_request = ControlTreeExecutionRequest()
# Fails because of the nodes BehaviorTreeException
execution_request.command = ControlTreeExecutionRequest.SETUP_AND_SHUTDOWN
self.assertFalse(get_success(self.manager.control_execution(execution_request)))
def testControlTickPeriodicallyNoNodes(self):
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.TICK_PERIODICALLY
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
execution_request.command = ControlTreeExecutionRequest.SHUTDOWN
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
execution_request.command = ControlTreeExecutionRequest.RESET
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
def testControlTickPeriodically0Hz(self):
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.manager.tree_msg.tick_frequency_hz = 0
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.RESET
self.assertFalse(get_success(self.manager.control_execution(execution_request)))
execution_request.command = ControlTreeExecutionRequest.TICK_PERIODICALLY
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
time.sleep(0.01)
execution_request.command = ControlTreeExecutionRequest.RESET
self.assertFalse(get_success(self.manager.control_execution(execution_request)))
execution_request.command = ControlTreeExecutionRequest.SHUTDOWN
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
def testControlLongRunningTreeNode(self):
node = LongRunningNode()
self.manager.nodes[node.name] = node
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.TICK_PERIODICALLY
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
time.sleep(0.1)
execution_request.command = ControlTreeExecutionRequest.STOP
self.assertRaises(BehaviorTreeException,
self.manager.control_execution, execution_request)
def testControlLongRunningTreeNodetickOnce(self):
node = LongRunningNode()
self.manager.nodes[node.name] = node
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.TICK_ONCE
self.assertRaises(BehaviorTreeException,
self.manager.control_execution, execution_request)
time.sleep(0.1)
execution_request.command = ControlTreeExecutionRequest.STOP
self.assertRaises(BehaviorTreeException,
self.manager.control_execution, execution_request)
def testControlLongRunningTreeNodeDebugging(self):
node = LongRunningNode()
self.manager.nodes[node.name] = node
request = SetExecutionModeRequest(single_step=True,
collect_performance_data=False, publish_subtrees=False)
self.assertEqual(self.manager.set_execution_mode(request), SetExecutionModeResponse())
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.TICK_PERIODICALLY
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
time.sleep(0.1)
execution_request.command = ControlTreeExecutionRequest.STOP
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
def testControlLongRunningTreeNodeDebuggingTickOnce(self):
node = LongRunningNode()
self.manager.nodes[node.name] = node
request = SetExecutionModeRequest(single_step=True,
collect_performance_data=False, publish_subtrees=False)
self.assertEqual(self.manager.set_execution_mode(request), SetExecutionModeResponse())
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.TICK_ONCE
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
time.sleep(0.1)
execution_request.command = ControlTreeExecutionRequest.STOP
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
def testControlDebugRaceCondition(self):
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.constant_msg))))
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.TICK_PERIODICALLY
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
debug_request = SetExecutionModeRequest(single_step=True,
collect_performance_data=False,
publish_subtrees=False)
self.assertEqual(self.manager.set_execution_mode(debug_request),
SetExecutionModeResponse())
debug_request = SetExecutionModeRequest(single_step=False,
collect_performance_data=False,
publish_subtrees=False)
self.assertEqual(self.manager.set_execution_mode(debug_request),
SetExecutionModeResponse())
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.SHUTDOWN
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
def testControlTickExceptionNode(self):
@define_bt_node(NodeConfig(
options={},
inputs={},
outputs={},
max_children=0))
class ExceptionNode(Leaf):
def _do_setup(self):
pass
def _do_tick(self):
raise BehaviorTreeException
def _do_shutdown(self):
pass
def _do_reset(self):
return NodeMsg.IDLE
def _do_untick(self):
return NodeMsg.IDLE
node = ExceptionNode()
self.manager.nodes[node.name] = node
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.TICK_PERIODICALLY
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
time.sleep(0.1)
execution_request.command = ControlTreeExecutionRequest.STOP
self.assertFalse(get_success(self.manager.control_execution(execution_request)))
def testControlUntickExceptionNode(self):
@define_bt_node(NodeConfig(
options={},
inputs={},
outputs={},
max_children=0))
class ExceptionNode(Leaf):
def _do_setup(self):
pass
def _do_tick(self):
return NodeMsg.SUCCEEDED
def _do_shutdown(self):
pass
def _do_reset(self):
return NodeMsg.IDLE
def _do_untick(self):
raise BehaviorTreeException
node = ExceptionNode()
self.manager.nodes[node.name] = node
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.TICK_PERIODICALLY
self.assertTrue(get_success(self.manager.control_execution(execution_request)))
time.sleep(0.1)
execution_request.command = ControlTreeExecutionRequest.STOP
self.assertFalse(get_success(self.manager.control_execution(execution_request)))
def testControlUntickNoNodes(self):
self.manager.tree_msg.state = Tree.WAITING_FOR_TICK
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.STOP
response = self.manager.control_execution(execution_request)
self.assertTrue(get_success(response))
self.assertEqual(response.tree_state, Tree.IDLE)
def testControlStopTopologyError(self):
# build a cycle
node = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
self.manager.nodes[node.name].parent = node.name
self.manager.tree_msg.state = Tree.WAITING_FOR_TICK
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.STOP
response = self.manager.control_execution(execution_request)
self.assertFalse(get_success(response))
def testControlShutdownNotRunningTopologyError(self):
# build a cycle
node = self.manager.instantiate_node_from_msg(self.node_msg, allow_rename=True)
self.manager.nodes[node.name].parent = node.name
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.SHUTDOWN
response = self.manager.control_execution(execution_request)
self.assertFalse(get_success(response))
def testControlTickNoNodes(self):
execution_request = ControlTreeExecutionRequest()
execution_request.command = ControlTreeExecutionRequest.TICK_ONCE
response = self.manager.control_execution(execution_request)
self.assertTrue(get_success(response))
def testControlTickThreadAlive(self):
add_request = AddNodeRequest(node=self.node_msg)
add_request.node.name = 'passthrough'
add_request.node.inputs.append(NodeData(key='in',
serialized_value=json_encode(42)))
self.assertTrue(self.manager.add_node(add_request).success)
self.assertEqual(self.manager.nodes['passthrough'].inputs['in'], 42)
self.assertIsNone(self.manager.nodes['passthrough'].outputs['out'])
execution_request = ControlTreeExecutionRequest(
command=ControlTreeExecutionRequest.TICK_ONCE)
response = self.manager.control_execution(execution_request)
self.assertTrue(get_success(response))
self.assertEqual(response.tree_state, Tree.WAITING_FOR_TICK)
self.manager._tick_thread.is_alive = mock.MagicMock()
self.manager._tick_thread.is_alive.return_value = True
self.manager.get_state = mock.MagicMock()
self.manager.get_state.return_value = Tree.IDLE
self.assertRaises(BehaviorTreeException, self.manager.control_execution, execution_request)
def testControlTreeStateNotIdle(self):
add_request = AddNodeRequest(node=self.node_msg)
add_request.node.name = 'passthrough'
add_request.node.inputs.append(NodeData(key='in',
serialized_value=json_encode(42)))
self.assertTrue(self.manager.add_node(add_request).success)
self.assertEqual(self.manager.nodes['passthrough'].inputs['in'], 42)
self.assertIsNone(self.manager.nodes['passthrough'].outputs['out'])
execution_request = ControlTreeExecutionRequest(
command=ControlTreeExecutionRequest.TICK_PERIODICALLY)
response = self.manager.control_execution(execution_request)
self.assertTrue(get_success(response))
self.assertEqual(response.tree_state, Tree.TICKING)
execution_request = ControlTreeExecutionRequest(
command=ControlTreeExecutionRequest.STOP)
self.manager.get_state = mock.MagicMock()
self.manager.get_state.side_effect = [
Tree.TICKING, Tree.TICKING, Tree.STOP_REQUESTED, Tree.STOP_REQUESTED]
response = self.manager.control_execution(execution_request)
self.assertFalse(get_success(response))
def testControlTreeStateNotIdleOrPaused(self):
add_request = AddNodeRequest(node=self.node_msg)
add_request.node.name = 'passthrough'
add_request.node.inputs.append(NodeData(key='in',
serialized_value=json_encode(42)))
self.assertTrue(self.manager.add_node(add_request).success)
self.assertEqual(self.manager.nodes['passthrough'].inputs['in'], 42)
self.assertIsNone(self.manager.nodes['passthrough'].outputs['out'])
execution_request = ControlTreeExecutionRequest(
command=ControlTreeExecutionRequest.TICK_ONCE)
response = self.manager.control_execution(execution_request)
self.assertTrue(get_success(response))
self.assertEqual(response.tree_state, Tree.WAITING_FOR_TICK)
execution_request = ControlTreeExecutionRequest(
command=ControlTreeExecutionRequest.STOP)
self.manager.find_root = mock.MagicMock()
node = MockLeaf(name='error',
options={'output_type': int,
'state_values': [NodeMsg.FAILED],
'output_values': [1]})
node.state = NodeMsg.FAILED
node.untick = mock.MagicMock()
self.manager.find_root.return_value = node
response = self.manager.control_execution(execution_request)
self.assertFalse(get_success(response))
def testControlTreeStateTickOnceIdle(self):
add_request = AddNodeRequest(node=self.node_msg)
add_request.node.name = 'passthrough'
add_request.node.inputs.append(NodeData(key='in',
serialized_value=json_encode(42)))
self.assertTrue(self.manager.add_node(add_request).success)
self.assertEqual(self.manager.nodes['passthrough'].inputs['in'], 42)
self.assertIsNone(self.manager.nodes['passthrough'].outputs['out'])
execution_request = ControlTreeExecutionRequest(
command=ControlTreeExecutionRequest.TICK_ONCE)
self.manager.get_state = mock.MagicMock()
self.manager.get_state.return_value = Tree.STOP_REQUESTED
response = self.manager.control_execution(execution_request)
self.assertFalse(get_success(response))
def testGetAvailableNodes(self):
request = GetAvailableNodesRequest(node_modules=['ros_bt_py.nodes.passthrough_node'])
response = self.manager.get_available_nodes(request)
self.assertTrue(get_success(response), get_error_message(response))
self.assertGreaterEqual(len(response.available_nodes), 1)
self.assertIn("PassthroughNode", [node.node_class for node in response.available_nodes])
request = GetAvailableNodesRequest(node_modules=['ros_bt_py.tests.node_does_not_exist'])
response = self.manager.get_available_nodes(request)
self.assertFalse(get_success(response))
def testSetOptions(self):
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.node_msg))))
# There's only one node...
node = self.tree_msg.nodes[0]
# and it only has one option
self.assertEqual(node.options[0].serialized_value, json_encode(int))
# a node that is not in the tree should fail
self.assertFalse(get_success(self.manager.set_options(
SetOptionsRequest(node_name='not_in_tree',
rename_node=False,
options=[NodeData(key='passthrough_type',
serialized_value=json_encode(str))]))))
# unparseable values should fail
self.assertFalse(get_success(self.manager.set_options(
SetOptionsRequest(node_name='PassthroughNode',
rename_node=False,
options=[NodeData(key='passthrough_type',
serialized_value='invalid_value')]))))
# assigning values to invalid keys should fail too
self.assertFalse(get_success(self.manager.set_options(
SetOptionsRequest(node_name='PassthroughNode',
rename_node=False,
options=[NodeData(key='invalid_key',
serialized_value=json_encode(str))]))))
# assigning values of the wrong type should also fail
self.assertFalse(get_success(self.manager.set_options(
SetOptionsRequest(node_name='PassthroughNode',
rename_node=False,
options=[NodeData(key='passthrough_type',
serialized_value=json_encode(
'I am not a type, but a string!'))]))))
# finally, this is valid :)
self.assertTrue(get_success(self.manager.set_options(
SetOptionsRequest(node_name='PassthroughNode',
rename_node=False,
options=[NodeData(key='passthrough_type',
serialized_value=json_encode(str))]))))
node = self.tree_msg.nodes[0]
self.assertEqual(node.options[0].serialized_value, json_encode(str))
def testSetSomeOptions(self):
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.constant_msg))))
# We expect the old value of constant_type (int) to be
# preserved - if it weren't Node.__init__() would raise an
# error!
self.assertTrue(get_success(self.manager.set_options(
SetOptionsRequest(node_name='Constant',
rename_node=False,
options=[NodeData(key='constant_value',
serialized_value=json_encode(23))]))))
def testRename(self):
self.sequence_msg.name = 'foo'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.constant_msg.name = 'const'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(parent_name=self.sequence_msg.name,
node=self.constant_msg))))
set_options_response = self.manager.set_options(
SetOptionsRequest(node_name=self.constant_msg.name,
rename_node=True,
new_name='bar'))
self.assertTrue(get_success(set_options_response),
get_error_message(set_options_response))
self.assertIn('bar', (node.name for node in self.tree_msg.nodes))
self.assertNotIn('const', (node.name for node in self.tree_msg.nodes))
set_options_response = self.manager.set_options(
SetOptionsRequest(node_name='bar',
rename_node=True,
new_name='foo'))
# 'foo' is already taken, so this shouldn't succeed
self.assertFalse(get_success(set_options_response))
def testSetOptionsWithWirings(self):
# Add a Sequence with two children
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.node_msg.name = 'child1'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(parent_name='Sequence',
node=self.node_msg))))
self.node_msg.name = 'child2'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(parent_name='Sequence',
node=self.node_msg))))
wire_request = WireNodeDataRequest()
wire_request.wirings.append(NodeDataWiring(
source=NodeDataLocation(
node_name='child1',
data_kind=NodeDataLocation.OUTPUT_DATA,
data_key='out'),
target=NodeDataLocation(
node_name='child2',
data_kind=NodeDataLocation.INPUT_DATA,
data_key='in')))
self.assertTrue(get_success(self.manager.wire_data(wire_request)))
# Should work - the new value is the same as the old one, so
# it definitely works
self.assertTrue(get_success(self.manager.set_options(SetOptionsRequest(
node_name='child1',
options=[NodeData(key='passthrough_type',
serialized_value=json_encode(int))]))))
# Should fail because the wiring cannot be re-established
# (child1.out is now a str, but child2.in still expects an
# int)
failed_res = self.manager.set_options(SetOptionsRequest(
node_name='child1',
options=[NodeData(key='passthrough_type',
serialized_value=json_encode(str))]))
self.assertFalse(get_success(failed_res))
# The failed attempt should reset everything to the way it was
# before, so this must still work
retry_res = self.manager.set_options(SetOptionsRequest(
node_name='child1',
options=[NodeData(key='passthrough_type',
serialized_value=json_encode(int))]))
self.assertTrue(get_success(retry_res), get_error_message(retry_res))
# Renaming should work
rename_res = self.manager.set_options(SetOptionsRequest(
node_name='child1',
rename_node=True,
new_name='child_new_name1'))
self.assertTrue(get_success(rename_res), get_error_message(rename_res))
rename_res = self.manager.set_options(SetOptionsRequest(
node_name='child2',
rename_node=True,
new_name='child_new_name2'))
self.assertTrue(get_success(rename_res), get_error_message(rename_res))
rename_res = self.manager.set_options(SetOptionsRequest(
node_name='Sequence',
rename_node=True,
new_name='Sequence_new_name'))
self.assertTrue(get_success(rename_res), get_error_message(rename_res))
def testSetOptionsChangeTypeWithOptionWirings(self):
# OptionWirings allow a semantic relationship between option fields
# For example the constant_type and constant_value options of the Constant node
# have a wiring where the constant_type is the source and the constant_value the target
add_response = self.manager.add_node(AddNodeRequest(node=self.constant_msg))
self.assertTrue(get_success(add_response))
node = self.manager.nodes[add_response.actual_node_name]
self.assertEqual(node.options.get_serialized('constant_value'), json_encode(42))
self.assertEqual(node.options.get_serialized('constant_type'), json_encode(int))
# Changing type and value at the same time should work
set_options_response = self.manager.set_options(SetOptionsRequest(
node_name=add_response.actual_node_name,
options=[NodeData(key='constant_value',
serialized_value=json_encode('foo')),
NodeData(key='constant_type',
serialized_value=json_encode(str))]))
self.assertTrue(get_success(set_options_response))
# The node has been replaced, so we need an updated reference
node = self.manager.nodes[add_response.actual_node_name]
self.assertEqual(node.options.get_serialized('constant_value'), json_encode('foo'))
self.assertEqual(node.options.get_serialized('constant_type'), json_encode(str))
# Changing type and value at the same time should work
# str and unicode are considered equal
set_options_response = self.manager.set_options(SetOptionsRequest(
node_name=add_response.actual_node_name,
options=[NodeData(key='constant_value',
serialized_value=json_encode('bar')),
NodeData(key='constant_type',
serialized_value=json_encode(unicode))]))
self.assertTrue(get_success(set_options_response))
# The node has been replaced, so we need an updated reference
node = self.manager.nodes[add_response.actual_node_name]
self.assertEqual(node.options.get_serialized('constant_value'), json_encode('bar'))
self.assertEqual(node.options.get_serialized('constant_type'), json_encode(unicode))
# Changing type and value also works with ROS Messages
tree_msg = Tree(name='test')
set_options_response = self.manager.set_options(SetOptionsRequest(
node_name=add_response.actual_node_name,
options=[NodeData(key='constant_value',
serialized_value=json_encode(tree_msg)),
NodeData(key='constant_type',
serialized_value=json_encode(Tree))]))
self.assertTrue(get_success(set_options_response))
# The node has been replaced, so we need an updated reference
node = self.manager.nodes[add_response.actual_node_name]
self.assertEqual(node.options.get_serialized('constant_value'),
json_encode(tree_msg))
self.assertEqual(node.options.get_serialized('constant_type'), json_encode(Tree))
def testSetOptionsChangeTypeWithOptionWiringsBroken(self):
add_response = self.manager.add_node(AddNodeRequest(node=self.constant_msg))
self.assertTrue(get_success(add_response))
node = self.manager.nodes[add_response.actual_node_name]
# intentionally break wiring
self.manager.tree_msg.data_wirings.append(
NodeDataWiring(
source=NodeDataLocation(
node_name='Constant'
),
target=NodeDataLocation(
node_name='missing'
)
))
# with broken wirings changing options should not work
set_options_response = self.manager.set_options(SetOptionsRequest(
node_name=add_response.actual_node_name,
options=[NodeData(key='constant_value',
serialized_value=json_encode('foo')),
NodeData(key='constant_type',
serialized_value=json_encode(str))]))
self.assertFalse(get_success(set_options_response))
def testSetOptionsBrokenNodes(self):
add_response = self.manager.add_node(AddNodeRequest(node=self.constant_msg))
self.assertTrue(get_success(add_response))
constant_node = self.manager.nodes[add_response.actual_node_name]
add_response = self.manager.add_node(AddNodeRequest(node=self.sequence_msg))
self.assertTrue(get_success(add_response))
sequence_node = self.manager.nodes[add_response.actual_node_name]
self.assertIsNone(constant_node.parent)
self.assertEqual(len(sequence_node.children), 0)
# now deliberately break the constant node by claiming to have a parent
constant_node.parent = sequence_node
self.assertFalse(get_success(self.manager.set_options(
SetOptionsRequest(node_name='Constant',
rename_node=False,
options=[NodeData(key='constant_value',
serialized_value=json_encode(23))]))))
def testSetOptionsErrorOnRemove(self):
self.sequence_msg.name = 'outer_seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.sequence_msg.name = "inner_seq"
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg,
parent_name='outer_seq'))))
self.manager.nodes['outer_seq'].remove_child = mock.MagicMock()
self.manager.nodes['outer_seq'].remove_child.side_effect = KeyError()
self.manager.wire_data = mock.MagicMock()
self.manager.wire_data.return_value = WireNodeDataResponse(success=False)
# self.manager.nodes['outer_seq'].remove_child = mock.MagicMock()
# self.manager.nodes['outer_seq'].remove_child.side_effect = KeyError()
set_options_response = self.manager.set_options(
SetOptionsRequest(node_name="inner_seq",
rename_node=True,
new_name='bar'))
def testSetOptionsErrorOnAdd(self):
self.sequence_msg.name = 'outer_seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.sequence_msg.name = "inner_seq"
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg,
parent_name='outer_seq'))))
self.manager.nodes['outer_seq'].add_child = mock.MagicMock()
self.manager.nodes['outer_seq'].add_child.side_effect = [BehaviorTreeException(), None]
self.manager.wire_data = mock.MagicMock()
self.manager.wire_data.return_value = WireNodeDataResponse(success=False)
set_options_response = self.manager.set_options(
SetOptionsRequest(node_name="inner_seq",
rename_node=True,
new_name='bar'))
def testSetOptionsErrorOnAddException(self):
self.sequence_msg.name = 'outer_seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.sequence_msg.name = "inner_seq"
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg,
parent_name='outer_seq'))))
self.manager.nodes['outer_seq'].add_child = mock.MagicMock()
self.manager.nodes['outer_seq'].add_child.side_effect = BehaviorTreeException()
self.manager.wire_data = mock.MagicMock()
self.manager.wire_data.return_value = WireNodeDataResponse(success=False)
set_options_response = self.manager.set_options(
SetOptionsRequest(node_name="inner_seq",
rename_node=True,
new_name='bar'))
def testSetOptionsErrorOnAddRewire(self):
self.sequence_msg.name = 'outer_seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.sequence_msg.name = "inner_seq"
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg,
parent_name='outer_seq'))))
self.manager.nodes['outer_seq'].remove_child = mock.MagicMock()
self.manager.nodes['outer_seq'].remove_child.side_effect = [None, BehaviorTreeException()]
self.manager.wire_data = mock.MagicMock()
self.manager.wire_data.return_value = WireNodeDataResponse(success=False)
set_options_response = self.manager.set_options(
SetOptionsRequest(node_name="inner_seq",
rename_node=True,
new_name='bar'))
def testSetOptionsErrorOnReAddChildren(self):
self.sequence_msg.name = 'outer_seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.sequence_msg.name = "inner_seq"
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg,
parent_name='outer_seq'))))
self.manager.nodes['outer_seq'].remove_child = mock.MagicMock()
self.manager.nodes['outer_seq'].remove_child.side_effect = BehaviorTreeException()
set_options_response = self.manager.set_options(
SetOptionsRequest(node_name="outer_seq",
rename_node=True,
new_name='bar'))
def testEnforceEditable(self):
add_request = AddNodeRequest(node=self.node_msg)
add_request.node.name = 'first'
self.assertEqual(self.tree_msg.state, "EDITABLE")
self.assertTrue(get_success(self.manager.add_node(add_request)))
self.assertTrue(get_success(self.manager.control_execution(ControlTreeExecutionRequest(
command=ControlTreeExecutionRequest.TICK_ONCE))))
add_request.node.name = 'second'
# The tree is not editable after ticking once
self.assertNotEqual(self.tree_msg.state, "EDITABLE")
# Neither by adding...
self.assertFalse(get_success(self.manager.add_node(add_request)))
# Nor deleting a node
self.assertFalse(get_success(self.manager.remove_node(
RemoveNodeRequest(node_name='first',
remove_children=False))))
self.assertFalse(get_success(self.manager.set_options(
SetOptionsRequest(node_name='first', options=[]))))
# TODO(nberg): test other editing services here as they're implemented
# But after shutting it down, we can edit it again
self.assertTrue(get_success(self.manager.control_execution(ControlTreeExecutionRequest(
command=ControlTreeExecutionRequest.SHUTDOWN))))
self.assertEqual(self.tree_msg.state, "EDITABLE")
self.assertTrue(get_success(self.manager.add_node(add_request)))
self.assertTrue(get_success(self.manager.remove_node(
RemoveNodeRequest(node_name='first',
remove_children=False))))
def testLoadTreeFromPath(self):
load_request = LoadTreeFromPathRequest(
path='package://ros_bt_py/test/testdata/trees/subtree_constant.yaml')
self.assertTrue(get_success(self.manager.load_tree_from_path(load_request)))
def testLoadTreeFromPathBuiltins(self):
load_request = LoadTreeFromPathRequest(
path='package://ros_bt_py/test/testdata/trees/builtins_constant.yaml')
self.assertTrue(get_success(self.manager.load_tree_from_path(load_request)))
node = self.manager.nodes['Constant']
self.assertEqual(node.options.get_serialized('constant_value'), json_encode(42))
self.assertEqual(node.options.get_serialized('constant_type'), json_encode(int))
self.assertEqual(node.options.get_type('constant_value'), int)
self.assertEqual(node.options.get_type('constant_type'), type)
def testLoadWithAndWithoutName(self):
load_request = LoadTreeRequest(tree=Tree(
path='package://ros_bt_py/test/testdata/trees/without_name.yaml'))
response = self.manager.load_tree(load_request)
self.assertTrue(get_success(response))
self.assertEqual(self.manager.tree_msg.name, 'without_name.yaml')
load_request = LoadTreeRequest(tree=Tree(
path='package://ros_bt_py/test/testdata/trees/with_name.yaml'))
response = self.manager.load_tree(load_request)
self.assertTrue(get_success(response))
self.assertEqual(self.manager.tree_msg.name, 'with_name.yaml')
def testLoadFromInvalidFiles(self):
load_request = LoadTreeRequest(tree=Tree(
name='from_file',
path='/notareal.file'))
self.assertFalse(get_success(self.manager.load_tree(load_request)))
load_request = LoadTreeRequest(tree=Tree(
name='from_file',
path='file://'))
self.assertFalse(get_success(self.manager.load_tree(load_request)))
load_request = LoadTreeRequest(tree=Tree(
name='from_file',
path='package://ros_bt_py/etc/trees/notareal.file'))
self.assertFalse(get_success(self.manager.load_tree(load_request)))
load_request = LoadTreeRequest(tree=Tree(
name='from_file',
path='package://ros_bt_py/etc/trees/two_trees.yaml'))
self.assertFalse(get_success(self.manager.load_tree(load_request)))
load_request = LoadTreeRequest(tree=Tree(
name='from_file',
path='package://ros_bt_py/etc/trees/empty.yaml'))
self.assertFalse(get_success(self.manager.load_tree(load_request)))
load_request = LoadTreeRequest(tree=Tree(
name='from_file',
path='package://ros_bt_py/test/testdata/trees/broken_node_with_child.yaml'))
self.assertFalse(get_success(self.manager.load_tree(load_request)))
load_request = LoadTreeRequest(tree=Tree(
name='from_file',
path='package://ros_bt_py/test/testdata/trees/broken_node_with_missing_child.yaml'))
self.assertFalse(get_success(self.manager.load_tree(load_request)))
load_request = LoadTreeRequest(tree=Tree(
name='from_file',
path='package://ros_bt_py/test/testdata/trees/broken_wiring.yaml'))
self.assertFalse(get_success(self.manager.load_tree(load_request)))
def testLoadFromValidFile(self):
load_request = LoadTreeRequest(tree=Tree(name='from_file',
path='package://ros_bt_py/etc/trees/test.yaml'))
response = self.manager.load_tree(load_request)
self.assertTrue(get_success(response), get_error_message(response))
# test.yaml contains a sequence, two succeeders, a fallback and a failer
self.assertEqual(len(self.manager.nodes), 5)
self.assertTrue(get_success(self.manager.control_execution(ControlTreeExecutionRequest(
command=ControlTreeExecutionRequest.TICK_ONCE))))
def testLoadPermissive(self):
load_request = LoadTreeRequest(
tree=Tree(
name='permissive_load',
path='package://ros_bt_py/test/testdata/trees/permissive_changed_msg.yaml'),
permissive=False)
response = self.manager.load_tree(load_request)
self.assertFalse(get_success(response), get_error_message(response))
load_request = LoadTreeRequest(
tree=Tree(
name='permissive_load',
path='package://ros_bt_py/test/testdata/trees/permissive_changed_msg.yaml'),
permissive=True)
response = self.manager.load_tree(load_request)
self.assertTrue(get_success(response), get_error_message(response))
def testLoadPermissiveService(self):
load_request = LoadTreeRequest(
tree=Tree(
name='permissive_load',
path='package://ros_bt_py/test/testdata/trees/permissive_changed_srv.yaml'),
permissive=False)
response = self.manager.load_tree(load_request)
self.assertFalse(get_success(response), get_error_message(response))
load_request = LoadTreeRequest(
tree=Tree(
name='permissive_load',
path='package://ros_bt_py/test/testdata/trees/permissive_changed_srv.yaml'),
permissive=True)
response = self.manager.load_tree(load_request)
self.assertTrue(get_success(response), get_error_message(response))
def testLoadFromValidFileWithEmptyObject(self):
"""Load a tree from a rostopic echo file that has "---" at the end"""
load_request = LoadTreeRequest(
tree=Tree(name='from_file',
path='package://ros_bt_py/etc/trees/test_extra_empty.yaml'))
response = self.manager.load_tree(load_request)
self.assertTrue(get_success(response), get_error_message(response))
# test.yaml contains a sequence, two succeeders, a fallback and a failer
self.assertEqual(len(self.manager.nodes), 5)
self.assertTrue(get_success(self.manager.control_execution(ControlTreeExecutionRequest(
command=ControlTreeExecutionRequest.TICK_ONCE))))
def testLoadWithoutNodesAndWithoutPath(self):
request = self.manager.load_tree(
LoadTreeRequest(tree=Tree(name='broken')))
self.assertFalse(get_success(request))
def testLoadFromFileWithIndirection(self):
request = self.manager.load_tree(
LoadTreeRequest(tree=Tree(name='from_file',
path='package://ros_bt_py/etc/trees/indirection.yaml')))
# Indirection should work as well (this yaml file refers to test.yaml)
self.assertTrue(get_success(request), get_error_message(request))
def testLoadSubtree(self):
load_request = LoadTreeRequest(tree=Tree(name='from_file',
path='package://ros_bt_py/etc/trees/test.yaml'))
response = self.manager.load_tree(load_request)
self.assertTrue(get_success(response), get_error_message(response))
# Fallback is an inner node with 2 children
fallback = self.manager.find_root().find_node('fallback')
self.assertIsNotNone(fallback)
subtree, _, _ = fallback.get_subtree_msg()
# Now load the subtree
load_request = LoadTreeRequest(tree=subtree)
response = self.manager.load_tree(load_request)
self.assertTrue(get_success(response), get_error_message(response))
def testSetExecutionMode(self):
request = SetExecutionModeRequest(single_step=False,
collect_performance_data=False, publish_subtrees=True)
self.assertEqual(self.manager.set_execution_mode(request), SetExecutionModeResponse())
self.assertEqual(self.manager.get_state(), Tree.EDITABLE)
request = SetExecutionModeRequest(single_step=False,
collect_performance_data=False, publish_subtrees=False)
self.assertEqual(self.manager.set_execution_mode(request), SetExecutionModeResponse())
def testDebugStep(self):
request = ContinueRequest()
self.assertTrue(self.manager.debug_step(request).success)
def testModifyBreakpoints(self):
breakpoints = ["first", "second", "third", "fourth"]
request = ModifyBreakpointsRequest(add=breakpoints)
self.assertEqual(self.manager.modify_breakpoints(request).current_breakpoints,
breakpoints)
def testReloadTree(self):
# reload empty tree
reload_response = self.manager.reload_tree(request=ReloadTreeRequest())
self.assertFalse(get_success(reload_response))
# reload a valid tree
load_request = LoadTreeRequest(tree=Tree(name='from_file',
path='package://ros_bt_py/etc/trees/test.yaml'))
response = self.manager.load_tree(load_request)
self.assertTrue(get_success(response), get_error_message(response))
reload_response = self.manager.reload_tree(request=ReloadTreeRequest())
self.assertTrue(get_success(reload_response))
def testChangeTreeName(self):
change_response = self.manager.change_tree_name(request=ChangeTreeNameRequest(name='hi'))
self.assertTrue(get_success(change_response))
self.assertEqual(self.tree_msg.name, 'hi')
def testGenerateSubtree(self):
res = self.manager.generate_subtree(request=GenerateSubtreeRequest())
self.assertFalse(get_success(res))
self.sequence_msg.name = 'seq'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))))
self.succeeder_msg.name = 'A'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='seq'))))
self.succeeder_msg.name = 'B'
self.assertTrue(get_success(self.manager.add_node(
AddNodeRequest(node=self.succeeder_msg,
parent_name='seq'))))
self.assertEqual(len(self.tree_msg.nodes), 3)
res = self.manager.generate_subtree(request=GenerateSubtreeRequest(nodes=['A']))
self.assertTrue(get_success(res))
res = self.manager.generate_subtree(
request=GenerateSubtreeRequest(nodes=[]))
self.assertTrue(get_success(res))
def testLoadFromFileWithPyYAMLgenpyMigration(self):
load_request = LoadTreeRequest(
tree=Tree(
name='migration',
path='package://ros_bt_py/test/testdata/trees/pyyaml_5_3_1_seq_multilayer.yaml'),
permissive=True)
response = self.manager.load_tree(load_request)
self.assertTrue(get_success(response), get_error_message(response))
load_request = LoadTreeRequest(
tree=Tree(
name='migration',
path='package://ros_bt_py/test/testdata/trees/pyyaml_5_3_1_1_child.yaml'),
permissive=True)
response = self.manager.load_tree(load_request)
self.assertTrue(get_success(response), get_error_message(response))
load_request = LoadTreeRequest(
tree=Tree(
name='migration',
path='package://ros_bt_py/test/testdata/trees/pyyaml_5_3_1_100_children.yaml'),
permissive=True)
response = self.manager.load_tree(load_request)
self.assertTrue(get_success(response), get_error_message(response))
load_request = LoadTreeRequest(
tree=Tree(
name='migration',
path='package://ros_bt_py/test/testdata/trees/pyyaml_5_3_1.yaml'),
permissive=True)
response = self.manager.load_tree(load_request)
self.assertTrue(get_success(response), get_error_message(response))
load_request = LoadTreeRequest(
tree=Tree(
name='migration',
path='package://ros_bt_py/test/testdata/trees/pyyaml_3_13.yaml'),
permissive=True)
response = self.manager.load_tree(load_request)
self.assertTrue(get_success(response), get_error_message(response))
load_request = LoadTreeRequest(
tree=Tree(
name='migration',
path='package://ros_bt_py/test/testdata/trees/pyyaml_3_13_100_children.yaml'),
permissive=True)
response = self.manager.load_tree(load_request)
self.assertTrue(get_success(response), get_error_message(response))
class TestWiringServices(unittest.TestCase):
def setUp(self):
self.tree_msg = None
self.debug_info_msg = None
def set_tree_msg(msg):
self.tree_msg = msg
def set_debug_info_msg(msg):
self.debug_info_msg = msg
self.manager = TreeManager(publish_tree_callback=set_tree_msg,
publish_debug_info_callback=set_debug_info_msg)
node_msg = NodeMsg(
module='ros_bt_py.nodes.passthrough_node',
node_class='PassthroughNode',
inputs=[NodeData(key='in',
serialized_value=json_encode(42))],
options=[NodeData(key='passthrough_type',
serialized_value=json_encode(int))])
self.sequence_msg = NodeMsg(
module='ros_bt_py.nodes.sequence',
node_class='Sequence')
response = self.manager.add_node(
AddNodeRequest(node=self.sequence_msg))
# Add three passthrough nodes that we can wire between
self.node_1_name = 'passthrough_1'
node_msg.name = self.node_1_name
self.manager.add_node(
AddNodeRequest(parent_name=response.actual_node_name,
node=node_msg))
self.node_2_name = 'passthrough_2'
node_msg.name = self.node_2_name
self.manager.add_node(
AddNodeRequest(parent_name=response.actual_node_name,
node=node_msg))
self.node_3_name = 'passthrough_3'
node_msg.name = self.node_3_name
self.manager.add_node(
AddNodeRequest(parent_name=response.actual_node_name,
node=node_msg))
def wiring(self, from_name, to_name):
return NodeDataWiring(
source=NodeDataLocation(node_name=from_name,
data_key='out',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name=to_name,
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA))
def testWireMultiple(self):
wire_request = WireNodeDataRequest()
wire_request.wirings.append(self.wiring(self.node_1_name, self.node_2_name))
wire_request.wirings.append(self.wiring(self.node_2_name, self.node_3_name))
response = self.manager.wire_data(wire_request)
self.assertTrue(get_success(response), get_error_message(response))
self.assertEqual(len(self.manager.tree_msg.data_wirings), 2)
response = self.manager.unwire_data(wire_request)
self.assertTrue(get_success(response), get_error_message(response))
self.assertEqual(len(self.manager.tree_msg.data_wirings), 0)
def testUndoWiringOnError(self):
wire_request = WireNodeDataRequest()
wire_request.wirings.append(self.wiring(self.node_1_name, self.node_2_name))
wire_request.wirings.append(
NodeDataWiring(
source=NodeDataLocation(node_name=self.node_2_name,
data_key='fake',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name=self.node_3_name,
data_key='invalid',
data_kind=NodeDataLocation.INPUT_DATA)))
response = self.manager.wire_data(wire_request)
self.assertFalse(get_success(response))
# Even though the first wiring was valid, it should be undone if
# another in the same request is invalid
self.assertEqual(len(self.manager.tree_msg.data_wirings), 0)
def testRewireAfterUnwire(self):
wire_request = WireNodeDataRequest()
wire_request.wirings.append(self.wiring(self.node_1_name, self.node_2_name))
response = self.manager.wire_data(wire_request)
self.assertTrue(get_success(response), get_error_message(response))
self.assertEqual(len(self.manager.tree_msg.data_wirings), 1)
response = self.manager.unwire_data(wire_request)
self.assertTrue(get_success(response), get_error_message(response))
self.assertEqual(len(self.manager.tree_msg.data_wirings), 0)
response = self.manager.wire_data(wire_request)
self.assertTrue(get_success(response), get_error_message(response))
self.assertEqual(len(self.manager.tree_msg.data_wirings), 1)
def testRedoWiringOnError(self):
wire_request = WireNodeDataRequest()
wire_request.wirings.append(self.wiring(self.node_1_name, self.node_2_name))
wire_request.wirings.append(self.wiring(self.node_2_name, self.node_3_name))
unwire_request = WireNodeDataRequest()
unwire_request.wirings.append(self.wiring(self.node_1_name, self.node_2_name))
unwire_request.wirings.append(
NodeDataWiring(
source=NodeDataLocation(node_name=self.node_2_name,
data_key='fake',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='missing',
data_key='invalid',
data_kind=NodeDataLocation.INPUT_DATA)))
wire_response = self.manager.wire_data(wire_request)
self.assertTrue(get_success(wire_response), get_error_message(wire_response))
self.assertEqual(len(self.manager.tree_msg.data_wirings), 2)
# Number of wirings should stay the same, since the unwire operation failed
unwire_response = self.manager.unwire_data(unwire_request)
self.assertFalse(get_success(unwire_response))
self.assertEqual(len(self.manager.tree_msg.data_wirings), 2)
unwire_request = WireNodeDataRequest()
unwire_request.wirings.append(self.wiring(self.node_1_name, self.node_2_name))
unwire_request.wirings.append(
NodeDataWiring(
source=NodeDataLocation(node_name='missing',
data_key='fake',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name=self.node_2_name,
data_key='invalid',
data_kind=NodeDataLocation.INPUT_DATA)))
# Number of wirings should be reduced by one since the second unwire request
# was ignore but the first was performed
unwire_response = self.manager.unwire_data(unwire_request)
self.assertTrue(get_success(unwire_response)) # unwire is forgiving with wrong sources
self.assertEqual(len(self.manager.tree_msg.data_wirings), 1)
def testWiringWithoutNodes(self):
manager = TreeManager()
wire_request = WireNodeDataRequest()
wire_response = manager.wire_data(wire_request)
self.assertFalse(get_success(wire_response))
def testWireAfterNodeRemoveAndAdd(self):
manager = TreeManager()
sequence_msg = NodeMsg(
module='ros_bt_py.nodes.sequence',
node_class='Sequence')
constant_msg = NodeMsg(
module='ros_bt_py.nodes.constant',
node_class='Constant',
options=[NodeData(key='constant_type',
serialized_value=json_encode(str)),
NodeData(key='constant_value',
serialized_value=json_encode('hello'))])
log_msg = NodeMsg(
module='ros_bt_py.nodes.log',
node_class='Log',
options=[NodeData(key='logger_level',
serialized_value=json_encode(
LoggerLevel(logger_level='info'))),
NodeData(key='log_type',
serialized_value=json_encode(str))])
add_response = manager.add_node(AddNodeRequest(node=sequence_msg))
self.assertTrue(get_success(add_response))
add_response = manager.add_node(
AddNodeRequest(node=constant_msg, parent_name='Sequence'))
self.assertTrue(get_success(add_response))
add_response = manager.add_node(
AddNodeRequest(node=log_msg, parent_name='Sequence'))
self.assertTrue(get_success(add_response))
# now that the nodes are added, wire constant.constant to log.in
wiring = NodeDataWiring(
source=NodeDataLocation(node_name='Constant',
data_key='constant',
data_kind=NodeDataLocation.OUTPUT_DATA),
target=NodeDataLocation(node_name='Log',
data_key='in',
data_kind=NodeDataLocation.INPUT_DATA))
wire_request = WireNodeDataRequest()
wire_request.wirings.append(wiring)
wire_response = manager.wire_data(wire_request)
self.assertTrue(get_success(wire_response))
remove_response = manager.remove_node(
RemoveNodeRequest(node_name='Constant'))
self.assertTrue(get_success(remove_response))
add_response = manager.add_node_at_index(
AddNodeAtIndexRequest(node=constant_msg, parent_name='Sequence',
new_child_index=0, allow_rename=False))
self.assertTrue(get_success(add_response))
# wiring should work because the old node got deleted
wire_request = WireNodeDataRequest()
wire_request.wirings.append(wiring)
wire_response = manager.wire_data(wire_request)
self.assertTrue(get_success(wire_response))
def get_success(response):
if isinstance(response, dict):
return response['success']
return response.success
def get_error_message(response):
if isinstance(response, dict):
return response['error_message']
return response.error_message
| 43.113723
| 99
| 0.641372
| 13,171
| 124,728
| 5.828031
| 0.052692
| 0.07552
| 0.044072
| 0.062219
| 0.817967
| 0.797423
| 0.782207
| 0.760829
| 0.740102
| 0.71126
| 0
| 0.003143
| 0.267919
| 124,728
| 2,892
| 100
| 43.128631
| 0.837511
| 0.057164
| 0
| 0.740161
| 0
| 0
| 0.053806
| 0.018114
| 0
| 0
| 0
| 0.000346
| 0.230441
| 1
| 0.069227
| false
| 0.034139
| 0.008535
| 0.00569
| 0.089142
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cc646705f21a4dc7ee92c166bbcc05bcef77bd7a
| 2,524
|
py
|
Python
|
step_7_generate_pr_bins.py
|
byuccl/fiate
|
318f634badd69f241091277ead59d7ead64ee6aa
|
[
"Apache-2.0"
] | 1
|
2021-05-12T09:42:14.000Z
|
2021-05-12T09:42:14.000Z
|
step_7_generate_pr_bins.py
|
byuccl/fiate
|
318f634badd69f241091277ead59d7ead64ee6aa
|
[
"Apache-2.0"
] | null | null | null |
step_7_generate_pr_bins.py
|
byuccl/fiate
|
318f634badd69f241091277ead59d7ead64ee6aa
|
[
"Apache-2.0"
] | 3
|
2021-04-19T19:36:06.000Z
|
2021-08-17T01:46:23.000Z
|
from lib import *
generate_bins("./bld/bits/vex_pb0_partial.bit","./bld/bins/vex_pb0.bin",0x8D)
generate_bins("./bld/bits/vex_pb1_partial.bit","./bld/bins/vex_pb1.bin",0x8D)
generate_bins("./bld/bits/vex_pb2_partial.bit","./bld/bins/vex_pb2.bin",0x8D)
generate_bins("./bld/bits/vex_tmr_pb0_partial.bit","./bld/bins/vex_tmr_pb0.bin",0x8D)
generate_bins("./bld/bits/vex_tmr_pb1_partial.bit","./bld/bins/vex_tmr_pb1.bin",0x8D)
generate_bins("./bld/bits/vex_tmr_pb2_partial.bit","./bld/bins/vex_tmr_pb2.bin",0x8D)
generate_bins("./bld/bits/mb_pb0_partial.bit","./bld/bins/mb_pb0.bin",0x8D)
generate_bins("./bld/bits/mb_pb1_partial.bit","./bld/bins/mb_pb1.bin",0x8D)
generate_bins("./bld/bits/mb_pb2_partial.bit","./bld/bins/mb_pb2.bin",0x8D)
generate_bins("./bld/bits/mb_tmr_pb0_partial.bit","./bld/bins/mb_tmr_pb0.bin",0x8D)
generate_bins("./bld/bits/mb_tmr_pb1_partial.bit","./bld/bins/mb_tmr_pb1.bin",0x8D)
generate_bins("./bld/bits/mb_tmr_pb2_partial.bit","./bld/bins/mb_tmr_pb2.bin",0x8D)
generate_bins("./bld/bits/taiga_pb0_partial.bit","./bld/bins/taiga_pb0.bin",0x8D)
generate_bins("./bld/bits/taiga_pb1_partial.bit","./bld/bins/taiga_pb1.bin",0x8D)
generate_bins("./bld/bits/taiga_pb2_partial.bit","./bld/bins/taiga_pb2.bin",0x8D)
generate_bins("./bld/bits/taiga_tmr_pb0_partial.bit","./bld/bins/taiga_tmr_pb0.bin",0x8D)
generate_bins("./bld/bits/taiga_tmr_pb1_partial.bit","./bld/bins/taiga_tmr_pb1.bin",0x8D)
generate_bins("./bld/bits/taiga_tmr_pb2_partial.bit","./bld/bins/taiga_tmr_pb2.bin",0x8D)
generate_bins("./bld/bits/pico_pb0_partial.bit","./bld/bins/pico_pb0.bin",0x8D)
generate_bins("./bld/bits/pico_pb1_partial.bit","./bld/bins/pico_pb1.bin",0x8D)
generate_bins("./bld/bits/pico_pb2_partial.bit","./bld/bins/pico_pb2.bin",0x8D)
generate_bins("./bld/bits/pico_tmr_pb0_partial.bit","./bld/bins/pico_tmr_pb0.bin",0x8D)
generate_bins("./bld/bits/pico_tmr_pb1_partial.bit","./bld/bins/pico_tmr_pb1.bin",0x8D)
generate_bins("./bld/bits/pico_tmr_pb2_partial.bit","./bld/bins/pico_tmr_pb2.bin",0x8D)
generate_bins("./bld/bits/kron_pb0_partial.bit","./bld/bins/kron_pb0.bin",0x8D)
generate_bins("./bld/bits/kron_pb1_partial.bit","./bld/bins/kron_pb1.bin",0x8D)
generate_bins("./bld/bits/kron_pb2_partial.bit","./bld/bins/kron_pb2.bin",0x8D)
generate_bins("./bld/bits/kron_tmr_pb0_partial.bit","./bld/bins/kron_tmr_pb0.bin",0x8D)
generate_bins("./bld/bits/kron_tmr_pb1_partial.bit","./bld/bins/kron_tmr_pb1.bin",0x8D)
generate_bins("./bld/bits/kron_tmr_pb2_partial.bit","./bld/bins/kron_tmr_pb2.bin",0x8D)
| 60.095238
| 89
| 0.768225
| 453
| 2,524
| 3.949227
| 0.04415
| 0.20123
| 0.251537
| 0.318614
| 0.987144
| 0.974846
| 0.577418
| 0.575741
| 0
| 0
| 0
| 0.048387
| 0.017433
| 2,524
| 42
| 90
| 60.095238
| 0.672984
| 0
| 0
| 0
| 1
| 0
| 0.679604
| 0.679604
| 0
| 0
| 0.047525
| 0
| 0
| 1
| 0
| true
| 0
| 0.032258
| 0
| 0.032258
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4e0fdb2ce2d3bb69607900f9e65b83e79e178cdd
| 201
|
py
|
Python
|
data_loader/__init__.py
|
kunato/style_swap_tensorflow
|
ab136c20fa5351852f1f4c986bed5b25eee3b890
|
[
"Apache-2.0"
] | null | null | null |
data_loader/__init__.py
|
kunato/style_swap_tensorflow
|
ab136c20fa5351852f1f4c986bed5b25eee3b890
|
[
"Apache-2.0"
] | null | null | null |
data_loader/__init__.py
|
kunato/style_swap_tensorflow
|
ab136c20fa5351852f1f4c986bed5b25eee3b890
|
[
"Apache-2.0"
] | null | null | null |
# data_loader
# __init__.py
from data_loader.image_data_loader import ImageDataLoader
from data_loader.coco_data_loader import COCODataLoader
from data_loader.tf_example_loader import TFExampleLoader
| 28.714286
| 57
| 0.885572
| 28
| 201
| 5.857143
| 0.464286
| 0.365854
| 0.256098
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084577
| 201
| 6
| 58
| 33.5
| 0.891304
| 0.114428
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9d68975702955b40381a81581441db52c47f67ab
| 43
|
py
|
Python
|
src/network/__init__.py
|
ThomasRanvier/faces_recognition_nn
|
b9177134169b6e05d9d9b6ea3206628bdb127a5e
|
[
"MIT"
] | null | null | null |
src/network/__init__.py
|
ThomasRanvier/faces_recognition_nn
|
b9177134169b6e05d9d9b6ea3206628bdb127a5e
|
[
"MIT"
] | null | null | null |
src/network/__init__.py
|
ThomasRanvier/faces_recognition_nn
|
b9177134169b6e05d9d9b6ea3206628bdb127a5e
|
[
"MIT"
] | null | null | null |
from .neural_network import Neural_network
| 21.5
| 42
| 0.883721
| 6
| 43
| 6
| 0.666667
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 43
| 1
| 43
| 43
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9daf2e07854e8ace58237146dcb7ca501dc5a1ae
| 111
|
py
|
Python
|
odata_query/django/__init__.py
|
itd-fsc/odata-query
|
7d5239b775633594ce52d4eda5754c2ad078eb75
|
[
"MIT"
] | 26
|
2021-06-11T07:42:08.000Z
|
2022-02-16T04:42:45.000Z
|
odata_query/django/__init__.py
|
itd-fsc/odata-query
|
7d5239b775633594ce52d4eda5754c2ad078eb75
|
[
"MIT"
] | 13
|
2021-08-07T21:38:22.000Z
|
2022-03-28T17:25:47.000Z
|
odata_query/django/__init__.py
|
itd-fsc/odata-query
|
7d5239b775633594ce52d4eda5754c2ad078eb75
|
[
"MIT"
] | 6
|
2021-07-28T04:46:14.000Z
|
2022-03-15T08:22:19.000Z
|
from .django_q import AstToDjangoQVisitor
from .django_q_ext import *
from .shorthand import apply_odata_query
| 27.75
| 41
| 0.855856
| 16
| 111
| 5.625
| 0.625
| 0.222222
| 0.244444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 111
| 3
| 42
| 37
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d1c48b5c347d67ffd046cdeddd8592dec9342813
| 1,926
|
py
|
Python
|
tests/unit_tests/crud/test_base.py
|
Georgi2704/vidpit-authentication
|
521505a3b4e77db512227688558dab5605822626
|
[
"Apache-2.0"
] | 16
|
2021-06-08T05:47:59.000Z
|
2022-01-29T22:39:16.000Z
|
tests/unit_tests/crud/test_base.py
|
Georgi2704/vidpit-authentication
|
521505a3b4e77db512227688558dab5605822626
|
[
"Apache-2.0"
] | 6
|
2021-09-30T14:45:37.000Z
|
2022-01-28T13:27:00.000Z
|
tests/unit_tests/crud/test_base.py
|
Georgi2704/vidpit-authentication
|
521505a3b4e77db512227688558dab5605822626
|
[
"Apache-2.0"
] | 3
|
2021-06-24T14:13:36.000Z
|
2022-01-29T22:39:32.000Z
|
from server import crud
def test_filter(product_1, product_2):
result, content_range = crud.product_crud.get_multi(filter_parameters=["name:duct 1"], sort_parameters=[])
assert len(result) == 1
assert content_range == "products 0-100/1"
# Will not filter but return all results
result, content_range = crud.product_crud.get_multi(filter_parameters=["NONEXISITANT:0"], sort_parameters=[])
assert len(result) == 2
assert content_range == "products 0-100/2"
# Wild card match on all tables
result, content_range = crud.product_crud.get_multi(filter_parameters=["Product 1"], sort_parameters=[])
assert len(result) == 1
assert content_range == "products 0-100/1"
def test_sort(product_1, product_2):
result, content_range = crud.product_crud.get_multi(filter_parameters=[], sort_parameters=["name:ASC"])
assert len(result) == 2
assert content_range == "products 0-100/2"
assert result[0].name == "Product 1"
result, content_range = crud.product_crud.get_multi(filter_parameters=[], sort_parameters=["name:DESC"])
assert len(result) == 2
assert content_range == "products 0-100/2"
assert result[0].name == "Product 2"
# No Sort order
result, content_range = crud.product_crud.get_multi(filter_parameters=[], sort_parameters=["name"])
assert len(result) == 2
assert content_range == "products 0-100/2"
assert result[0].name == "Product 1"
# Non existant column impossible to sort
result, content_range = crud.product_crud.get_multi(filter_parameters=[], sort_parameters=["NONTRUE"])
assert len(result) == 2
assert content_range == "products 0-100/2"
# Non existant column impossible to sort on nonexistant method
result, content_range = crud.product_crud.get_multi(filter_parameters=[], sort_parameters=["NONTRUE:NONTRUE"])
assert len(result) == 2
assert content_range == "products 0-100/2"
| 39.306122
| 114
| 0.711838
| 266
| 1,926
| 4.951128
| 0.180451
| 0.145786
| 0.109339
| 0.133637
| 0.873956
| 0.863326
| 0.813212
| 0.813212
| 0.813212
| 0.813212
| 0
| 0.037888
| 0.164071
| 1,926
| 48
| 115
| 40.125
| 0.780124
| 0.094496
| 0
| 0.6
| 0
| 0
| 0.133487
| 0
| 0
| 0
| 0
| 0
| 0.633333
| 1
| 0.066667
| false
| 0
| 0.033333
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d1d7dba700dc5f0d195179566af837041f1113d5
| 13,432
|
py
|
Python
|
dynaphopy/analysis/fitting/fitting_functions.py
|
faradaymahe/DynaPhopy
|
8519ff616386651acf71166bee02c1a2aef89312
|
[
"MIT"
] | 76
|
2015-02-24T02:55:09.000Z
|
2022-03-31T09:38:09.000Z
|
dynaphopy/analysis/fitting/fitting_functions.py
|
jianskerh/DynaPhoPy
|
e1201f6de62b4303c68a7808ed19175364409586
|
[
"MIT"
] | 14
|
2017-07-21T12:37:28.000Z
|
2021-09-15T08:50:55.000Z
|
dynaphopy/analysis/fitting/fitting_functions.py
|
jianskerh/DynaPhoPy
|
e1201f6de62b4303c68a7808ed19175364409586
|
[
"MIT"
] | 38
|
2015-07-02T01:17:27.000Z
|
2022-03-25T14:24:33.000Z
|
import numpy as np
from scipy.optimize import curve_fit, minimize_scalar
h_planck = 4.135667662e-3 # eV/ps
h_planck_bar = 6.58211951e-4 # eV/ps
kb_boltzmann = 8.6173324e-5 # eV/K
def get_standard_errors_from_covariance(covariance):
# return np.linalg.eigvals(covariance)
return np.sqrt(np.diag(covariance))
#return np.sqrt(np.trace(covariance))
class Lorentzian:
def __init__(self,
test_frequencies_range,
power_spectrum,
guess_position=None,
guess_height=None):
self.test_frequencies_range = test_frequencies_range
self.power_spectrum = power_spectrum
self.guess_pos = guess_position
self.guess_height = guess_height
self._fit_params = None
self._fit_covariances = None
self.curve_name = 'Lorentzian'
def _function(self, x, a, b, c, d):
"""Lorentzian function
x: frequency coordinate
a: peak position
b: half width
c: area proportional parameter
d: base line
"""
return c/(np.pi*b*(1.0+((x - a)/b)**2))+d
def get_fitting_parameters(self):
if self._fit_params is None:
if self.guess_pos is None or self.guess_height is None:
fit_params, fit_covariances = curve_fit(self._function,
self.test_frequencies_range,
self.power_spectrum)
else:
fit_params, fit_covariances = curve_fit(self._function,
self.test_frequencies_range,
self.power_spectrum,
p0=[self.guess_pos, 0.1, self.guess_height, 0.0])
self._fit_covariances = fit_covariances
self._fit_params = fit_params
return self._fit_params, self._fit_covariances
def get_fitting(self):
from scipy.integrate import quad
try:
fit_params, fit_covariances = self.get_fitting_parameters()
maximum = fit_params[2]/(fit_params[1]*np.pi)
width = 2.0*fit_params[1]
frequency = fit_params[0]
area = fit_params[2]
standard_errors = get_standard_errors_from_covariance(fit_covariances)
global_error = np.average(standard_errors[:2])/np.sqrt(area)
if np.isnan(global_error):
raise RuntimeError
#error = get_error_from_covariance(fit_covariances)
base_line = fit_params[3]
return {'maximum': maximum,
'width': width,
'peak_position': frequency,
'standard_errors': standard_errors,
'global_error': global_error,
'area': area,
'base_line': base_line,
'all_good': True}
except RuntimeError:
return {'all_good': False}
def get_curve(self, frequency_range):
return self._function(frequency_range, *self.get_fitting_parameters()[0])
class Lorentzian_asymmetric:
def __init__(self,
test_frequencies_range,
power_spectrum,
guess_position=None,
guess_height=None):
self.test_frequencies_range = test_frequencies_range
self.power_spectrum = power_spectrum
self.guess_pos = guess_position
self.guess_height = guess_height
self._fit_params = None
self._fit_covariances = None
self.curve_name = 'Assym. Lorentzian'
def _g_a (self, x, a, b, s):
"""Asymmetric width term
x: frequency coordinate
a: peak position
b: half width
s: asymmetry parameter
"""
return 2*b/(1.0+np.exp(s*(x-a)))
def _function(self, x, a, b, c, d, s):
"""Lorentzian asymmetric function
x: frequency coordinate
a: peak position
b: half width
c: area proportional parameter
d: base line
s: asymmetry parameter
"""
return c/(np.pi*self._g_a(x, a, b, s)*(1.0+((x-a)/(self._g_a(x, a, b, s)))**2))+d
def get_fitting_parameters(self):
if self._fit_params is None:
if self.guess_pos is None or self.guess_height is None:
fit_params, fit_covariances = curve_fit(self._function,
self.test_frequencies_range,
self.power_spectrum)
else:
fit_params, fit_covariances = curve_fit(self._function,
self.test_frequencies_range,
self.power_spectrum,
p0=[self.guess_pos, 0.1, self.guess_height, 0.0, 0.0])
self._fit_covariances = fit_covariances
self._fit_params = fit_params
return self._fit_params, self._fit_covariances
def get_fitting(self):
from scipy.integrate import quad
try:
fit_params, fit_covariances = self.get_fitting_parameters()
peak_pos = minimize_scalar(lambda x: -self._function(x, *fit_params), fit_params[0],
bounds=[self.test_frequencies_range[0], self.test_frequencies_range[-1]],
method='bounded')
frequency = peak_pos["x"]
maximum = -peak_pos["fun"]
width = 2.0 * self._g_a(frequency, fit_params[0], fit_params[1], fit_params[4])
asymmetry = fit_params[4]
area, error_integration = quad(self._function, 0, self.test_frequencies_range[-1],
args=tuple(fit_params),
epsabs=1e-8)
# area = fit_params[2]
standard_errors = get_standard_errors_from_covariance(fit_covariances)
global_error = np.average(standard_errors[:2])/np.sqrt(area)
if np.isnan(global_error):
raise RuntimeError
#error = get_error_from_covariance(fit_covariances)
base_line = fit_params[3]
return {'maximum': maximum,
'width': width,
'peak_position': frequency,
'global_error': global_error,
'area': area,
'base_line': base_line,
'asymmetry': asymmetry,
'all_good': True}
except RuntimeError:
return {'all_good': False}
def get_curve(self, frequency_range):
return self._function(frequency_range, *self.get_fitting_parameters()[0])
class Damped_harmonic:
def __init__(self,
test_frequencies_range,
power_spectrum,
guess_position=None,
guess_height=None):
self.test_frequencies_range = test_frequencies_range
self.power_spectrum = power_spectrum
self.guess_pos = guess_position
self.guess_height = guess_height
self._fit_params = None
self._fit_covariances = None
self.curve_name = 'Damped Harm. Osc.'
def _function(self, x, a, b, c, d):
"""Damped harmonic oscillator PS function
x: frequency coordinate
a: peak position
b: half width
c: area proportional parameter
d: base line
"""
return c/((a**2-x**2)**2 + (b*x)**2)+d
def get_fitting_parameters(self):
if self._fit_params is None:
if self.guess_pos is None or self.guess_height is None:
fit_params, fit_covariances = curve_fit(self._function,
self.test_frequencies_range,
self.power_spectrum)
else:
fit_params, fit_covariances = curve_fit(self._function,
self.test_frequencies_range,
self.power_spectrum,
p0=[self.guess_pos, 0.1, self.guess_height, 0.0])
self._fit_covariances = fit_covariances
self._fit_params = fit_params
return self._fit_params, self._fit_covariances
def get_fitting(self):
from scipy.integrate import quad
try:
fit_params, fit_covariances = self.get_fitting_parameters()
self._fit_params = fit_params
width = abs(fit_params[1])
maximum = fit_params[2]/(width*np.pi)
frequency = fit_params[0]
maximum = self.get_curve(frequency)
area, error_integration = quad(self._function, 0, self.test_frequencies_range[-1],
args=tuple(fit_params),
epsabs=1e-8)
# area = fit_params[2]*np.pi/(fit_params[0]**3*width)
standard_errors = get_standard_errors_from_covariance(fit_covariances)
global_error = np.average(standard_errors[:2])/np.sqrt(area)
if np.isnan(global_error):
raise RuntimeError
base_line = fit_params[3]
return {'maximum': maximum,
'width': width,
'peak_position': frequency,
'global_error': global_error,
'area': area,
'base_line': base_line,
'all_good': True}
except RuntimeError:
return {'all_good': False}
def get_curve(self, frequency_range):
return self._function(frequency_range, *self.get_fitting_parameters()[0])
class Gaussian_function:
def __init__(self,
test_frequencies_range,
power_spectrum,
guess_position=None,
guess_height=None):
self.test_frequencies_range = test_frequencies_range
self.power_spectrum = power_spectrum
self.guess_pos = guess_position
self.guess_height = guess_height
self._fit_params = None
self._fit_covariances = None
self.curve_name = 'Gaussian dist.'
def _function(self, x, a, b, c, d):
"""Gaussian PDF function
x: coordinate
a: peak position
b: deviation (sigma)
c: area proportional parameter
d: base line
"""
return c/b*np.sqrt(2*np.pi)*np.exp(-(x-a)**2/(2*b**2))+d
def get_fitting_parameters(self):
if self._fit_params is None:
if self.guess_pos is None or self.guess_height is None:
fit_params, fit_covariances = curve_fit(self._function,
self.test_frequencies_range,
self.power_spectrum)
else:
fit_params, fit_covariances = curve_fit(self._function,
self.test_frequencies_range,
self.power_spectrum,
p0=[self.guess_pos, 0.1, self.guess_height, 0.0])
self._fit_covariances = fit_covariances
self._fit_params = fit_params
return self._fit_params, self._fit_covariances
def get_fitting(self):
from scipy.integrate import quad
try:
fit_params, fit_covariances = self.get_fitting_parameters()
self._fit_params = fit_params
width = abs(fit_params[1])
frequency = fit_params[0]
maximum = self.get_curve(frequency)
area, error_integration = quad(self._function, 0, self.test_frequencies_range[-1],
args=tuple(fit_params),
epsabs=1e-8)
# area = fit_params[2]*np.pi/(fit_params[0]**3*width)
standard_errors = get_standard_errors_from_covariance(fit_covariances)
global_error = np.average(standard_errors[:2])/np.sqrt(area)
if np.isnan(global_error):
raise RuntimeError
base_line = fit_params[3]
return {'maximum': maximum,
'width': width,
'peak_position': frequency,
'global_error': global_error,
'area': area,
'base_line': base_line,
'all_good': True}
except RuntimeError:
return {'all_good': False}
def get_curve(self, frequency_range):
return self._function(frequency_range, *self.get_fitting_parameters()[0])
fitting_functions = {
0: Lorentzian,
1: Lorentzian_asymmetric,
2: Damped_harmonic,
}
# Test for automatic detection (order can change)
# import sys, inspect
# list_fitting = inspect.getmembers(sys.modules[__name__], inspect.isclass)
# Fitting_functions = {}
# for i, p in enumerate(list_fitting):
# Fitting_functions[i] = p[1]
| 34.979167
| 112
| 0.542064
| 1,441
| 13,432
| 4.748092
| 0.098543
| 0.084186
| 0.073078
| 0.073663
| 0.841421
| 0.826367
| 0.822567
| 0.815405
| 0.803712
| 0.791289
| 0
| 0.014899
| 0.375372
| 13,432
| 383
| 113
| 35.070496
| 0.800596
| 0.089413
| 0
| 0.829876
| 0
| 0
| 0.029887
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.091286
| false
| 0
| 0.024896
| 0.020747
| 0.224066
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d1e715c85a2185a84c7545eb4958d65bd238b0ac
| 20,031
|
py
|
Python
|
dsi/tests/test_multi_analysis.py
|
mongodb/dsi
|
8cfc845156561d698fb01da93464392caca40644
|
[
"Apache-2.0"
] | 9
|
2020-05-19T21:39:44.000Z
|
2022-02-11T10:03:36.000Z
|
dsi/tests/test_multi_analysis.py
|
mongodb/dsi
|
8cfc845156561d698fb01da93464392caca40644
|
[
"Apache-2.0"
] | 1
|
2021-03-25T23:37:22.000Z
|
2021-03-25T23:37:22.000Z
|
dsi/tests/test_multi_analysis.py
|
mongodb/dsi
|
8cfc845156561d698fb01da93464392caca40644
|
[
"Apache-2.0"
] | 3
|
2020-03-05T10:49:10.000Z
|
2021-03-02T11:15:45.000Z
|
"""Unit tests for util/multi_analysis.py"""
import os
import unittest
from dsi.multi_analysis import MultiEvergreenAnalysis, main
from test_lib.fixture_files import FixtureFiles
from test_lib.test_requests_parent import TestRequestsParent
FIXTURE_FILES = FixtureFiles()
class TestMultiEvergreenAnalysis(TestRequestsParent):
"""
Test the MultiEvergreen client class.
"""
def test_parse_options(self):
"""MultiEvergreenAnalysis: parse options."""
expected = {
"evergreen_config": FIXTURE_FILES.repo_root_file_path("config.yml"),
"csv": True,
"json": False,
"json_array": False,
"ycsbfix": False,
"yml": False,
"id": ["587773af3ff120ab9000946", "587773b03ff1220ab900094a"],
}
args = [
"587773af3ff120ab9000946",
"587773b03ff1220ab900094a",
"--evergreen-config",
FIXTURE_FILES.repo_root_file_path("config.yml"),
]
client = MultiEvergreenAnalysis(args)
client.parse_options()
self.assertEqual(client.config, expected)
def test_parse_options2(self):
"""MultiEvergreenAnalysis: parse more advanced options."""
input_file = FIXTURE_FILES.fixture_file_path("multi_patch_builds.yml")
expected_config = {
"evergreen_config": FIXTURE_FILES.repo_root_file_path("config.yml"),
"csv": False,
"json": True,
"json_array": False,
"ycsbfix": False,
"yml": False,
"out": "outfile.json",
"id": [],
"continue": input_file,
}
args = [
"--json",
"--out",
"outfile.json",
"--continue",
input_file,
"--evergreen-config",
FIXTURE_FILES.repo_root_file_path("config.yml"),
]
client = MultiEvergreenAnalysis(args)
client.parse_options()
self.assertEqual(client.config, expected_config)
self.assertEqual(client.builds[1]["ID"], "5873a2623ff1224e8e0003ee")
def test_aggregate_results(self):
"""MultiEvergreenAnalysis.aggregate_results()"""
data = [
{
"a_variant": {
"a_task": {
"data": {
"results": [
{
"name": "a_test",
"results": {
"32": {
"ops_per_sec": 111.123,
"ops_per_sec_values": [111.123, 123.111, 234.123],
},
"64": {
"ops_per_sec": 222.234,
"ops_per_sec_values": [222.234, 333.123, 444.111],
},
},
}
]
}
}
}
},
{
"a_variant": {
"a_task": {
"data": {
"results": [
{
"name": "a_test",
"results": {
"32": {
"ops_per_sec": 123,
"ops_per_sec_values": [123.123, 234.234, 345.345],
},
"64": {
"ops_per_sec": 234,
"ops_per_sec_values": [234.234, 345.345, 456.456],
},
},
}
]
}
}
}
},
]
expected = {
"a_variant": {
"a_task": {
"a_test": {
32: {
"all_variance_to_mean": 44.10677573939485,
"it_range_to_median": [0.999098374637522, 0.9487179487179488],
"it_range_to_median_avg": 0.97390816167773542,
"it_range_to_median_max": 0.999098374637522,
"it_range_to_median_min": 0.9487179487179488,
"it_max": [234.123, 345.345],
"it_variance": [4599.396047999999, 12345.654321000002],
"it_range": [122.99999999999999, 222.22200000000004],
"all_min": 111.123,
"all_median": 178.623,
"it_median": [123.111, 234.234],
"min": 111.123,
"all_variance": 8608.6061151,
"all_range_to_median": 1.3112645068104334,
"max": 123,
"all_range": 234.22200000000004,
"variance": 70.53156449999994,
"variance_to_mean": 0.6025171768685686,
"it_min": [111.123, 123.123],
"all_max": 345.345,
"it_variance_to_mean": [29.46083467098815, 52.706500000000005],
"average": 117.0615,
"median": 117.0615,
"ops_per_sec": [111.123, 123],
"ops_per_sec_values": [
[111.123, 123.111, 234.123],
[123.123, 234.234, 345.345],
],
"range": 11.876999999999995,
"it_average": [156.119, 234.234],
"range_to_median": 0.10145948924283386,
"all_average": 195.17650000000003,
},
64: {
"all_variance_to_mean": 29.198995681067522,
"it_range_to_median": [0.666051278356643, 0.6434782608695652],
"it_range_to_median_avg": 0.65476476961310404,
"it_range_to_median_max": 0.666051278356643,
"it_range_to_median_min": 0.6434782608695652,
"it_max": [444.111, 456.456],
"it_variance": [12307.351598999998, 12345.654321000002],
"it_range": [221.87699999999998, 222.222],
"all_min": 222.234,
"all_median": 339.23400000000004,
"it_median": [333.123, 345.345],
"min": 222.234,
"all_variance": 9905.773884299999,
"all_range_to_median": 0.6904437644811545,
"max": 234,
"all_range": 234.222,
"variance": 69.21937799999989,
"variance_to_mean": 0.30343805152619,
"it_min": [222.234, 234.234],
"all_max": 456.456,
"it_variance_to_mean": [36.9417077855419, 35.74875652173913],
"average": 228.11700000000002,
"median": 228.11700000000002,
"ops_per_sec": [222.234, 234],
"ops_per_sec_values": [
[222.234, 333.123, 444.111],
[234.234, 345.345, 456.456],
],
"range": 11.765999999999991,
"it_average": [333.156, 345.345],
"range_to_median": 0.05157879509199222,
"all_average": 339.25050000000005,
},
}
}
}
}
client = MultiEvergreenAnalysis()
client.results = data
client.aggregate_results()
self.assertEqual(client.agg_results, expected)
def test_flatten(self):
"""MultiEvergreenAnalysis.flat_results()"""
client = MultiEvergreenAnalysis()
client.agg_results = {
"a_variant": {
"a_task": {
"a_test": {
32: {
"all_variance_to_mean": 44.10677573939485,
"it_range_to_median": [0.999098374637522, 0.9487179487179488],
"it_range_to_median_avg": 0.97390816167773542,
"it_range_to_median_max": 0.999098374637522,
"it_range_to_median_min": 0.9487179487179488,
"it_max": [234.123, 345.345],
"it_variance": [4599.396047999999, 12345.654321000002],
"it_range": [122.99999999999999, 222.22200000000004],
"all_min": 111.123,
"all_median": 178.623,
"it_median": [123.111, 234.234],
"min": 111.123,
"all_variance": 8608.6061151,
"all_range_to_median": 1.3112645068104334,
"max": 123,
"all_range": 234.22200000000004,
"variance": 70.53156449999994,
"variance_to_mean": 0.6025171768685686,
"it_min": [111.123, 123.123],
"all_max": 345.345,
"it_variance_to_mean": [29.46083467098815, 52.706500000000005],
"average": 117.0615,
"median": 117.0615,
"ops_per_sec": [111.123, 123],
"ops_per_sec_values": [
[111.123, 123.111, 234.123],
[123.123, 234.234, 345.345],
],
"range": 11.876999999999995,
"it_average": [156.119, 234.234],
"range_to_median": 0.10145948924283386,
"all_average": 195.17650000000003,
},
64: {
"all_variance_to_mean": 29.198995681067522,
"it_range_to_median": [0.666051278356643, 0.6434782608695652],
"it_range_to_median_avg": 0.65476476961310404,
"it_range_to_median_max": 0.666051278356643,
"it_range_to_median_min": 0.6434782608695652,
"it_max": [444.111, 456.456],
"it_variance": [12307.351598999998, 12345.654321000002],
"it_range": [221.87699999999998, 222.222],
"all_min": 222.234,
"all_median": 339.23400000000004,
"it_median": [333.123, 345.345],
"min": 222.234,
"all_variance": 9905.773884299999,
"all_range_to_median": 0.6904437644811545,
"max": 234,
"all_range": 234.222,
"variance": 69.21937799999989,
"variance_to_mean": 0.30343805152619,
"it_min": [222.234, 234.234],
"all_max": 456.456,
"it_variance_to_mean": [36.9417077855419, 35.74875652173913],
"average": 228.11700000000002,
"median": 228.11700000000002,
"ops_per_sec": [222.234, 234],
"ops_per_sec_values": [
[222.234, 333.123, 444.111],
[234.234, 345.345, 456.456],
],
"range": 11.765999999999991,
"it_average": [333.156, 345.345],
"range_to_median": 0.05157879509199222,
"all_average": 339.25050000000005,
},
}
}
}
}
expected = [
{
"thread_level": 32,
"variant_name": "a_variant",
"task_name": "a_task",
"test_name": "a_test",
"added_label": "added_label",
"all_variance_to_mean": 44.10677573939485,
"it_range_to_median": [0.999098374637522, 0.9487179487179488],
"it_range_to_median_avg": 0.97390816167773542,
"it_range_to_median_max": 0.999098374637522,
"it_range_to_median_min": 0.9487179487179488,
"it_max": [234.123, 345.345],
"it_variance": [4599.396047999999, 12345.654321000002],
"it_range": [122.99999999999999, 222.22200000000004],
"all_min": 111.123,
"all_median": 178.623,
"it_median": [123.111, 234.234],
"min": 111.123,
"all_variance": 8608.6061151,
"all_range_to_median": 1.3112645068104334,
"max": 123,
"all_range": 234.22200000000004,
"variance": 70.53156449999994,
"variance_to_mean": 0.6025171768685686,
"it_min": [111.123, 123.123],
"all_max": 345.345,
"it_variance_to_mean": [29.46083467098815, 52.706500000000005],
"average": 117.0615,
"median": 117.0615,
"ops_per_sec": [111.123, 123],
"ops_per_sec_values": [[111.123, 123.111, 234.123], [123.123, 234.234, 345.345]],
"range": 11.876999999999995,
"it_average": [156.119, 234.234],
"range_to_median": 0.10145948924283386,
"all_average": 195.17650000000003,
},
{
"thread_level": 64,
"variant_name": "a_variant",
"task_name": "a_task",
"test_name": "a_test",
"added_label": "added_label",
"all_variance_to_mean": 29.198995681067522,
"it_range_to_median": [0.666051278356643, 0.6434782608695652],
"it_range_to_median_avg": 0.65476476961310404,
"it_range_to_median_max": 0.666051278356643,
"it_range_to_median_min": 0.6434782608695652,
"it_max": [444.111, 456.456],
"it_variance": [12307.351598999998, 12345.654321000002],
"it_range": [221.87699999999998, 222.222],
"all_min": 222.234,
"all_median": 339.23400000000004,
"it_median": [333.123, 345.345],
"min": 222.234,
"all_variance": 9905.773884299999,
"all_range_to_median": 0.6904437644811545,
"max": 234,
"all_range": 234.222,
"variance": 69.21937799999989,
"variance_to_mean": 0.30343805152619,
"it_min": [222.234, 234.234],
"all_max": 456.456,
"it_variance_to_mean": [36.9417077855419, 35.74875652173913],
"average": 228.11700000000002,
"median": 228.11700000000002,
"ops_per_sec": [222.234, 234],
"ops_per_sec_values": [[222.234, 333.123, 444.111], [234.234, 345.345, 456.456]],
"range": 11.765999999999991,
"it_average": [333.156, 345.345],
"range_to_median": 0.05157879509199222,
"all_average": 339.25050000000005,
},
]
flat_results = client.flat_results({"added_label": "added_label"})
self.assertEqual(flat_results, expected)
def test_ycsb_fix(self):
"""Test MultiEvergreenAnalysis._ycsb_fix()"""
client = MultiEvergreenAnalysis()
client.results = [
{
"a_variant": {
"a_task": {
"build_id": "a_build_id",
"create_time": "2017-04-05T20:14:53.193Z",
"data": {
"results": [
{
"end": 1491482988,
"name": "ycsb_load-wiredTiger",
"results": {"32": {"ops_per_sec": 50915.97845235792}},
"start": 1491482887,
"workload": "ycsb",
},
{
"end": 1491483185,
"name": "ycsb_load-wiredTiger",
"results": {"32": {"ops_per_sec": 50418.98173824482}},
"start": 1491483084,
"workload": "ycsb",
},
]
},
}
}
}
]
client._ycsb_fix()
print(client.results)
expected_results = [
{
"a_variant": {
"a_task": {
"build_id": "a_build_id",
"create_time": "2017-04-05T20:14:53.193Z",
"data": {
"results": [
{
"end": 1491482988,
"name": "ycsb_load-wiredTiger",
"results": {
"32": {
"ops_per_sec": 50667.480095301369,
"ops_per_sec_values": [
50915.97845235792,
50418.98173824482,
],
}
},
"start": 1491482887,
"workload": "ycsb",
}
]
},
}
}
}
]
self.assertEqual(client.results, expected_results)
def test_main(self):
"""MultiEvergreenAnalysis: Fetch real Evergreen results and write output files."""
evergreen_config = FIXTURE_FILES.repo_root_file_path("config.yml")
args = [
"--evergreen-config",
evergreen_config,
"--json",
"--out",
"test_outfile.json",
"587773af3ff1220ab9000946",
"587773b03ff1220ab900094a",
]
main(args)
# Intentionally not checking output files, just testing that we run without exceptions.
os.remove("test_outfile.json")
if __name__ == "__main__":
unittest.main()
| 45.421769
| 97
| 0.403924
| 1,507
| 20,031
| 5.086264
| 0.135368
| 0.032877
| 0.061057
| 0.046967
| 0.760861
| 0.753294
| 0.748598
| 0.739726
| 0.739726
| 0.734508
| 0
| 0.318849
| 0.493335
| 20,031
| 440
| 98
| 45.525
| 0.436398
| 0.022565
| 0
| 0.667476
| 0
| 0
| 0.171293
| 0.032364
| 0
| 0
| 0
| 0
| 0.014563
| 1
| 0.014563
| false
| 0
| 0.012136
| 0
| 0.029126
| 0.002427
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ae149f58a8d124a1863b191cb6116f6a91fb3bc3
| 5,110
|
py
|
Python
|
test/test_package.py
|
TheJacksonLaboratory/chia_rep
|
fe774259bfa3a045cc5189c61110a07c8f5eaa26
|
[
"MIT"
] | 1
|
2019-09-14T02:44:40.000Z
|
2019-09-14T02:44:40.000Z
|
test/test_package.py
|
TheJacksonLaboratory/chia_rep
|
fe774259bfa3a045cc5189c61110a07c8f5eaa26
|
[
"MIT"
] | null | null | null |
test/test_package.py
|
TheJacksonLaboratory/chia_rep
|
fe774259bfa3a045cc5189c61110a07c8f5eaa26
|
[
"MIT"
] | 1
|
2021-07-10T12:00:05.000Z
|
2021-07-10T12:00:05.000Z
|
import sys
import os
import shutil
sys.path.append('.')
import chia_rep
def test_filter_peaks():
sample_dict = chia_rep.read_data('test/sample_input_file.txt',
'test/test_files/hg38.chrom.sizes',
output_dir='test/output')
for sample in sample_dict:
sample_dict[sample].filter_peaks(60, 'chr1')
assert len(sample_dict[sample].peak_dict['chr1']) == 60
# if sample == 'sampleA1':
# print(len(sample_dict[sample].peak_dict['chr2']))
# assert len(sample_dict[sample].peak_dict['chr2']) == 30
def test_package():
bin_size = 5000
window_size = 3000000
shutil.rmtree('test/output')
sample_dict = chia_rep.read_data('test/sample_input_file.txt',
'test/test_files/hg38.chrom.sizes',
output_dir='test/output')
l = sample_dict
chia_rep.preprocess(l, output_dir='test/output')
emd_scores, j_scores = chia_rep.compare(l, 'all',
compare_list_file='test/pairs.txt',
bin_size=bin_size,
window_size=window_size,
output_dir='test/output')
chia_rep.output_to_csv(emd_scores, j_scores, window_size, bin_size, 'all',
output_dir='test/output')
assert os.path.isfile('test/output/loops/sampleA1.all.loops')
assert os.path.isfile('test/output/loops/sampleA2.all.loops')
assert os.path.isfile('test/output/loops/sampleB1.all.loops')
assert os.path.isfile('test/output/peaks/sampleA1.all.peaks')
assert os.path.isfile('test/output/peaks/sampleA2.all.peaks')
assert os.path.isfile('test/output/peaks/sampleB1.all.peaks')
param = f'{window_size}.{bin_size}.all'
assert os.path.isfile(f'test/output/{param}/scores/emd_complete.csv')
assert os.path.isfile(f'test/output/{param}/scores/j_complete.csv')
assert os.path.isfile(f'test/output/timings/comparison.{param}.txt')
assert os.path.isfile(f'test/output/timings/read_data.txt')
assert os.path.isfile(
f'test/output/{param}/scores/windows/sampleA1_sampleA2_chr1.txt')
assert os.path.isfile(
f'test/output/{param}/scores/windows/sampleA1_sampleB1_chr1.txt')
assert os.path.isfile(
f'test/output/{param}/scores/windows/sampleA2_sampleB1_chr1.txt')
assert os.path.isfile(
f'test/output/{param}/scores/chromosomes/sampleA1_sampleA2.txt')
assert os.path.isfile(
f'test/output/{param}/scores/chromosomes/sampleA1_sampleB1.txt')
assert os.path.isfile(
f'test/output/{param}/scores/chromosomes/sampleA2_sampleB1.txt')
def test_package2():
bin_size = 5000
window_size = 3000000
shutil.rmtree('test/output')
sample_dict = chia_rep.read_data('test/sample_input_file.txt',
'test/test_files/hg38.chrom.sizes',
output_dir='test/output')
l = sample_dict
chia_rep.preprocess(l, output_dir='test/output')
comparison_list = [
['sampleA1', 'sampleA2'],
['sampleA1', 'sampleB1'],
['sampleA2', 'sampleB1']
]
emd_scores, j_scores = chia_rep.compare(l, 'all',
compare_list=comparison_list,
bin_size=bin_size,
window_size=window_size,
output_dir='test/output')
chia_rep.output_to_csv(emd_scores, j_scores, window_size, bin_size, 'all',
output_dir='test/output')
assert os.path.isfile('test/output/loops/sampleA1.all.loops')
assert os.path.isfile('test/output/loops/sampleA2.all.loops')
assert os.path.isfile('test/output/loops/sampleB1.all.loops')
assert os.path.isfile('test/output/peaks/sampleA1.all.peaks')
assert os.path.isfile('test/output/peaks/sampleA2.all.peaks')
assert os.path.isfile('test/output/peaks/sampleB1.all.peaks')
param = f'{window_size}.{bin_size}.all'
assert os.path.isfile(f'test/output/{param}/scores/emd_complete.csv')
assert os.path.isfile(f'test/output/{param}/scores/j_complete.csv')
assert os.path.isfile(f'test/output/timings/comparison.{param}.txt')
assert os.path.isfile(f'test/output/timings/read_data.txt')
assert os.path.isfile(
f'test/output/{param}/scores/windows/sampleA1_sampleA2_chr1.txt')
assert os.path.isfile(
f'test/output/{param}/scores/windows/sampleA1_sampleB1_chr1.txt')
assert os.path.isfile(
f'test/output/{param}/scores/windows/sampleA2_sampleB1_chr1.txt')
assert os.path.isfile(
f'test/output/{param}/scores/chromosomes/sampleA1_sampleA2.txt')
assert os.path.isfile(
f'test/output/{param}/scores/chromosomes/sampleA1_sampleB1.txt')
assert os.path.isfile(
f'test/output/{param}/scores/chromosomes/sampleA2_sampleB1.txt')
| 39.921875
| 79
| 0.626027
| 652
| 5,110
| 4.73773
| 0.102761
| 0.139204
| 0.124312
| 0.186468
| 0.910327
| 0.910327
| 0.910327
| 0.87763
| 0.87763
| 0.87763
| 0
| 0.022733
| 0.242466
| 5,110
| 127
| 80
| 40.23622
| 0.775252
| 0.027006
| 0
| 0.810526
| 0
| 0
| 0.384461
| 0.343398
| 0
| 0
| 0
| 0
| 0.347368
| 1
| 0.031579
| false
| 0
| 0.042105
| 0
| 0.073684
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ae6515732de013312213bbfb2e08738b394327ad
| 139
|
py
|
Python
|
nlpatl/sampling/clustering/__init__.py
|
dumpmemory/nlpatl
|
59209242d1ac26714b11b86261070ac50cc90432
|
[
"MIT"
] | 18
|
2021-11-29T06:43:46.000Z
|
2022-03-29T09:58:32.000Z
|
nlpatl/sampling/clustering/__init__.py
|
dumpmemory/nlpatl
|
59209242d1ac26714b11b86261070ac50cc90432
|
[
"MIT"
] | null | null | null |
nlpatl/sampling/clustering/__init__.py
|
dumpmemory/nlpatl
|
59209242d1ac26714b11b86261070ac50cc90432
|
[
"MIT"
] | 1
|
2021-11-29T06:43:47.000Z
|
2021-11-29T06:43:47.000Z
|
from nlpatl.sampling.clustering.nearest_mean import NearestMeanSampling
from nlpatl.sampling.clustering.farthest import FarthestSampling
| 46.333333
| 72
| 0.884892
| 15
| 139
| 8.133333
| 0.666667
| 0.163934
| 0.295082
| 0.459016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071942
| 139
| 2
| 73
| 69.5
| 0.945736
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
88203bc1014682e00a112fd76476f7cca8c80dfe
| 6,825
|
py
|
Python
|
module4-acid-and-database-scalability-tradeoffs/titanic_queries.py
|
imdeja/DS-Unit-3-Sprint-2-SQL-and-Databases
|
100546c4c8acdecd3361661705f373a2bcd3e7c9
|
[
"MIT"
] | null | null | null |
module4-acid-and-database-scalability-tradeoffs/titanic_queries.py
|
imdeja/DS-Unit-3-Sprint-2-SQL-and-Databases
|
100546c4c8acdecd3361661705f373a2bcd3e7c9
|
[
"MIT"
] | null | null | null |
module4-acid-and-database-scalability-tradeoffs/titanic_queries.py
|
imdeja/DS-Unit-3-Sprint-2-SQL-and-Databases
|
100546c4c8acdecd3361661705f373a2bcd3e7c9
|
[
"MIT"
] | null | null | null |
import os
from dotenv import load_dotenv
import pandas as pd
import psycopg2
from psycopg2.extras import execute_values
import json
import numpy as np
load_dotenv()
DB_NAME = os.getenv("DB_NAME")
DB_USER = os.getenv("DB_USER")
DB_PASSWORD = os.getenv("DB_PASSWORD")
DB_HOST= os.getenv("DB_HOST")
conn = psycopg2.connect(dbname=DB_NAME, user=DB_USER, password=DB_PASSWORD, host=DB_HOST)
curs = conn.cursor()
#- How many passengers survived, and how many died?
query = 'SELECT count(survived) from passengers where survived = 0'
curs.execute(query)
hi = curs.fetchone()
print(hi[0], "passengers died.")
query = 'SELECT count(survived) from passengers where survived = 1'
curs.execute(query)
hi = curs.fetchone()
print(hi[0], "passengers survived.")
#- How many passengers were in each class?
class1 = 'SELECT count(pclass) from passengers where pclass =1'
curs.execute(class1)
hi = curs.fetchone()
print("There were", hi[0], "passengers in class 1.")
class2 = 'SELECT count(pclass) from passengers where pclass =2'
curs.execute(class2)
hi = curs.fetchone()
print("There were", hi[0], "passengers in class 2.")
class3 = 'SELECT count(pclass) from passengers where pclass =3'
curs.execute(class3)
hi = curs.fetchone()
print("There were", hi[0], "passengers in class 3.")
#- How many passengers survived/died within each class?
died = 'SELECT count(pclass) from passengers where survived = 0 and pclass =1'
curs.execute(died)
hi = curs.fetchone()
print("There were", hi[0], "passengers who died in class 1.")
survived = 'SELECT count(pclass) from passengers where survived = 1 and pclass =1'
curs.execute(survived)
hi = curs.fetchone()
print("There were", hi[0], "passengers who survived in class 1.")
died1 = 'SELECT count(pclass) from passengers where survived = 0 and pclass =2'
curs.execute(died1)
hi = curs.fetchone()
print("There were", hi[0], "passengers who died in class 2.")
survived1 = 'SELECT count(pclass) from passengers where survived = 1 and pclass =2'
curs.execute(survived1)
hi = curs.fetchone()
print("There were", hi[0], "passengers who survived in class 2.")
died2 = 'SELECT count(pclass) from passengers where survived = 0 and pclass =3'
curs.execute(died2)
hi = curs.fetchone()
print("There were", hi[0], "passengers who died in class 3.")
survived2 = 'SELECT count(pclass) from passengers where survived = 1 and pclass =3'
curs.execute(survived2)
hi = curs.fetchone()
print("There were", hi[0], "passengers who survived in class 3.")
#- What was the average age of survivors vs nonsurvivors?
avg_dead = 'select avg(age) from passengers where survived =0'
curs.execute(avg_dead)
hi = curs.fetchone()
print("The average age of passengers who died was", hi[0])
avg_surv = 'select avg(age) from passengers where survived =1'
curs.execute(avg_surv)
hi = curs.fetchone()
print("The average age of passengers who survived was", hi[0])
#- What was the average age of each passenger class?
class1 = 'select avg(age) from passengers where pclass =1'
curs.execute(class1)
hi = curs.fetchone()
print("The average age of passengers in class 1 was", hi[0])
class2 = 'select avg(age) from passengers where pclass =2'
curs.execute(class2)
hi = curs.fetchone()
print("The average age of passengers in class 2 was", hi[0])
class3 = 'select avg(age) from passengers where pclass =3'
curs.execute(class3)
hi = curs.fetchone()
print("The average age of passengers in class 3 was", hi[0])
#- What was the average fare by passenger class? By survival?
class1 = 'select avg(fare) from passengers where pclass =1'
curs.execute(class1)
hi = curs.fetchone()
print("The average fare of passengers in class 1 was", hi[0])
class2 = 'select avg(fare) from passengers where pclass =2'
curs.execute(class2)
hi = curs.fetchone()
print("The average fare of passengers in class 2 was", hi[0])
class3 = 'select avg(fare) from passengers where pclass =3'
curs.execute(class3)
hi = curs.fetchone()
print("The average fare of passengers in class 3 was", hi[0])
avg_dead = 'select avg(fare) from passengers where survived =0'
curs.execute(avg_dead)
hi = curs.fetchone()
print("The average fare of passengers who died was", hi[0])
avg_surv = 'select avg(fare) from passengers where survived =1'
curs.execute(avg_surv)
hi = curs.fetchone()
print("The average fare of passengers who survived was", hi[0])
#- How many siblings/spouses aboard on average, by passenger class? By survival?
class1 = 'select avg(sib_spouse_count) from passengers where pclass =1'
curs.execute(class1)
hi = curs.fetchone()
print("The average siblings/spouses aboard in class 1 was", hi[0])
class2 = 'select avg(sib_spouse_count) from passengers where pclass =2'
curs.execute(class2)
hi = curs.fetchone()
print("The average siblings/spouses aboard in class 2 was", hi[0])
class3 = 'select avg(sib_spouse_count) from passengers where pclass =3'
curs.execute(class3)
hi = curs.fetchone()
print("The average siblings/spouses aboard in class 3 was", hi[0])
avg_dead = 'select avg(sib_spouse_count) from passengers where survived =0'
curs.execute(avg_dead)
hi = curs.fetchone()
print("The average siblings/spouses aboard of passengers who died was", hi[0])
avg_surv = 'select avg(sib_spouse_count) from passengers where survived =1'
curs.execute(avg_surv)
hi = curs.fetchone()
print("The average siblings/spouses aboard of passengers who survived was", hi[0])
#- How many parents/children aboard on average, by passenger class? By survival?
class1 = 'select avg(parent_child_count) from passengers where pclass =1'
curs.execute(class1)
hi = curs.fetchone()
print("The average parents/children aboard in class 1 was", hi[0])
class2 = 'select avg(parent_child_count) from passengers where pclass =2'
curs.execute(class2)
hi = curs.fetchone()
print("The average parents/children aboard in class 2 was", hi[0])
class3 = 'select avg(parent_child_count) from passengers where pclass =3'
curs.execute(class3)
hi = curs.fetchone()
print("The average parents/children aboard in class 3 was", hi[0])
avg_dead = 'select avg(parent_child_count) from passengers where survived =0'
curs.execute(avg_dead)
hi = curs.fetchone()
print("The average parents/children aboard of passengers who died was", hi[0])
avg_surv = 'select avg(parent_child_count) from passengers where survived =1'
curs.execute(avg_surv)
hi = curs.fetchone()
print("The average parents/children aboard of passengers who survived was", hi[0])
#- Do any passengers have the same name?
name = 'SELECT count(distinct name) from passengers having count(*) >1'
curs.execute(name)
hi = curs.fetchone()
print("All", hi[0], "passengers have a different name.")
#nope!
# (Bonus! Hard, may require pulling and processing with Python) How many married
#couples were aboard the Titanic? Assume that two people (one `Mr.` and one
#`Mrs.`) with the same last name and with at least 1 sibling/spouse aboard are
#a married couple.
| 42.12963
| 89
| 0.74989
| 1,082
| 6,825
| 4.682994
| 0.109057
| 0.088415
| 0.088415
| 0.119992
| 0.808763
| 0.794553
| 0.789224
| 0.758832
| 0.746596
| 0.696862
| 0
| 0.022474
| 0.132894
| 6,825
| 162
| 90
| 42.12963
| 0.833728
| 0.111502
| 0
| 0.401408
| 0
| 0
| 0.546356
| 0.036358
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.422535
| 0.049296
| 0
| 0.049296
| 0.225352
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
889e61707ed575e870d03501921a624e597540da
| 158
|
py
|
Python
|
RecPatrones/__init__.py
|
riemannruiz/MediaMovil
|
282cf498551671f97436bff563c8b1a5c2dbf4ad
|
[
"MIT"
] | null | null | null |
RecPatrones/__init__.py
|
riemannruiz/MediaMovil
|
282cf498551671f97436bff563c8b1a5c2dbf4ad
|
[
"MIT"
] | 1
|
2019-03-21T21:16:00.000Z
|
2019-03-21T21:16:00.000Z
|
RecPatrones/__init__.py
|
riemannruiz/MediaMovil
|
282cf498551671f97436bff563c8b1a5c2dbf4ad
|
[
"MIT"
] | 3
|
2019-03-20T22:43:36.000Z
|
2019-03-27T19:16:38.000Z
|
from Simulacion import Optimizacion
from Simulacion import Graficos
from Simulacion import Genetico
from Simulacion import Model_close
from mylib import mylib
| 31.6
| 35
| 0.879747
| 21
| 158
| 6.571429
| 0.428571
| 0.405797
| 0.57971
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120253
| 158
| 5
| 36
| 31.6
| 0.992806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
88b1bbc1e2f422bb622061ec6f6d2f4ceb966b0b
| 22,360
|
bzl
|
Python
|
third_party/boringssl-with-bazel/BUILD.generated_tests.bzl
|
miyachu/grpc
|
a06ea3c3162c10ff90a1578bf82bbbff95dc799d
|
[
"BSD-3-Clause"
] | 91
|
2018-11-24T05:33:58.000Z
|
2022-03-16T05:58:05.000Z
|
third_party/boringssl-with-bazel/BUILD.generated_tests.bzl
|
miyachu/grpc
|
a06ea3c3162c10ff90a1578bf82bbbff95dc799d
|
[
"BSD-3-Clause"
] | 11
|
2019-06-02T23:50:17.000Z
|
2022-02-04T23:58:56.000Z
|
third_party/boringssl-with-bazel/BUILD.generated_tests.bzl
|
miyachu/grpc
|
a06ea3c3162c10ff90a1578bf82bbbff95dc799d
|
[
"BSD-3-Clause"
] | 18
|
2018-11-24T10:35:29.000Z
|
2021-04-22T07:22:10.000Z
|
# This file is created by generate_build_files.py. Do not edit manually.
test_support_sources = [
"src/crypto/aes/internal.h",
"src/crypto/asn1/asn1_locl.h",
"src/crypto/bio/internal.h",
"src/crypto/bn/internal.h",
"src/crypto/bn/rsaz_exp.h",
"src/crypto/bytestring/internal.h",
"src/crypto/cipher/internal.h",
"src/crypto/conf/conf_def.h",
"src/crypto/conf/internal.h",
"src/crypto/curve25519/internal.h",
"src/crypto/des/internal.h",
"src/crypto/digest/internal.h",
"src/crypto/digest/md32_common.h",
"src/crypto/ec/internal.h",
"src/crypto/ec/p256-x86_64-table.h",
"src/crypto/ec/p256-x86_64.h",
"src/crypto/evp/internal.h",
"src/crypto/internal.h",
"src/crypto/modes/internal.h",
"src/crypto/obj/obj_dat.h",
"src/crypto/pkcs8/internal.h",
"src/crypto/poly1305/internal.h",
"src/crypto/pool/internal.h",
"src/crypto/rand/internal.h",
"src/crypto/rsa/internal.h",
"src/crypto/test/file_test.cc",
"src/crypto/test/file_test.h",
"src/crypto/test/test_util.cc",
"src/crypto/test/test_util.h",
"src/crypto/x509/charmap.h",
"src/crypto/x509/internal.h",
"src/crypto/x509/vpm_int.h",
"src/crypto/x509v3/ext_dat.h",
"src/crypto/x509v3/pcy_int.h",
"src/ssl/internal.h",
"src/ssl/test/async_bio.h",
"src/ssl/test/packeted_bio.h",
"src/ssl/test/test_config.h",
]
def create_tests(copts, crypto, ssl):
native.cc_test(
name = "aes_test",
size = "small",
srcs = ["src/crypto/aes/aes_test.cc"] + test_support_sources,
args = [
"$(location src/crypto/aes/aes_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/aes/aes_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "asn1_test",
size = "small",
srcs = ["src/crypto/asn1/asn1_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "base64_test",
size = "small",
srcs = ["src/crypto/base64/base64_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "bio_test",
size = "small",
srcs = ["src/crypto/bio/bio_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "bn_test",
size = "small",
srcs = ["src/crypto/bn/bn_test.cc"] + test_support_sources,
args = [
"$(location src/crypto/bn/bn_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/bn/bn_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "bytestring_test",
size = "small",
srcs = ["src/crypto/bytestring/bytestring_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "chacha_test",
size = "small",
srcs = ["src/crypto/chacha/chacha_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "aead_test_aes_128_gcm",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"aes-128-gcm",
"$(location src/crypto/cipher/test/aes_128_gcm_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/aes_128_gcm_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_aes_256_gcm",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"aes-256-gcm",
"$(location src/crypto/cipher/test/aes_256_gcm_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/aes_256_gcm_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_aes_128_gcm_siv",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"aes-128-gcm-siv",
"$(location src/crypto/cipher/test/aes_128_gcm_siv_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/aes_128_gcm_siv_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_aes_256_gcm_siv",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"aes-256-gcm-siv",
"$(location src/crypto/cipher/test/aes_256_gcm_siv_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/aes_256_gcm_siv_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_chacha20_poly1305",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"chacha20-poly1305",
"$(location src/crypto/cipher/test/chacha20_poly1305_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/chacha20_poly1305_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_chacha20_poly1305_old",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"chacha20-poly1305-old",
"$(location src/crypto/cipher/test/chacha20_poly1305_old_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/chacha20_poly1305_old_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_aes_128_cbc_sha1_tls",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"aes-128-cbc-sha1-tls",
"$(location src/crypto/cipher/test/aes_128_cbc_sha1_tls_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/aes_128_cbc_sha1_tls_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_aes_128_cbc_sha1_tls_implicit_iv",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"aes-128-cbc-sha1-tls-implicit-iv",
"$(location src/crypto/cipher/test/aes_128_cbc_sha1_tls_implicit_iv_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/aes_128_cbc_sha1_tls_implicit_iv_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_aes_128_cbc_sha256_tls",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"aes-128-cbc-sha256-tls",
"$(location src/crypto/cipher/test/aes_128_cbc_sha256_tls_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/aes_128_cbc_sha256_tls_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_aes_256_cbc_sha1_tls",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"aes-256-cbc-sha1-tls",
"$(location src/crypto/cipher/test/aes_256_cbc_sha1_tls_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/aes_256_cbc_sha1_tls_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_aes_256_cbc_sha1_tls_implicit_iv",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"aes-256-cbc-sha1-tls-implicit-iv",
"$(location src/crypto/cipher/test/aes_256_cbc_sha1_tls_implicit_iv_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/aes_256_cbc_sha1_tls_implicit_iv_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_aes_256_cbc_sha256_tls",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"aes-256-cbc-sha256-tls",
"$(location src/crypto/cipher/test/aes_256_cbc_sha256_tls_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/aes_256_cbc_sha256_tls_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_aes_256_cbc_sha384_tls",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"aes-256-cbc-sha384-tls",
"$(location src/crypto/cipher/test/aes_256_cbc_sha384_tls_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/aes_256_cbc_sha384_tls_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_des_ede3_cbc_sha1_tls",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"des-ede3-cbc-sha1-tls",
"$(location src/crypto/cipher/test/des_ede3_cbc_sha1_tls_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/des_ede3_cbc_sha1_tls_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_des_ede3_cbc_sha1_tls_implicit_iv",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"des-ede3-cbc-sha1-tls-implicit-iv",
"$(location src/crypto/cipher/test/des_ede3_cbc_sha1_tls_implicit_iv_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/des_ede3_cbc_sha1_tls_implicit_iv_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_aes_128_cbc_sha1_ssl3",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"aes-128-cbc-sha1-ssl3",
"$(location src/crypto/cipher/test/aes_128_cbc_sha1_ssl3_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/aes_128_cbc_sha1_ssl3_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_aes_256_cbc_sha1_ssl3",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"aes-256-cbc-sha1-ssl3",
"$(location src/crypto/cipher/test/aes_256_cbc_sha1_ssl3_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/aes_256_cbc_sha1_ssl3_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_des_ede3_cbc_sha1_ssl3",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"des-ede3-cbc-sha1-ssl3",
"$(location src/crypto/cipher/test/des_ede3_cbc_sha1_ssl3_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/des_ede3_cbc_sha1_ssl3_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_aes_128_ctr_hmac_sha256",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"aes-128-ctr-hmac-sha256",
"$(location src/crypto/cipher/test/aes_128_ctr_hmac_sha256.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/aes_128_ctr_hmac_sha256.txt",
],
deps = [crypto],
)
native.cc_test(
name = "aead_test_aes_256_ctr_hmac_sha256",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources,
args = [
"aes-256-ctr-hmac-sha256",
"$(location src/crypto/cipher/test/aes_256_ctr_hmac_sha256.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/aes_256_ctr_hmac_sha256.txt",
],
deps = [crypto],
)
native.cc_test(
name = "cipher_test",
size = "small",
srcs = ["src/crypto/cipher/cipher_test.cc"] + test_support_sources,
args = [
"$(location src/crypto/cipher/test/cipher_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/cipher/test/cipher_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "cmac_test",
size = "small",
srcs = ["src/crypto/cmac/cmac_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "constant_time_test",
size = "small",
srcs = ["src/crypto/constant_time_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "ed25519_test",
size = "small",
srcs = ["src/crypto/curve25519/ed25519_test.cc"] + test_support_sources,
args = [
"$(location src/crypto/curve25519/ed25519_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/curve25519/ed25519_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "x25519_test",
size = "small",
srcs = ["src/crypto/curve25519/x25519_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "spake25519_test",
size = "small",
srcs = ["src/crypto/curve25519/spake25519_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "dh_test",
size = "small",
srcs = ["src/crypto/dh/dh_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "digest_test",
size = "small",
srcs = ["src/crypto/digest/digest_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "dsa_test",
size = "small",
srcs = ["src/crypto/dsa/dsa_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "ec_test",
size = "small",
srcs = ["src/crypto/ec/ec_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "example_mul",
size = "small",
srcs = ["src/crypto/ec/example_mul.c"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "p256-x86_64_test",
size = "small",
srcs = ["src/crypto/ec/p256-x86_64_test.cc"] + test_support_sources,
args = [
"$(location src/crypto/ec/p256-x86_64_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/ec/p256-x86_64_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "ecdh_test",
size = "small",
srcs = ["src/crypto/ecdh/ecdh_test.cc"] + test_support_sources,
args = [
"$(location src/crypto/ecdh/ecdh_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/ecdh/ecdh_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "ecdsa_sign_test",
size = "small",
srcs = ["src/crypto/ecdsa/ecdsa_sign_test.cc"] + test_support_sources,
args = [
"$(location src/crypto/ecdsa/ecdsa_sign_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/ecdsa/ecdsa_sign_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "ecdsa_test",
size = "small",
srcs = ["src/crypto/ecdsa/ecdsa_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "ecdsa_verify_test",
size = "small",
srcs = ["src/crypto/ecdsa/ecdsa_verify_test.cc"] + test_support_sources,
args = [
"$(location src/crypto/ecdsa/ecdsa_verify_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/ecdsa/ecdsa_verify_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "err_test",
size = "small",
srcs = ["src/crypto/err/err_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "evp_extra_test",
size = "small",
srcs = ["src/crypto/evp/evp_extra_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "evp_test",
size = "small",
srcs = ["src/crypto/evp/evp_test.cc"] + test_support_sources,
args = [
"$(location src/crypto/evp/evp_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/evp/evp_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "pbkdf_test",
size = "small",
srcs = ["src/crypto/evp/pbkdf_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "hkdf_test",
size = "small",
srcs = ["src/crypto/hkdf/hkdf_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "hmac_test",
size = "small",
srcs = ["src/crypto/hmac/hmac_test.cc"] + test_support_sources,
args = [
"$(location src/crypto/hmac/hmac_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/hmac/hmac_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "lhash_test",
size = "small",
srcs = ["src/crypto/lhash/lhash_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "gcm_test",
size = "small",
srcs = ["src/crypto/modes/gcm_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "obj_test",
size = "small",
srcs = ["src/crypto/obj/obj_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "pkcs12_test",
size = "small",
srcs = ["src/crypto/pkcs8/pkcs12_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "pkcs8_test",
size = "small",
srcs = ["src/crypto/pkcs8/pkcs8_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "poly1305_test",
size = "small",
srcs = ["src/crypto/poly1305/poly1305_test.cc"] + test_support_sources,
args = [
"$(location src/crypto/poly1305/poly1305_tests.txt)",
],
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
data = [
"src/crypto/poly1305/poly1305_tests.txt",
],
deps = [crypto],
)
native.cc_test(
name = "pool_test",
size = "small",
srcs = ["src/crypto/pool/pool_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "refcount_test",
size = "small",
srcs = ["src/crypto/refcount_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "rsa_test",
size = "small",
srcs = ["src/crypto/rsa/rsa_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "thread_test",
size = "small",
srcs = ["src/crypto/thread_test.c"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "pkcs7_test",
size = "small",
srcs = ["src/crypto/x509/pkcs7_test.c"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "x509_test",
size = "small",
srcs = ["src/crypto/x509/x509_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "tab_test",
size = "small",
srcs = ["src/crypto/x509v3/tab_test.c"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "v3name_test",
size = "small",
srcs = ["src/crypto/x509v3/v3name_test.c"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [crypto],
)
native.cc_test(
name = "ssl_test",
size = "small",
srcs = ["src/ssl/ssl_test.cc"] + test_support_sources,
copts = copts + ["-DBORINGSSL_SHARED_LIBRARY"],
deps = [
crypto,
ssl,
],
)
| 29.228758
| 91
| 0.590474
| 2,690
| 22,360
| 4.61487
| 0.046468
| 0.115273
| 0.094248
| 0.082488
| 0.900918
| 0.881908
| 0.816739
| 0.777751
| 0.734654
| 0.675447
| 0
| 0.03341
| 0.257066
| 22,360
| 764
| 92
| 29.267016
| 0.713882
| 0.003131
| 0
| 0.573066
| 1
| 0
| 0.432834
| 0.375942
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001433
| false
| 0
| 0
| 0
| 0.001433
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ee24a2a2d1d21059fb919ebc02526d5846c7d278
| 303
|
py
|
Python
|
ivory/layers/__init__.py
|
daizutabi/scratch
|
4c56fad47da0938eda89f3c2b6cb2f1919bee180
|
[
"MIT"
] | null | null | null |
ivory/layers/__init__.py
|
daizutabi/scratch
|
4c56fad47da0938eda89f3c2b6cb2f1919bee180
|
[
"MIT"
] | null | null | null |
ivory/layers/__init__.py
|
daizutabi/scratch
|
4c56fad47da0938eda89f3c2b6cb2f1919bee180
|
[
"MIT"
] | null | null | null |
from ivory.layers import (activation, affine, convolution, core, dropout,
embedding, loss, normalization, recurrent)
__all__ = [
"activation",
"affine",
"convolution",
"core",
"dropout",
"embedding",
"loss",
"normalization",
"recurrent",
]
| 20.2
| 73
| 0.574257
| 23
| 303
| 7.391304
| 0.608696
| 0.188235
| 0.317647
| 0.364706
| 0.858824
| 0.858824
| 0.858824
| 0.858824
| 0.858824
| 0
| 0
| 0
| 0.290429
| 303
| 14
| 74
| 21.642857
| 0.790698
| 0
| 0
| 0
| 0
| 0
| 0.240924
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.076923
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ee3b8a11298be38df1b9a7e27b57f8559c0985a3
| 60
|
py
|
Python
|
app/tests/__init__.py
|
victorlomi/News-Catchup
|
214b4e92b0cf90c7e4906c3b2316578918645dac
|
[
"Unlicense"
] | null | null | null |
app/tests/__init__.py
|
victorlomi/News-Catchup
|
214b4e92b0cf90c7e4906c3b2316578918645dac
|
[
"Unlicense"
] | null | null | null |
app/tests/__init__.py
|
victorlomi/News-Catchup
|
214b4e92b0cf90c7e4906c3b2316578918645dac
|
[
"Unlicense"
] | null | null | null |
from tests import test_article
from tests import test_source
| 30
| 30
| 0.883333
| 10
| 60
| 5.1
| 0.6
| 0.352941
| 0.588235
| 0.745098
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116667
| 60
| 2
| 31
| 30
| 0.962264
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
c9ce4ccacd1980f9dcbf0a2c852bfa9e74a3ad5a
| 4,420
|
py
|
Python
|
config.py
|
NYU-DICE-Lab/graph_invnet
|
166db79ac9ab3bff0e67ab0ec978da7efea042e2
|
[
"MIT"
] | null | null | null |
config.py
|
NYU-DICE-Lab/graph_invnet
|
166db79ac9ab3bff0e67ab0ec978da7efea042e2
|
[
"MIT"
] | 4
|
2021-06-08T23:01:47.000Z
|
2022-03-12T00:53:53.000Z
|
config.py
|
NYU-DICE-Lab/graph_invnet
|
166db79ac9ab3bff0e67ab0ec978da7efea042e2
|
[
"MIT"
] | null | null | null |
""" Config class for training the InvNet """
import argparse
from dp_layer.graph_layer.edge_functions import edge_f_dict as d
def get_parser(name):
"""
:param name: String for Config Name
:return: parser
"""
parser = argparse.ArgumentParser(name, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
return parser
class MNISTConfig(argparse.Namespace):
def build_parser(self):
parser = get_parser("InvNet config")
parser.add_argument('--dataset', default='mnist', help='circle / polycrystalline')
parser.add_argument('--lr',default=01e-04)
parser.add_argument('--output_path', default='./output_dir', help='output directory')
parser.add_argument('--data_dir', default='/data/MNIST')
parser.add_argument('--gpu', default=1, help='Selecting the gpu')
parser.add_argument('--data_size', default=64, type=int)
parser.add_argument('--batch_size', default=32,type=int, help='Batch size for training')
parser.add_argument('--hidden_size', default=32, type=int,help='Hidden size used for generator and discriminator')
parser.add_argument('--critic_iter', default=5, type=int,help='Number of iter for descriminator')
parser.add_argument('--proj_iter', default=3, type=int, help='Number of iteration for projection update.')
parser.add_argument('--end_iter', default=30000, help='How many iterations to train for.')
parser.add_argument('--lambda_gp', default=10, help='gradient penalty hyperparameter')
parser.add_argument('--restore_mode', default=False,
help='If True, it will load saved model from OUT_PATH and continue to train')
parser.add_argument('--max_op', default=False)
parser.add_argument('--edge_fn', default='diff_exp')
parser.add_argument('--make_pos', type=bool,default=True)
parser.add_argument('--proj_lambda',type=float,default=1)
parser.add_argument('--include_dp', type=int, default=True)
parser.add_argument('--top2bottom', dest='top2bottom', action='store_true')
parser.add_argument('--no-top2bottom', dest='top2bottom', action='store_false')
parser.set_defaults(top2bottom=False)
return parser
def __init__(self):
parser = self.build_parser()
args = parser.parse_args()
super().__init__(**vars(args))
class MicroStructureConfig(argparse.Namespace):
def build_parser(self):
parser = get_parser("MicroConfig config")
parser.add_argument('--lr',default=01e-04)
parser.add_argument('--output_path', default='./output_dir', help='output directory')
parser.add_argument('--data_dir', default='/data/datasets/two_phase_morph/')
parser.add_argument('--gpu', default=1, type= int,help='Selecting the gpu')
parser.add_argument('--data_size',default=64,type=int)
parser.add_argument('--batch_size', default=32,type=int, help='Batch size for training')
parser.add_argument('--hidden_size', default=32, type=int,help='Hidden size used for generator and discriminator')
parser.add_argument('--critic_iter', default=5, type=int,help='Number of iter for descriminator')
parser.add_argument('--proj_iter', default=1, type=int, help='Number of iteration for projection update.')
parser.add_argument('--end_iter', default=50000, help='How many iterations to train for.')
parser.add_argument('--lambda_gp', default=10, help='gradient penalty hyperparameter')
parser.add_argument('--restore_mode', default=False,
help='If True, it will load saved model from OUT_PATH and continue to train')
parser.add_argument('--max_op', default=False)
parser.add_argument('--edge_fn', choices=list(d.keys()),default='diff_exp')
parser.add_argument('--make_pos', type=bool, default=False)
parser.add_argument('--proj_lambda', type=float, default=1)
parser.add_argument('--include_dp',type=int,default=True)
parser.add_argument('--top2bottom', dest='top2bottom', action='store_true')
parser.add_argument('--no-top2bottom', dest='top2bottom', action='store_false')
parser.set_defaults(top2bottom=False)
return parser
def __init__(self):
parser = self.build_parser()
args = parser.parse_args()
super().__init__(**vars(args))
| 53.253012
| 122
| 0.682579
| 561
| 4,420
| 5.174688
| 0.231729
| 0.120909
| 0.228384
| 0.036169
| 0.851188
| 0.845332
| 0.826042
| 0.826042
| 0.826042
| 0.791595
| 0
| 0.01427
| 0.175566
| 4,420
| 82
| 123
| 53.902439
| 0.782382
| 0.01991
| 0
| 0.671875
| 0
| 0
| 0.294377
| 0.007203
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078125
| false
| 0
| 0.03125
| 0
| 0.1875
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c9e940d8a93717c521e40ddaeecaaa28cbc83b2f
| 403
|
py
|
Python
|
rllib/examples/gpu_test.py
|
anaskn/ray
|
81db5f8060cb093085470ffdc71d8fdecc7bf381
|
[
"Apache-2.0"
] | null | null | null |
rllib/examples/gpu_test.py
|
anaskn/ray
|
81db5f8060cb093085470ffdc71d8fdecc7bf381
|
[
"Apache-2.0"
] | null | null | null |
rllib/examples/gpu_test.py
|
anaskn/ray
|
81db5f8060cb093085470ffdc71d8fdecc7bf381
|
[
"Apache-2.0"
] | 1
|
2021-05-20T22:00:15.000Z
|
2021-05-20T22:00:15.000Z
|
import os
import ray
from ray import tune
@ray.remote(num_gpus=1)
def use_gpu():
print("ray.get_gpu_ids(): {}".format(ray.get_gpu_ids()))
print("CUDA_VISIBLE_DEVICES: {}".format(os.environ["CUDA_VISIBLE_DEVICES"]))
if __name__ == "__main__":
ray.init()
print("ray.get_gpu_ids(): {}".format(ray.get_gpu_ids()))
#print("CUDA_VISIBLE_DEVICES: {}".format(os.environ["CUDA_VISIBLE_DEVICES"]))
| 25.1875
| 80
| 0.707196
| 61
| 403
| 4.245902
| 0.393443
| 0.092664
| 0.138996
| 0.185328
| 0.702703
| 0.702703
| 0.702703
| 0.702703
| 0.702703
| 0.702703
| 0
| 0.002747
| 0.096774
| 403
| 15
| 81
| 26.866667
| 0.708791
| 0.188586
| 0
| 0.2
| 0
| 0
| 0.288344
| 0.064417
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| true
| 0
| 0.3
| 0
| 0.4
| 0.3
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4e85a767dc1a77745686895d8e0fa92531e23594
| 210
|
py
|
Python
|
bolinette/defaults/__init__.py
|
bolinette/bolinette
|
b35a7d828c7d9617da6a8d7ac066e3b675a65252
|
[
"MIT"
] | 4
|
2020-11-02T15:16:32.000Z
|
2022-01-11T11:19:24.000Z
|
bolinette/defaults/__init__.py
|
bolinette/bolinette
|
b35a7d828c7d9617da6a8d7ac066e3b675a65252
|
[
"MIT"
] | 14
|
2021-01-04T11:06:59.000Z
|
2022-03-23T17:01:49.000Z
|
bolinette/defaults/__init__.py
|
bolinette/bolinette
|
b35a7d828c7d9617da6a8d7ac066e3b675a65252
|
[
"MIT"
] | null | null | null |
import bolinette.defaults.models
import bolinette.defaults.mixins
import bolinette.defaults.services
import bolinette.defaults.middlewares
import bolinette.defaults.controllers
import bolinette.defaults.topics
| 30
| 37
| 0.885714
| 24
| 210
| 7.75
| 0.375
| 0.483871
| 0.741935
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057143
| 210
| 6
| 38
| 35
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0952e905288174cbfdd1f35d6dcc51f31902d2fc
| 104
|
py
|
Python
|
janus/qm_wrapper/__init__.py
|
zhenglz/janus
|
f3f1ed3f2b6e377c51e958cae2d919069d221eda
|
[
"BSD-3-Clause"
] | 16
|
2019-04-18T15:45:02.000Z
|
2021-12-17T17:51:18.000Z
|
janus/qm_wrapper/__init__.py
|
zhenglz/janus
|
f3f1ed3f2b6e377c51e958cae2d919069d221eda
|
[
"BSD-3-Clause"
] | 2
|
2019-06-20T16:56:08.000Z
|
2020-08-28T16:09:16.000Z
|
janus/qm_wrapper/__init__.py
|
zhenglz/janus
|
f3f1ed3f2b6e377c51e958cae2d919069d221eda
|
[
"BSD-3-Clause"
] | 8
|
2018-11-16T17:00:58.000Z
|
2022-01-11T05:36:50.000Z
|
from janus.qm_wrapper.qm_wrapper import QMWrapper
from janus.qm_wrapper.psi4_wrapper import Psi4Wrapper
| 34.666667
| 53
| 0.884615
| 16
| 104
| 5.5
| 0.5
| 0.306818
| 0.25
| 0.409091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020833
| 0.076923
| 104
| 2
| 54
| 52
| 0.895833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
117ccff29d39a8a303a7911cf64c178e84256843
| 681
|
py
|
Python
|
tests/test_provider_unicell_kind.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_unicell_kind.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_unicell_kind.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_unicell_kind.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:20:21 UTC)
def test_provider_import():
import terrascript.provider.unicell.kind
def test_resource_import():
from terrascript.resource.unicell.kind import kind_cluster
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.unicell.kind
#
# t = terrascript.provider.unicell.kind.kind()
# s = str(t)
#
# assert 'https://github.com/unicell/terraform-provider-kind' in s
# assert '0.0.2-u2' in s
| 27.24
| 80
| 0.737151
| 96
| 681
| 5.114583
| 0.59375
| 0.112016
| 0.154786
| 0.183299
| 0.14664
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027972
| 0.160059
| 681
| 24
| 81
| 28.375
| 0.83042
| 0.709251
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 0
| 1
| 0.5
| true
| 0
| 1
| 0
| 1.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.