hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
56f0adb6ffb23ba292badc3740453920de7e1ff6
42,030
py
Python
src/move_arm/src/projik_example.py
citronella3alain/baxterDraw
c050254e8b4b8d4f5087e8743a34289844138e0c
[ "MIT" ]
null
null
null
src/move_arm/src/projik_example.py
citronella3alain/baxterDraw
c050254e8b4b8d4f5087e8743a34289844138e0c
[ "MIT" ]
null
null
null
src/move_arm/src/projik_example.py
citronella3alain/baxterDraw
c050254e8b4b8d4f5087e8743a34289844138e0c
[ "MIT" ]
null
null
null
#!/usr/bin/env python import rospy from moveit_msgs.srv import GetPositionIK, GetPositionIKRequest, GetPositionIKResponse from geometry_msgs.msg import PoseStamped from moveit_commander import MoveGroupCommander import numpy as np from numpy import linalg import sys def make0(robo, arm, xi, upper): # Construct the request request = GetPositionIKRequest() request.ik_request.group_name = arm + "_arm" # If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead link = arm + "_gripper" if robo == 'sawyer': link += '_tip' request.ik_request.ik_link_name = link request.ik_request.attempts = 20 request.ik_request.pose_stamped.header.frame_id = "base" # print('Opening...') # right_gripper.open() # rospy.sleep(1.0) # print('Done!') b = 0.048 a = 0.068 k = 0.193 h = 0.765 xi = np.round(xi, 3) if upper == True: #math equatio y = np.round((np.sqrt((b**2)-((b**2/a**2)*((xi-h)**2))))+k, 3) print(xi) print(y) if np.isnan(y): y = 0.193 if xi == 0.765: y = 0.293 #y = np.round(((b/a)*np.sqrt(((a**2)-((xi-h)**2)))+k), 3) else: print("im here") y = np.round((-1*np.sqrt((b**2)-((b**2/a**2)*((xi-h)**2))))+k, 3) print(xi) print(y) if np.isnan(y): y = 0.193 if xi == 0.765: y = 0.093 #y = np.round(((b/a)*-np.sqrt(((a**2)-((xi-h)**2)))+k), 3) # Set the desired orientation for the end effector HERE #switch = not switch request.ik_request.pose_stamped.pose.position.x = xi request.ik_request.pose_stamped.pose.position.y = y request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 return request def make1(robo, arm, xi, xc, yc): # Construct the request request = GetPositionIKRequest() request.ik_request.group_name = arm + "_arm" # If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead link = arm + "_gripper" if robo == 'sawyer': link += '_tip' request.ik_request.ik_link_name = link request.ik_request.attempts = 20 request.ik_request.pose_stamped.header.frame_id = "base" y = yc+0.059 request.ik_request.pose_stamped.pose.position.x = xi request.ik_request.pose_stamped.pose.position.y = y request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 return request def make2(robo, arm, xi, upper, mid, center_x = 0.691, center_y = 0.259): # Construct the request request = GetPositionIKRequest() request.ik_request.group_name = arm + "_arm" # If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead link = arm + "_gripper" if robo == 'sawyer': link += '_tip' request.ik_request.ik_link_name = link request.ik_request.attempts = 20 request.ik_request.pose_stamped.header.frame_id = "base" #draw top of two if upper == True and mid == True: y = -np.sqrt((-xi+center_x)/-20) + center_y elif upper == True: # y = -20*((xi-center_x)**2)+center_y y = np.sqrt((-xi+center_x)/-20) + center_y elif mid == True: # y = xi-.741+.209 # y = xi - center_x - 0.05 + center_y -0.05 y = -xi + center_y +.05 + center_x + .05 else: # y = center_y -0.15 y = xi xi = center_x + .15 request.ik_request.pose_stamped.pose.position.x = xi request.ik_request.pose_stamped.pose.position.y = y request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 return request def make3(robo, arm, xi, upper, xc= 0.691,yc= 0.259): request = GetPositionIKRequest() request.ik_request.group_name = arm + "_arm" # If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead link = arm + "_gripper" if robo == 'sawyer': link += '_tip' request.ik_request.ik_link_name = link request.ik_request.attempts = 20 request.ik_request.pose_stamped.header.frame_id = "base" xi = round(xi,3) if upper == True: y= round(-30*((xi-xc)**2) +yc,3) else: y= round(-30*((xi-(xc*1.1))**2) +yc, 3) request.ik_request.pose_stamped.pose.position.x = xi request.ik_request.pose_stamped.pose.position.y = y request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 return request def make4(robo, arm, xi, xc= 0.691,yc= 0.259): request = GetPositionIKRequest() request.ik_request.group_name = arm + "_arm" # If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead link = arm + "_gripper" if robo == 'sawyer': link += '_tip' request.ik_request.ik_link_name = link request.ik_request.attempts = 20 request.ik_request.pose_stamped.header.frame_id = "base" if xi == 0: request.ik_request.pose_stamped.pose.position.x = xc request.ik_request.pose_stamped.pose.position.y = yc request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 if xi == 1: request.ik_request.pose_stamped.pose.position.x = xc+(0.0788/2.0) request.ik_request.pose_stamped.pose.position.y = yc request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 if xi == 2: request.ik_request.pose_stamped.pose.position.x = xc+0.0788 request.ik_request.pose_stamped.pose.position.y = yc request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 if xi == 3: request.ik_request.pose_stamped.pose.position.x = xc+0.0788 request.ik_request.pose_stamped.pose.position.y = yc + 0.059 request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 if xi == 4: request.ik_request.pose_stamped.pose.position.x = xc request.ik_request.pose_stamped.pose.position.y = yc + 0.059 request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 if xi == 5: request.ik_request.pose_stamped.pose.position.x = xc+(0.1577/2.0) request.ik_request.pose_stamped.pose.position.y = yc + 0.059 request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 if xi == 6: request.ik_request.pose_stamped.pose.position.x = xc+0.1577 request.ik_request.pose_stamped.pose.position.y = yc + 0.059 request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 return request def make5(robo, arm, xi, upper, mid, xc=0.6467, yc=0.2): request = GetPositionIKRequest() request.ik_request.group_name = arm + "_arm" # If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead link = arm + "_gripper" if robo == 'sawyer': link += '_tip' request.ik_request.ik_link_name = link request.ik_request.attempts = 20 request.ik_request.pose_stamped.header.frame_id = "base" xi = round(xi,3) if upper == True: y = xi xi = xc elif mid == True: y=yc else: y= round(-30*((xi-((xc+0.0443)*1.1))**2) +yc +0.059, 3) request.ik_request.pose_stamped.pose.position.x = xi request.ik_request.pose_stamped.pose.position.y = y request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 return request def make6(robo, arm, xi, upper, center_x = 0.6566, center_y = 0.2235): # Construct the request request = GetPositionIKRequest() request.ik_request.group_name = arm + "_arm" # If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead link = arm + "_gripper" if robo == 'sawyer': link += '_tip' request.ik_request.ik_link_name = link request.ik_request.attempts = 20 request.ik_request.pose_stamped.header.frame_id = "base" #draw top of two if upper == 0: y = 30 * (xi-(center_x+0.0344))**2 + center_y - 0.0355 elif upper == 1: y=center_y-0.0355 elif upper == 2: y = 30*(xi - ((center_x+0.0344)*1.1))**2 + center_y - 0.0355 elif upper == 3: y = -30*(xi - ((center_x+0.0344)*1.1))**2 + center_y +0.0355 elif upper==4: y = 30*(xi - ((center_x+0.0344)*1.1))**2 + center_y - 0.0355 request.ik_request.pose_stamped.pose.position.x = xi request.ik_request.pose_stamped.pose.position.y = y request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 return request def make7(robo, arm, xi, upper, xc= 0.6467,yc= 0.2): request = GetPositionIKRequest() request.ik_request.group_name = arm + "_arm" # If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead link = arm + "_gripper" if robo == 'sawyer': link += '_tip' request.ik_request.ik_link_name = link request.ik_request.attempts = 20 request.ik_request.pose_stamped.header.frame_id = "base" xi = round(xi,3) if upper: y=xi xi = xc else: y = yc+0.059 request.ik_request.pose_stamped.pose.position.x = xi request.ik_request.pose_stamped.pose.position.y = y request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 return request def make8(robo, arm, xi, upper, mid, center_x = 0.6566, center_y = 0.2235): # Construct the request request = GetPositionIKRequest() request.ik_request.group_name = arm + "_arm" # If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead link = arm + "_gripper" if robo == 'sawyer': link += '_tip' request.ik_request.ik_link_name = link request.ik_request.attempts = 20 request.ik_request.pose_stamped.header.frame_id = "base" #draw top of two if upper == True and mid ==True: y = -30*((xi-(center_x+0.0344))**2)+center_y+0.0355 elif upper == False and mid == False: y = 30*(xi - ((center_x+0.0344)*1.1))**2 + center_y - 0.0355 # y = xi-.741+.209 elif upper == False and mid == True: y = -30*(xi - ((center_x+0.0344)*1.1))**2 + center_y +0.0355 elif upper == True and mid == False: y = 30 * (xi-(center_x+0.0344))**2 + center_y - 0.0355 request.ik_request.pose_stamped.pose.position.x = xi request.ik_request.pose_stamped.pose.position.y = y request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 return request def make9(robo, arm, xi, upper, mid, center_x = 0.6566, center_y = 0.2235): # Construct the request request = GetPositionIKRequest() request.ik_request.group_name = arm + "_arm" # If a Sawyer does not have a gripper, replace '_gripper_tip' with '_wrist' instead link = arm + "_gripper" if robo == 'sawyer': link += '_tip' request.ik_request.ik_link_name = link request.ik_request.attempts = 20 request.ik_request.pose_stamped.header.frame_id = "base" #draw top of two if upper == True and mid ==True: y = -30*((xi-(center_x+0.0344))**2)+center_y+0.0355 elif upper == False and mid == False: y = 30 * (xi-(center_x+0.0344))**2 + center_y - 0.0355 # y = xi-.741+.209 elif upper == False and mid == True: y = -30*((xi-(center_x+0.0344))**2)+center_y+0.0355 elif upper == True and mid == False: y = center_y + 0.0355 request.ik_request.pose_stamped.pose.position.x = xi request.ik_request.pose_stamped.pose.position.y = y request.ik_request.pose_stamped.pose.position.z = -0.1 request.ik_request.pose_stamped.pose.orientation.x = 0.0 request.ik_request.pose_stamped.pose.orientation.y = 1.0 request.ik_request.pose_stamped.pose.orientation.z = 0.0 request.ik_request.pose_stamped.pose.orientation.w = 0.0 return request def main(robo): # Wait for the IK service to become available rospy.wait_for_service('compute_ik') rospy.init_node('service_query') # Set up the right gripper right_gripper = robot_gripper.Gripper('right') # Calibrate the gripper (other commands won't work unless you do this first) print('Calibrating...') right_gripper.calibrate() rospy.sleep(2.0) arm = 'left' # Create the function used to call the service compute_ik = rospy.ServiceProxy('compute_ik', GetPositionIK) if robo == 'sawyer': arm = 'right' switch = True number = 0 #change this to change the number drawn while not rospy.is_shutdown(): raw_input('Press [ Enter ]: ') if number == 9: #Computer vision determines start point. center_x = 0.6566 center_y = 0.2235 # for xi in np.linspace(0.641, 0.741, 3): request = make9(robo, arm, center_x + 0.0344, upper=True, mid=True, center_x = 0.691, center_y = 0.259) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x + 0.0688, center_x, 3): request = make9(robo, arm, xi, False, False, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x, center_x + 0.0344, 3): request = make9(robo, arm, xi, False, True, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x + 0.0344, center_x + 0.1379, 3): request = make9(robo, arm, xi, True, False, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e if number == 8: #Computer vision determines start point. center_x = 0.691 center_y = 0.259 # for xi in np.linspace(0.641, 0.741, 3): for xi in np.linspace(center_x, center_x + 0.0688, 3): request = make8(robo, arm, xi, True, True, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x + 0.0688, center_x + 0.1379, 3): request = make8(robo, arm, xi, False, False, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x + 0.1379, center_x + 0.0688, 3): request = make8(robo, arm, xi, False, True, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x + 0.0688, center_x, 3): request = make8(robo, arm, xi, True, False, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e if number == 7: center_x = 0.6467 center_y = 0.2 for xi in np.linspace(center_y, center_y+0.059, 3): request = make7(robo, arm, xi, True, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x, center_x+0.1577, 3): request = make7(robo, arm, xi, False, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e if number == 6: #Computer vision determines start point. center_x = 0.691 center_y = 0.259 # for xi in np.linspace(0.641, 0.741, 3): for xi in np.linspace(center_x, center_x + 0.0344, 3): request = make6(robo, arm, xi, 0, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x + 0.0344, center_x + 0.1035, 3): request = make6(robo, arm, xi, 1, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x + 0.1035, center_x + 0.1379, 3): request = make6(robo, arm, xi, 2, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x + 0.1379, center_x + 0.0688, 3): request = make6(robo, arm, xi, 3, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x + 0.0688, center_x + 0.1035, 3): request = make6(robo, arm, xi, 4, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e if number == 5: center_x = 0.6467 center_y = 0.2 for xi in np.linspace(center_y+0.059, center_y, 3): request = make5(robo, arm, xi, True, False, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x, center_x+0.0691, 3): request = make5(robo, arm, xi, False, True, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x+0.0691, center_x+0.1577, 5): request = make5(robo, arm, xi, False, False, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e if number == 4: center_x = 0.6467 center_y = 0.2 for xi in range(7): request = make4(robo, arm, xi, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e if number == 3: center_x = 0.691 center_y = 0.259 for xi in np.linspace(center_x-0.0443, center_x+0.0345, 3): request = make3(robo, arm, xi, True, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x+0.0345, center_x+0.0691, 3): request = make3(robo, arm, xi, False, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x+0.0691, center_x+0.1134, 3): request = make3(robo, arm, xi, False, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e if number == 2: #Computer vision determines start point. center_x = 0.691 center_y = 0.259 # for xi in np.linspace(0.641, 0.741, 3): for xi in np.linspace(center_x + 0.05, center_x, 3): request = make2(robo, arm, xi, True, True, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x, center_x + 0.05, 3): request = make2(robo, arm, xi, True, False, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_x + 0.05, center_x + 0.15, 3): request = make2(robo, arm, xi, False, True, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(center_y - 0.05, center_y + 0.05, 3): request = make2(robo, arm, xi, False, False, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e # Set the desired orientation for the end effector HERE if number==1: center_x = 0.6467 center_y = 0.2 for xi in np.linspace(center_x, center_x+0.1577, 4): request = make1(robo, arm, xi, center_x, center_y) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e if number == 0: for xi in np.linspace(0.692, 0.765, 3): request = make0(robo, arm, xi, True) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # TRY THIS # Setting just the position without specifying the orientation # group.set_position_target([0.5, 0.5, 0.0]) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(0.765, 0.838, 3): request = make0(robo, arm, xi, True) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # TRY THIS # Setting just the position without specifying the orientation # group.set_position_target([0.5, 0.5, 0.0]) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(0.838, 0.765, 3): print("new") request = make0(robo, arm, xi, False) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # TRY THIS # Setting just the position without specifying the orientation # group.set_position_target([0.5, 0.5, 0.0]) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e for xi in np.linspace(0.765, 0.692, 3): request = make0(robo, arm, xi, False) try: # Send the request to the service response = compute_ik(request) # Print the response HERE # print(response) group = MoveGroupCommander(arm + "_arm") # Setting position and orientation target group.set_pose_target(request.ik_request.pose_stamped) # TRY THIS # Setting just the position without specifying the orientation # group.set_position_target([0.5, 0.5, 0.0]) # Plan IK and execute group.go() rospy.sleep(1.0) except rospy.ServiceException, e: print "Service call failed: %s"%e # Python's syntax for a main() method if __name__ == '__main__': if sys.argv[1] == 'sawyer': from intera_interface import gripper as robot_gripper else: from baxter_interface import gripper as robot_gripper main(sys.argv[1])
39.688385
115
0.550036
5,155
42,030
4.336566
0.042483
0.086155
0.130977
0.136882
0.939119
0.936658
0.919257
0.916797
0.90955
0.90548
0
0.040975
0.356626
42,030
1,058
116
39.725898
0.785733
0.157697
0
0.796073
0
0
0.03425
0
0
0
0
0
0
0
null
null
0
0.013595
null
null
0.057402
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
8
56f2e5d5d632d949b005c992def0743a49363138
269
py
Python
day4/grid.py
Teckat/Python-Webinar
bc72f85487603fcde44ccd367ab362ad0cfb712b
[ "MIT" ]
null
null
null
day4/grid.py
Teckat/Python-Webinar
bc72f85487603fcde44ccd367ab362ad0cfb712b
[ "MIT" ]
null
null
null
day4/grid.py
Teckat/Python-Webinar
bc72f85487603fcde44ccd367ab362ad0cfb712b
[ "MIT" ]
null
null
null
import tkinter as tk # Create a window root = tk.Tk() name1 = tk.Label(root, text="Aman") name2 = tk.Label(root, text="Rahul") name3 = tk.Label(root, text="Manoj") name1.grid(row=0, column=0) name2.grid(row=1, column=1) name3.grid(row=3, column=3) root.mainloop()
16.8125
36
0.684015
48
269
3.833333
0.479167
0.11413
0.179348
0.244565
0
0
0
0
0
0
0
0.051282
0.130112
269
15
37
17.933333
0.735043
0.055762
0
0
0
0
0.055556
0
0
0
0
0
0
1
0
false
0
0.111111
0
0.111111
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
56f43352bfe59575a440aa446f6337c18f283182
1,747
py
Python
03LinkedList/143ReorderList.py
zhaoxinlu/leetcode-algorithms
f5e1c94c99628e7fb04ba158f686a55a8093e933
[ "MIT" ]
null
null
null
03LinkedList/143ReorderList.py
zhaoxinlu/leetcode-algorithms
f5e1c94c99628e7fb04ba158f686a55a8093e933
[ "MIT" ]
null
null
null
03LinkedList/143ReorderList.py
zhaoxinlu/leetcode-algorithms
f5e1c94c99628e7fb04ba158f686a55a8093e933
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Editor: Zhao Xinlu School: BUPT Date: 2018-04-11 算法思想:链表重排序 """ # Definition for singly-linked list. class ListNode(object): def __init__(self, x): self.val = x self.next = None class Solution(object): def reorderList(self, head): """ :type head: ListNode :rtype: void Do not return anything, modify head in-place instead. """ if not head or not head.next: return midNode = self.midOfList(head) behindHead = self.reverseList(midNode.next) midNode.next = None head = self.mergeList(head, behindHead) def midOfList(self, head): if not head: return head slow, fast = head, head while fast.next and fast.next.next: slow = slow.next fast = fast.next.next return slow def reverseList(self, head): if not head or not head.next: return head pre = None cur = head nhead = None while cur: nextNode = cur.next if cur.next == None: nhead = cur cur.next = pre pre = cur cur = nextNode return nhead def mergeList(self, head1, head2): if not head2: return head1 if not head1: return head2 dummy = ListNode(0) l3 = dummy while head1 and head2: l3.next = head1 head1 = head1.next l3 = l3.next l3.next = head2 head2 = head2.next l3 = l3.next if head1: l3.next = head1 if head2: l3.next = head2 return dummy.next
21.048193
74
0.507728
201
1,747
4.393035
0.313433
0.04077
0.030578
0.024915
0.091733
0.06342
0.06342
0.06342
0
0
0
0.035957
0.41099
1,747
83
75
21.048193
0.822157
0.117344
0
0.188679
0
0
0
0
0
0
0
0
0
1
0.09434
false
0
0
0
0.283019
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
56f4fe4a463dd38569b76ab12f231c84b957ff91
2,409
py
Python
libraries/colors/colors_example1.py
est/nodebox-gl
f1909a7a4ebc55c8ba254f92e25acb26e8cf1f1d
[ "BSD-3-Clause" ]
1
2015-09-29T14:22:49.000Z
2015-09-29T14:22:49.000Z
libraries/colors/colors_example1.py
est/nodebox-gl
f1909a7a4ebc55c8ba254f92e25acb26e8cf1f1d
[ "BSD-3-Clause" ]
1
2015-09-29T14:23:35.000Z
2015-09-30T02:33:13.000Z
libraries/colors/colors_example1.py
est/nodebox-gl
f1909a7a4ebc55c8ba254f92e25acb26e8cf1f1d
[ "BSD-3-Clause" ]
null
null
null
# ANALOG COLORS # Import the library try: # This is the statement you normally use. colors = ximport("colors") except ImportError: # But since these examples are "inside" the library # we may need to try something different when # the library is not located in /Application Support colors = ximport("__init__") reload(colors) size(600, 600) nofill() stroke(0.4, 0.5, 0) strokewidth(0.1) autoclosepath(False) clr = colors.color(0.6, 0.4, 0) # Get a very dark variation of the color for the background. background(colors.dark(clr).darken(0.1)) clr.alpha = 0.5 # Each curve has a shadow and there are a lot of them, # so we have to use a very subtle shadow: # very transparent and thin (little blur). colors.shadow(alpha=0.05, blur=0.2) for i in range(50): # Each strand of curves has an analogous color # (i.e. hues that are next to each other on the color wheel). # This yields a very natural effect. stroke(clr.analog(angle=10, d=0.3)) # Start drawing strands of curves from the center. x0 = WIDTH/2 y0 = HEIGHT/2 # Each strand of curves bends in a certain way. vx0 = random(-200, 200) vy0 = random(-200, 200) vx1 = random(-200, 200) vy1 = random(-200, 200) # A strand ends up either left or right outside the screen. # Each curve in a strand ends up at the same place # (identical x1 and y1). x1 = choice((-10, WIDTH)) y1 = random(HEIGHT) # This code gives interesting effects as well: #from math import radians, sin, cos #angle = random(360) #x1 = x0 + cos(radians(angle)) * 100 #y1 = y0 + sin(radians(angle)) * 100 for j in range(100): beginpath(x0, y0) curveto( # The bend of each curve in a strand differs slightly # at the start, so the strand looks thicker at the start # and then all the curves come together at x1 and y1. x0+vx0+random(80), y0+vy0+random(80), x1+vx1, y1+vy1, x1, y1 ) endpath() """ # Some type, with a heart symbol! heart = u"\u2665" s1 = "strands of analogous curves "+heart s2 = "gratuitous type always looks cool on these things" fill(1, 1, 1, 0.85) fontsize(18) text(s1, 65, HEIGHT/2) fontsize(9) text(s2.upper(), 65, HEIGHT/2+12) stroke(1) strokewidth(1) line(0, HEIGHT/2, 60, HEIGHT/2) """
28.341176
68
0.628892
380
2,409
3.976316
0.473684
0.023163
0.031767
0.023825
0.023825
0
0
0
0
0
0
0.077445
0.26567
2,409
85
69
28.341176
0.77671
0.457866
0
0
0
0
0.014583
0
0
0
0
0
0
1
0
false
0
0.085714
0
0.085714
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
56f59a848cb0e6dafae0001284fd9453da3b89e8
27
py
Python
code/cyclegan/__init__.py
karl-joan/text2art-gan
86370667f9a62bab95968abe1990dcaa4760b333
[ "MIT" ]
5
2021-10-30T13:40:41.000Z
2022-03-20T04:48:45.000Z
code/cyclegan/__init__.py
karl-joan/text2art-gan
86370667f9a62bab95968abe1990dcaa4760b333
[ "MIT" ]
null
null
null
code/cyclegan/__init__.py
karl-joan/text2art-gan
86370667f9a62bab95968abe1990dcaa4760b333
[ "MIT" ]
2
2021-09-06T03:45:04.000Z
2022-03-13T03:23:49.000Z
from .main import cyclegan
13.5
26
0.814815
4
27
5.5
1
0
0
0
0
0
0
0
0
0
0
0
0.148148
27
1
27
27
0.956522
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
56f6dc86c3ae14b0448108e0f2a5fb1570174b18
1,875
py
Python
base_site/mainapp/telegram_bot/handlers/custom_command_handler.py
ricardochaves/financeiro-bot
2c48be4355e3c8630c36aa846c16042f22b88271
[ "MIT" ]
4
2020-01-21T00:21:44.000Z
2021-06-15T19:38:36.000Z
base_site/mainapp/telegram_bot/handlers/custom_command_handler.py
ricardochaves/financeiro-bot
2c48be4355e3c8630c36aa846c16042f22b88271
[ "MIT" ]
173
2019-11-18T08:19:44.000Z
2021-09-08T01:37:19.000Z
base_site/mainapp/telegram_bot/handlers/custom_command_handler.py
ricardochaves/financeiro-bot
2c48be4355e3c8630c36aa846c16042f22b88271
[ "MIT" ]
3
2020-01-28T19:19:35.000Z
2021-05-01T02:33:36.000Z
from typing import Callable from typing import List from telegram import MessageEntity from telegram import Update from telegram.ext import Handler class CustomCommandHandle(Handler): def __init__( self, commands_callback: Callable[[], List[str]], callback: Callable, pass_update_queue=False, pass_job_queue=False, pass_user_data=False, pass_chat_data=False, ): super(CustomCommandHandle, self).__init__( callback, pass_update_queue=pass_update_queue, pass_job_queue=pass_job_queue, pass_user_data=pass_user_data, pass_chat_data=pass_chat_data, ) self.commands_callback = commands_callback def check_update(self, update): """Determines whether an update should be passed to this handlers :attr:`callback`. Args: update (:class:`telegram.Update`): Incoming telegram update. Returns: :obj:`list`: The list of args for the handler """ if isinstance(update, Update) and update.effective_message: message = update.effective_message if ( message.entities and message.entities[0].type == MessageEntity.BOT_COMMAND and message.entities[0].offset == 0 ): command = message.text[1 : message.entities[0].length] args = message.text.split()[1:] command = command.split("@") command.append(message.bot.username) if not ( command[0].lower() in self.list_commands() and command[1].lower() == message.bot.username.lower() ): return None return args def list_commands(self) -> List[str]: return self.commands_callback()
32.894737
117
0.5936
200
1,875
5.355
0.33
0.059757
0.056022
0.035481
0.030812
0
0
0
0
0
0
0.006279
0.320533
1,875
56
118
33.482143
0.83438
0.112
0
0.071429
0
0
0.000618
0
0
0
0
0
0
1
0.071429
false
0.190476
0.119048
0.02381
0.285714
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
56f77dfab2b19510099200dcfd2b7bf839aee11a
175
py
Python
rainbowconnection/sources/__init__.py
zkbt/rainbow-connection
53828fd0b63a552a22a6aa38393cefda27c61b9a
[ "MIT" ]
6
2019-09-04T20:22:02.000Z
2020-12-30T05:00:10.000Z
rainbowconnection/sources/__init__.py
zkbt/rainbow-connection
53828fd0b63a552a22a6aa38393cefda27c61b9a
[ "MIT" ]
8
2019-05-23T18:06:51.000Z
2020-02-13T22:15:07.000Z
rainbowconnection/sources/__init__.py
zkbt/rainbow-connection
53828fd0b63a552a22a6aa38393cefda27c61b9a
[ "MIT" ]
null
null
null
from .spectrum import Spectrum from .blank import Blank from .thermal import Thermal from .sun import Sun from .lightbulbs import * # , LED, CFL # from .PHOENIX import Star
21.875
39
0.76
25
175
5.32
0.44
0
0
0
0
0
0
0
0
0
0
0
0.177143
175
7
40
25
0.923611
0.205714
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
56f7e6b34768a05254458c5974b6b68155a3ea9f
4,704
py
Python
tests/db/ops/test_import_convert_str.py
simonsobs/acondbs
6ca11c2889d827ecdb2b54d0cf3b94b8cdd281e6
[ "MIT" ]
null
null
null
tests/db/ops/test_import_convert_str.py
simonsobs/acondbs
6ca11c2889d827ecdb2b54d0cf3b94b8cdd281e6
[ "MIT" ]
24
2020-04-02T19:29:07.000Z
2022-03-08T03:05:43.000Z
tests/db/ops/test_import_convert_str.py
simonsobs/acondbs
6ca11c2889d827ecdb2b54d0cf3b94b8cdd281e6
[ "MIT" ]
1
2020-04-08T15:48:28.000Z
2020-04-08T15:48:28.000Z
import csv from io import StringIO import datetime from sqlalchemy import MetaData from flask_sqlalchemy import SQLAlchemy from sqlalchemy_utils import EncryptedType import pytest from acondbs import create_app from acondbs.db.ops import convert_data_type_for_insert ##__________________________________________________________________|| sa = SQLAlchemy() class SampleTable(sa.Model): __tablename__ = "sample_table" id_ = sa.Column(sa.Integer(), primary_key=True) # https://docs.sqlalchemy.org/en/14/core/type_basics.html#generic-types text = sa.Column(sa.Text()) unicode_text = sa.Column(sa.UnicodeText()) boolean = sa.Column(sa.Boolean()) integer = sa.Column(sa.Integer()) float = sa.Column(sa.Float()) date = sa.Column(sa.Date()) date_time = sa.Column(sa.DateTime()) time = sa.Column(sa.Time()) encrypted = sa.Column(EncryptedType(sa.Text(), "8b5d3d25b3e5")) ##__________________________________________________________________|| @pytest.fixture def app_with_empty_db(): database_uri = "sqlite:///:memory:" app = create_app(SQLALCHEMY_DATABASE_URI=database_uri) yield app @pytest.fixture def app_with_empty_tables(app_with_empty_db): app = app_with_empty_db # define tables with app.app_context(): engine = sa.engine metadata = MetaData() metadata.reflect(bind=engine) metadata.drop_all(bind=engine) sa.Model.metadata.create_all(engine) yield app ##__________________________________________________________________|| params = [ pytest.param( dict( text="abcde", unicode_text="絵文字😀 😃 😄 😁 😆", boolean=False, integer=512, float=2.34556234, date=datetime.date(2021, 10, 7), date_time=datetime.datetime(2021, 10, 7, 15, 4, 20), time=datetime.time(15, 4, 20), encrypted="secret string", ), id="one", ), pytest.param( dict( boolean=True, ), id="bool-true", ), pytest.param( dict( text="", unicode_text="", boolean=None, integer=None, float=None, date=None, date_time=None, time=None, encrypted=None, ), id="none", ), ] @pytest.mark.parametrize("data", params) def test_convert(app_with_empty_tables, data): """test convert_data_type_for_insert()""" app = app_with_empty_tables tbl_name = "sample_table" expected = list(data.items()) # e.g., [('text', 'abcde'), ...] fields = list(data.keys()) # .e.,g ['text', 'unicode_text', ...] # delete all rows from the table # The table is not empty! Not clear why! with app.app_context(): SampleTable.query.delete() sa.session.commit() # enter data with app.app_context(): row = SampleTable(**data) sa.session.add(row) sa.session.commit() # assert the data are committed as they entered with app.app_context(): row = SampleTable.query.one() actual = [(f, getattr(row, f)) for f in fields] assert actual == expected # export to csv as string with app.app_context(): csv_str = _export_tbl_to_csv(tbl_name) # empty the table SampleTable.query.delete() sa.session.commit() # import from the csv with app.app_context(): # confirm the table is empty assert SampleTable.query.count() == 0 _import_tbl_from_csv(tbl_name, csv_str) # assert with app.app_context(): row = SampleTable.query.one() actual = [(f, getattr(row, f)) for f in fields] assert actual == expected def _export_tbl_to_csv(tbl_name): result_proxy = sa.session.execute(f"select * from {tbl_name}") b = StringIO() csv_writer = csv.writer(b, lineterminator="\n") csv_writer.writerow(result_proxy.keys()) csv_writer.writerows(result_proxy) ret = b.getvalue() b.close() return ret def _import_tbl_from_csv(tbl_name, csv_str): engine = sa.engine metadata = MetaData() metadata.reflect(bind=engine) tbl = metadata.tables[tbl_name] rows = list(csv.reader(StringIO(csv_str))) fields = rows[0] rows = rows[1:] field_types = [tbl.columns[f].type for f in fields] data = [ { f: convert_data_type_for_insert(e, t) for f, t, e in zip(fields, field_types, r) } for r in rows ] ins = tbl.insert() sa.session.execute(ins, data) ##__________________________________________________________________||
25.427027
75
0.625638
568
4,704
4.551056
0.27993
0.030948
0.034816
0.046035
0.240232
0.212379
0.133849
0.133849
0.111412
0.068859
0
0.013726
0.25659
4,704
184
76
25.565217
0.724049
0.144345
0
0.3125
0
0
0.032516
0
0
0
0
0
0.023438
1
0.039063
false
0
0.085938
0
0.226563
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
56f8f1bd78977f320adb7ac5e330627101781a73
3,041
py
Python
pythonFiles/PCA.py
SANDEEPREDDY56712/OELP_6thSem
0904c5b47eb57b8399844ca5f3cd9dded6361c5a
[ "MIT" ]
null
null
null
pythonFiles/PCA.py
SANDEEPREDDY56712/OELP_6thSem
0904c5b47eb57b8399844ca5f3cd9dded6361c5a
[ "MIT" ]
null
null
null
pythonFiles/PCA.py
SANDEEPREDDY56712/OELP_6thSem
0904c5b47eb57b8399844ca5f3cd9dded6361c5a
[ "MIT" ]
1
2021-07-30T17:24:10.000Z
2021-07-30T17:24:10.000Z
import pandas as pd from sklearn.decomposition import PCA import DataPreprocessing as dp import sys import numpy as np import matplotlib.pyplot as plt from sklearn.preprocessing import StandardScaler from sklearn.cluster import KMeans from scipy.stats import pearsonr ################################################################################################# ################################################################################################# ################################################################################################# def implementClustering(principal_df): X_df = pd.DataFrame(principal_df) principal_df = StandardScaler().fit_transform(X_df) kmeans = KMeans(n_clusters=3, init='k-means++') y_kmeans3 = kmeans.fit_predict(principal_df) print(y_kmeans3) cent = kmeans.cluster_centers_ print(cent) plt.figure(figsize=(10,7)) X = np.array(principal_df) plt.scatter(X[:,0],X[:,1],c=y_kmeans3,cmap='rainbow') plt.title('K_means_clustering') plt.xlabel('PC1') plt.ylabel('PC2') plt.show() def loading_matrix(pca_model): variables_name=['V60','Vmn','Vsd','Asd','A+mn','A+sd','Br_mn','Br_sd','W'] mat = pd.DataFrame(pca_model.components_,columns=variables_name) print(np.transpose(mat)) def plot_principalComponents(pca_train): plt.figure(figsize=(8,6)) plt.title("PCA for Drivability") plt.scatter(pca_train[:,0],pca_train[:,1],cmap='rainbow') plt.xlabel('PC1') plt.ylabel('PC2') plt.show() def correlation(X,Y): return pearsonr(X,Y)[0] if __name__=='__main__': dataset = pd.DataFrame(dp.X_norm) #print(dataset) pca_obd = PCA(n_components=2) principal_comp = pca_obd.fit(dp.X_norm) principal_comp = pca_obd.fit_transform(dp.X_norm) ############# PRINTING THE TYPE ########################################## print(type(principal_comp)) principal_df = pd.DataFrame(data=principal_comp,columns=['PC1','PC2']) print(principal_df) X = dp.X ################################################################################### ############### CALCULAING CORRELATION MATRIX #################################### ################################################################################### corr_matrix = [] for i in range(X.shape[1]): temp = [] for j in range(principal_comp.shape[1]): temp.append(correlation(X[:,i],principal_comp[:,j])) corr_matrix.append(temp) corr_matrix = np.array(corr_matrix) print(pd.DataFrame(corr_matrix,index= ['V60','Vmn','Vsd','Asd','A+mn','A+sd','Br_mn','Br_sd','W'],columns=['PC1','PC2'])) ################################################################################### ############## CALCULATINg VARIANCE RETAINED #################################### ################################################################################### print("Amount of data held after Dimensionality Reduction") print(sum(pca_obd.explained_variance_ratio_)*100) #RCA(principal_comp) #plot_principalComponents(principal_comp) #loading_matrix(pca_model) implementClustering(principal_df)
32.698925
122
0.545215
340
3,041
4.670588
0.361765
0.055416
0.013224
0.018892
0.104534
0.076826
0.076826
0.076826
0.076826
0.034005
0
0.011523
0.086814
3,041
92
123
33.054348
0.560317
0.059849
0
0.105263
0
0
0.098266
0
0
0
0
0
0
1
0.070175
false
0
0.157895
0.017544
0.245614
0.140351
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
56fa36196b16c7d3153b31b44fd82e66ce67cc2c
86
py
Python
tap_rockgympro/consts.py
cinchio/tap-rockgympro
aaf55111e40a74708639363cea6198f5e70eeeaf
[ "Apache-2.0" ]
1
2021-09-07T09:29:42.000Z
2021-09-07T09:29:42.000Z
tap_rockgympro/consts.py
cinchio/tap-rockgympro
aaf55111e40a74708639363cea6198f5e70eeeaf
[ "Apache-2.0" ]
null
null
null
tap_rockgympro/consts.py
cinchio/tap-rockgympro
aaf55111e40a74708639363cea6198f5e70eeeaf
[ "Apache-2.0" ]
null
null
null
ORDERED_STREAM_NAMES = ['facilities', 'customers', 'bookings', 'checkins', 'invoices']
86
86
0.732558
8
86
7.625
1
0
0
0
0
0
0
0
0
0
0
0
0.069767
86
1
86
86
0.7625
0
0
0
0
0
0.494253
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
56fb6798738db165da7424a3d5f66f0c90904f19
98
py
Python
pygame_rl/scenario/soccer/teams.py
sc420/pygame-rl
f81da559385876616d99c74b43e4345f53d086d2
[ "MIT" ]
6
2019-02-18T09:34:34.000Z
2021-11-09T06:58:02.000Z
pygame_rl/scenario/soccer/teams.py
ebola777/pygame-soccer
f81da559385876616d99c74b43e4345f53d086d2
[ "MIT" ]
3
2019-03-14T03:10:48.000Z
2019-07-15T08:11:25.000Z
pygame_rl/scenario/soccer/teams.py
ebola777/pygame-soccer
f81da559385876616d99c74b43e4345f53d086d2
[ "MIT" ]
4
2019-02-13T13:41:17.000Z
2019-07-22T13:33:15.000Z
# Native modules from enum import IntEnum class Teams(IntEnum): PLAYER = 0 COMPUTER = 1
12.25
24
0.683673
13
98
5.153846
0.923077
0
0
0
0
0
0
0
0
0
0
0.027397
0.255102
98
7
25
14
0.890411
0.142857
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.25
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
56fcbdb798ac3caf6427669eb040b57eb4eb3d30
331
py
Python
graph_pruning/methods/zhenv5/remove_self_loops.py
shan18/taxi
286e2c9a97c1e0b52d63bbb3508045001f449714
[ "Apache-2.0" ]
49
2017-06-26T01:10:48.000Z
2022-03-15T12:15:26.000Z
graph_pruning/methods/zhenv5/remove_self_loops.py
uhh-lt/taxi
0abc016ff854cf3ebeff61be76acf10b7d6a67a7
[ "Apache-2.0" ]
7
2018-06-20T12:33:49.000Z
2018-08-27T09:30:34.000Z
graph_pruning/methods/zhenv5/remove_self_loops.py
shan18/taxi
286e2c9a97c1e0b52d63bbb3508045001f449714
[ "Apache-2.0" ]
20
2017-06-26T01:27:56.000Z
2021-12-24T10:38:09.000Z
import networkx as nx def remove_self_loops_from_graph(g): self_loops = list(g.selfloop_edges()) g.remove_edges_from(self_loops) return self_loops def remove_self_loops_from_edges_file(graph_file): g = nx.read_edgelist(args.original_graph, nodetype = int, create_using = nx.DiGraph()) return remove_self_loops_from_graph(g)
30.090909
87
0.81571
55
331
4.490909
0.454545
0.218623
0.182186
0.230769
0.303644
0.202429
0
0
0
0
0
0
0.096677
331
10
88
33.1
0.826087
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.125
0
0.625
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
56fd5170b7684adf06467bcddc3847c7bea4ee50
3,438
py
Python
DailyProgrammer/DP20150422B.py
DayGitH/Python-Challenges
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
[ "MIT" ]
2
2020-12-23T18:59:22.000Z
2021-04-14T13:16:09.000Z
DailyProgrammer/DP20150422B.py
DayGitH/Python-Challenges
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
[ "MIT" ]
null
null
null
DailyProgrammer/DP20150422B.py
DayGitH/Python-Challenges
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
[ "MIT" ]
null
null
null
""" [2015-04-22] Challenge #211 [Intermediate] Ogre Maze https://www.reddit.com/r/dailyprogrammer/comments/33hwwf/20150422_challenge_211_intermediate_ogre_maze/ #Description: Today we are going to solve a maze. What? Again? Come on, Simpsons did it. Yah okay so we always pick a hero to walk a maze. This time our hero is an Ogre. An ogre is large. Your run of the mill hero "@" takes up a 1x1 spot. Easy. But our beloved hero today is an ogre. @@ @@ Ogres take up a 2x2 space instead of a 1x1. This makes navigating a maze tougher as you have to handle the bigger ogre. So I will give you a layout of a swamp. (Ogres navigate swamps while puny heroes navigate caves. That's the unwritten rules of maze challenges) You will find the path (if possible) for the ogre to walk to his gold. #Input: You will read in a swamp. The swamp is laid out in 10x10 spaces. Each space can be the following: * . - empty spot * @ - 1/4th of the 2x2 ogre * $ - the ogre's gold * O - sink hole - the ogre cannot touch these. All 2x2 of the Ogre manages to fall down one of these (even if it is a 1x1 spot too. Don't be bothered by this - think of it as a "wall" but in a swamp we call them sink holes) #Output: You will navigate the swamp. If you find a path you will display the solution of all the spaces the ogre will occupy to get to his gold. Use a "&" symbol to show the muddy path created by the ogre to reach his gold. If there is no path at all then you will output "No Path" #Example Input 1: @@........ @@O....... .....O.O.. .......... ..O.O..... ..O....O.O .O........ .......... .....OO... .........$ #Example Output 1: &&.&&&&&&& &&O&&&&&&& &&&&&O.O&& &&&&&&&&&& ..O.O&&&&& ..O..&&O.O .O...&&&&. .....&&&&. .....OO&&& .......&&& #Example Input 2: @@........ @@O....... .....O.O.. .......... ..O.O..... ..O....O.O .O........ .......... .....OO.O. .........$ #Example Output 2: No Path #FAQ (Will update with answers here) * Q: Does path have to be shortest Path. * A: No. ### - * Q: There could be a few different paths. Which one do I output? * A: The first one that works. Answers will vary based on how people solve it. ### - * Q: My output should show all the spots the Ogre moves too or just the optimal path? * A: The ogre will hit dead ends. But only show the optimal path and not all his dead ends. Think of this as a GPS Tom-Tom guide for the Ogre so he uses the program to find his gold. TIL Ogres subscribe to /r/dailyprogrammer. (And use the internet....) #Challenge Input 1: $.O...O... ...O...... .......... O..O..O... .......... O..O..O... .......... ......OO.. O..O....@@ ........@@ #Challenge Input 2: .@@.....O. .@@....... ..O..O.... .......O.. ...O...... .......... .......O.O ...O.O.... .......O.. .........$ #Bonus: For those seeking more challenge. Instead of using input swamps you will generate a swamp. Place the Ogre randomly. Place his gold randomly. Generate sinkholes based on the size of the swamp. For example you are given N for a NxN swamp to generate. Generate a random swamp and apply your solution to it. The exact design/algorithm for random generation I leave it for you to tinker with. I suggest start with like 15% of the swamp spots are sinkholes and go up or down based on your results. (So you get paths and not always No Path) """ def main(): pass if __name__ == "__main__": main()
31.254545
119
0.617219
577
3,438
3.655113
0.381283
0.039829
0.051209
0.058796
0.075391
0.045045
0.045045
0.045045
0.045045
0.044571
0
0.018275
0.204188
3,438
109
120
31.541284
0.752558
1.058464
0
0
0
0
0.125
0
0
0
0
0
0
1
0.25
true
0.25
0
0
0.25
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
0
0
0
0
0
4
56fe5a7bf702f7326f225c4e486c0a157601c27e
39,967
py
Python
test/intelliflow/core/signal_processing/routing_runtime_constructs/test_route.py
amzn/rheoceros
5e8f79d97f8b21d693d3c869b0df70de3d5fd068
[ "Apache-2.0", "MIT-0" ]
4
2022-03-24T04:39:02.000Z
2022-03-31T16:41:50.000Z
test/intelliflow/core/signal_processing/routing_runtime_constructs/test_route.py
amzn/rheoceros
5e8f79d97f8b21d693d3c869b0df70de3d5fd068
[ "Apache-2.0", "MIT-0" ]
null
null
null
test/intelliflow/core/signal_processing/routing_runtime_constructs/test_route.py
amzn/rheoceros
5e8f79d97f8b21d693d3c869b0df70de3d5fd068
[ "Apache-2.0", "MIT-0" ]
null
null
null
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 import copy import time from test.intelliflow.core.signal_processing.dimension_constructs.test_dimension_spec import TestDimensionSpec from test.intelliflow.core.signal_processing.routing_runtime_constructs import create_incoming_signal from test.intelliflow.core.signal_processing.signal.test_signal_link_node import signal_dimension_tuple import pytest from intelliflow.core.platform.constructs import RoutingHookInterface from intelliflow.core.serialization import dumps, loads from intelliflow.core.signal_processing.definitions.dimension_defs import Type from intelliflow.core.signal_processing.routing_runtime_constructs import * from intelliflow.core.signal_processing.signal import * from intelliflow.core.signal_processing.signal_source import InternalDatasetSignalSourceAccessSpec from intelliflow.core.signal_processing.slot import SlotType def _create_hook(code: str = "pass") -> Slot: return Slot(SlotType.SYNC_INLINED, dumps(code), None, None, None, None) class TestRoute: @classmethod def _route_1_basic(cls): from test.intelliflow.core.signal_processing.test_slot import TestSlot from test.intelliflow.core.signal_processing.signal.test_signal import TestSignal from test.intelliflow.core.signal_processing.signal.test_signal_link_node import TestSignalLinkNode signal_link_node = copy.deepcopy(TestSignalLinkNode.signal_link_node_1) output_spec = DimensionSpec.load_from_pretty({"output_dim": {type: Type.LONG}}) output_dim_link_matrix = [ SignalDimensionLink( signal_dimension_tuple(None, "output_dim"), lambda x: x, signal_dimension_tuple(TestSignal.signal_internal_1, "dim_1_1") ) ] output_filter = signal_link_node.get_output_filter( output_spec, # Logical equivalent -> output_dim = (signal_internal_1('dim_1_1') output_dim_link_matrix, ) output_signal = Signal( TestSignal.signal_internal_1.type, InternalDatasetSignalSourceAccessSpec("sample_data", output_spec, **{}), SignalDomainSpec(output_spec, output_filter, TestSignal.signal_internal_1.domain_spec.integrity_check_protocol), "sample_data", ) return Route( f"InternalDataNode-{output_signal.alias}", signal_link_node, output_signal, output_dim_link_matrix, [TestSlot.slot_batch_compute_basic], False, ) @classmethod def _route_2_two_inputs_linked(cls): from test.intelliflow.core.signal_processing.test_slot import TestSlot from test.intelliflow.core.signal_processing.signal.test_signal import TestSignal from test.intelliflow.core.signal_processing.signal.test_signal_link_node import TestSignalLinkNode signal_link_node = copy.deepcopy(TestSignalLinkNode.signal_link_node_2) output_spec = DimensionSpec.load_from_pretty({"output_dim": {type: Type.LONG}}) output_dim_link_matrix = [ SignalDimensionLink( signal_dimension_tuple(None, "output_dim"), lambda x: x, signal_dimension_tuple(TestSignal.signal_internal_1, "dim_1_1") ) ] output_filter = signal_link_node.get_output_filter(output_spec, output_dim_link_matrix) output_signal = Signal( TestSignal.signal_internal_1.type, InternalDatasetSignalSourceAccessSpec("sample_data_2", output_spec, **{}), SignalDomainSpec(output_spec, output_filter, TestSignal.signal_internal_1.domain_spec.integrity_check_protocol), "sample_data_2", ) return Route( f"InternalDataNode-{output_signal.alias}", signal_link_node, output_signal, output_dim_link_matrix, [TestSlot.slot_batch_compute_basic], False, ) @classmethod def _route_3_three_inputs_unlinked(cls): from test.intelliflow.core.signal_processing.test_slot import TestSlot from test.intelliflow.core.signal_processing.signal.test_signal import TestSignal from test.intelliflow.core.signal_processing.signal.test_signal_link_node import TestSignalLinkNode signal_link_node = copy.deepcopy(TestSignalLinkNode.signal_link_node_3_complex) # create sample expected output output_spec = DimensionSpec.load_from_pretty( {"output_dim_1": {type: Type.LONG, "output_dim_2": {type: Type.LONG, "output_dim_3": {type: Type.LONG}}}} ) output_dim_link_matrix = [ SignalDimensionLink( signal_dimension_tuple(None, "output_dim_1"), lambda x: x, signal_dimension_tuple(TestSignal.signal_internal_complex_1, "dim_1_1"), ), SignalDimensionLink( signal_dimension_tuple(None, "output_dim_2"), # input's sub dimension is of type String, convert it. # because output spec expects it to be of type Long. lambda x: ord(x), signal_dimension_tuple(TestSignal.signal_internal_complex_1, "dim_1_2"), ), SignalDimensionLink( signal_dimension_tuple(None, "output_dim_3"), # and this one is from the 3rd input (which has only one dim 'dim_1_1') lambda x: x, signal_dimension_tuple(TestSignal.signal_s3_1, "dim_1_1"), ), ] output_filter = signal_link_node.get_output_filter(output_spec, output_dim_link_matrix) output_signal = Signal( SignalType.INTERNAL_PARTITION_CREATION, InternalDatasetSignalSourceAccessSpec("sample_data_3", output_spec, **{}), SignalDomainSpec(output_spec, output_filter, TestSignal.signal_internal_complex_1.domain_spec.integrity_check_protocol), "sample_data_3", ) return Route( f"InternalDataNode-{output_signal.alias}", signal_link_node, output_signal, output_dim_link_matrix, [TestSlot.slot_batch_compute_basic], False, ) @classmethod def _route_3_three_inputs_linked(cls): from test.intelliflow.core.signal_processing.test_slot import TestSlot from test.intelliflow.core.signal_processing.signal.test_signal import TestSignal from test.intelliflow.core.signal_processing.signal.test_signal_link_node import TestSignalLinkNode signal_link_node = copy.deepcopy(TestSignalLinkNode.signal_link_node_3_complex) # add links (since the dimension names on same, use the auto-linking of dimensions, # so that; # signal_internal_complex_1['dim_1_1'] == signal_s3_1['dim_1_1'], etc signal_link_node.compensate_missing_links() # create sample expected output output_spec = DimensionSpec.load_from_pretty( { "output_dim_1": { type: Type.LONG, "output_dim_2": { type: Type.LONG, }, } } ) output_dim_link_matrix = [ SignalDimensionLink( signal_dimension_tuple(None, "output_dim_1"), # from the second dimension of the first/second inputs (convert to Long) lambda x: ord(x), signal_dimension_tuple(TestSignal.signal_internal_complex_1, "dim_1_2"), ), SignalDimensionLink( signal_dimension_tuple(None, "output_dim_2"), # and this one is from the 3rd input (which has only one dim 'dim_1_1') lambda x: x, signal_dimension_tuple(TestSignal.signal_s3_1, "dim_1_1"), ), ] output_filter = signal_link_node.get_output_filter(output_spec, output_dim_link_matrix) output_signal = Signal( SignalType.INTERNAL_PARTITION_CREATION, InternalDatasetSignalSourceAccessSpec("sample_data_4", output_spec, **{}), SignalDomainSpec(output_spec, output_filter, TestSignal.signal_internal_complex_1.domain_spec.integrity_check_protocol), "sample_data_4", ) return Route( f"InternalDataNode-{output_signal.alias}", signal_link_node, output_signal, output_dim_link_matrix, [TestSlot.slot_batch_compute_basic], False, ) def test_route_init(self): assert self._route_1_basic() def test_route_init_with_hooks(self): route = self._route_1_basic() Route( route.route_id, route.link_node, route.output, route._output_dim_matrix, route.slots, False, RouteExecutionHook( on_exec_begin=_create_hook(), on_exec_skipped=_create_hook(), on_compute_success=_create_hook(), on_compute_failure=_create_hook(), on_success=_create_hook(), on_failure=_create_hook(), checkpoints=[RouteCheckpoint(5, _create_hook())], ), 30 * 24 * 60 * 60, RoutePendingNodeHook(on_pending_node_created=_create_hook(), on_expiration=_create_hook(), checkpoints=None), ) # check another instantiation case + checkpoint sorting assert ( Route( route.route_id, route.link_node, route.output, route._output_dim_matrix, route.slots, False, RouteExecutionHook( on_exec_begin=_create_hook(), on_exec_skipped=_create_hook(), on_compute_success=_create_hook(), on_compute_failure=_create_hook(), on_success=_create_hook(), on_failure=_create_hook(), checkpoints=[], ), None, RoutePendingNodeHook( on_pending_node_created=_create_hook(), on_expiration=None, checkpoints=[RouteCheckpoint(2, _create_hook()), RouteCheckpoint(1, _create_hook())], ), ) .pending_node_hook.checkpoints[0] .checkpoint_in_secs == 1 ) def test_route_init_with_hook_chain(self): route = self._route_1_basic() callback1_var = None callback1_var_expected = 1 def _callback1(*args, **kwargs): nonlocal callback1_var callback1_var = callback1_var_expected callback2_var = None callback2_var_expected = 2 def _callback2(*args, **kwargs): nonlocal callback2_var callback2_var = callback2_var_expected hook1 = RouteExecutionHook( on_exec_begin=_create_hook(), on_exec_skipped=_callback1, on_compute_success=_create_hook(), on_compute_failure=_create_hook(), on_success=_create_hook(), on_failure=_create_hook(), checkpoints=[RouteCheckpoint(5, _create_hook())], ) hook2 = RouteExecutionHook( on_exec_begin=_create_hook(), on_exec_skipped=_callback2, on_compute_success=_create_hook(), on_compute_failure=_create_hook(), on_success=_create_hook(), on_failure=_create_hook(), checkpoints=[RouteCheckpoint(10, _create_hook())], ) exec_hook_chain = hook1.chain(hook2) pending_hook1 = RoutePendingNodeHook( on_pending_node_created=_create_hook(), on_expiration=_create_hook(), checkpoints=[RouteCheckpoint(5, _create_hook())] ) pending_hook2 = RoutePendingNodeHook( on_pending_node_created=_create_hook(), on_expiration=_create_hook(), checkpoints=[RouteCheckpoint(10, _create_hook())] ) pending_hook3 = RoutePendingNodeHook( on_pending_node_created=_create_hook(), on_expiration=_create_hook(), checkpoints=[RouteCheckpoint(13, _create_hook())] ) pending_hook_chain = pending_hook1.chain(pending_hook2, pending_hook3) pending_hook_chain_2 = pending_hook1.chain(pending_hook2).chain(pending_hook3) Route( route.route_id, route.link_node, route.output, route._output_dim_matrix, route.slots, False, exec_hook_chain, 24 * 60 * 60, pending_hook_chain, ) assert len(exec_hook_chain.checkpoints) == 2 assert len(pending_hook_chain.checkpoints) == 3 assert len(pending_hook_chain_2.checkpoints) == 3 exec_hook_chain.on_exec_begin() pending_hook_chain.on_pending_node_created() pending_hook_chain_2.on_expiration() exec_hook_chain.on_exec_skipped() assert callback1_var == callback1_var_expected assert callback2_var == callback2_var_expected def test_route_equality(self): assert self._route_1_basic() == self._route_1_basic() assert Route("test", None, None, [], [], False) == Route("test", None, None, [], [], False) assert Route("test", None, None, [], [], False) != Route("test2", None, None, [], [], False) assert self._route_1_basic() == self._route_1_basic().clone() def test_route_check_integrity(self): route = self._route_1_basic() assert route.check_integrity(self._route_1_basic()) route2 = self._route_2_two_inputs_linked() # Route is very sensitive about an integrity check against a different Route. This is very critical # for whole Routing module. It should not occur! A safe-guard against a high-level (e.g RoutingTable) bug. with pytest.raises(ValueError): assert route.check_integrity(route2) # make id equal so that check move on to other fields route2._id = route.route_id assert not route.check_integrity(route2) assert route.check_integrity(Route(route.route_id, route.link_node, route.output, route._output_dim_matrix, route.slots, False)) assert not route.check_integrity( Route(route.route_id, route2.link_node, route.output, route._output_dim_matrix, route.slots, False) ) assert not route.check_integrity( Route(route.route_id, route.link_node, route2.output, route._output_dim_matrix, route.slots, False) ) assert not route.check_integrity(Route(route.route_id, route.link_node, route.output, [], route.slots, False)) assert not route.check_integrity(Route(route.route_id, route.link_node, route.output, route._output_dim_matrix, [], False)) def test_route_check_integrity_noops(self): """show that some type of changes in route should not invalidate the integrity""" route = self._route_3_three_inputs_linked() # dim matrix ordering should not alter the semantics of route new_route = copy.deepcopy(route) new_route.link_node.link_matrix.reverse() new_route.output_dim_matrix.reverse() # TODO evaluate slots order? currently impacting integrity but not as critical as dim matrice assert route.check_integrity(new_route) @pytest.mark.parametrize( "execution_hook_1, pending_node_ttl_1, pending_hook_1, execution_hook_2, pending_node_ttl_2, pending_hook_2, result", [ (None, 30 * 24 * 60 * 60, None, None, 24 * 60 * 60, None, False), ( RouteExecutionHook( on_exec_begin=_create_hook(), on_exec_skipped=_create_hook(), on_compute_success=_create_hook(), on_compute_failure=_create_hook(), on_success=_create_hook(), on_failure=_create_hook(), checkpoints=[RouteCheckpoint(checkpoint_in_secs=5, slot=_create_hook())], ), 30 * 24 * 60 * 60, RoutePendingNodeHook( on_pending_node_created=_create_hook(), on_expiration=_create_hook(), checkpoints=[RouteCheckpoint(checkpoint_in_secs=1, slot=_create_hook()), RouteCheckpoint(2, _create_hook())], ), RouteExecutionHook( on_exec_begin=_create_hook(), on_exec_skipped=_create_hook(), on_compute_success=_create_hook(), on_compute_failure=_create_hook(), on_success=_create_hook(), on_failure=_create_hook(), checkpoints=[RouteCheckpoint(5, _create_hook())], ), 30 * 24 * 60 * 60, RoutePendingNodeHook( on_pending_node_created=_create_hook(), on_expiration=_create_hook(), # also test that checkpoint other should not matter as long as values are same checkpoints=[RouteCheckpoint(2, _create_hook()), RouteCheckpoint(1, _create_hook())], ), True, ), ( RouteExecutionHook(on_exec_begin=_create_hook()), 30 * 24 * 60 * 60, RoutePendingNodeHook(), RouteExecutionHook(on_exec_begin=_create_hook()), 30 * 24 * 60 * 60, RoutePendingNodeHook(), True, ), ( RouteExecutionHook(on_exec_begin=_create_hook("print('diff')")), 30 * 24 * 60 * 60, RoutePendingNodeHook(), RouteExecutionHook(on_exec_begin=_create_hook()), 30 * 24 * 60 * 60, RoutePendingNodeHook(), False, ), (None, None, None, None, None, None, True), ( RouteExecutionHook(on_exec_begin=None, on_exec_skipped=None), None, None, RouteExecutionHook(on_exec_begin=None, on_exec_skipped=_create_hook()), None, None, False, ), ( RouteExecutionHook(on_exec_begin=None, on_exec_skipped=_create_hook()), None, None, RouteExecutionHook(on_exec_begin=None, on_exec_skipped=None), None, None, False, ), ( RouteExecutionHook( on_exec_begin=None, on_exec_skipped=None, on_compute_success=None, on_compute_failure=None, on_success=None, on_failure=None, checkpoints=[RouteCheckpoint(1, _create_hook())], ), None, RoutePendingNodeHook(), RouteExecutionHook( on_exec_begin=None, on_exec_skipped=None, on_compute_success=None, on_compute_failure=None, on_success=None, on_failure=None, # change the value of first checkpoint checkpoints=[RouteCheckpoint(5, _create_hook())], ), None, RoutePendingNodeHook(), False, ), ( RouteExecutionHook(), None, RoutePendingNodeHook( on_pending_node_created=_create_hook(), on_expiration=None, checkpoints=[RouteCheckpoint(2, _create_hook())] ), RouteExecutionHook(), None, RoutePendingNodeHook( on_pending_node_created=_create_hook(), on_expiration=None, # also test that checkpoint other should not matter as long as values are same checkpoints=[RouteCheckpoint(1, _create_hook())], ), False, ), ( None, None, RoutePendingNodeHook(on_pending_node_created=None, on_expiration=None, checkpoints=[RouteCheckpoint(1, _create_hook())]), None, None, RoutePendingNodeHook( on_pending_node_created=None, on_expiration=None, # also test that checkpoint other should not matter as long as values are same checkpoints=[RouteCheckpoint(1, _create_hook("print('diff 2')"))], ), False, ), ], ) def test_route_check_auxiliary_integrity( self, execution_hook_1, pending_node_ttl_1, pending_hook_1, execution_hook_2, pending_node_ttl_2, pending_hook_2, result ): route = self._route_1_basic() assert ( Route( route.route_id, route.link_node, route.output, route._output_dim_matrix, route.slots, False, execution_hook_1, pending_node_ttl_1, pending_hook_1, ).check_auxiliary_data_integrity( Route( route.route_id, route.link_node, route.output, route._output_dim_matrix, route.slots, False, execution_hook_2, pending_node_ttl_2, pending_hook_2, ) ) == result ) def test_route_serialization(self): route = self._route_1_basic() assert route == loads(dumps(route)) def test_route_receive_basic(self): from test.intelliflow.core.signal_processing.signal.test_signal import TestSignal route = self._route_1_basic() # route will reject incompatible signal assert not route.receive(create_incoming_signal(TestSignal.signal_s3_1, [1])) assert not route._pending_nodes # successful trigger # 1 response: Optional[Route.Response] = route.receive(create_incoming_signal(TestSignal.signal_internal_1, [1])) assert response assert len(response.new_execution_contexts) == 1 assert response.new_execution_contexts[0].slots assert DimensionFilter.check_equivalence( response.new_execution_contexts[0].output.domain_spec.dimension_filter_spec, DimensionFilter.load_raw({1: {}}) ) # since the node completed immediately (since it has only one input), # also removed from the internal pending nodes. assert not route._pending_nodes # successful trigger # 2 response: Optional[Route.Response] = route.receive(create_incoming_signal(TestSignal.signal_internal_1, [2])) assert DimensionFilter.check_equivalence( response.new_execution_contexts[0].output.domain_spec.dimension_filter_spec, DimensionFilter.load_raw({2: {}}) ) # since the node completed immediately (since it has only one input), # also removed from the internal pending nodes. assert not route._pending_nodes def test_route_receive_two_inputs_linked(self): from test.intelliflow.core.signal_processing.signal.test_signal import TestSignal route = self._route_2_two_inputs_linked() # will consume the event, create a new pending node but return no 'new_execution_contexts' response = route.receive(create_incoming_signal(TestSignal.signal_internal_1, [1])) assert not response.new_execution_contexts assert len(response.new_pending_nodes) == 1 assert len(route._pending_nodes) == 1 # will consume the event, create a new pending node but return no 'new_execution_contexts' response = route.receive(create_incoming_signal(TestSignal.signal_internal_1, [2])) assert not response.new_execution_contexts assert len(response.new_pending_nodes) == 1 assert len(route._pending_nodes) == 2 # please note that it is 2 now! # will consume again with no internal effect response = route.receive(create_incoming_signal(TestSignal.signal_internal_1, [2])) assert not response.new_execution_contexts assert not response.new_pending_nodes response = route.receive(create_incoming_signal(TestSignal.signal_internal_1, [1])) assert not response.new_execution_contexts assert not response.new_pending_nodes assert len(route._pending_nodes) == 2 # please note that it is 2 still # send in a Signal that belongs to the second input but with different dim value # will create another pending node since it is neither '1' nor '2' (linking is active). response = route.receive(create_incoming_signal(TestSignal.signal_s3_1, [3])) assert not response.new_execution_contexts assert len(response.new_pending_nodes) == 1 assert len(route._pending_nodes) == 3 # please note that it is 3 now! # Completions # unleash the third pending node (which is pending on its first input with dim value 3) response = route.receive(create_incoming_signal(TestSignal.signal_internal_1, [3])) assert len(response.new_execution_contexts) == 1 assert not response.new_pending_nodes assert DimensionFilter.check_equivalence( response.new_execution_contexts[0].output.domain_spec.dimension_filter_spec, DimensionFilter.load_raw({3: {}}) ) assert len(route._pending_nodes) == 2 # please note that it got back to 2! # unleash the fist node response = route.receive(create_incoming_signal(TestSignal.signal_s3_1, [1])) assert len(response.new_execution_contexts) == 1 assert DimensionFilter.check_equivalence( response.new_execution_contexts[0].output.domain_spec.dimension_filter_spec, DimensionFilter.load_raw({1: {}}) ) assert len(route._pending_nodes) == 1 # and finally the second node response = route.receive(create_incoming_signal(TestSignal.signal_s3_1, [2])) assert len(response.new_execution_contexts) == 1 assert DimensionFilter.check_equivalence( response.new_execution_contexts[0].output.domain_spec.dimension_filter_spec, DimensionFilter.load_raw({2: {}}) ) assert not route._pending_nodes def test_route_receive_three_inputs_unlinked(self): from test.intelliflow.core.signal_processing.signal.test_signal import TestSignal route = self._route_3_three_inputs_unlinked() # will consume the event, create a new pending node but return no 'new_execution_contexts' response = route.receive(create_incoming_signal(TestSignal.signal_internal_complex_1, [1, "y"])) assert not response.new_execution_contexts assert len(route._pending_nodes) == 1 # will consume the event, create a new pending node but return no 'new_execution_contexts' response = route.receive(create_incoming_signal(TestSignal.signal_internal_complex_1, [2, "y"])) assert not response.new_execution_contexts assert len(route._pending_nodes) == 2 # please note that it is 2 now! # will consume again with no internal effect response = route.receive(create_incoming_signal(TestSignal.signal_internal_complex_1, [2, "y"])) assert not response.new_execution_contexts response = route.receive(create_incoming_signal(TestSignal.signal_internal_complex_1, [1, "y"])) assert not response.new_execution_contexts assert len(route._pending_nodes) == 2 # please note that it is 2 still # EFFECT of missing linking (N-N logic) # incoming signal will satisfy all of the pending nodes response = route.receive(create_incoming_signal(TestSignal.signal_s3_1, [3])) assert len(response.new_execution_contexts) == 2 assert not route._pending_nodes # please note that it got back to 0 now! # we have to compare this way since the order is not guarateed if DimensionFilter.check_equivalence( response.new_execution_contexts[0].output.domain_spec.dimension_filter_spec, DimensionFilter.load_raw( { 2: { # from the 1st dim of the 1st input signal 121: {3: {}} # ord('y') from the second dim of the 1st input signal # from the 3rd input } } ), ): assert DimensionFilter.check_equivalence( response.new_execution_contexts[1].output.domain_spec.dimension_filter_spec, DimensionFilter.load_raw({1: {121: {3: {}}}}) ) else: assert DimensionFilter.check_equivalence( response.new_execution_contexts[1].output.domain_spec.dimension_filter_spec, DimensionFilter.load_raw({2: {121: {3: {}}}}) ) def test_route_receive_three_inputs_linked(self): from test.intelliflow.core.signal_processing.signal.test_signal import TestSignal route = self._route_3_three_inputs_linked() # will consume the event, create a new pending node but return no 'new_execution_contexts' response = route.receive(create_incoming_signal(TestSignal.signal_internal_complex_1, [1, "y"])) assert not response.new_execution_contexts assert len(route._pending_nodes) == 1 # will consume the event, create a new pending node but return no 'new_execution_contexts' response = route.receive(create_incoming_signal(TestSignal.signal_internal_complex_1, [2, "y"])) assert not response.new_execution_contexts assert len(route._pending_nodes) == 2 # EFFECT of linking # incoming signal will not satisfy dimensional linking and will just create another node. response = route.receive(create_incoming_signal(TestSignal.signal_s3_1, [3])) assert not response.new_execution_contexts assert len(route._pending_nodes) == 3 # please note that it is 3 now! # unleash the most recent node response = route.receive(create_incoming_signal(TestSignal.signal_internal_complex_1, [3, "y"])) assert len(response.new_execution_contexts) == 1 assert len(route._pending_nodes) == 2 assert DimensionFilter.check_equivalence( response.new_execution_contexts[0].output.domain_spec.dimension_filter_spec, DimensionFilter.load_raw({121: {3: {}}}) ) # unleash the node that created first response = route.receive(create_incoming_signal(TestSignal.signal_s3_1, [1])) assert len(response.new_execution_contexts) == 1 assert len(route._pending_nodes) == 1 assert DimensionFilter.check_equivalence( response.new_execution_contexts[0].output.domain_spec.dimension_filter_spec, DimensionFilter.load_raw({121: {1: {}}}) ) # unleash the node that created second response = route.receive(create_incoming_signal(TestSignal.signal_s3_1, [2])) assert len(response.new_execution_contexts) == 1 assert not route._pending_nodes # no remaining pending nodes! assert DimensionFilter.check_equivalence( response.new_execution_contexts[0].output.domain_spec.dimension_filter_spec, DimensionFilter.load_raw({121: {2: {}}}) ) def test_route_check_expired_nodes(self): from test.intelliflow.core.signal_processing.signal.test_signal import TestSignal route = self._route_2_two_inputs_linked() route = Route( route.route_id, route.link_node, route.output, route._output_dim_matrix, route.slots, False, RouteExecutionHook(), 5, # seconds RoutePendingNodeHook(), ) route.receive(create_incoming_signal(TestSignal.signal_internal_1, [1])) route.receive(create_incoming_signal(TestSignal.signal_internal_1, [2])) assert len(route._pending_nodes) == 2 # send in a Signal that belongs to the second input but with different dim value # will create another pending node since it is neither '1' nor '2' (linking is active). response = route.receive(create_incoming_signal(TestSignal.signal_s3_1, [3])) assert not response.new_execution_contexts assert len(response.new_pending_nodes) == 1 assert len(route._pending_nodes) == 3 # please note that it is 3 now! # Completions # unleash the third pending node (which is pending on its first input with dim value 3) route.receive(create_incoming_signal(TestSignal.signal_internal_1, [3])) assert len(route._pending_nodes) == 2 # please note that it got back to 2! # just make sure that it has been at least 5 seconds after the creation of those pending nodes. time.sleep(5) expired_nodes = route.check_expired_nodes() assert len(expired_nodes) == 2 assert len(route._pending_nodes) == 0 def test_route_zombie_node_on_other_input_already_materialized(self): from test.intelliflow.core.signal_processing.signal.test_signal import TestSignal route = self._route_2_two_inputs_linked() # create new route to make sure that the second input is already materialized on value 3 [for dim_1_1]! new_signal_link_node = SignalLinkNode( [TestSignal.signal_internal_1, create_incoming_signal(TestSignal.signal_s3_1.clone("test_signal_from_S3"), [3])] ) new_signal_link_node.compensate_missing_links() route = Route( route.route_id, new_signal_link_node, route.output, route._output_dim_matrix, route.slots, False, route.execution_hook, route.pending_node_ttl_in_secs, # seconds route.pending_node_hook, ) # since second input is locked on 3, this event would yield a zombie node # 1 != 3 response = route.receive(create_incoming_signal(TestSignal.signal_internal_1, [1])) assert not response.new_execution_contexts assert len(response.new_pending_nodes) == 1 assert len(route._pending_nodes) == 1 assert next(iter(response.new_pending_nodes)).is_zombie # same again 2 != 3 response = route.receive(create_incoming_signal(TestSignal.signal_internal_1, [2])) # since second input is locked on 3, this event would yield a zombie node assert not response.new_execution_contexts assert len(response.new_pending_nodes) == 1 assert len(route._pending_nodes) == 2 assert next(iter(response.new_pending_nodes)).is_zombie # new pending node! 3 == 3 response = route.receive(create_incoming_signal(TestSignal.signal_internal_1, [3])) assert not response.new_execution_contexts assert len(response.new_pending_nodes) == 1 assert len(route._pending_nodes) == 3 # new node should NOT be a zombie, waiting for TestSignal.signal_s3_1[3] to come in assert not next(iter(response.new_pending_nodes)).is_zombie def test_route_zombie_node_not_possible_when_inputs_unlinked(self): from test.intelliflow.core.signal_processing.signal.test_signal import TestSignal route = self._route_2_two_inputs_linked() # create new route to make sure that the second input is already materialized on value 3 [for dim_1_1]! new_signal_link_node = SignalLinkNode( [TestSignal.signal_internal_1, create_incoming_signal(TestSignal.signal_s3_1.clone("test_signal_from_S3"), [3])] ) # UNLINKED ! # new_signal_link_node.compensate_missing_links() route = Route( route.route_id, new_signal_link_node, route.output, route._output_dim_matrix, route.slots, False, route.execution_hook, route.pending_node_ttl_in_secs, # seconds route.pending_node_hook, ) # since second input is locked on 3, this event can NOT yield a zombie node since they are unlinked # 1 != 3 response = route.receive(create_incoming_signal(TestSignal.signal_internal_1, [1])) assert not response.new_execution_contexts assert len(response.new_pending_nodes) == 1 assert len(route._pending_nodes) == 1 assert not next(iter(response.new_pending_nodes)).is_zombie def test_route_zombie_node_not_possible_when_other_is_a_materialized_reference_even_if_inputs_linked(self): """Actually yields execution immediately since the second input is a materialized reference""" from test.intelliflow.core.signal_processing.signal.test_signal import TestSignal route = self._route_2_two_inputs_linked() # create new route to make sure that the second input is already materialized on value 3 [for dim_1_1]! new_signal_link_node = SignalLinkNode( [ TestSignal.signal_internal_1, # materialized reference input create_incoming_signal(TestSignal.signal_s3_1.clone("test_signal_from_S3").as_reference(), [3]), ] ) # LINK ! new_signal_link_node.compensate_missing_links() route = Route( route.route_id, new_signal_link_node, route.output, route._output_dim_matrix, route.slots, False, route.execution_hook, route.pending_node_ttl_in_secs, route.pending_node_hook, ) # although second input is locked on 3, this event can NOT yield a zombie node since it is a material reference. # 1 != 3 response = route.receive(create_incoming_signal(TestSignal.signal_internal_1, [1])) # yields execution ! assert response.new_execution_contexts assert len(response.new_pending_nodes) == 0 assert len(route._pending_nodes) == 0 # DONE # We are actually done but let's show that even if they are unlinked, the result would not change. new_signal_link_node = SignalLinkNode( [ TestSignal.signal_internal_1, # materialized reference input create_incoming_signal(TestSignal.signal_s3_1.clone("test_signal_from_S3").as_reference(), [3]), ] ) # UNLINK ! # new_signal_link_node.compensate_missing_links() route = Route( route.route_id, new_signal_link_node, route.output, route._output_dim_matrix, route.slots, False, route.execution_hook, route.pending_node_ttl_in_secs, route.pending_node_hook, ) response = route.receive(create_incoming_signal(TestSignal.signal_internal_1, [1])) # yields execution again! assert response.new_execution_contexts assert len(response.new_pending_nodes) == 0 assert len(route._pending_nodes) == 0
44.358491
138
0.636525
4,486
39,967
5.326349
0.076906
0.031807
0.037666
0.045702
0.861137
0.8282
0.813342
0.788315
0.774881
0.756508
0
0.017156
0.286837
39,967
900
139
44.407778
0.821142
0.129582
0
0.669944
0
0
0.020388
0.004383
0
0
0
0.001111
0.154494
1
0.032303
false
0.001404
0.046348
0.001404
0.087079
0.002809
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
56ff69de5c0b77597019e7ce269a5c5386a35249
1,519
py
Python
uocsecrets/forum/urls.py
jeff-zqiu/uocweb
bb6e99a7ab01c9634f8b8446127c4bd1c0701388
[ "MIT" ]
1
2018-09-24T13:32:06.000Z
2018-09-24T13:32:06.000Z
uocsecrets/forum/urls.py
jeff-zqiu/uocweb
bb6e99a7ab01c9634f8b8446127c4bd1c0701388
[ "MIT" ]
null
null
null
uocsecrets/forum/urls.py
jeff-zqiu/uocweb
bb6e99a7ab01c9634f8b8446127c4bd1c0701388
[ "MIT" ]
null
null
null
from django.urls import path, include from . import views from django.views.generic import TemplateView app_name = 'forum' urlpatterns = [ # /forum/ path('about/', TemplateView.as_view(template_name='forum/about.html'),name='about'), path('', views.IndexView.as_view(), name = 'index'), path('top/', views.IndexView.as_view(), name = 'top'), path('new/', views.IndexView.as_view(), name = 'new'), path('<str:mode>/<int:page>/', views.PageView.as_view(), name = 'page'), # /forum/edit/ path('edit/', views.EditView.as_view(), name = 'new_post'), path('<int:post_id>/edit/', views.EditView.as_view(), name='edit'), path('<int:post_id>/edit/delete/', views.delete, name='delete'), # /forum/<post_id>/ path('<int:post_id>/', views.ContentView.as_view() , name='content'), path('<int:post_id>/clickup/', views.ClickUpView.as_view(), name='clickup'), # /forum/<post_id>/comment/ path('<int:post_id>/comment/', views.CommentView.as_view(), name='new_comment'), path('<int:post_id>/comment/<int:comment_id>/', views.CommentView.as_view(), name='comment'), path('sign_up/', views.SignUpView.as_view(), name='sign_up'), path('login/', views.LoginView.as_view(template_name='forum/login.html', extra_context = {'next': '/forum/'}), name='login'), path('logout/', views.LogoutView.as_view(), name = 'logout'), # /forum/user/ path('user/<str:username>/', views.UserView.as_view(), name='user'), ]
41.054054
97
0.631995
199
1,519
4.663317
0.256281
0.096983
0.140086
0.084052
0.331897
0.116379
0
0
0
0
0
0
0.150757
1,519
37
98
41.054054
0.71938
0.050691
0
0
0
0
0.253306
0.091162
0
0
0
0
0
1
0
false
0
0.130435
0
0.130435
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
71008fe29a8062c4d781fffdb3dbd9227f9e7c32
12,085
py
Python
leer/core/primitives/block.py
WTRMQDev/leer
c77c6c1d31e6d99996f471bf4c45b8af70f44fa7
[ "MIT" ]
5
2018-11-10T03:33:37.000Z
2019-08-23T07:02:32.000Z
leer/core/primitives/block.py
WTRMQDev/leer
c77c6c1d31e6d99996f471bf4c45b8af70f44fa7
[ "MIT" ]
2
2018-11-22T11:10:49.000Z
2018-12-15T14:44:03.000Z
leer/core/primitives/block.py
WTRMQDev/leer
c77c6c1d31e6d99996f471bf4c45b8af70f44fa7
[ "MIT" ]
2
2018-10-30T13:43:54.000Z
2018-11-13T06:30:56.000Z
from leer.core.primitives.header import Header, PoPoW, VoteData from leer.core.storage.txos_storage import TXOsStorage from leer.core.chains.headers_manager import HeadersManager from leer.core.storage.excesses_storage import ExcessesStorage from leer.core.storage.headers_storage import HeadersStorage from leer.core.primitives.transaction_skeleton import TransactionSkeleton from leer.core.lubbadubdub.transaction import Transaction from leer.core.lubbadubdub.ioput import IOput from leer.core.lubbadubdub.offset_utils import sum_offset from time import time from leer.core.parameters.dynamic import next_reward, next_target from leer.core.parameters.constants import initial_target import functools class Block(): def __init__(self, storage_space, header=None, transaction_skeleton=None): self._header = header if header else Header() self.transaction_skeleton = transaction_skeleton if transaction_skeleton else TransactionSkeleton() self.tx=None self.storage_space = storage_space @property def header(self): try: return self._header except: self._header = Header() return self._header @property def hash(self): return self.header.hash @property def partial_hash(self): return self.header.partial_hash def serialize(self, rtx, rich_block_format=False, max_size =40000): serialized=b"" serialized += self.header.serialize() serialized += self.transaction_skeleton.serialize(rich_format=rich_block_format, max_size=max_size, full_tx = build_tx_from_skeleton(self.transaction_skeleton,\ self.storage_space.txos_storage,\ self.storage_space.excesses_storage,\ self.header.height, self.header.version, rtx=rtx,\ historical = True) if rich_block_format else None) return serialized @classmethod @functools.lru_cache(maxsize=40) def from_serialized(cls, serialized_block, storage_space): b = cls(storage_space=storage_space) b.deserialize(serialized_block) return b def deserialize(self, serialized): self.deserialize_raw(serialized) def deserialize_raw(self, serialized): serialized = self.header.deserialize_raw(serialized) serialized = self.transaction_skeleton.deserialize_raw(serialized, storage_space=self.storage_space) return serialized def non_context_verify(self, rtx): ''' While this check is called 'non_context', it actually uses context since it needs: a) fully validated headers chain up to this block b) downloaded outputs c) blocks which create inputs spent in checked(self) block should be applied Currently if those conditions are not satisfied block is marked as not_downloaded and thus can not be validated. To verify block we need to 0) check that header is known and valid 1) verify transaction 2) check that transaction can be applied 3) check reward size (actually in can be checked on headers level) ''' # stage 1 assert self.storage_space.headers_storage.has(self.header.hash, rtx=rtx), "Block's header is unknown" #self.storage_space.headers_storage.context_validation(self.header.hash) assert not self.storage_space.headers_storage.get(self.header.hash, rtx=rtx).invalid, "Block's header is invalid. Reason: `%s`"%self.storage_space.headers_storage.get(self.header.hash, rtx=rtx).reason #currently during building we automatically check that tx can ba applied and tx is valid self.tx = build_tx_from_skeleton(self.transaction_skeleton, txos_storage=self.storage_space.txos_storage, excesses_storage=self.storage_space.excesses_storage, block_height=self.header.height, block_version = self.header.version, rtx=rtx, non_context = True) # stage 3 => should be moved to blockchain #commitment_root, txos_root = self.storage_space.txos_storage.apply_block_tx_get_merkles_and_rollback(tx) #excesses_root = self.storage_space.excesses_storage.apply_block_tx_get_merkles_and_rollback(tx) #assert [commitment_root, txos_root, excesses_root]==self.header.merkles # This is context validation too??? TODO miner_subsidy, dev_reward = next_reward(self.header.prev, self.storage_space.headers_storage, rtx=rtx) assert self.tx.coinbase.value == (miner_subsidy+self.transaction_skeleton.relay_fee), "Wrong miner subsidy" if dev_reward: assert self.tx.dev_reward.value == dev_reward, "Wrong miner subsidy" return True def __str__(self): return "Block< hash: %s..., height: %d, inputs: %d, outputs %d>"%(self.header.hash[:6], self.header.height , len(self.transaction_skeleton.input_indexes),len(self.transaction_skeleton.output_indexes) ) def build_tx_from_skeleton(tx_skeleton, txos_storage, excesses_storage, block_height, block_version, rtx, historical=False, non_context = False): ''' By given tx_skeleton and txos_storage return transaction. If transaction is invalid or any input/output isn't available exception will be raised. Optionally, if `historical` is True we will check output_indexes both in mempool and spent outputs. ''' tx=Transaction(txos_storage=txos_storage, excesses_storage=excesses_storage) for _i in tx_skeleton.input_indexes: if historical or non_context: tx.inputs.append(txos_storage.confirmed.find(_i, rtx=rtx)) else: tx.inputs.append(txos_storage.confirmed.get(_i, rtx=rtx)) for _o in tx_skeleton.output_indexes: if historical or non_context: # About non_context: if we are on one branch and build block from another one # and this block contain output which is already commited on our branch (tx is # confirmed on both branches) we should get txo from confirmed storage try: tx.outputs.append(txos_storage.confirmed.find(_o, rtx=rtx)) except: tx.outputs.append(txos_storage.mempool[_o]) else: tx.outputs.append(txos_storage.mempool[_o]) tx.additional_excesses = tx_skeleton.additional_excesses.copy() tx.updated_excesses = tx_skeleton.updated_excesses.copy() tx.mixer_offset = tx_skeleton.mixer_offset if historical or non_context: assert tx.non_context_verify(block_height=block_height) else: assert tx.verify(block_height=block_height, block_version = block_version, rtx=rtx) return tx #To setup utils def generate_genesis(tx, storage_space, wtx): ''' 1. spend inputs and add outputs and excesses from tx to storage 2. calc new mercles 3. generate header 4. rollback outputs ''' storage = storage_space.txos_storage excesses = storage_space.excesses_storage exc_merkle = excesses.apply_block_tx_get_merkles_and_rollback(tx, wtx=wtx) # it should be calced first, since we nned to calc address_excess_num_index merkles = storage.apply_block_tx_get_merkles_and_rollback(tx, wtx=wtx) + [exc_merkle] popow = PoPoW([]) votedata = VoteData() target = initial_target full_offset = tx.mixer_offset header=Header(height = 0, supply=tx.coinbase.value, full_offset=full_offset, merkles=merkles, popow=popow, votedata=votedata, timestamp=int(time()), target=target, version=int(1), nonce=b"\x00"*16) tx_skeleton = TransactionSkeleton(tx=tx) new_block = Block(storage_space, header, tx_skeleton) return new_block def generate_block_template(tx, storage_space, wtx, get_tx_from_mempool = True, timestamp = None, dev_reward_vote = b"\x00"): ''' Generate block template: block is correct but nonce (by default) is equal to zero. Thus difficulty target (almost always) isn't met. arguments: tx [mandatory]: transaction which contains coinbase output. It also may contain other inputs and outputs. storage_space [mandatory] : - get_tx_from_mempool [optional, default True]: if get_tx_from_mempool, transaction from mempool will be merged to block_transaction. If this merge will produce invalid tx (for instance tx from mempool spends the same inputs as tx with coinbase), tx from mempool will be discarded. Inner logic: 1. apply block_tx to txos_storage and excesses_storage 2. calc new merkles 3. generate header with new merkles 4. generate block by appending tx_skeleton and new header 5. rollback block_tx ''' storage = storage_space.txos_storage excesses = storage_space.excesses_storage current_block = storage_space.blocks_storage.get(storage_space.blockchain.current_tip(rtx=wtx), rtx=wtx) if get_tx_from_mempool: try: tx = tx.merge(storage_space.mempool_tx.give_tx(), rtx=wtx) except: pass exc_merkle = excesses.apply_block_tx_get_merkles_and_rollback(tx, wtx=wtx) # it should be calced first, since we nned to calc address_excess_num_index merkles = storage.apply_block_tx_get_merkles_and_rollback(tx, wtx=wtx) + [exc_merkle] popow = current_block.header.next_popow() supply = current_block.header.supply + tx.minted_value - tx.calc_new_outputs_fee() height = current_block.header.height+1 votedata = VoteData() target = next_target(current_block.hash, storage_space.headers_storage, rtx=wtx) full_offset = sum_offset(current_block.header.full_offset,tx.mixer_offset) if not timestamp: timestamp = max(int(time()), storage_space.headers_storage.get(storage_space.blockchain.current_tip(rtx=wtx), rtx=wtx).timestamp+1) header=Header(height = height, supply=supply, full_offset=full_offset, merkles=merkles, popow=popow, votedata=votedata, timestamp=timestamp, target=target, version=int(1), nonce=b"\x00"*16) tx_skeleton = TransactionSkeleton(tx=tx) new_block = Block(storage_space, header, tx_skeleton) return new_block class ContextBlock(Block): # TODO consider removing ContextBlock. For now we store all information about validity in ContextHeader # (it allows headers_manager to provide less useless paths). ''' Wrapper of Block for inner storage. It contains contextual info about block: for instance is it valid in chain or not. ''' def __init__(self, storage_space = None, block=None): if block: Block.__init__(self, storage_space= block.storage_space, header=block.header, transaction_skeleton=block.transaction_skeleton) if block.tx: self.tx=block.tx else: if not storage_space: raise TypeError("ContextBlock initialized without context") Block.__init__(self, storage_space) self.invalid = False self.reason = None def serialize_with_context(self): ser = super(ContextBlock, self).serialize(rtx=None) # We can pass None as rtx, since rtx is required for rich block serialization ser += int(self.invalid).to_bytes(1,'big') reason = self.reason if self.reason else "" ser += int(len(reason)).to_bytes(2,'big') ser += reason.encode('utf-8') return ser @classmethod @functools.lru_cache(maxsize=10) def from_serialized(cls, serialized_block, storage_space): b = cls(storage_space=storage_space) b.deserialize(serialized_block) return b def deserialize(self, serialized): self.deserialize_raw(serialized) def deserialize_raw(self, serialized): ser = super(ContextBlock, self).deserialize_raw(serialized) self.invalid, ser = bool(ser[0]), ser[1:] reason_len, ser = int.from_bytes(ser[:2], 'big'), ser[2:] self.reason, ser = ser[:reason_len].decode('utf-8'), ser[reason_len:] return ser def __str__(self): return "ContextBlock< hash: %s..., height: %d, inputs: %d, outputs %d, valid: %s, reason %s>"%(self.header.hash[:6], self.header.height , len(self.transaction_skeleton.input_indexes),len(self.transaction_skeleton.output_indexes), ("-" if self.invalid else '+'), self.reason )
46.841085
289
0.725528
1,645
12,085
5.122796
0.179331
0.059808
0.032277
0.021597
0.349116
0.265456
0.237332
0.229263
0.212412
0.202445
0
0.004999
0.188912
12,085
257
290
47.023346
0.854724
0.244435
0
0.353659
0
0.012195
0.035452
0
0
0
0
0.007782
0.036585
1
0.115854
false
0.006098
0.079268
0.02439
0.304878
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
71017d33ceca5fbf3aa398b934100c4307a77f61
1,982
py
Python
GpioService.py
slater1/gpio
ef4767456b746706ee5cb3304ed6ef29e0fe6d54
[ "MIT" ]
null
null
null
GpioService.py
slater1/gpio
ef4767456b746706ee5cb3304ed6ef29e0fe6d54
[ "MIT" ]
null
null
null
GpioService.py
slater1/gpio
ef4767456b746706ee5cb3304ed6ef29e0fe6d54
[ "MIT" ]
null
null
null
from datetime import datetime from gpiozero import LED from threading import Thread from time import sleep class GpioService: def __init__(self): self.pins = {} self.heartbeat_last = datetime.now() self.heartbeat_enabled = False self.have_heartbeat = False; self.heartbeat_timeout_seconds = 2 self.heartbeat_thread = Thread(target=self.service_heartbeat) self.heartbeat_thread.start(); self.loss_action = self.default_heartbeat_loss_action; def get_pin(self, id): self.maybe_add(id) return self.pins[id].value def get_all_pins(self): return [(id, led.value) for id, led in self.pins.items()] def pin_on(self, id): self.maybe_add(id) return self.pins[id].on() def pin_off(self, id): self.maybe_add(id) return self.pins[id].off() def heartbeat(self): self.heartbeat_last = datetime.now() self.have_heartbeat = True; def heartbeat_enable(self): self.heartbeat_enabled = True def heartbeat_disable(self): self.heartbeat_enabled = False def maybe_add(self, id): if id not in self.pins: self.pins[id] = LED(id) def heartbeat_ok(self): return self.heartbeat_enabled == False \ or (datetime.now() - self.heartbeat_last).total_seconds() \ < self.heartbeat_timeout_seconds def service_heartbeat(self): self.run = True; while (self.run): if self.have_heartbeat and not self.heartbeat_ok(): self.have_heartbeat = False; print("Heartbeat lost") self.loss_action(); sleep(1) def default_heartbeat_loss_action(self): for id, led in self.pins.items(): led.off() def close(self): self.run = False; self.heartbeat_thread.join();
27.527778
70
0.592836
240
1,982
4.704167
0.245833
0.14969
0.070859
0.06643
0.193091
0.193091
0.136404
0.09566
0.09566
0.09566
0
0.001465
0.311302
1,982
71
71
27.915493
0.825641
0
0
0.169811
0
0
0.007326
0
0
0
0
0
0
1
0.245283
false
0
0.075472
0.037736
0.433962
0.018868
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
7101adf147765864029e975e6ef3b5f8d4d932f9
1,465
py
Python
Loan-Approval-Analysis/code.py
acharya221b/ga-learner-dsmp-repo
9b493aff25cf861fa8b757d7f2e926e1dcbe6061
[ "MIT" ]
null
null
null
Loan-Approval-Analysis/code.py
acharya221b/ga-learner-dsmp-repo
9b493aff25cf861fa8b757d7f2e926e1dcbe6061
[ "MIT" ]
null
null
null
Loan-Approval-Analysis/code.py
acharya221b/ga-learner-dsmp-repo
9b493aff25cf861fa8b757d7f2e926e1dcbe6061
[ "MIT" ]
null
null
null
# -------------- # Import packages import numpy as np import pandas as pd from scipy.stats import mode bank=pd.read_csv(path) categorical_var=bank.select_dtypes(include='object') print(categorical_var) numerical_var=bank.select_dtypes(include='number') print(numerical_var) # code starts here # code ends here # -------------- # code starts here banks=bank.drop('Loan_ID',axis=1) print(banks.isnull().sum()) bank_mode=banks.mode().iloc[0] print(type(bank_mode)) print(bank_mode) banks.fillna(bank_mode, inplace=True) print(banks.isnull().sum()) #code ends here # -------------- # Code starts here avg_loan_amount=banks.pivot_table(index=['Gender','Married','Self_Employed'],values='LoanAmount',aggfunc='mean') print(avg_loan_amount) # code ends here # -------------- # code starts here loan_approved_se=len(banks[(banks['Self_Employed']=='Yes') & (banks['Loan_Status']=='Y')]) loan_approved_nse=len(banks[(banks['Self_Employed']=='No') & (banks['Loan_Status']=='Y')]) percentage_se=loan_approved_se*100/614 percentage_nse=loan_approved_nse*100/614 # code ends here # -------------- # code starts here loan_term=banks['Loan_Amount_Term'].apply(lambda x:x/12) big_loan_term=len(banks[loan_term>=25]) print(big_loan_term) print(banks[loan_term>=25]) # code ends here # -------------- # code starts here loan_groupby=banks.groupby('Loan_Status')[['ApplicantIncome', 'Credit_History']] mean_values=loan_groupby.mean() # code ends here
18.08642
112
0.703072
211
1,465
4.663507
0.364929
0.060976
0.085366
0.081301
0.247967
0.144309
0.091463
0
0
0
0
0.015094
0.095563
1,465
80
113
18.3125
0.727547
0.202048
0
0.074074
0
0
0.148213
0
0
0
0
0
0
1
0
false
0
0.111111
0
0.111111
0.333333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7103030bd95a829786ccbdd3fa84915b9d8496a9
1,568
py
Python
test/test_mega.py
adacker10/showdown
8ceb1ff46d5c33ec3055928d6ad293224446f63c
[ "MIT" ]
8
2019-02-02T01:15:57.000Z
2021-12-23T04:43:46.000Z
test/test_mega.py
adacker10/showdown
8ceb1ff46d5c33ec3055928d6ad293224446f63c
[ "MIT" ]
null
null
null
test/test_mega.py
adacker10/showdown
8ceb1ff46d5c33ec3055928d6ad293224446f63c
[ "MIT" ]
6
2020-09-11T13:15:05.000Z
2022-03-18T15:46:35.000Z
import unittest from sim.battle import Battle from data import dex class TestMega(unittest.TestCase): def test_pidgeot(self): battle = Battle(debug=False, rng=False) battle.join(0, [{'species': 'pidgeot', 'item': 'pidgeotite', 'moves': ['tackle', 'protect']}]) battle.join(1, [{'species': 'mew', 'moves': ['tackle']}]) battle.choose(0, dex.Decision('move', 0, mega=True)) battle.choose(1, dex.Decision('move', 0, mega=True)) battle.do_turn() pidgeot = battle.sides[0].pokemon[0] self.assertEqual(pidgeot.species, 'pidgeotmega') self.assertEqual(pidgeot.hp, pidgeot.maxhp-23) def test_mewtwo_x(self): battle = Battle(debug=False, rng=False) battle.join(0, [{'species': 'mewtwo', 'item': 'mewtwonitex', 'moves': ['tackle', 'protect'] }]) battle.join(1, [{'species': 'charizard', 'item': 'charizarditex', 'moves': ['tackle'] }]) battle.choose(0, dex.Decision('move', 0, mega=True)) battle.choose(1, dex.Decision('move', 0, mega=False)) battle.do_turn() mewtwo = battle.sides[0].pokemon[0] charizard = battle.sides[1].pokemon[0] self.assertEqual(mewtwo.species, 'mewtwomegax') self.assertEqual(mewtwo.hp, mewtwo.maxhp-17) def runTest(self): self.test_pidgeot() self.test_mewtwo_x
34.844444
65
0.53699
166
1,568
5.024096
0.295181
0.047962
0.071942
0.076739
0.465228
0.417266
0.417266
0.318945
0.318945
0.318945
0
0.020314
0.309311
1,568
44
66
35.636364
0.749769
0
0
0.222222
0
0
0.124362
0
0
0
0
0
0.111111
1
0.083333
false
0
0.083333
0
0.194444
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7105dbc3e130b4596cac492082bc99f3266720ff
3,872
py
Python
tests/test_pybrain.py
carlosf/rep
365917a1d582c7d784e26f80808eeed18f655cb3
[ "Apache-2.0" ]
null
null
null
tests/test_pybrain.py
carlosf/rep
365917a1d582c7d784e26f80808eeed18f655cb3
[ "Apache-2.0" ]
null
null
null
tests/test_pybrain.py
carlosf/rep
365917a1d582c7d784e26f80808eeed18f655cb3
[ "Apache-2.0" ]
null
null
null
# Copyright 2014-2015 Yandex LLC and contributors <https://yandex.com/> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # <http://www.apache.org/licenses/LICENSE-2.0> # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import division, print_function, absolute_import from rep.test.test_estimators import check_classifier, check_regression, check_params, \ generate_classification_data, check_classification_reproducibility from rep.estimators.pybrain import PyBrainClassifier, PyBrainRegressor from sklearn.ensemble import BaggingClassifier from rep.estimators import SklearnClassifier __author__ = 'Artem Zhirokhov' classifier_params = { 'has_staged_pp': False, 'has_importances': False, 'supports_weight': False } regressor_params = { 'has_staged_predictions': False, 'has_importances': False, 'supports_weight': False } def test_pybrain_params(): check_params(PyBrainClassifier, layers=[1, 2], epochs=5, use_rprop=True, hiddenclass=['LinearLayer']) check_params(PyBrainRegressor, layers=[1, 2], epochs=5, etaplus=1.3, hiddenclass=['LinearLayer'], learningrate=0.1) def test_pybrain_classification(): clf = PyBrainClassifier(epochs=2) check_classifier(clf, **classifier_params) check_classifier(PyBrainClassifier(epochs=-1, continue_epochs=1, layers=[]), **classifier_params) check_classifier(PyBrainClassifier(epochs=2, layers=[5, 2]), **classifier_params) def test_pybrain_reproducibility(): try: import numpy X, y, _ = generate_classification_data() clf1 = PyBrainClassifier(layers=[4], epochs=2).fit(X, y) clf2 = PyBrainClassifier(layers=[4], epochs=2).fit(X, y) print(clf1.predict_proba(X)-clf2.predict_proba(X)) assert numpy.allclose(clf1.predict_proba(X), clf2.predict_proba(X)), 'different predicitons' check_classification_reproducibility(clf1, X, y) except: # This test fails. Because PyBrain can't reproduce training. pass def test_pybrain_Linear_MDLSTM(): check_classifier(PyBrainClassifier(epochs=2, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']), **classifier_params) check_regression(PyBrainRegressor(epochs=3, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']), **regressor_params) def test_pybrain_SoftMax_Tanh(): check_classifier(PyBrainClassifier(epochs=2, layers=[10, 5, 2], hiddenclass=['SoftmaxLayer', 'SoftmaxLayer', 'TanhLayer'], use_rprop=True), **classifier_params) check_regression(PyBrainRegressor(epochs=2, layers=[10, 5, 2], hiddenclass=['SoftmaxLayer', 'TanhLayer', 'TanhLayer']), **regressor_params) def pybrain_test_partial_fit(): clf = PyBrainClassifier(layers=[4], epochs=2) X, y, _ = generate_classification_data() clf.partial_fit(X, y) clf.partial_fit(X[:2], y[:2]) def test_pybrain_multi_classification(): check_classifier(PyBrainClassifier(), n_classes=4, **classifier_params) def test_pybrain_regression(): check_regression(PyBrainRegressor(), **regressor_params) def test_pybrain_multi_regression(): check_regression(PyBrainRegressor(), n_targets=4, **regressor_params) def test_simple_stacking_pybrain(): base_pybrain = PyBrainClassifier() base_bagging = BaggingClassifier(base_estimator=base_pybrain, n_estimators=3) check_classifier(SklearnClassifier(clf=base_bagging), **classifier_params)
37.592233
143
0.737345
460
3,872
5.982609
0.341304
0.022892
0.040698
0.055233
0.331032
0.24564
0.139898
0.079942
0
0
0
0.019512
0.152893
3,872
102
144
37.960784
0.819512
0.167355
0
0.163934
0
0
0.081073
0.00686
0
0
0
0
0.016393
1
0.163934
false
0.016393
0.131148
0
0.295082
0.032787
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
710806234af5d094e32935a5e432c9bd6ad09b51
9,749
py
Python
apps/consecutive_create_and_update_operations/consecutive_create_and_update_operations.py
semi-technologies/weaviate-chaos-engineering
57bc0cd919130749ead1ca2f397a3a46aa77c5fd
[ "BSD-3-Clause" ]
null
null
null
apps/consecutive_create_and_update_operations/consecutive_create_and_update_operations.py
semi-technologies/weaviate-chaos-engineering
57bc0cd919130749ead1ca2f397a3a46aa77c5fd
[ "BSD-3-Clause" ]
1
2022-03-08T12:03:20.000Z
2022-03-14T10:28:45.000Z
apps/consecutive_create_and_update_operations/consecutive_create_and_update_operations.py
semi-technologies/weaviate-chaos-engineering
57bc0cd919130749ead1ca2f397a3a46aa77c5fd
[ "BSD-3-Clause" ]
null
null
null
from weaviate import Client from uuid import uuid1 class TestConsecutiveCreateAndUpdate: client: Client img = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAABhGlDQ1BJQ0MgcHJvZmlsZQAAKJF9kT1Iw0AcxV/TSou0ONhBxCFD62RBVMRRq1CECqFWaNXB5NIvaNKSpLg4Cq4FBz8Wqw4uzro6uAqC4AeIm5uToouU+L+k0CLGg+N+vLv3uHsHCK0q08zAOKDplpFJJcVcflUMviKACEKIwy8zsz4nSWl4jq97+Ph6l+BZ3uf+HBG1YDLAJxLPsrphEW8QT29adc77xFFWllXic+Ixgy5I/Mh1xeU3ziWHBZ4ZNbKZeeIosVjqYaWHWdnQiKeIY6qmU76Qc1nlvMVZqzZY5578heGCvrLMdZojSGERS5AgQkEDFVRhIUGrToqJDO0nPfzDjl8il0KuChg5FlCDBtnxg//B727N4uSEmxROAn0vtv0RB4K7QLtp29/Htt0+AfzPwJXe9ddawMwn6c2uFjsCBraBi+uupuwBlzvA0FNdNmRH8tMUikXg/Yy+KQ8M3gL9a25vnX2cPgBZ6ip9AxwcAqMlyl73eHeot7d/z3T6+wEPO3J/B8olWgAAAAlwSFlzAAAuIwAALiMBeKU/dgAAAAd0SU1FB+UEDQgmFS2naPsAAAAZdEVYdENvbW1lbnQAQ3JlYXRlZCB3aXRoIEdJTVBXgQ4XAAAADElEQVQI12NgYGAAAAAEAAEnNCcKAAAAAElFTkSuQmCC" img2 = "/9j/4AAQSkZJRgABAQEASABIAAD/4QpKRXhpZgAASUkqAAgAAAAGABoBBQABAAAAVgAAABsBBQABAAAAXgAAACgBAwABAAAAAgAAADEBAgANAAAAZgAAADIBAgAUAAAAdAAAAGmHBAABAAAAiAAAAJoAAABIAAAAAQAAAEgAAAABAAAAR0lNUCAyLjEwLjE0AAAyMDIxOjAzOjI1IDE2OjI5OjQ3AAEAAaADAAEAAAABAAAAAAAAAAgAAAEEAAEAAAAAAQAAAQEEAAEAAADXAAAAAgEDAAMAAAAAAQAAAwEDAAEAAAAGAAAABgEDAAEAAAAGAAAAFQEDAAEAAAADAAAAAQIEAAEAAAAGAQAAAgIEAAEAAAA7CQAAAAAAAAgACAAIAP/Y/+AAEEpGSUYAAQEAAAEAAQAA/9sAQwAIBgYHBgUIBwcHCQkICgwUDQwLCwwZEhMPFB0aHx4dGhwcICQuJyAiLCMcHCg3KSwwMTQ0NB8nOT04MjwuMzQy/9sAQwEJCQkMCwwYDQ0YMiEcITIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIy/8AAEQgA1wEAAwEiAAIRAQMRAf/EAB8AAAEFAQEBAQEBAAAAAAAAAAABAgMEBQYHCAkKC//EALUQAAIBAwMCBAMFBQQEAAABfQECAwAEEQUSITFBBhNRYQcicRQygZGhCCNCscEVUtHwJDNicoIJChYXGBkaJSYnKCkqNDU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6g4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2drh4uPk5ebn6Onq8fLz9PX29/j5+v/EAB8BAAMBAQEBAQEBAQEAAAAAAAABAgMEBQYHCAkKC//EALURAAIBAgQEAwQHBQQEAAECdwABAgMRBAUhMQYSQVEHYXETIjKBCBRCkaGxwQkjM1LwFWJy0QoWJDThJfEXGBkaJicoKSo1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoKDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uLj5OXm5+jp6vLz9PX29/j5+v/aAAwDAQACEQMRAD8A9/ooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAK+YP+GjvGH/QN0P8A78Tf/Ha+n6+AKAPYP+GjvGH/AEDdD/78Tf8Ax2j/AIaO8Yf9A3Q/+/E3/wAdrx+igD2D/ho7xh/0DdD/AO/E3/x2j/ho7xh/0DdD/wC/E3/x2vH6KAPYP+GjvGH/AEDdD/78Tf8Ax2j/AIaO8Yf9A3Q/+/E3/wAdrx+igD2D/ho7xh/0DdD/AO/E3/x2j/ho7xh/0DdD/wC/E3/x2vH6KAPYP+GjvGH/AEDdD/78Tf8Ax2j/AIaO8Yf9A3Q/+/E3/wAdrx+igD2D/ho7xh/0DdD/AO/E3/x2j/ho7xh/0DdD/wC/E3/x2vH6KAPYP+GjvGH/AEDdD/78Tf8Ax2j/AIaO8Yf9A3Q/+/E3/wAdrx+igD2D/ho7xh/0DdD/AO/E3/x2j/ho7xh/0DdD/wC/E3/x2vH6KAPYP+GjvGH/AEDdD/78Tf8Ax2j/AIaO8Yf9A3Q/+/E3/wAdrx+igD2D/ho7xh/0DdD/AO/E3/x2j/ho7xh/0DdD/wC/E3/x2vH6KAPYP+GjvGH/AEDdD/78Tf8Ax2j/AIaO8Yf9A3Q/+/E3/wAdrx+igD7/AKKKKACiiigAooooAKKKKACvgCvv+vgCgAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKAPv8AooooAKKKKACiiigAooooAK+AK+/6+AKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooA+/wCiiigAooooAKKKKACiiigAr4Ar7/r4AoAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigD7/AKKKKACiiigAooooAKKKKACvgCvv+vgCgAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKAPv8AooooAKKKKACiiigAooooAK+AK+/6+AKACiiigAooooAKKKKACiiigAr0jwt8FPEni7w5aa5YXulR2t1v2JPLIHG12Q5AjI6qe9eb19f/AAS/5JDoX/bx/wClElAHkH/DOPjD/oJaH/3/AJv/AI1R/wAM4+MP+glof/f+b/41X0/RQB8wf8M4+MP+glof/f8Am/8AjVH/AAzj4w/6CWh/9/5v/jVfT9FAHzB/wzj4w/6CWh/9/wCb/wCNUf8ADOPjD/oJaH/3/m/+NV9P0UAfMH/DOPjD/oJaH/3/AJv/AI1R/wAM4+MP+glof/f+b/41X0/RQB8wf8M4+MP+glof/f8Am/8AjVH/AAzj4w/6CWh/9/5v/jVfT9FAHzB/wzj4w/6CWh/9/wCb/wCNUf8ADOPjD/oJaH/3/m/+NV9P0UAFFFFABRRRQAUUUUAFFFFABXwBX3/XwBQAUUUUAFFFFABRRRQAUUUUAFfX/wAEv+SQ6F/28f8ApRJXyBX1/wDBL/kkOhf9vH/pRJQB6BRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAV8AV9/wBfAFABRRRQAUUUUAFFFFABRRRQAV9f/BL/AJJDoX/bx/6USV8gV9f/AAS/5JDoX/bx/wClElAHoFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABXwBX3/XwBQAUUUUAFFFFABRRRQAUUUUAFfX/wS/5JDoX/AG8f+lElfIFfX/wS/wCSQ6F/28f+lElAHoFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABXwBX3/XwBQAUUUUAFFFFABRRRQAUUUUAFfX/AMEv+SQ6F/28f+lElfIFfX/wS/5JDoX/AG8f+lElAHoFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABXwBRRQAUUUUAFFFFABRRRQAUUUUAFfX/wAEv+SQ6F/28f8ApRJRRQB6BRRRQAUUUUAFFFFABRRRQAUUUUAFFFFAH//ZAP/iArBJQ0NfUFJPRklMRQABAQAAAqBsY21zBDAAAG1udHJSR0IgWFlaIAflAAMAGQAPABwAMmFjc3BBUFBMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD21gABAAAAANMtbGNtcwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADWRlc2MAAAEgAAAAQGNwcnQAAAFgAAAANnd0cHQAAAGYAAAAFGNoYWQAAAGsAAAALHJYWVoAAAHYAAAAFGJYWVoAAAHsAAAAFGdYWVoAAAIAAAAAFHJUUkMAAAIUAAAAIGdUUkMAAAIUAAAAIGJUUkMAAAIUAAAAIGNocm0AAAI0AAAAJGRtbmQAAAJYAAAAJGRtZGQAAAJ8AAAAJG1sdWMAAAAAAAAAAQAAAAxlblVTAAAAJAAAABwARwBJAE0AUAAgAGIAdQBpAGwAdAAtAGkAbgAgAHMAUgBHAEJtbHVjAAAAAAAAAAEAAAAMZW5VUwAAABoAAAAcAFAAdQBiAGwAaQBjACAARABvAG0AYQBpAG4AAFhZWiAAAAAAAAD21gABAAAAANMtc2YzMgAAAAAAAQxCAAAF3v//8yUAAAeTAAD9kP//+6H///2iAAAD3AAAwG5YWVogAAAAAAAAb6AAADj1AAADkFhZWiAAAAAAAAAknwAAD4QAALbEWFlaIAAAAAAAAGKXAAC3hwAAGNlwYXJhAAAAAAADAAAAAmZmAADypwAADVkAABPQAAAKW2Nocm0AAAAAAAMAAAAAo9cAAFR8AABMzQAAmZoAACZnAAAPXG1sdWMAAAAAAAAAAQAAAAxlblVTAAAACAAAABwARwBJAE0AUG1sdWMAAAAAAAAAAQAAAAxlblVTAAAACAAAABwAcwBSAEcAQv/bAEMAAwICAwICAwMDAwQDAwQFCAUFBAQFCgcHBggMCgwMCwoLCw0OEhANDhEOCwsQFhARExQVFRUMDxcYFhQYEhQVFP/bAEMBAwQEBQQFCQUFCRQNCw0UFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFP/CABEIABUAGQMBEQACEQEDEQH/xAAXAAADAQAAAAAAAAAAAAAAAAAABwgJ/8QAFAEBAAAAAAAAAAAAAAAAAAAAAP/aAAwDAQACEAMQAAABpkSAANUzlHgBVRABpSB//8QAGxAAAQUBAQAAAAAAAAAAAAAABQAEBhc2AhD/2gAIAQEAAQUCIPeBrC6giuoIrqCKWZYey7JP6VNqlTalmWiep8//xAAUEQEAAAAAAAAAAAAAAAAAAAAw/9oACAEDAQE/AU//xAAUEQEAAAAAAAAAAAAAAAAAAAAw/9oACAECAQE/AU//xAAhEAAABQQDAQEAAAAAAAAAAAAAAQIDBQQ0k9IRdLIxEP/aAAgBAQAGPwKpq3CUbbDanVEn7wRci1kMaNxayGNG4tZDGjcTPTe8GKakbNJOPuJaSavnJnwLqPyL0F1H5F6CZ6b3gxDdxn2X7//EABcQAQEBAQAAAAAAAAAAAAAAAAERICH/2gAIAQEAAT8hLs8otSCp2GcWLErbs8qEQWDyu8WJWr//2gAMAwEAAgADAAAAEIABAAJP/8QAFBEBAAAAAAAAAAAAAAAAAAAAMP/aAAgBAwEBPxBP/8QAFBEBAAAAAAAAAAAAAAAAAAAAMP/aAAgBAgEBPxBP/8QAFxABAQEBAAAAAAAAAAAAAAAAAREgMf/aAAgBAQABPxAT69h7QKUBQsqdz06dNin17D2oAKoLLB5vp02bP//Z" def __init__(self, client): self.client = client def batch_callback_result(self, results: dict) -> int: """ Check batch results for errors and return the number of occurred errors. Parameters ---------- results : dict The Weaviate batch creation return value. """ if results is not None: for result in results: if 'result' in result and 'errors' in result['result']: if 'error' in result['result']['errors']: print(f"error: {result['result']['errors']}") raise Exception("Some batch items failed!") def deleteTestClass(self, schemas, cls_name): if self.client.schema.contains(schemas): self.client.schema.delete_class(cls_name) def checkIfObjectsExist(self, uuids): for _id in uuids: # assert self.client.data_object.exists(_id) resp = self.client.data_object.get_by_id(_id, with_vector=True) if resp is None: print(f"ERROR!!! Object with ID: {_id} doesn't exist!!!") raise def consecutive_create_and_update_operations(self): print("Test started") cls_name = 'Test123' schemas = { 'classes': [ { 'class': cls_name, "vectorizer": "none", 'vectorIndexConfig': {'skip': False}, 'properties': [ { 'dataType': ['blob'], 'name': 'a', 'indexInverted': False, } ], }, ] } self.deleteTestClass(schemas, cls_name) uuids = [str(uuid1()) for _ in range(28000)] assert len(list(set(uuids))) == len(uuids), 'uuids contain duplicates' # extend print(f"Create objects in batch of 50 items...") with self.client.batch(batch_size=50, callback=self.batch_callback_result) as batch: for _id in uuids: batch.add_data_object(data_object={'a': self.img}, class_name=cls_name, uuid=_id) self.client.batch.flush() print(f"Update objects with vector started...") x = 1 # embed for _id in uuids: self.client.batch.add_data_object(data_object={'a': self.img2}, class_name=cls_name, uuid=_id, vector=[3,2,1]) if x % 1000 == 0: print(f"updated {x} objects...") x += 1 print("Check if objects exist...") # check self.checkIfObjectsExist(uuids) print(f"Update objects with new vector in batch of 50 items...") x = 1 # update vectors with self.client.batch(batch_size=50, callback=self.batch_callback_result) as batch: for _id in uuids: batch.add_data_object(data_object={'a': self.img}, class_name=cls_name, uuid=_id, vector=[1,2,3]) if x % 1000 == 0: print(f"updated {x} objects...") x += 1 self.client.batch.flush() print("Check if objects exist...") # check self.checkIfObjectsExist(uuids) self.deleteTestClass(schemas, cls_name) print("Test done") c = Client('http://localhost:8080') test = TestConsecutiveCreateAndUpdate(c) test.consecutive_create_and_update_operations()
89.440367
5,389
0.795569
683
9,749
11.270864
0.374817
0.014289
0.014809
0.023383
0.186932
0.153676
0.153676
0.14926
0.144973
0.130164
0
0.060113
0.129757
9,749
109
5,390
89.440367
0.847242
0.024618
0
0.289474
0
0.026316
0.701767
0.648958
0
1
0
0
0.013158
1
0.065789
false
0
0.026316
0
0.144737
0.144737
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
5
710a8eb80318d483b7ae6c16ae6882b79bd7ec77
301
py
Python
backend/app.py
JohnnyDevNull/python-nuxt-starter
e6158818b7536212dafec2dfe3bc70385110440c
[ "MIT" ]
null
null
null
backend/app.py
JohnnyDevNull/python-nuxt-starter
e6158818b7536212dafec2dfe3bc70385110440c
[ "MIT" ]
1
2022-01-22T12:45:49.000Z
2022-01-22T12:45:49.000Z
backend/app.py
JohnnyDevNull/python-nuxt-starter
e6158818b7536212dafec2dfe3bc70385110440c
[ "MIT" ]
null
null
null
from flask import Flask from rest_server import rest_api from socket_server import socket_api app = Flask(__name__) app.config['SECRET_KEY'] = 'secret!' rest_api.init_app(app) socket_api.init_app(app) if __name__ == '__main__': socket_api.run(app, host="127.0.0.1", port="5000", debug='true')
21.5
68
0.744186
50
301
4.04
0.48
0.133663
0.09901
0.128713
0
0
0
0
0
0
0
0.037736
0.119601
301
13
69
23.153846
0.724528
0
0
0
0
0
0.139535
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
710eaa9bc41c92a8622f37f457e57ace8494ee57
25,385
py
Python
eran/ELINA/python_interface/elina_coeff.py
pauls658/ReluDiff-ICSE2020-Artifact
212854fe04f482183c239e5dfec70106a9a83df8
[ "Apache-2.0" ]
7
2020-01-27T21:25:49.000Z
2022-01-07T04:37:37.000Z
eran/ELINA/python_interface/elina_coeff.py
yqtianust/ReluDiff-ICSE2020-Artifact
149f6efe4799602db749faa576980c36921a07c7
[ "Apache-2.0" ]
1
2022-01-25T17:41:54.000Z
2022-01-26T02:27:51.000Z
eran/ELINA/python_interface/elina_coeff.py
yqtianust/ReluDiff-ICSE2020-Artifact
149f6efe4799602db749faa576980c36921a07c7
[ "Apache-2.0" ]
3
2020-03-14T17:12:17.000Z
2022-03-16T09:50:46.000Z
# # # This source file is part of ELINA (ETH LIbrary for Numerical Analysis). # ELINA is Copyright © 2019 Department of Computer Science, ETH Zurich # This software is distributed under GNU Lesser General Public License Version 3.0. # For more information, see the ELINA project website at: # http://elina.ethz.ch # # THE SOFTWARE IS PROVIDED "AS-IS" WITHOUT ANY WARRANTY OF ANY KIND, EITHER # EXPRESS, IMPLIED OR STATUTORY, INCLUDING BUT NOT LIMITED TO ANY WARRANTY # THAT THE SOFTWARE WILL CONFORM TO SPECIFICATIONS OR BE ERROR-FREE AND ANY # IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, # TITLE, OR NON-INFRINGEMENT. IN NO EVENT SHALL ETH ZURICH BE LIABLE FOR ANY # DAMAGES, INCLUDING BUT NOT LIMITED TO DIRECT, INDIRECT, # SPECIAL OR CONSEQUENTIAL DAMAGES, ARISING OUT OF, RESULTING FROM, OR IN # ANY WAY CONNECTED WITH THIS SOFTWARE (WHETHER OR NOT BASED UPON WARRANTY, # CONTRACT, TORT OR OTHERWISE). # # from elina_coeff_h import * # ====================================================================== # # Basics # ====================================================================== # def elina_coeff_alloc(discr): """ Allocate a new ElinaCoeff, using a specific type as the core. Parameters ---------- discr : c_int Discriminant specifying the type of the core of the ElinaCoeff. Returns ------- coeff : ElinaCoeffPtr Pointer to the newly allocated ElinaCoeff. """ coeff = None try: elina_coeff_alloc_c = elina_auxiliary_api.elina_coeff_alloc elina_coeff_alloc_c.restype = ElinaCoeffPtr elina_coeff_alloc_c.argtypes = [c_uint] coeff = elina_coeff_alloc_c(discr) except: print('Problem with loading/calling "elina_coeff_alloc" from "libelinaux.so"') print('Make sure you are passing c_uint to the function') return coeff def elina_coeff_reinit(coeff, coeff_discr, scalar_discr): """ Reinitialise a given ElinaCoeff, according to the provided types. Parameters ---------- coeff : ElinaCoeffPtr Pointer to the ElinaCoeff that needs to be reinitiliased. coeff_discr : c_uint Enum of type ElinaCoeffDiscr that defines the core of the ElinaCoeff. scalar_discr : c_uint Enum of type ElinaScalarDiscr that defines the core of the ElinaScalar (0 = double, 1 = mpq, 2 = mpfr). Returns ------- None """ try: elina_coeff_reinit_c = elina_auxiliary_api.elina_coeff_reinit elina_coeff_reinit_c.restype = None elina_coeff_reinit_c.argtypes = [ElinaCoeffPtr, c_uint, c_uint] elina_coeff_reinit_c(coeff, coeff_discr, scalar_discr) except: print('Problem with loading/calling "elina_coeff_reinit" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, c_uint, c_uint to the function') def elina_coeff_free(coeff): """ Free an ElinaCoeff. Parameters ---------- coeff : ElinaCoeffPtr Pointer to the ElinaCoeff that needs to be freed. Returns ------- """ try: elina_coeff_free_c = elina_auxiliary_api.elina_coeff_free elina_coeff_free_c.restype = None elina_coeff_free_c.argtypes = [ElinaCoeffPtr] elina_coeff_free_c(coeff) except: print('Problem with loading/calling "elina_coeff_free" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr to the function') def elina_coeff_fprint(stream, coeff): """ Print an ElinaCoeff onto a given stream. Parameters ---------- stream : c_void_p Stream on which to print. coeff : ElinaCoeffPtr Pointer to the ElinaCoeff that needs to be printed. Returns ------- None """ try: elina_coeff_fprint_c = elina_auxiliary_api.elina_coeff_fprint elina_coeff_fprint_c.restype = None elina_coeff_fprint_c.argtypes = [c_void_p, ElinaCoeffPtr] elina_coeff_fprint_c(stream, coeff) except: print('Problem with loading/calling "elina_coeff_fprint" from "libelinaux.so"') print('Make sure you are passing c_void_p, ElinaCoeffPtr to the function') def elina_coeff_reduce(coeff): """ Reduce an ElinaCoeff of core type ElinaInterval [a, a], to an ElinaScalar. Parameters ---------- coeff : ElinaCoeffPtr Pointer to the ElinaCoeff that needs to be reduced. Returns ------- None """ try: elina_coeff_reduce_c = elina_auxiliary_api.elina_coeff_reduce elina_coeff_reduce_c.restype = None elina_coeff_reduce_c.argtypes = [ElinaCoeffPtr] elina_coeff_reduce_c(coeff) except: print('Problem with loading/calling "elina_coeff_reduce" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr to the function') # ====================================================================== # # Assignments # ====================================================================== # def elina_coeff_set(coeff1, coeff2): """ Set the value of one ElinaCoeff to the value of another ElinaCoeff. Parameters ---------- coeff1 : ElinaCoeffPtr Destination. coeff2 : ElinaCoeffPtr Source Returns ------- None """ try: elina_coeff_set_c = elina_auxiliary_api.elina_coeff_set elina_coeff_set_c.restype = None elina_coeff_set_c.argtypes = [ElinaCoeffPtr, ElinaCoeffPtr] elina_coeff_set_c(coeff1, coeff2) except: print('Problem with loading/calling "elina_coeff_set" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, ElinaCoeffPtr to the function') def elina_coeff_set_scalar(coeff, scalar): """ Set the value of an ElinaCoeff with core ElinaScalar by using an ElinaScalar. Parameters ---------- coeff : ElinaCoeffPtr Destination. scalar : ElinaScalarPtr Source. Returns ------- """ try: elina_coeff_set_scalar_c = elina_auxiliary_api.elina_coeff_set_scalar elina_coeff_set_scalar_c.restype = None elina_coeff_set_scalar_c.argtypes = [ElinaCoeffPtr, ElinaScalarPtr] elina_coeff_set_scalar_c(coeff, scalar) except: print('Problem with loading/calling "elina_coeff_set_scalar" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, ElinaScalarPtr to the function') def elina_coeff_set_scalar_mpq(coeff, mpq_t): """ Set the value of an ElinaCoeff with core ElinaScalar by using a Mpq_t. Parameters ---------- coeff : ElinaCoeffPtr Destination. mpq_t : Mpq_t Source. Returns ------- None """ try: elina_coeff_set_scalar_mpq_c = elina_auxiliary_api.elina_coeff_set_scalar_mpq elina_coeff_set_scalar_mpq_c.restype = None elina_coeff_set_scalar_mpq_c.argypes = [ElinaCoeffPtr, Mpq_t] elina_coeff_set_scalar_mpq_c(coeff, mpq_t) except: print('Problem with loading/calling "elina_coeff_set_scalar_mpq_c" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, Mpq_t to the function') def elina_coeff_set_scalar_int(coeff, num): """ Set the value of an ElinaCoeff with core ElinaScalar by using a long integer. Parameters ---------- coeff : ElinaCoefPtr Destination. num : c_long Source. Returns ------- None """ try: elina_coeff_set_scalar_int_c = elina_auxiliary_api.elina_coeff_set_scalar_int elina_coeff_set_scalar_int_c.restype = None elina_coeff_set_scalar_int_c.argypes = [ElinaCoeffPtr, c_long] elina_coeff_set_scalar_int_c(coeff, num) except: print('Problem with loading/calling "elina_coeff_set_scalar_int" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, c_long to the function') def elina_coeff_set_scalar_frac(coeff, num, den): """ Set the value of an ElinaCoeff with core ElinaScalar by using fraction of two long integers. Parameters ---------- coeff : ElinaCoeffPtr Destination. num : c_long Source. den : c_ulong Source. Returns ------- None """ try: elina_coeff_set_scalar_frac_c = elina_auxiliary_api.elina_coeff_set_scalar_frac elina_coeff_set_scalar_frac_c.restype = None elina_coeff_set_scalar_frac_c.argypes = [ElinaCoeffPtr, c_long, c_ulong] elina_coeff_set_scalar_frac_c(coeff, num, den) except: print('Problem with loading/calling "elina_coeff_set_scalar_frac" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, c_long, c_ulong to the function') def elina_coeff_set_scalar_double(coeff, num): """ Set the value of an ElinaCoeff with core ElinaScalar by using a double. Parameters ---------- coeff : ElinaCoeffPtr Destination. num : c_double Source. Returns ------- None """ try: elina_coeff_set_scalar_double_c = elina_auxiliary_api.elina_coeff_set_scalar_double elina_coeff_set_scalar_double_c.restype = None elina_coeff_set_scalar_double_c.argypes = [ElinaCoeffPtr, c_double] elina_coeff_set_scalar_double_c(coeff, num) except: print('Problem with loading/calling "elina_coeff_set_scalar_double" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, c_double to the function') def elina_coeff_set_scalar_mpfr(coeff, mpfr_t): """ Set the value of an ElinaCoeff with core ElinaScalar by using a Mpfr_t. Parameters ---------- coeff : ElinaCoeffPtr Destination. mpfr_t : Mpfr_t Source. Returns ------- None """ try: elina_coeff_set_scalar_mpfr_c = elina_auxiliary_api.elina_coeff_set_scalar_mpfr elina_coeff_set_scalar_mpfr_c.restype = None elina_coeff_set_scalar_mpfr_c.argtypes = [ElinaCoeffPtr, Mpfr_t] elina_coeff_set_scalar_mpfr_c(coeff, mpfr_t) except: print('Problem with loading/calling "elina_coeff_set_scalar_mpfr" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, Mpfr_t to the function') def elina_coeff_set_interval(coeff, interval): """ Set the value of an ElinaCoeff with core ElinaInterval by using an ElinaInterval Parameters ---------- coeff : ElinaCoeffPtr Destination. interval : ElinaIntervalPtr Source. Returns ------- None """ try: elina_coeff_set_interval_c = elina_auxiliary_api.elina_coeff_set_interval elina_coeff_set_interval_c.restype = None elina_coeff_set_interval_c.argtypes = [ElinaCoeffPtr, ElinaIntervalPtr] elina_coeff_set_interval_c(coeff, interval) except: print('Problem with loading/calling "elina_coeff_set_interval" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, ElinaIntervalPtr to the function') def elina_coeff_set_interval_scalar(coeff, inf, sup): """ Set the value of an ElinaCoeff with core ElinaInterval by using two ElinaScalar-s. Parameters ---------- coeff : ElinaCoeffPtr Destination. inf : ElinaScalarPtr Source. sup : ElinaScalarPtr Source. Returns ------- None """ try: elina_coeff_set_interval_scalar_c = elina_auxiliary_api.elina_coeff_set_interval_scalar elina_coeff_set_interval_scalar_c.restype = None elina_coeff_set_interval_scalar_c.argtypes = [ElinaCoeffPtr, ElinaScalarPtr, ElinaScalarPtr] elina_coeff_set_interval_scalar_c(coeff, inf, sup) except: print('Problem with loading/calling "elina_coeff_set_interval_scalar" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, ElinaScalarPtr, ElinaScalarPtr to the function') def elina_coeff_set_interval_mpq(coeff, inf, sup): """ Set the value of an ElinaCoeff with core ElinaInterval by using two Mpq_t-s. Parameters ---------- coeff : ElinaCoeffPtr Destination. inf : Mpq_t Source. sup : Mpq_t Source. Returns ------- None """ try: elina_coeff_set_interval_mpq_c = elina_auxiliary_api.elina_coeff_set_interval_mpq elina_coeff_set_interval_mpq_c.restype = None elina_coeff_set_interval_mpq_c.argypes = [ElinaCoeffPtr, Mpq_t, Mpq_t] elina_coeff_set_interval_mpq_c(coeff, inf, sup) except: print('Problem with loading/calling "elina_coeff_set_interval_mpq" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, Mpq_t, Mpq_t to the function') def elina_coeff_set_interval_int(coeff, inf, sup): """ Set the value of an ElinaCoeff with core ElinaInterval by using two long integers. Parameters ---------- coeff : ElinaCoeffPtr Destination. inf : c_long Source. sup : c_long Source. Returns ------- None """ try: elina_coeff_set_interval_int_c = elina_auxiliary_api.elina_coeff_set_interval_int elina_coeff_set_interval_int_c.restype = None elina_coeff_set_interval_int_c.argtypes = [ElinaCoeffPtr, c_long, c_long] elina_coeff_set_interval_int_c(coeff, inf, sup) except: print('Problem with loading/calling "elina_coeff_set_interval_int" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, c_long, c_long to the function') def elina_coeff_set_interval_frac(coeff, numinf, deninf, numsup, densup): """ Set the value of an ElinaCoeff with core ElinaInterval by using two pairs of long integers as fractions. Parameters ---------- coeff : ElinaCoeffPtr Destination. numinf : c_long Source. deninf : c_ulong Source. numsup : c_long Source. densup : c_ulong Source. Returns ------- None """ try: elina_coeff_set_interval_frac_c = elina_auxiliary_api.elina_coeff_set_interval_frac elina_coeff_set_interval_frac_c.restype = None elina_coeff_set_interval_frac_c.argtypes = [ElinaCoeffPtr, c_long, c_ulong, c_long, c_ulong] elina_coeff_set_interval_frac_c(coeff, numinf, deninf, numsup, densup) except: print('Problem with loading/calling "elina_coeff_set_interval_frac" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, c_long, c_ulong, c_long, c_ulong to the function') def elina_coeff_set_interval_double(coeff, inf, sup): """ Set the value of an ElinaCoeff with core ElinaInterval by using two double-s. Parameters ---------- coeff : ElinaCoeffPtr Destination. inf : c_double Source. sup : c_double Source. Returns ------- None """ try: elina_coeff_set_interval_double_c = elina_auxiliary_api.elina_coeff_set_interval_double elina_coeff_set_interval_double_c.restype = None elina_coeff_set_interval_double_c.argtypes = [ElinaCoeffPtr, c_double, c_double] elina_coeff_set_interval_double_c(coeff, inf, sup) except: print('Problem with loading/calling "elina_coeff_set_interval_double" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, c_double, c_double to the function') def elina_coeff_set_interval_top(coeff): """ Set the value of an ElinaCoeff with core ElinaInterval by using the universe interval [-oo, +oo]. Parameters ---------- coeff : ElinaCoeffPtr Destination. Returns ------- None """ try: elina_coeff_set_interval_top_c = elina_auxiliary_api.elina_coeff_set_interval_top elina_coeff_set_interval_top_c.restype = None elina_coeff_set_interval_top_c.argtypes = [ElinaCoeffPtr] elina_coeff_set_interval_top_c(coeff) except: print('Problem with loading/calling "elina_coeff_set_interval_top" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr to the function') def elina_coeff_set_interval_mpfr(coeff, inf, sup): """ Set the value of an ElinaCoeff with core ElinaInterval by using two Mpfr_t-s. Parameters ---------- coeff : ElinaCoeffPtr Destination. inf : Mpfr_t Source. sup : Mpfr_t Source. Returns ------- None """ try: elina_coeff_set_interval_mpfr_c = elina_auxiliary_api.elina_coeff_set_interval_mpfr elina_coeff_set_interval_mpfr_c.restype = None elina_coeff_set_interval_mpfr_c.argtypes = [ElinaCoeffPtr, Mpfr_t, Mpfr_t] elina_coeff_set_interval_mpfr_c(coeff, inf, sup) except: print('Problem with loading/calling "elina_coeff_set_interval_mpfr" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, Mpfr_t, Mpfr_t to the function') # ====================================================================== # # Combined allocation and assignment # ====================================================================== # def elina_coeff_alloc_set(coeff2): """ Allocate a new ElinaCoeff and initialise it with another ElinaCoeff. Parameters ---------- coeff2 : ElinaCoeffPtr Pointer to the ElinaCoeff used for initialisation. Returns ------- coeff1: ElinaCoeffPtr Pointer to the newly allocated and initialised ElinaCoeff. """ coeff1 = None try: elina_coeff_alloc_set_c = elina_auxiliary_api.elina_coeff_alloc_set elina_coeff_alloc_set_c.restype = ElinaCoeffPtr elina_coeff_alloc_set_c.argtypes = [ElinaCoeffPtr] coeff1 = elina_coeff_alloc_set_c(coeff2) except: print('Problem with loading/calling "elina_coeff_alloc_set" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr to the function') return coeff1 def elina_coeff_alloc_set_scalar(scalar): """ Allocate a new ElinaCoeff and initialise it with an ElinaScalar. Parameters ---------- scalar : ElinaScalarPtr Pointer to the ElinaScalar used for initialisation. Returns ------- coeff : ElinaCoeffPtr Pointer to the newly allocated and initialised ElinaCoeff. """ coeff = None try: elina_coeff_alloc_set_scalar_c = elina_auxiliary_api.elina_coeff_alloc_set_scalar elina_coeff_alloc_set_scalar_c.restype = None elina_coeff_alloc_set_scalar_c.argtypes = [ElinaScalarPtr] coeff = elina_coeff_alloc_set_scalar_c(scalar) except: print('Problem with loading/calling "elina_coeff_alloc_set_scalar" from "libelinaux.so"') print('Make sure you are passing ElinaScalarPtr to the function') return coeff def elina_coeff_alloc_set_interval(interval): """ Allocate a new ElinaCoeff and initialise it with an ElinaInterval. Parameters ---------- interval : ElinaIntervalPtr Pointer to the ElinaInterval used for initialisation. Returns ------- coeff : ElinaCoeffPtr Pointer to the newly allocated and initialised ElinaCoeff. """ coeff = None try: elina_coeff_alloc_set_interval_c = elina_auxiliary_api.elina_coeff_alloc_set_interval elina_coeff_alloc_set_interval_c.restype = ElinaCoeffPtr elina_coeff_alloc_set_interval_c.argtypes = [ElinaIntervalPtr] elina_coeff_alloc_set_interval_c(interval) except: print('Problem with loading/calling "elina_coeff_alloc_set_interval" from "libelinaux.so"') print('Make sure you are passing ElinaIntervalPtr to the function') return coeff # ====================================================================== # # Tests # ====================================================================== # def elina_coeff_cmp(coeff1, coeff2): """ Compare an ElinaCoeff with another ElinaCoeff. Parameters ---------- coeff1 : ElinaCoeffPtr Pointer to the ElinaCoeff that needs to be compared. coeff2 : ElinaCoeffPtr Pointer to the ElinaCoeff that needs to be compared. Returns ------- result : c_int The result of the comparison. Return corresponding to elina_scalar_cmp if the two ElinaCoeff-s have ElinaScalar core corresponding to elina_interval_cmp if the two ElinaCoeff-s have ElinaInterval core -3 if the first ElinaCoeff has an ElinaScalar core +3 if the second ElinaCoeff has an ElinaScalar core """ result = None try: elina_coeff_cmp_c = elina_auxiliary_api.elina_coeff_cmp elina_coeff_cmp_c.restype = c_int elina_coeff_cmp_c.argtypes = [ElinaCoeffPtr, ElinaCoeffPtr] result = elina_coeff_cmp_c(coeff1, coeff2) except: print('Problem with loading/calling "elina_coeff_cmp" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, ElinaCoeffPtr to the function') return result def elina_coeff_equal(coeff1, coeff2): """ Test if an ElinaCoeff is equal to another ElinaCoeff. Parameters ---------- coeff1 : ElinaCoeffPtr Pointer to the ElinaCoeff that needs to be tested for equality. coeff2 : ElinaCoefPtr Pointer to the ElinaCoeff that needs to be tested for equality. Returns ------- result : c_bool Result of the equality test. """ result = None try: elina_coeff_equal_c = elina_auxiliary_api.elina_coeff_equal elina_coeff_equal_c.restype = c_bool elina_coeff_equal_c.argtypes = [ElinaCoeffPtr, ElinaCoeffPtr] result = elina_coeff_equal_c(coeff1, coeff2) except: print('Problem with loading/calling "elina_coeff_equal" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, ElinaCoeffPtr to the function') return result def elina_coeff_zero(coeff): """ Test if an ElinaCoeff is a zero ElinaScalar or an ElinaInterval with zero bounds. Parameters ---------- coeff : ElinaCoefPtr Pointer to the ElinaCoeff that needs to be tested. Returns ------- result : c_bool Result of the zero test. """ result = None try: elina_coeff_zero_c = elina_auxiliary_api.elina_coeff_zero elina_coeff_zero_c.restype = c_int elina_coeff_zero_c.argtypes = [ElinaCoeffPtr] result = elina_coeff_zero_c(coeff) except: print('Problem with loading/calling "elina_coeff_zero" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr to the function') return result def elina_coeff_equal_int(coeff, i): """ Test if an ElinaCoeff is equal to an integer. Parameters ---------- coeff : ElinaCoeffPtr Pointer to the ElinaCoeff that needs to be tested for equality. i : c_int Integer that needs to be tested for equality. Returns ------- result : c_bool Result of the equality test. """ result = None try: elina_coeff_equal_int_c = elina_auxiliary_api.elina_coeff_equal_int elina_coeff_equal_int_c.restype = c_bool elina_coeff_equal_int_c.argtypes = [ElinaCoeffPtr, c_int] result = elina_coeff_equal_int_c(coeff, i) except: print('Problem with loading/calling "elina_coeff_equal_int" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, c_int to the function') return result # ====================================================================== # # Other operations # ====================================================================== # def elina_coeff_neg(coeff1, coeff2): """ Set the value of an ElinaCoeff to the negative of another ElinaCoeff. Parameters ---------- coeff1 : ElinaCoeffPtr Destination. coeff2 : ElinaCoeffPtr Source. Returns ------- None """ try: elina_coeff_neg_c = elina_auxiliary_api.elina_coeff_neg elina_coeff_neg_c.restype = None elina_coeff_neg_c.argtypes = [ElinaCoeffPtr, ElinaCoeffPtr] elina_coeff_neg_c(coeff1, coeff2) except: print('Problem with loading/calling "elina_coeff_neg" from "libelinaux.so"') print('Make sure you are passing ElinaCoeffPtr, ElinaCoeffPtr to the function') def elina_coeff_hash(coeff): """ Calculate the hash code of an ElinaCoeff. Parameters ---------- coeff : ElinaCoeffPtr Pointer to the ElinaCoeff that needs to be hashed. Returns ------- result : c_long The resulting hash. """ result = None try: elina_coeff_hash_c = elina_auxiliary_api.elina_coeff_hash elina_coeff_hash_c.restype = c_long elina_coeff_hash_c.argtypes = [ElinaCoeffPtr] result = elina_coeff_hash_c(coeff) except: print('Problem with loading/calling "elina_coeff_hash" from "libelinaux.so"') print('Make sure you are passing c_long to the function') return result
29.144661
111
0.658105
3,108
25,385
5.08269
0.078507
0.129138
0.086409
0.074445
0.796164
0.708616
0.604545
0.509401
0.433943
0.365702
0
0.002174
0.238842
25,385
870
112
29.178161
0.81534
0.343274
0
0.296429
0
0
0.272679
0.03504
0
0
0
0
0
1
0.103571
false
0.103571
0.003571
0
0.139286
0.225
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
711002166da834a891f4c7b037ed34b457c09408
936
py
Python
utils/gatekeeper.py
whiletrace/dwellinglybackend
e766b3d612b4c92fd337b82498ab8ef68bd95e1f
[ "MIT" ]
15
2020-07-09T20:51:09.000Z
2021-11-28T21:59:02.000Z
utils/gatekeeper.py
codeforpdx/dwellinglybackend
92fee6d19a68ae00750927b8700eaa7195b57668
[ "MIT" ]
148
2020-03-28T22:10:30.000Z
2021-12-19T09:22:59.000Z
utils/gatekeeper.py
whiletrace/dwellinglybackend
e766b3d612b4c92fd337b82498ab8ef68bd95e1f
[ "MIT" ]
30
2020-03-12T02:31:27.000Z
2021-07-29T02:40:36.000Z
from flask import current_app, request from functools import wraps from typing import Set def allowed_params(params: Set[str]): def decorator(endpoint): @wraps(endpoint) def verify_fields(*args, **kwargs): if not (request.json.keys() <= params): return invalid_field_error(params) return endpoint(*args, **kwargs) return verify_fields return decorator def invalid_field_error(field_set): if current_app.env == "production": return {"message": "Invalid request field"}, 400 else: raise GatekeeperError(field_set) class GatekeeperError(Exception): def __init__(self, field_set, message="Invalid request field"): self.field_set = field_set self.message = message super().__init__(self.message) def __str__(self): return f"{self.message}: request fields must be one or more of {self.field_set}"
27.529412
88
0.66453
114
936
5.219298
0.412281
0.080672
0.060504
0.087395
0
0
0
0
0
0
0
0.004213
0.239316
936
33
89
28.363636
0.831461
0
0
0
0
0
0.137821
0
0
0
0
0
0
1
0.25
false
0
0.125
0.041667
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
7113e4f6820f3436a0a547fe628d3433163dbab4
2,756
py
Python
single_preprocessing.py
OpenVessel/RedTinSaintBernard-for-BraTS2021-challenge
dafe6f33ff6269869377d01a014ab1528b0f1c1d
[ "MIT" ]
null
null
null
single_preprocessing.py
OpenVessel/RedTinSaintBernard-for-BraTS2021-challenge
dafe6f33ff6269869377d01a014ab1528b0f1c1d
[ "MIT" ]
null
null
null
single_preprocessing.py
OpenVessel/RedTinSaintBernard-for-BraTS2021-challenge
dafe6f33ff6269869377d01a014ab1528b0f1c1d
[ "MIT" ]
null
null
null
import os import pandas as pd from brats_toolkit.preprocessor import Preprocessor # instantiate prep = Preprocessor() ## convert mapping info ## survial name_mapping = r"E:\Datasets\BraTS challenge\MICCAI_BraTS2020_TrainingData\name_mapping.csv" survival_info = r"E:\Datasets\BraTS challenge\MICCAI_BraTS2020_TrainingData\survival_info.csv" df_name_mapping = pd.read_csv(name_mapping) df_survival_info = pd.read_csv(survival_info) root_path_train = r"E:\Datasets\BraTS challenge\MICCAI_BraTS2020_TrainingData" outputDir = r"E:\Datasets\BraTS challenge\Output\Output_training" list_of_dir = os.listdir(root_path_train) for name_of_file in list_of_dir: #if name_of_file contains .csv it skips iteration on the loop if name_of_file.endswith('.csv'): continue #We make new path tto list to for loop through we list that dir readable_path = os.path.join(root_path_train , name_of_file) list_of_zips = os.listdir(readable_path) # we for loop each folder list_sort = [] outpath = os.path.join(outputDir, name_of_file) for zips in list_of_zips: readable_path_2nd = os.path.join(readable_path, zips) list_sort.append(readable_path_2nd) list_sort = sorted(list_sort) ## missing var for segmentation preprocessing # E:\Datasets\BraTS challenge\MICCAI_BraTS2020_TrainingData\BraTS20_Training_369\BraTS20_Training_369_seg.nii.gz 2 ?? examName = name_of_file flaFile = list_sort[0] # E:\Datasets\BraTS challenge\MICCAI_BraTS2020_TrainingData\BraTS20_Training_369\BraTS20_Training_369_flair.nii.gz1 flaFile t1File = list_sort[2] # E:\Datasets\BraTS challenge\MICCAI_BraTS2020_TrainingData\BraTS20_Training_369\BraTS20_Training_369_t1.nii.gz 3 t1File t1cFile = list_sort[3] # E:\Datasets\BraTS challenge\MICCAI_BraTS2020_TrainingData\BraTS20_Training_369\BraTS20_Training_369_t1ce.nii.gz 4 t1cFile t2File = list_sort[4] # E:\Datasets\BraTS challenge\MICCAI_BraTS2020_TrainingData\BraTS20_Training_369\BraTS20_Training_369_t2.nii.gz 5 t2File ## this code calls docker! ##dcm2niix conversion prep.single_preprocess(t1File=t1File, t1cFile=t1cFile, t2File=t2File, flaFile=flaFile, outputFolder=outputDir, mode="cpu", confirm=True, skipUpdate=False, gpuid='0') # start_docker(exam_import_folder=exam_import_folder, exam_export_folder=exam_export_folder, # dicom_import_folder=dicom_import_folder, nifti_export_folder=nifti_export_folder, mode=self.mode, gpuid=self.gpuid) ## expected outtputs? #hdbet_brats-space #hdbet_native-space #mask_hdbet_brats-space #masks_hdbet-space #niftis_brats-space #png_slices #registrations
38.816901
171
0.759071
382
2,756
5.159686
0.324607
0.076104
0.091324
0.105023
0.307966
0.295789
0.295789
0.295789
0.218163
0.218163
0
0.047557
0.16074
2,756
70
172
39.371429
0.804583
0.445573
0
0
0
0
0.176353
0.122912
0
0
0
0
0
1
0
false
0
0.083333
0
0.083333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
71165b6a3988566ca2a96415e483f35d734b5e5c
2,081
py
Python
src/gbkfit/params/params.py
bek0s/gbkf
2ef56e8554fdf7ee8136141bc16d59bd34efa6c8
[ "BSD-3-Clause" ]
6
2016-06-26T16:52:25.000Z
2022-03-22T06:37:13.000Z
src/gbkfit/params/params.py
bek0s/gbkf
2ef56e8554fdf7ee8136141bc16d59bd34efa6c8
[ "BSD-3-Clause" ]
4
2016-07-30T08:11:20.000Z
2021-02-12T08:33:42.000Z
src/gbkfit/params/params.py
bek0s/gbkf
2ef56e8554fdf7ee8136141bc16d59bd34efa6c8
[ "BSD-3-Clause" ]
2
2015-05-18T03:21:56.000Z
2015-09-21T06:16:18.000Z
import copy import numbers from gbkfit.params.interpreter import ( Interpreter, load_exprs_file, dump_exprs_file) from gbkfit.params.utils import parse_param_values_strict from gbkfit.utils import parseutils def load_params_info_common(cls, info): desc = parseutils.make_basic_desc(cls, 'params') opts = parseutils.parse_options_for_callable( info, desc, cls.__init__, fun_ignore_args=['descs']) if 'expressions' in opts: opts['expressions'] = load_exprs_file(opts['expressions']) return opts def dump_params_info_common(params, exprs_file): info = dict(parameters=params.parameters()) exprs_func_gen = params.interpreter().exprs_func_gen() exprs_func_obj = params.interpreter().exprs_func_obj() exprs_func_src = params.interpreter().exprs_func_src() if not exprs_func_gen and exprs_func_src: info['expressions'] = dump_exprs_file( exprs_file, exprs_func_obj, exprs_func_src) return info class EvalParams(parseutils.BasicParserSupport): @classmethod def load(cls, info, descs): opts = load_params_info_common(cls, info) return cls(descs, **opts) def dump(self, exprs_file): return dump_params_info_common(self, exprs_file) def __init__(self, descs, parameters, expressions=None): super().__init__() value_type = (type(None), numbers.Real) values, exprs = parse_param_values_strict(descs, parameters, value_type) self._descs = copy.deepcopy(descs) self._infos = values self._parameters = copy.deepcopy(parameters) self._interpreter = Interpreter(descs, values | exprs, expressions) def descs(self): return self._descs def infos(self): return self._infos def parameters(self): return self._parameters def interpreter(self): return self._interpreter def load_eval_params(info, descs): return EvalParams.load(info, descs) def dump_eval_params(params, exprs_file='gbkfit_config_expressions.py'): return params.dump(exprs_file)
29.309859
80
0.710716
262
2,081
5.312977
0.229008
0.064655
0.045977
0.056034
0.073276
0.073276
0
0
0
0
0
0
0.19606
2,081
70
81
29.728571
0.832038
0
0
0
0
0
0.039904
0.013462
0
0
0
0
0
1
0.215686
false
0
0.098039
0.137255
0.529412
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
7116b439a184e893f7256cd540dd3d4a730960fe
4,263
py
Python
infer/lib/capture/make.py
vaginessa/infer
553d39eb7d7663fb8762d368feb3b824416f37a1
[ "BSD-3-Clause" ]
null
null
null
infer/lib/capture/make.py
vaginessa/infer
553d39eb7d7663fb8762d368feb3b824416f37a1
[ "BSD-3-Clause" ]
null
null
null
infer/lib/capture/make.py
vaginessa/infer
553d39eb7d7663fb8762d368feb3b824416f37a1
[ "BSD-3-Clause" ]
null
null
null
import argparse import os import subprocess import traceback MODULE_NAME = 'make/cc/clang/gcc' MODULE_DESCRIPTION = '''Run analysis of code built with commands like: make [target] clang [compiler_options] <filename> gcc [compiler_options] <filename> cc [compiler_options] <filename> Analysis examples: infer -- make all infer -- clang -c srcfile.m infer -- gcc -c srcfile.c''' def gen_instance(*args): return MakeCapture(*args) def mkdir_if_not_exists(path): if not os.path.exists(path): os.mkdir(path) def create_argparser(group_name=MODULE_NAME): """This defines the set of arguments that get added by this module to the set of global args defined in the infer top-level module Do not use this function directly, it should be invoked by the infer top-level module""" parser = argparse.ArgumentParser(add_help=False) group = parser.add_argument_group( "{grp} module".format(grp=MODULE_NAME), description=MODULE_DESCRIPTION, ) group.add_argument( '-hd', '--headers', action='store_true', help='Analyze code in header files', ) group.add_argument( '--models_mode', action='store_true', dest='models_mode', help='Mode for computing the models', ) group.add_argument( '--no_failures_allowed', action='store_true', dest='no_failures_allowed', help='Fail if at least one of the translations fails', ) group.add_argument( '-tm', '--testing_mode', dest='testing_mode', action='store_true', help='Testing mode for the translation: Do not translate libraries' ' (including enums)') group.add_argument( '-fs', '--frontend-stats', dest='frontend_stats', action='store_true', help='Output statistics about the capture phase to *.o.astlog') group.add_argument( '-fd', '--frontend-debug', dest='frontend_debug', action='store_true', help='Output debugging information to *.o.astlog during capture') return parser class MakeCapture: def __init__(self, args, cmd): self.args = args self.cmd = [os.path.basename(cmd[0])] + cmd[1:] def create_results_dir(self): results_dir = self.args.infer_out mkdir_if_not_exists(results_dir) mkdir_if_not_exists(os.path.join(results_dir, 'specs')) mkdir_if_not_exists(os.path.join(results_dir, 'captured')) mkdir_if_not_exists(os.path.join(results_dir, 'sources')) def get_envvars(self): env_vars = dict(os.environ) env_vars['INFER_RESULTS_DIR'] = self.args.infer_out wrappers_path = os.path.join( os.path.dirname(os.path.realpath(__file__)), '..', 'wrappers') env_vars['INFER_OLD_PATH'] = env_vars['PATH'] env_vars['PATH'] = '{wrappers}{sep}{path}'.format( wrappers=wrappers_path, sep=os.pathsep, path=env_vars['PATH'], ) return env_vars def capture(self): self.create_results_dir() env_vars = self.get_envvars() frontend_args = [] if self.args.headers: frontend_args.append('-headers') if self.args.models_mode: frontend_args.append('-models_mode') if self.args.project_root: frontend_args += ['-project_root', self.args.project_root] if self.args.testing_mode: frontend_args.append('-testing_mode') if self.args.frontend_debug: frontend_args += ['-debug'] env_vars['FCP_DEBUG_MODE'] = '1' if self.args.frontend_stats: frontend_args += ['-stats'] env_vars['FCP_DEBUG_MODE'] = '1' if self.args.no_failures_allowed: env_vars['FCP_REPORT_FRONTEND_FAILURE'] = '1' # export an env variable with all the arguments to pass to InferClang env_vars['FCP_INFER_FRONTEND_ARGS'] = ' '.join(frontend_args) try: subprocess.check_call(self.cmd, env=env_vars) return os.EX_OK except subprocess.CalledProcessError as exc: if self.args.debug: traceback.print_exc() return exc.returncode
32.295455
77
0.630776
536
4,263
4.787313
0.311567
0.04053
0.031177
0.031177
0.122369
0.085737
0.065472
0.065472
0.065472
0
0
0.001575
0.255454
4,263
131
78
32.541985
0.806868
0.066151
0
0.12844
0
0
0.262932
0.023215
0
0
0
0
0
1
0.06422
false
0
0.036697
0.009174
0.155963
0.009174
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7116ca2e4f0dcb2bd507fa78836458daf8085478
2,315
py
Python
projects/Doodle/Alexander/code/train/utils.py
liaopeiyuan/ml-arsenal-public
f8938ce3cb58b35fc7cc20d096c39a85ec9780b2
[ "Apache-2.0" ]
280
2018-10-21T01:07:18.000Z
2021-12-30T11:29:48.000Z
projects/Doodle/YourVenn_code/code/train/utils.py
liaopeiyuan/ml-arsenal-public
f8938ce3cb58b35fc7cc20d096c39a85ec9780b2
[ "Apache-2.0" ]
3
2018-11-13T08:04:48.000Z
2020-04-17T09:20:03.000Z
projects/Doodle/YourVenn_code/code/train/utils.py
liaopeiyuan/ml-arsenal-public
f8938ce3cb58b35fc7cc20d096c39a85ec9780b2
[ "Apache-2.0" ]
59
2018-10-21T04:38:23.000Z
2021-03-29T07:58:47.000Z
from common import * from torch.autograd import Variable def to_var(x, volatile=False): if torch.cuda.is_available(): x = x.cuda() return Variable(x, volatile=volatile) def softmax_cross_entropy_criterion(logit, truth, is_average=True): loss = F.cross_entropy(logit, truth, reduce=is_average) return loss def metric(logit, truth, is_average=True): # with torch.no_grad(): prob = F.softmax(logit, 1) value, top = prob.topk(3, dim=1, largest=True, sorted=True) correct = top.eq(truth.view(-1, 1).expand_as(top)) if is_average==True: # top-3 accuracy correct = correct.float().sum(0, keepdim=False) correct = correct/len(truth) top = [correct[0], correct[0]+correct[1], correct[0]+correct[1]+correct[2]] precision = correct[0]/1 + correct[1]/2 + correct[2]/3 return precision, top else: return correct def do_valid( net, valid_loader, criterion ): valid_num = 0 probs = [] truths = [] losses = [] corrects = [] for input, truth, _ in valid_loader: input = input.cuda() truth = truth.cuda() input = to_var(input) truth = to_var(truth) logit = net(input) prob = F.softmax(logit,1) loss = criterion(logit, truth, False) correct = metric(logit, truth, False) valid_num += len(input) probs.append(prob.data.cpu().numpy()) losses.append(loss.data.cpu().numpy()) corrects.append(correct.data.cpu().numpy()) truths.append(truth.data.cpu().numpy()) assert(valid_num == len(valid_loader.sampler)) #------------------------------------------------------ prob = np.concatenate(probs) correct = np.concatenate(corrects) truth = np.concatenate(truths).astype(np.int32).reshape(-1,1) loss = np.concatenate(losses) #--- #top = np.argsort(-predict,1)[:,:3] loss = loss.mean() correct = correct.mean(0) top = [correct[0], correct[0]+correct[1], correct[0]+correct[1]+correct[2]] precision = correct[0]/1 + correct[1]/2 + correct[2]/3 #---- valid_loss = np.array([ loss, top[0], top[2], precision ]) return valid_loss
30.866667
84
0.570626
290
2,315
4.472414
0.282759
0.049345
0.069391
0.049345
0.197379
0.134156
0.134156
0.134156
0.134156
0.134156
0
0.024662
0.264363
2,315
75
85
30.866667
0.736935
0.056587
0
0.111111
0
0
0
0
0
0
0
0
0.018519
1
0.074074
false
0
0.037037
0
0.203704
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
71184f8e2f9e3b802d08210e84b8fd4a03eb2e43
1,281
py
Python
day14/a.py
Cefqrn/advent-of-code-2021
1979f3cff981cfe1a5d59d39ec02f104b0e27abd
[ "MIT" ]
null
null
null
day14/a.py
Cefqrn/advent-of-code-2021
1979f3cff981cfe1a5d59d39ec02f104b0e27abd
[ "MIT" ]
null
null
null
day14/a.py
Cefqrn/advent-of-code-2021
1979f3cff981cfe1a5d59d39ec02f104b0e27abd
[ "MIT" ]
null
null
null
import os from collections import defaultdict with open(os.path.join(os.path.dirname(__file__), "input")) as f: data = f.read().split('\n\n') template, rules = data rules = [x.split(' -> ') for x in rules.splitlines()] rules = dict(rules) pair_counts = defaultdict(int) for i, pair in enumerate(zip(template, template[1:])): pair_counts[''.join(pair)] += 1 rules2 = {} for pair, inserted_char in rules.items(): rules2[pair] = (pair[0] + inserted_char, inserted_char + pair[1]) for x in range(10): for pair, count in tuple(pair_counts.items()): if pair in rules2 and count: for pair2 in rules2[pair]: pair_counts[pair2] += count pair_counts[pair] -= count c = defaultdict(int) for pair, count in pair_counts.items(): c[pair[1]] += count print(c[max(c, key=lambda x: c[x])] - c[min(c, key=lambda x: c[x])]) for x in range(30): for pair, count in tuple(pair_counts.items()): if pair in rules2 and count: for pair2 in rules2[pair]: pair_counts[pair2] += count pair_counts[pair] -= count c = defaultdict(int) for pair, count in pair_counts.items(): c[pair[1]] += count print(c[max(c, key=lambda x: c[x])] - c[min(c, key=lambda x: c[x])])
30.5
69
0.613583
200
1,281
3.845
0.255
0.130039
0.062419
0.072822
0.543563
0.543563
0.543563
0.543563
0.543563
0.543563
0
0.020305
0.231069
1,281
42
70
30.5
0.760406
0
0
0.545455
0
0
0.01014
0
0
0
0
0
0
1
0
false
0
0.060606
0
0.060606
0.060606
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
71187322e743030c13b6dd0566757709045bdff7
3,793
py
Python
app/arguments.py
calio/taski
c06346d7e3600f41b1347c6d9f73616f17b226e4
[ "MIT" ]
null
null
null
app/arguments.py
calio/taski
c06346d7e3600f41b1347c6d9f73616f17b226e4
[ "MIT" ]
1
2021-06-01T22:24:59.000Z
2021-06-01T22:24:59.000Z
app/arguments.py
calio/taski
c06346d7e3600f41b1347c6d9f73616f17b226e4
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- import os import sys import six import argparse import app import app.taski as taski def check_positive_int(val): """Make sure input argument is an positive integer""" ival = int(val) if ival <= 0: raise argparse.ArgumentTypeError("%s is not a positive integer" % val) return ival def str2unicode(val): """ Python2 will set val to type `bytes` while Python3 will set val to unicode. So we need to convert bytes to unicode in Python2. https://stackoverflow.com/questions/22947181/dont-argparse-read-unicode-from-commandline """ if six.PY2: return val.decode(sys.getfilesystemencoding()) return val def parse(cmd=None): parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', help="config file path") parser.set_defaults(config=os.path.expanduser("~") + "/.taski.yaml") parser.add_argument('-d', '--dryrun', help="dryrun", action='store_true') parser.add_argument('-v', '--verbose', help="enable debugging", action='store_true') subparsers = parser.add_subparsers(help='available commands') plan_parser = subparsers.add_parser('plan', help='plan tasks') plan_parser.add_argument('-v', '--verbose', help="enable debugging", action='store_true') plan_parser.add_argument('-l', '--limit', help='limit number of tasks to plan', type=check_positive_int, default=30) plan_parser.add_argument('-n', '--daily-goal', help='number of tasks scheduled per day', type=check_positive_int, default=10) plan_parser.set_defaults(func=taski.plan) rank_parser = subparsers.add_parser('rank', help='rank tasks') rank_parser.add_argument('-v', '--verbose', help="enable debugging", action='store_true') rank_parser.add_argument('-p', '--project', help='project name', type=str2unicode) rank_parser.add_argument('-t', '--tui', help='Use terminal UI for ranking', default=False, action='store_true') rank_parser.set_defaults(func=taski.rank) show_parser = subparsers.add_parser('show', help='show things') show_parser.add_argument('show_cmd', help='show things', choices=["api_token", "stats", "config", "old_tasks", "completed_tasks"]) show_parser.add_argument( '--since', help='show completed task since this date. Format "2007-4-29T10:13"') show_parser.add_argument( '--until', help='show completed task until this date. Format "2007-4-29T10:13"') show_parser.set_defaults(since=None) show_parser.set_defaults(until=None) show_parser.set_defaults(func=taski.show) dump_parser = subparsers.add_parser('dump', help='dump tasks to csv file: todoist.csv') dump_parser.add_argument('-f', '--file', help="output file name", default="taski.csv") dump_parser.add_argument('-c', '--completed', help="include completed tasks", action='store_true', default=False) dump_parser.add_argument('-v', '--verbose', help="enable debugging", action='store_true') dump_parser.set_defaults(func=taski.dump) version_parser = subparsers.add_parser( 'version', help='print version number') version_parser.set_defaults( quick_func=lambda args: sys.stdout.write(app.VERSION + "\n")) test_parser = subparsers.add_parser('test', help="¯\_(ツ)_/¯") test_parser.set_defaults(func=taski.test) if cmd: args = parser.parse_args(cmd) else: args = parser.parse_args() return args
36.471154
102
0.631163
469
3,793
4.940299
0.315565
0.062149
0.110056
0.064739
0.259819
0.132931
0.132931
0.132931
0.132931
0.101856
0
0.014458
0.234115
3,793
103
103
36.825243
0.782444
0.075402
0
0.085714
0
0
0.233822
0
0
0
0
0
0
1
0.042857
false
0
0.085714
0
0.185714
0.014286
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
711ba77d02afa51aa92c2a5601172430744cd942
467
py
Python
tardis/apps/equipment/urls.py
keithschulze/mytardis
8ed3562574ce990d42bfe96133185a82c31c27d4
[ "Apache-2.0" ]
null
null
null
tardis/apps/equipment/urls.py
keithschulze/mytardis
8ed3562574ce990d42bfe96133185a82c31c27d4
[ "Apache-2.0" ]
null
null
null
tardis/apps/equipment/urls.py
keithschulze/mytardis
8ed3562574ce990d42bfe96133185a82c31c27d4
[ "Apache-2.0" ]
null
null
null
from django.conf.urls import patterns urlpatterns = patterns('', (r'^$', 'tardis.apps.equipment.views.index'), (r'^search/$', 'tardis.apps.equipment.views.search'), (r'^(?P<object_id>\d+)/$', 'tardis.apps.equipment.views.view_id'), (r'^(?P<object_key>\w+)/$', 'tardis.apps.equipment.views.view_key'), )
42.454545
76
0.441113
43
467
4.697674
0.488372
0.19802
0.376238
0.475248
0.277228
0
0
0
0
0
0
0
0.385439
467
10
77
46.7
0.703833
0
0
0
0
0
0.411135
0.38758
0
0
0
0
0
1
0
false
0
0.111111
0
0.111111
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
711ba79d48c5acb9d3accd7a478bb0384f339f42
156
py
Python
anvil/interfaces/__init__.py
AndresMWeber/Anvil
9cd202183ac998983c2bf6e55cc46bbc0ca1a78e
[ "Apache-2.0" ]
3
2019-11-22T04:38:06.000Z
2022-01-19T08:27:18.000Z
anvil/interfaces/__init__.py
AndresMWeber/Anvil
9cd202183ac998983c2bf6e55cc46bbc0ca1a78e
[ "Apache-2.0" ]
28
2018-02-01T20:39:42.000Z
2018-04-26T17:25:23.000Z
anvil/interfaces/__init__.py
AndresMWeber/Anvil
9cd202183ac998983c2bf6e55cc46bbc0ca1a78e
[ "Apache-2.0" ]
1
2018-03-11T06:47:26.000Z
2018-03-11T06:47:26.000Z
"""The set of functions that enable Anvil to work in the given DCC.""" import api_proxy import dcc_plugin __all__ = ['api_proxy', 'dcc_plugin']
22.285714
70
0.692308
24
156
4.166667
0.708333
0.16
0
0
0
0
0
0
0
0
0
0
0.211538
156
6
71
26
0.813008
0.410256
0
0
0
0
0.22093
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
711c0839688b9f5dedcef052e9032977bfdb8fbd
13,158
py
Python
nlp_uncertainty_ssl/models/emotion_classifier.py
apmoore1/nlp-uncertainty-ssl
4531ffce61557b4d4a71b97698479a30f65efaec
[ "Apache-2.0" ]
null
null
null
nlp_uncertainty_ssl/models/emotion_classifier.py
apmoore1/nlp-uncertainty-ssl
4531ffce61557b4d4a71b97698479a30f65efaec
[ "Apache-2.0" ]
null
null
null
nlp_uncertainty_ssl/models/emotion_classifier.py
apmoore1/nlp-uncertainty-ssl
4531ffce61557b4d4a71b97698479a30f65efaec
[ "Apache-2.0" ]
null
null
null
from typing import Dict, Optional, List, Any from allennlp.common.checks import check_dimensions_match, ConfigurationError from allennlp.data import Vocabulary from allennlp.modules import Seq2VecEncoder, TimeDistributed, TextFieldEmbedder, Seq2SeqEncoder from allennlp.modules import FeedForward from allennlp.modules.input_variational_dropout import InputVariationalDropout from allennlp.modules.attention import DotProductAttention from allennlp.models.model import Model from allennlp.modules.token_embedders import Embedding from allennlp.nn import InitializerApplicator, RegularizerApplicator import allennlp.nn.util as util import numpy from overrides import overrides import torch from torch.nn.modules.linear import Linear import torch.nn.functional as F from torch.nn.parameter import Parameter from nlp_uncertainty_ssl.metrics.jaccard_index import JaccardIndex @Model.register("emotion_classifier") class EmotionClassifier(Model): """ The ``emotion_classifier`` is a multi label classifier (predict 0-N labels per sample). Parameters ---------- vocab : ``Vocabulary``, required A Vocabulary, required in order to compute sizes for input/output projections. text_field_embedder : ``TextFieldEmbedder``, required Used to embed the tokens ``TextField`` we get as input to the model. encoder : ``Seq2SeqEncoder``, optional (default=None) The encoder that we will use in between embedding tokens and predicting output tags. label_namespace : ``str``, optional (default=``labels``) This is needed to compute the SpanBasedF1Measure metric. Unless you did something unusual, the default value should be what you want. feedforward : ``FeedForward``, optional, (default = None). An optional feedforward layer to apply after the encoder. label_encoding : ``str``, optional (default=``None``) Label encoding to use when calculating span f1. Valid options are "BIO", "BIOUL", "IOB1", "BMES". Required if ``calculate_span_f1`` is true. calculate_span_f1 : ``bool``, optional (default=``None``) Calculate span-level F1 metrics during training. If this is ``True``, then ``label_encoding`` is required. If ``None`` and label_encoding is specified, this is set to ``True``. If ``None`` and label_encoding is not specified, it defaults to ``False``. dropout: ``float``, optional (default=``None``). Use `Variational Dropout <https://arxiv.org/abs/1512.05287>`_ for sequence and normal dropout for non sequences. verbose_metrics : ``bool``, optional (default = False) If true, metrics will be returned per label class in addition to the overall statistics. initializer : ``InitializerApplicator``, optional (default=``InitializerApplicator()``) Used to initialize the model parameters. regularizer : ``RegularizerApplicator``, optional (default=``None``) If provided, will be used to calculate the regularization penalty during training. """ def __init__(self, vocab: Vocabulary, text_field_embedder: TextFieldEmbedder, label_namespace: str = "labels", encoder: Optional[Seq2VecEncoder] = None, seq_encoder: Optional[Seq2SeqEncoder] = None, feedforward: Optional[FeedForward] = None, dropout: Optional[float] = None, incl_neutral: Optional[bool] = False, initializer: InitializerApplicator = InitializerApplicator(), regularizer: Optional[RegularizerApplicator] = None) -> None: super().__init__(vocab, regularizer) self.label_namespace = label_namespace self.text_field_embedder = text_field_embedder self.num_labels = self.vocab.get_vocab_size(label_namespace) self.encoder = encoder self.seq_encoder = seq_encoder if self.seq_encoder is not None: self.attention_vector = Parameter(torch.Tensor(self.seq_encoder.get_output_dim())) self.attention_layer = DotProductAttention(normalize=True) embedding_output_dim = self.text_field_embedder.get_output_dim() if dropout is not None: self.dropout = torch.nn.Dropout(dropout) self.variational_dropout = InputVariationalDropout(dropout) else: self.dropout = None self._feedforward = feedforward if feedforward is not None: output_dim = feedforward.get_output_dim() elif encoder is not None: output_dim = self.encoder.get_output_dim() elif seq_encoder is not None: output_dim = self.seq_encoder.get_output_dim() else: output_dim = embedding_output_dim # Have to create a tag projection layer for each label in the # multi label classifier self._tag_projection_layers: Any = [] for k in range(self.num_labels): tag_projection_layer = Linear(output_dim, 1) self.add_module(f'tag_projection_layer_{k}', tag_projection_layer) self._tag_projection_layers.append(tag_projection_layer) self.output_activation = torch.nn.Sigmoid() self.loss_criterion = torch.nn.BCEWithLogitsLoss(reduction='mean') self.incl_neutral = incl_neutral self.metrics = {"jaccard_index": JaccardIndex(self.incl_neutral)} if encoder is not None: check_dimensions_match(embedding_output_dim, encoder.get_input_dim(), "text field embedding dim", "encoder input dim") if feedforward is not None and encoder is not None: check_dimensions_match(encoder.get_output_dim(), feedforward.get_input_dim(), "encoder output dim", "feedforward input dim") elif feedforward is not None and encoder is None: check_dimensions_match(embedding_output_dim, feedforward.get_input_dim(), "text field output dim", "feedforward input dim") if self.seq_encoder is not None: self.reset_parameters() initializer(self) def reset_parameters(self): ''' Intitalises the attnention vector ''' torch.nn.init.uniform_(self.attention_vector, -0.01, 0.01) @overrides def forward(self, # type: ignore tokens: Dict[str, torch.LongTensor], labels: torch.LongTensor = None, metadata: List[Dict[str, Any]] = None ) -> Dict[str, torch.Tensor]: # pylint: disable=arguments-differ """ Parameters ---------- tokens : ``Dict[str, torch.LongTensor]``, required The output of ``TextField.as_array()``, which should typically be passed directly to a ``TextFieldEmbedder``. This output is a dictionary mapping keys to ``TokenIndexer`` tensors. At its most basic, using a ``SingleIdTokenIndexer`` this is: ``{"tokens": Tensor(batch_size, num_tokens)}``. This dictionary will have the same keys as were used for the ``TokenIndexers`` when you created the ``TextField`` representing your sequence. The dictionary is designed to be passed directly to a ``TextFieldEmbedder``, which knows how to combine different word representations into a single vector per token in your input. labels : ``torch.LongTensor``, optional (default = ``None``) A torch tensor representing the multiple labels that the sample can be as a one hot vector where each True label is 1 and the rest 0. ``(batch_size, num_labels)``. metadata : ``List[Dict[str, Any]]``, optional, (default = None) metadata containg: 1. ``text`` - Original sentence 2. ``words`` - Tokenised words from the sentence 3. ``ID`` - Optionally the ID of the sample Returns ------- An output dictionary consisting of: logits : ``torch.FloatTensor`` The logits that are the output of the ``N`` tag projection layers where each projection layer represents a different tag. probs: ``torch.FloatTensor`` A tensor of shape ``(batch_size, num_labels)`` The probability that the sample is one of those labels. > 0.5 suggests that a label is associated to that sample. labels : ``List[List[int]]`` The predicted labels where the inner list represents the multi label classification. loss : ``torch.FloatTensor``, optional A scalar loss to be optimised. Only computed if gold label ``labels`` are provided. words : ``List[List[str]]`` The tokens that were given as input text: ``List[str]`` The text that was given to the tokeniser. ID: ``List[str]`` The ID that is associated to the training example. Only returned if the ``ID`` are provided. """ embedded_text_input = self.text_field_embedder(tokens) mask = util.get_text_field_mask(tokens) encoded_text = embedded_text_input batch_size = embedded_text_input.shape[0] if self.dropout is not None: encoded_text = self.variational_dropout(encoded_text) if self.seq_encoder is not None: encoded_text = self.seq_encoder(encoded_text, mask) encoded_text = self.variational_dropout(encoded_text) attention_vector = self.attention_vector.unsqueeze(0).expand(batch_size, -1) attention_weights = self.attention_layer(attention_vector, encoded_text, mask) attention_weights = attention_weights.unsqueeze(-1) weighted_encoded_text_seq = encoded_text * attention_weights weighted_encoded_text_vec = weighted_encoded_text_seq.sum(1) encoded_text = self.dropout(weighted_encoded_text_vec) if self.encoder is not None: encoded_text = self.encoder(encoded_text, mask) if self.dropout is not None: encoded_text = self.dropout(encoded_text) # Dropout is applied after each layer for feed forward if specified # in the config. if self._feedforward is not None: encoded_text = self._feedforward(encoded_text) all_label_logits = torch.empty(batch_size, self.num_labels) for i in range(len(self._tag_projection_layers)): tag_projection = getattr(self, f'tag_projection_layer_{i}') i_tag_predictions = tag_projection(encoded_text).reshape(-1) all_label_logits[:, i] = i_tag_predictions probs = self.output_activation(all_label_logits) predicted_labels = probs > 0.5 output = {'probs': probs, 'logits': all_label_logits, 'labels': predicted_labels} if labels is not None: labels = labels.type(torch.FloatTensor) loss = self.loss_criterion(all_label_logits, labels) output["loss"] = loss for metric in self.metrics.values(): metric(predicted_labels, labels) if metadata is not None: words, texts, ids = [], [], [] for sample in metadata: words.append(sample['words']) texts.append(sample['text']) if 'ID' in sample: ids.append(sample['ID']) output["words"] = words output["text"] = texts if ids: output['ID'] = ids return output @overrides def decode(self, output_dict: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]: """ Converts the labels to the actual labels. ``output_dict["readable_labels"]`` is a list of lists which will contain zero or more readable labels. The type associated to the value of ``output_dict["readable_labels"]`` is List[List[str]]. """ readable_labels: List[List[str]] = [] for sample in output_dict['labels']: sample_labels: List[str] = [] sample: List[int] # This should be a list of 0's and 1's for index, multi_label in enumerate(sample): if multi_label: word_label = self.vocab.get_token_from_index(index, namespace=self.label_namespace) sample_labels.append(word_label) readable_labels.append(sample_labels) output_dict['readable_labels'] = readable_labels return output_dict @overrides def get_metrics(self, reset: bool = False) -> Dict[str, float]: metrics_to_return = {metric_name: metric.get_metric(reset) for metric_name, metric in self.metrics.items()} return metrics_to_return
48.197802
104
0.636039
1,521
13,158
5.340565
0.21236
0.028438
0.018835
0.015758
0.13234
0.0943
0.064139
0.01625
0.00911
0
0
0.004749
0.27983
13,158
273
105
48.197802
0.852469
0.351649
0
0.07947
0
0
0.034865
0.006042
0
0
0
0
0
1
0.033113
false
0
0.119205
0
0.178808
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
711d1b4f75a4256d5a0cf38d84457010bf2940ef
3,225
py
Python
medcople.py
tks1998/statistical-function-and-algorithm-ML-
2b287524690e05087da400d879c2f901e148a5e3
[ "MIT" ]
null
null
null
medcople.py
tks1998/statistical-function-and-algorithm-ML-
2b287524690e05087da400d879c2f901e148a5e3
[ "MIT" ]
1
2020-12-07T19:29:21.000Z
2020-12-28T02:29:19.000Z
medcople.py
tks1998/statistical-function-and-algorithm-ML-
2b287524690e05087da400d879c2f901e148a5e3
[ "MIT" ]
null
null
null
import numpy as np import math from statistics import median from scipy.stats import skew import weightedstats as ws from statsmodels.stats.stattools import medcouple class Med_couple: def __init__(self,data): self.data = np.sort(data,axis = None)[::-1] # sorted decreasing self.med = np.median(self.data) self.scale = 2*np.amax(np.absolute(self.data)) self.Zplus = [(x-self.med)/self.scale for x in self.data if x>=self.med] self.Zminus = [(x-self.med)/self.scale for x in self.data if x<=self.med] self.p = len(self.Zplus) self.q = len(self.Zminus) def H(self,i,j): a = self.Zplus[i] b = self.Zminus[j] if a==b: return np.sign(self.p - 1 - i - j) else: return (a+b)/(a-b) def greater_h(self,u): P = [0]*self.p j = 0 for i in range(self.p-1,-1,-1): while j < self.q and self.H(i,j)>u: j+=1 P[i]=j-1 return P def less_h(self,u): Q = [0]*self.p j = self.q - 1 for i in range(self.p): while j>=0 and self.H(i,j) < u: j=j-1 Q[i]=j+1 return Q #Kth pair algorithm (Johnson & Mizoguchi) def kth_pair_algorithm(self): L = [0]*self.p R = [self.q-1]*self.p Ltotal = 0 Rtotal = self.p*self.q medcouple_index = math.floor(Rtotal / 2) while Rtotal - Ltotal > self.p: middle_idx = [i for i in range(self.p) if L[i]<=R[i]] row_medians = [self.H(i,math.floor((L[i]+R[i])/2)) for i in middle_idx] weight = [R[i]-L[i] + 1 for i in middle_idx] WM = ws.weighted_median(row_medians,weights = weight) P = self.greater_h(WM) Q = self.less_h(WM) Ptotal = np.sum(P)+len(P) Qtotal = np.sum(Q) if medcouple_index <= Ptotal-1: R = P.copy() Rtotal = Ptotal else: if medcouple_index > Qtotal - 1: L = Q.copy() Ltotal = Qtotal else: return WM remaining = np.array([]) for i in range(self.p): for j in range(L[i],R[i]+1): remaining = np.append(remaining,self.H(i,j)) find_index = medcouple_index-Ltotal k_minimum_element = remaining[np.argpartition(remaining,find_index)] # print(find_index,'tim trong mang ',sorted(remaining)) return k_minimum_element[find_index] def naive_algorithm_testing(self): result = [self.H(i,j) for i in range(self.p) for j in range(self.q)] return np.median(result) if __name__ == '__main__': sum=0 for i in range(1000): data = np.random.randint(low = 0, high = 200000, size = 1000) A = Med_couple(data) sum+=abs(medcouple(data)-A.kth_pair_algorithm()) # print(skew(data)) # print("kth",A.kth_pair_algorithm()) # print("naive my code",A.naive_algorithm_testing()) # print("naive",medcouple(data)) print(sum)
27.801724
83
0.52124
471
3,225
3.471338
0.227176
0.039755
0.029358
0.040367
0.185321
0.13211
0.102752
0.088073
0.088073
0.088073
0
0.019066
0.349457
3,225
116
84
27.801724
0.760248
0.077209
0
0.063291
0
0
0.002695
0
0
0
0
0
0
1
0.075949
false
0
0.075949
0
0.253165
0.012658
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
711dc29809114966ffd1a3a12882c04b3dd3d23a
49
py
Python
feedback/views/__init__.py
darkismus/kompassi
35dea2c7af2857a69cae5c5982b48f01ba56da1f
[ "CC-BY-3.0" ]
13
2015-11-29T12:19:12.000Z
2021-02-21T15:42:11.000Z
feedback/views/__init__.py
darkismus/kompassi
35dea2c7af2857a69cae5c5982b48f01ba56da1f
[ "CC-BY-3.0" ]
23
2015-04-29T19:43:34.000Z
2021-02-10T05:50:17.000Z
feedback/views/__init__.py
darkismus/kompassi
35dea2c7af2857a69cae5c5982b48f01ba56da1f
[ "CC-BY-3.0" ]
11
2015-09-20T18:59:00.000Z
2020-02-07T08:47:34.000Z
from .feedback_view import feedback_view # noqa
24.5
48
0.816327
7
49
5.428571
0.714286
0.631579
0
0
0
0
0
0
0
0
0
0
0.142857
49
1
49
49
0.904762
0.081633
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
711f7d7edce90878e1c7f4456d59b5282f3d8837
3,644
py
Python
shitty_tools/evil.py
njatkinson/shitty_tools
78c56eba331728d610d12c17fa5b34120fe31f03
[ "WTFPL" ]
null
null
null
shitty_tools/evil.py
njatkinson/shitty_tools
78c56eba331728d610d12c17fa5b34120fe31f03
[ "WTFPL" ]
null
null
null
shitty_tools/evil.py
njatkinson/shitty_tools
78c56eba331728d610d12c17fa5b34120fe31f03
[ "WTFPL" ]
null
null
null
from sqlalchemy.orm import relationship from sqlalchemy import and_ def create_attribute_associator(entity_id_col, eav_cls, eav_entity_id_col, eav_attr_col, eav_value_col): ''' Returns a class method that allows one to associate attributes in an Entity-Attribute-Value table with a sqlalchemy class and then access those attributes as properties of the entity class. Example usage: >>> from sqlalchemy import Column, ForeignKey, Index, Integer, String >>> from sqlalchemy.orm import relationship >>> from sqlalchemy.ext.declarative import declarative_base >>> Base = declarative_base() >>> metadata = Base.metadata >>> >>> class Eav(Base): ... __tablename__ = 'eav' ... __table_args__ = ( ... Index('e_a_uq', 'entity_id', 'attribute', unique=True), ... ) ... id = Column(Integer, primary_key=True) ... entity_id = Column(ForeignKey('entity.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False) ... attribute = Column(String(255), nullable=False) ... value = Column(String(255)) ... >>> >>> class Entity(Base): ... __tablename__ = 'entity' ... id = Column(Integer, primary_key=True) ... name = Column(String(255), nullable=False) ... _add_attribute = create_attribute_associator(id, Eav, Eav.entity_id, Eav.attribute, Eav.value) ... >>> Entity._add_attribute('foo') >>> Entity._add_attribute('bar') >>> >>> dir(Entity) ['__class__', '__delattr__', '__dict__', '__doc__', '__format__', '__getattribute__', '__hash__', '__init__', '__mapper__', '__module__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__table__', '__tablename__', '__weakref__', '_add_attribute', '_bar_get', '_bar_obj', '_bar_set', '_decl_class_registry', '_foo_get', '_foo_obj', '_foo_set', '_sa_class_manager', 'bar', 'foo', 'id', 'metadata', 'name'] :param entity_id_col: The id column of your entity :param eav_cls: The sqlalchemy class of the entity attribute value (EAV) table :param eav_entity_id_col: The foreign key column from the EAV table to the entity table :param eav_attr_col: The EAV table column that stores the attribute name :param eav_value_col: The EAV table column that stores the attribute value :return: class method to with signature like add_attribute(cls, attr_name, lazy='joined') ''' attr_col_name = eav_attr_col.key value_col_name = eav_value_col.key @classmethod def add_attribute(cls, attr_name, lazy='joined'): obj_name = '_%s_obj' % attr_name getter_name = '_%s_get' % attr_name setter_name = '_%s_set' % attr_name rel = relationship(eav_cls, primaryjoin=and_(entity_id_col == eav_entity_id_col, eav_attr_col == attr_name), uselist=False, lazy=lazy) def getter(self): obj = getattr(self, obj_name) return getattr(obj, value_col_name) def setter(self, value): obj = getattr(self, obj_name) if obj is None: obj = eav_cls(**{attr_col_name: attr_name, value_col_name: value}) setattr(self, obj_name, obj) else: setattr(obj, value_col_name, value) prop = property(getter, setter) setattr(cls, obj_name, rel) setattr(cls, getter_name, getter) setattr(cls, setter_name, setter) setattr(cls, attr_name, prop) return add_attribute
46.126582
114
0.63831
440
3,644
4.809091
0.265909
0.041588
0.031191
0.026465
0.213611
0.167297
0.139887
0.039698
0.039698
0
0
0.003257
0.241767
3,644
79
115
46.126582
0.762577
0.585071
0
0.066667
0
0
0.020194
0
0
0
0
0
0
1
0.133333
false
0
0.066667
0
0.266667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
712033ec7a6e7fd4c8901d3c8d26af890c676809
2,168
py
Python
backend/models/roboschool_fc.py
AroMorin/DNNOP
271e65811fe7cadcffc8155049e256fa78c0c5c6
[ "MIT" ]
6
2020-01-14T00:01:34.000Z
2021-12-28T14:31:05.000Z
backend/models/roboschool_fc.py
AroMorin/DNNOP
271e65811fe7cadcffc8155049e256fa78c0c5c6
[ "MIT" ]
null
null
null
backend/models/roboschool_fc.py
AroMorin/DNNOP
271e65811fe7cadcffc8155049e256fa78c0c5c6
[ "MIT" ]
1
2020-09-06T10:44:29.000Z
2020-09-06T10:44:29.000Z
"""A script that defines a simple FC model for function solving""" import torch.nn as nn import numpy as np import torch class Net(nn.Module): def __init__(self, model_params): super(Net, self).__init__() model_params = self.ingest_params_lvl1(model_params) ins = model_params['in features'] outs = model_params['number of outputs'] self.out_size = outs self.fc1 = nn.Linear(ins, 512) self.fc2 = nn.Linear(512, 16) self.fc3 = nn.Linear(64, 32) self.fc4 = nn.Linear(16, outs) self.drop = nn.Dropout(0.1) self.act = nn.ReLU() #self.act = nn.Tanh() self.reps = 20 self.rep = 0 self.step = 0 self.val = torch.zeros(outs).half().cuda() def ingest_params_lvl1(self, model_params): assert type(model_params) is dict default_params = { "in features": 128, "number of outputs": 18 } default_params.update(model_params) # Update with user selections return default_params def generate_noise(self, x): n = torch.empty_like(x) n.normal_(mean=0., std=0.3) return n.cuda() # Called with either one element to determine next action, or a batch # during optimization. Returns tensor([[left0exp,right0exp]...]). def forward(self, x): x = self.fc1(x) x = self.act(x) #x = self.drop(x) x = self.fc2(x) x = self.act(x) #x = self.drop(x) #x = self.fc3(x) #x = self.act(x) #x = self.drop(x) x = self.fc4(x).squeeze().clamp_(-1., 1.) #self.repeat(x) return x.cpu().detach().numpy() def repeat(self, x): if self.rep > self.reps: self.reset(x) self.rep=0 else: self.rep +=1 print(self.val, self.rep) def reset(self, x): default = torch.zeros(self.out_size).cuda() choice = np.random.choice([0, 1], p=[0.5, 0.5]) if choice == 0: self.val = default else: self.val = x.clone()
30.971429
74
0.535055
297
2,168
3.808081
0.377104
0.04863
0.05305
0.023873
0.066313
0.066313
0.066313
0.066313
0.066313
0.066313
0
0.035392
0.335332
2,168
69
75
31.42029
0.74948
0.153598
0
0.113208
0
0
0.030769
0
0
0
0
0
0.018868
1
0.113208
false
0
0.056604
0
0.245283
0.018868
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
71203d69e972ebd9a73901db8b42d1c645ea09f0
1,346
py
Python
src/face_check/social/backends/goodgame.py
tarvitz/face-check
e468025c7a5615f4ec8ee24c06252ceeb8a2e129
[ "BSD-4-Clause" ]
null
null
null
src/face_check/social/backends/goodgame.py
tarvitz/face-check
e468025c7a5615f4ec8ee24c06252ceeb8a2e129
[ "BSD-4-Clause" ]
7
2019-01-01T17:13:06.000Z
2021-06-10T17:42:54.000Z
src/face_check/social/backends/goodgame.py
tarvitz/face-check
e468025c7a5615f4ec8ee24c06252ceeb8a2e129
[ "BSD-4-Clause" ]
null
null
null
""" Twitch OAuth2 backend, docs at: https://python-social-auth.readthedocs.io/en/latest/backends/goodgame.html """ from social_core.backends import oauth class GoodGameOAuth2(oauth.BaseOAuth2): """GoodGame OAuth authentication backend""" name = 'goodgame' ID_KEY = 'user_id' AUTHORIZATION_URL = 'https://api2.goodgame.ru/oauth/authorize' ACCESS_TOKEN_URL = 'https://api2.goodgame.ru/oauth' ACCESS_TOKEN_METHOD = 'POST' #: TODO await when GG provide email user retrieve through scope #: https://goodgame.ru/topic/67865#comment427 DEFAULT_SCOPE = ['channel.subscribers'] REDIRECT_STATE = False def get_user_id(self, details, response): return response["user"].get(self.ID_KEY) def get_user_details(self, response): return { 'username': response['user'].get('username'), #: currently there's no email 'email': response['user'].get('email'), 'first_name': '', 'last_name': '' } def user_data(self, access_token, *args, **kwargs): #: treat this as hacky as far as simple info does not return #: email address, but we can retrieve it from another endpoint return self.get_json( 'https://api2.goodgame.ru/info', params={'access_token': access_token} )
34.512821
78
0.642645
163
1,346
5.171779
0.546012
0.065243
0.060498
0.067616
0.064057
0.064057
0
0
0
0
0
0.013553
0.232541
1,346
38
79
35.421053
0.802517
0.298663
0
0
0
0
0.222462
0
0
0
0
0.026316
0
1
0.130435
false
0
0.043478
0.130435
0.652174
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
1
1
0
0
3
7120c63dc1de2d2819806215bfba1cf552bbc4da
666
py
Python
recipes/Python/576838_Recursivemethod/recipe-576838.py
tdiprima/code
61a74f5f93da087d27c70b2efe779ac6bd2a3b4f
[ "MIT" ]
2,023
2017-07-29T09:34:46.000Z
2022-03-24T08:00:45.000Z
recipes/Python/576838_Recursivemethod/recipe-576838.py
unhacker/code
73b09edc1b9850c557a79296655f140ce5e853db
[ "MIT" ]
32
2017-09-02T17:20:08.000Z
2022-02-11T17:49:37.000Z
recipes/Python/576838_Recursivemethod/recipe-576838.py
unhacker/code
73b09edc1b9850c557a79296655f140ce5e853db
[ "MIT" ]
780
2017-07-28T19:23:28.000Z
2022-03-25T20:39:41.000Z
def recursive(func): func.func_globals[func.__name__] = func return func class Test: def method(self, x = False): if x: print(x) else: self.method("I'm method") @staticmethod def smethod(x = False): if x: print(x) else: method("I'm static method") @staticmethod @recursive def rmethod(x = False): if x: print(x) else: rmethod("I'm recursive method") test = Test() test.method() # I'm method test.rmethod() # I'm recursive method test.smethod() # raises NameError: global name 'method' is not defined
20.8125
70
0.540541
82
666
4.329268
0.329268
0.028169
0.067606
0.076056
0.31831
0.31831
0.160563
0
0
0
0
0
0.34985
666
31
71
21.483871
0.819861
0.127628
0
0.423077
0
0
0.081456
0
0
0
0
0
0
1
0.153846
false
0
0
0
0.230769
0.115385
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7122a8e74eb4212d1e6aaea222109cbc1f0c061a
18,717
py
Python
tests/src/python/test_qgssearchwidgetwrapper.py
dyna-mis/Hilabeling
cb7d5d4be29624a20c8a367162dbc6fd779b2b52
[ "MIT" ]
null
null
null
tests/src/python/test_qgssearchwidgetwrapper.py
dyna-mis/Hilabeling
cb7d5d4be29624a20c8a367162dbc6fd779b2b52
[ "MIT" ]
null
null
null
tests/src/python/test_qgssearchwidgetwrapper.py
dyna-mis/Hilabeling
cb7d5d4be29624a20c8a367162dbc6fd779b2b52
[ "MIT" ]
1
2021-12-25T08:40:30.000Z
2021-12-25T08:40:30.000Z
# -*- coding: utf-8 -*- """QGIS Unit tests for QgsSearchWidgetWrapper. .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ __author__ = 'Nyall Dawson' __date__ = '2016-05' __copyright__ = 'Copyright 2016, The QGIS Project' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '176c06ceefb5f555205e72b20c962740cc0ec183' import qgis # NOQA from qgis.gui import (QgsSearchWidgetWrapper, QgsDefaultSearchWidgetWrapper, QgsValueMapSearchWidgetWrapper, QgsValueRelationSearchWidgetWrapper, QgsCheckboxSearchWidgetWrapper, QgsDateTimeSearchWidgetWrapper) from qgis.core import (QgsVectorLayer, QgsFeature, QgsProject, ) from qgis.PyQt.QtCore import QDateTime, QDate, QTime from qgis.PyQt.QtWidgets import QWidget from qgis.testing import start_app, unittest start_app() class PyQgsSearchWidgetWrapper(unittest.TestCase): def testFlagToString(self): # test converting QgsSearchWidgetWrapper.FilterFlag to string tests = [QgsSearchWidgetWrapper.EqualTo, QgsSearchWidgetWrapper.NotEqualTo, QgsSearchWidgetWrapper.GreaterThan, QgsSearchWidgetWrapper.LessThan, QgsSearchWidgetWrapper.GreaterThanOrEqualTo, QgsSearchWidgetWrapper.LessThanOrEqualTo, QgsSearchWidgetWrapper.Between, QgsSearchWidgetWrapper.CaseInsensitive, QgsSearchWidgetWrapper.Contains, QgsSearchWidgetWrapper.DoesNotContain, QgsSearchWidgetWrapper.IsNull, QgsSearchWidgetWrapper.IsNotNull, QgsSearchWidgetWrapper.IsNotBetween ] for t in tests: self.assertTrue(len(QgsSearchWidgetWrapper.toString(t)) > 0) def testExclusiveFlags(self): # test flag exclusive/non exclusive exclusive = QgsSearchWidgetWrapper.exclusiveFilterFlags() non_exclusive = QgsSearchWidgetWrapper.nonExclusiveFilterFlags() for e in exclusive: self.assertFalse(e in non_exclusive) class PyQgsDefaultSearchWidgetWrapper(unittest.TestCase): def testCreateExpression(self): """ Test creating an expression using the widget""" layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer&field=flddate:datetime", "test", "memory") parent = QWidget() w = QgsDefaultSearchWidgetWrapper(layer, 0) w.initWidget(parent) line_edit = w.lineEdit() line_edit.setText('test') case_sensitive = w.caseSensitiveCheckBox() case_sensitive.setChecked(False) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"fldtxt" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"fldtxt" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), 'lower("fldtxt")=lower(\'test\')') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), 'lower("fldtxt")<>lower(\'test\')') case_sensitive.setChecked(True) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"fldtxt"=\'test\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '"fldtxt"<>\'test\'') case_sensitive.setChecked(False) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.Contains), '"fldtxt" ILIKE \'%test%\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.DoesNotContain), 'NOT ("fldtxt" ILIKE \'%test%\')') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.StartsWith), '"fldtxt" ILIKE \'test%\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EndsWith), '"fldtxt" ILIKE \'%test\'') case_sensitive.setChecked(True) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.Contains), '"fldtxt" LIKE \'%test%\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.DoesNotContain), 'NOT ("fldtxt" LIKE \'%test%\')') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.StartsWith), '"fldtxt" LIKE \'test%\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EndsWith), '"fldtxt" LIKE \'%test\'') case_sensitive.setChecked(False) # numeric field parent = QWidget() w = QgsDefaultSearchWidgetWrapper(layer, 1) w.initWidget(parent) # may need updating if widget layout changes: line_edit = w.lineEdit() line_edit.setText('5.5') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"fldint"=5.5') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '"fldint"<>5.5') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.GreaterThan), '"fldint">5.5') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.LessThan), '"fldint"<5.5') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.GreaterThanOrEqualTo), '"fldint">=5.5') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.LessThanOrEqualTo), '"fldint"<=5.5') # date/time/datetime parent = QWidget() w = QgsDefaultSearchWidgetWrapper(layer, 2) w.initWidget(parent) # may need updating if widget layout changes: line_edit = w.lineEdit() line_edit.setText('2015-06-03') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"flddate"=\'2015-06-03\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '"flddate"<>\'2015-06-03\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.GreaterThan), '"flddate">\'2015-06-03\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.LessThan), '"flddate"<\'2015-06-03\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.GreaterThanOrEqualTo), '"flddate">=\'2015-06-03\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.LessThanOrEqualTo), '"flddate"<=\'2015-06-03\'') class PyQgsValueMapSearchWidgetWrapper(unittest.TestCase): def testCreateExpression(self): """ Test creating an expression using the widget""" layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer", "test", "memory") w = QgsValueMapSearchWidgetWrapper(layer, 0) config = {"map": [{"val1": 1}, {"val2": 200}]} w.setConfig(config) c = w.widget() # first, set it to the "select value" item c.setCurrentIndex(0) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"fldtxt" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"fldtxt" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '') c.setCurrentIndex(1) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"fldtxt" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"fldtxt" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"fldtxt"=\'1\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '"fldtxt"<>\'1\'') c.setCurrentIndex(2) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"fldtxt" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"fldtxt" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"fldtxt"=\'200\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '"fldtxt"<>\'200\'') # try with numeric field w = QgsValueMapSearchWidgetWrapper(layer, 1) w.setConfig(config) c = w.widget() c.setCurrentIndex(1) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"fldint" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"fldint" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"fldint"=1') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '"fldint"<>1') class PyQgsValueRelationSearchWidgetWrapper(unittest.TestCase): def testCreateExpression(self): """ Test creating an expression using the widget""" layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer", "test", "memory") # setup value relation parent_layer = QgsVectorLayer("Point?field=stringkey:string&field=intkey:integer&field=display:string", "parent", "memory") f1 = QgsFeature(parent_layer.fields(), 1) f1.setAttributes(['a', 1, 'value a']) f2 = QgsFeature(parent_layer.fields(), 2) f2.setAttributes(['b', 2, 'value b']) f3 = QgsFeature(parent_layer.fields(), 3) f3.setAttributes(['c', 3, 'value c']) parent_layer.dataProvider().addFeatures([f1, f2, f3]) QgsProject.instance().addMapLayers([layer, parent_layer]) config = {"Layer": parent_layer.id(), "Key": 'stringkey', "Value": 'display'} w = QgsValueRelationSearchWidgetWrapper(layer, 0) w.setConfig(config) c = w.widget() # first, set it to the "select value" item c.setCurrentIndex(0) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"fldtxt" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"fldtxt" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '') c.setCurrentIndex(1) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"fldtxt" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"fldtxt" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"fldtxt"=\'a\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '"fldtxt"<>\'a\'') c.setCurrentIndex(2) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"fldtxt" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"fldtxt" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"fldtxt"=\'b\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '"fldtxt"<>\'b\'') # try with numeric field w = QgsValueRelationSearchWidgetWrapper(layer, 1) config['Key'] = 'intkey' w.setConfig(config) c = w.widget() c.setCurrentIndex(c.findText('value c')) self.assertEqual(c.currentIndex(), 3) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"fldint" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"fldint" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"fldint"=3') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '"fldint"<>3') # try with allow null set w = QgsValueRelationSearchWidgetWrapper(layer, 1) config['AllowNull'] = True w.setConfig(config) c = w.widget() c.setCurrentIndex(c.findText('value c')) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"fldint" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"fldint" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"fldint"=3') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '"fldint"<>3') # try with line edit w = QgsValueRelationSearchWidgetWrapper(layer, 1) config['UseCompleter'] = True w.setConfig(config) l = w.widget() l.setText('value b') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"fldint" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"fldint" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"fldint"=2') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '"fldint"<>2') class PyQgsCheckboxSearchWidgetWrapper(unittest.TestCase): def testCreateExpression(self): """ Test creating an expression using the widget""" layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer", "test", "memory") w = QgsCheckboxSearchWidgetWrapper(layer, 0) config = {"CheckedState": 5, "UncheckedState": 9} w.setConfig(config) c = w.widget() # first check with string field type c.setChecked(True) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"fldtxt" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"fldtxt" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"fldtxt"=\'5\'') c.setChecked(False) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"fldtxt" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"fldtxt" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"fldtxt"=\'9\'') # try with numeric field w = QgsCheckboxSearchWidgetWrapper(layer, 1) w.setConfig(config) c = w.widget() c.setChecked(True) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"fldint" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"fldint" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"fldint"=5') c.setChecked(False) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"fldint" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"fldint" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"fldint"=9') class PyQgsDateTimeSearchWidgetWrapper(unittest.TestCase): def testCreateExpression(self): """ Test creating an expression using the widget""" layer = QgsVectorLayer("Point?field=date:date&field=time:time&field=datetime:datetime", "test", "memory") w = QgsDateTimeSearchWidgetWrapper(layer, 0) config = {"field_format": 'yyyy-MM-dd', "display_format": 'yyyy-MM-dd'} w.setConfig(config) c = w.widget() # first check with date field type c.setDateTime(QDateTime(QDate(2013, 4, 5), QTime())) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"date" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"date" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"date"=\'2013-04-05\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '"date"<>\'2013-04-05\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.GreaterThan), '"date">\'2013-04-05\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.LessThan), '"date"<\'2013-04-05\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.GreaterThanOrEqualTo), '"date">=\'2013-04-05\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.LessThanOrEqualTo), '"date"<=\'2013-04-05\'') # time field type w = QgsDateTimeSearchWidgetWrapper(layer, 1) config = {"field_format": 'HH:mm:ss', "display_format": 'HH:mm:ss'} w.setConfig(config) c = w.widget() c.setDateTime(QDateTime(QDate(2013, 4, 5), QTime(13, 14, 15))) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"time" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"time" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"time"=\'13:14:15\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '"time"<>\'13:14:15\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.GreaterThan), '"time">\'13:14:15\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.LessThan), '"time"<\'13:14:15\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.GreaterThanOrEqualTo), '"time">=\'13:14:15\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.LessThanOrEqualTo), '"time"<=\'13:14:15\'') # datetime field type w = QgsDateTimeSearchWidgetWrapper(layer, 2) config = {"field_format": 'yyyy-MM-dd HH:mm:ss', "display_format": 'yyyy-MM-dd HH:mm:ss'} w.setConfig(config) c = w.widget() c.setDateTime(QDateTime(QDate(2013, 4, 5), QTime(13, 14, 15))) self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNull), '"datetime" IS NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.IsNotNull), '"datetime" IS NOT NULL') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.EqualTo), '"datetime"=\'2013-04-05 13:14:15\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.NotEqualTo), '"datetime"<>\'2013-04-05 13:14:15\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.GreaterThan), '"datetime">\'2013-04-05 13:14:15\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.LessThan), '"datetime"<\'2013-04-05 13:14:15\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.GreaterThanOrEqualTo), '"datetime">=\'2013-04-05 13:14:15\'') self.assertEqual(w.createExpression(QgsSearchWidgetWrapper.LessThanOrEqualTo), '"datetime"<=\'2013-04-05 13:14:15\'') if __name__ == '__main__': unittest.main()
54.095376
131
0.689534
1,763
18,717
7.290414
0.133863
0.120205
0.126974
0.253948
0.769315
0.728857
0.705905
0.651988
0.573563
0.435307
0
0.024355
0.18176
18,717
345
132
54.252174
0.814887
0.061228
0
0.382239
0
0
0.124929
0.024266
0
0
0
0
0.405405
1
0.027027
false
0
0.023166
0
0.073359
0
0
0
0
null
0
0
1
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
7122f7239260373acc14b5705783c292ba4716bb
220
py
Python
conftest.py
Ouss4/pytest-embedded
b8811f97f6e3a3688da3628b9f3cb4645723c552
[ "MIT" ]
12
2021-06-07T11:39:30.000Z
2022-03-07T18:42:04.000Z
conftest.py
Ouss4/pytest-embedded
b8811f97f6e3a3688da3628b9f3cb4645723c552
[ "MIT" ]
30
2021-08-10T10:27:41.000Z
2022-03-25T08:08:29.000Z
conftest.py
Ouss4/pytest-embedded
b8811f97f6e3a3688da3628b9f3cb4645723c552
[ "MIT" ]
6
2021-11-16T13:10:45.000Z
2022-03-31T10:51:29.000Z
import os import pytest pytest_plugins = [ 'pytester', ] @pytest.fixture(autouse=True) def copy_fixtures(testdir): testdir.copy_example(os.path.join(os.path.dirname(__file__), 'tests', 'fixtures')) yield
15.714286
86
0.713636
28
220
5.357143
0.678571
0.08
0
0
0
0
0
0
0
0
0
0
0.145455
220
13
87
16.923077
0.797872
0
0
0
0
0
0.095455
0
0
0
0
0
0
1
0.111111
false
0
0.222222
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
7125865039e4808ac309b57d84350350e5e69e6d
4,858
py
Python
tests/gitlab_test_utils.py
jarda-wien/gitlabber
e3e53b183233be6b08c47a8ce1264415dc7af6e4
[ "MIT" ]
344
2020-04-28T16:59:02.000Z
2022-03-30T08:50:58.000Z
tests/gitlab_test_utils.py
jarda-wien/gitlabber
e3e53b183233be6b08c47a8ce1264415dc7af6e4
[ "MIT" ]
86
2020-04-28T13:21:37.000Z
2022-03-31T12:51:29.000Z
tests/gitlab_test_utils.py
jarda-wien/gitlabber
e3e53b183233be6b08c47a8ce1264415dc7af6e4
[ "MIT" ]
64
2020-04-29T11:53:14.000Z
2022-03-23T09:41:05.000Z
import pytest import json from unittest import mock from gitlabber import gitlab_tree URL = "http://gitlab.my.com/" TOKEN = "MOCK_TOKEN" GROUP_URL = "http://gitlab.my.com/group" GROUP_NAME = "group" SUBGROUP_URL = "http://gitlab.my.com/group/subgroup" SUBGROUP_NAME = "subgroup" PROJECT_URL = "http://gitlab.my.com/group/subgroup/project/project.git" PROJECT_NAME = "project" YAML_TEST_INPUT_FILE = "tests/test-input.yaml" YAML_TEST_OUTPUT_FILE = "tests/test-output.yaml" JSON_TEST_OUTPUT_FILE = "tests/test-output.json" TREE_TEST_OUTPUT_FILE = "tests/test-output.tree" class MockNode: def __init__(self, id, name, url, subgroups=mock.MagicMock(), projects=mock.MagicMock(), parent_id=None): self.id = id self.name = name self.path = name self.url = url self.web_url = url self.ssh_url_to_repo = url self.http_url_to_repo = url self.subgroups = subgroups self.projects = projects self.parent_id = parent_id class Listable: def __init__(self, list_result, get_result=None, archive_result=None): self.list_result = list_result self.get_result = get_result self.archive_result = archive_result def list(self, as_list=False, archived=None): if archived is None: return [self.list_result, self.archive_result] if self.archive_result is not None else [self.list_result] elif archived is True: return [self.archive_result] else: return [self.list_result] def get(self, id): if self.get_result is not None: return self.get_result else: return self.list_result def validate_root(root): assert root.is_leaf is False assert root.name == "" assert root.url == "http://gitlab.my.com/" assert len(root.children) == 1 assert root.height == 3 def validate_group(group): assert group.name == GROUP_NAME assert group.url == GROUP_URL assert group.is_leaf is False assert len(group.children) == 1 assert group.height == 2 def validate_subgroup(subgroup): assert subgroup.name == SUBGROUP_NAME assert subgroup.url == SUBGROUP_URL assert subgroup.is_leaf is False assert len(subgroup.children) == 1 assert subgroup.height == 1 def validate_project(project): assert project.name == PROJECT_NAME assert project.url == PROJECT_URL assert project.is_leaf is True assert len(project.children) == 0 def validate_tree(root): validate_root(root) validate_group(root.children[0]) validate_subgroup(root.children[0].children[0]) validate_project(root.children[0].children[0].children[0]) def create_test_gitlab(monkeypatch, includes=None, excludes=None, in_file=None): gl = gitlab_tree.GitlabTree( URL, TOKEN, "ssh", "name", includes=includes, excludes=excludes, in_file=in_file) projects = Listable(MockNode(2, PROJECT_NAME, PROJECT_URL)) subgroup_node = MockNode(2, SUBGROUP_NAME, SUBGROUP_URL, projects=projects) subgroups = Listable(subgroup_node) groups = Listable(MockNode(2, GROUP_NAME, GROUP_URL, subgroups=subgroups), subgroup_node) monkeypatch.setattr(gl.gitlab, "groups", groups) return gl def create_test_gitlab_with_toplevel_subgroups(monkeypatch): gl = gitlab_tree.GitlabTree(URL, TOKEN, "ssh", "path") groups = Listable([MockNode(2, GROUP_NAME, GROUP_URL), MockNode(2, GROUP_NAME, GROUP_URL, parent_id=1)]) monkeypatch.setattr(gl.gitlab, "groups", groups) return gl def create_test_gitlab_with_archived(monkeypatch, includes=None, excludes=None, in_file=None, archived=None): gl = gitlab_tree.GitlabTree( URL, TOKEN, "ssh", "name", includes=includes, excludes=excludes, in_file=in_file, archived=archived) project_node = MockNode(1, PROJECT_NAME, PROJECT_URL) archived_project_node = MockNode( 2, "_archived_" + PROJECT_NAME, "_archived_" + PROJECT_URL) projects = Listable(project_node, archive_result=archived_project_node) subgroup_node = MockNode(2, SUBGROUP_NAME, SUBGROUP_URL, projects=projects) archived_subgroup_node = MockNode( 2, "_archived_" + SUBGROUP_NAME, "_archived_" + SUBGROUP_URL, projects=projects) subgroups = Listable(subgroup_node, archive_result=archived_subgroup_node) archived_subgroups = Listable(archived_subgroup_node, archive_result=archived_subgroup_node) group_node = MockNode(2, GROUP_NAME, GROUP_URL, subgroups=archived_subgroups) archived_group_node = MockNode(2, "_archived_" + GROUP_NAME, "_archived_" + GROUP_URL, subgroups=archived_subgroups) groups = Listable(group_node, get_result=subgroup_node, archive_result=archived_group_node) monkeypatch.setattr(gl.gitlab, "groups", groups) # gl.print_tree() return gl
36.253731
120
0.711404
640
4,858
5.140625
0.125
0.039514
0.025532
0.022796
0.426444
0.361094
0.306383
0.206383
0.130091
0.130091
0
0.006332
0.18732
4,858
133
121
36.526316
0.827001
0.003088
0
0.113208
0
0
0.077257
0.017971
0
0
0
0
0.179245
1
0.113208
false
0
0.037736
0
0.245283
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7126a909c8eb6e0615ba8dbc55706b97b9c85813
33,512
py
Python
mindquantum/simulator/simulator.py
Takishima/mindquantum
e90dfe474b759023d7ae18281b9a87cb8d223d04
[ "Apache-2.0" ]
null
null
null
mindquantum/simulator/simulator.py
Takishima/mindquantum
e90dfe474b759023d7ae18281b9a87cb8d223d04
[ "Apache-2.0" ]
null
null
null
mindquantum/simulator/simulator.py
Takishima/mindquantum
e90dfe474b759023d7ae18281b9a87cb8d223d04
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Simulator.""" import numpy as np from mindquantum import mqbackend as mb from mindquantum.core.circuit import Circuit from mindquantum.core.gates import BarrierGate, Measure, MeasureResult from mindquantum.core.gates.basic import BasicGate from mindquantum.core.operators import Hamiltonian from mindquantum.core.operators.hamiltonian import MODE from mindquantum.core.parameterresolver import ParameterResolver from mindquantum.utils import ket_string from mindquantum.utils.type_value_check import ( _check_and_generate_pr_type, _check_input_type, _check_int_type, _check_seed, _check_value_should_not_less, ) SUPPORTED_SIMULATOR = ['projectq'] def get_supported_simulator(): """ Get simulator name that supported by MindQuantum. Returns: list, The supported simulator list. """ return SUPPORTED_SIMULATOR class Simulator: """ Quantum simulator that simulate quantum circuit. Args: backend (str): which backend you want. The supported backend can be found in SUPPORTED_SIMULATOR n_qubits (int): number of quantum simulator. seed (int): the random seed for this simulator, if None, seed will generate by `numpy.random.randint`. Default: None. Raises: TypeError: if `backend` is not str. TypeError: if `n_qubits` is not int. TypeError: if `seed` is not int. ValueError: if `backend` is not supported. ValueError: if `n_qubits` is negative. ValueError: if `seed` is less than 0 or great than 2**23 - 1. Examples: >>> from mindquantum import Simulator >>> from mindquantum import qft >>> sim = Simulator('projectq', 2) >>> sim.apply_circuit(qft(range(2))) >>> sim.get_qs() array([0.5+0.j, 0.5+0.j, 0.5+0.j, 0.5+0.j]) """ def __init__(self, backend, n_qubits, seed=None): """Initialize a Simulator object.""" _check_input_type('backend', str, backend) _check_int_type('n_qubits', n_qubits) _check_value_should_not_less('n_qubits', 0, n_qubits) if seed is None: seed = np.random.randint(1, 2**23) _check_seed(seed) if backend not in SUPPORTED_SIMULATOR: raise ValueError(f"backend {backend} not supported, now we support {SUPPORTED_SIMULATOR}!") self.backend = backend self.seed = seed self.n_qubits = n_qubits if backend == 'projectq': self.sim = mb.projectq(seed, n_qubits) def copy(self): """ Copy this simulator. Returns: Simulator, a copy version of this simulator. Examples: >>> from mindquantum import RX, Simulator >>> sim = Simulator('projectq', 1) >>> sim.apply_gate(RX(1).on(0)) >>> sim.flush() >>> sim2 = sim.copy() >>> sim2.apply_gate(RX(-1).on(0)) >>> sim2 projectq simulator with 1 qubit (little endian). Current quantum state: 1¦0⟩ """ sim = Simulator(self.backend, self.n_qubits, self.seed) sim.sim = self.sim.copy() return sim def __str__(self): """Return a string representation of the object.""" state = self.get_qs() s = f"{self.backend} simulator with {self.n_qubits} qubit{'s' if self.n_qubits > 1 else ''} (little endian)." s += "\nCurrent quantum state:\n" if self.n_qubits < 4: s += '\n'.join(ket_string(state)) else: s += state.__str__() return s def __repr__(self): """Return a string representation of the object.""" return self.__str__() def reset(self): """ Reset simulator to zero state. Examples: >>> from mindquantum import Simulator >>> from mindquantum import qft >>> sim = Simulator('projectq', 2) >>> sim.apply_circuit(qft(range(2))) >>> sim.reset() >>> sim.get_qs() array([1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j]) """ self.sim.reset() def flush(self): """ Flush gate that works for projectq simulator. The projectq simulator will cache several gate and fushion these gate into a bigger gate, and than act on the quantum state. The flush command will ask the simulator to fushion currently stored gate and act on the quantum state. Examples: >>> from mindquantum import Simulator >>> from mindquantum import H >>> sim = Simulator('projectq', 1) >>> sim.apply_gate(H.on(0)) >>> sim.flush() """ if self.backend == 'projectq': self.sim.run() def apply_gate(self, gate, pr=None, diff=False): """ Apply a gate on this simulator, can be a quantum gate or a measurement operator. Args: gate (BasicGate): The gate you want to apply. pr (Union[numbers.Number, numpy.ndarray, ParameterResolver, list]): The parameter for parameterized gate. Default: None. diff (bool): Whether to apply the derivative gate on this simulator. Default: False. Returns: int or None, if the gate if a measure gate, then return a collapsed state, Otherwise return None. Raises: TypeError: if `gate` is not a BasicGate. ValueError: if any qubit of `gate` is higher than simulator qubits. ValueError: if `gate` is parameterized, but no parameter supplied. TypeError: the `pr` is not a ParameterResolver if `gate` is parameterized. Examples: >>> import numpy as np >>> from mindquantum import Simulator >>> from mindquantum import RY, Measure >>> sim = Simulator('projectq', 1) >>> sim.apply_gate(RY('a').on(0), np.pi/2) >>> sim.get_qs() array([0.70710678+0.j, 0.70710678+0.j]) >>> sim.apply_gate(Measure().on(0)) 1 >>> sim.get_qs() array([0.+0.j, 1.+0.j]) """ _check_input_type('gate', BasicGate, gate) if not isinstance(gate, BarrierGate): gate_max = max(max(gate.obj_qubits, gate.ctrl_qubits)) if self.n_qubits < gate_max: raise ValueError(f"qubits of gate {gate} is higher than simulator qubits.") if isinstance(gate, Measure): return self.sim.apply_measure(gate.get_cpp_obj()) if pr is None: if gate.parameterized: raise ValueError("apply a parameterized gate needs a parameter_resolver") self.sim.apply_gate(gate.get_cpp_obj()) else: pr = _check_and_generate_pr_type(pr, gate.coeff.params_name) self.sim.apply_gate(gate.get_cpp_obj(), pr.get_cpp_obj(), diff) return None def apply_circuit(self, circuit, pr=None): """ Apply a circuit on this simulator. Args: circuit (Circuit): The quantum circuit you want to apply on this simulator. pr (Union[ParameterResolver, dict, numpy.ndarray, list, numbers.Number]): The parameter resolver for this circuit. If the circuit is not parameterized, this arg should be None. Default: None. Returns: MeasureResult or None, if the circuit has measure gate, then return a MeasureResult, otherwise return None. Examples: >>> import numpy as np >>> from mindquantum import Circuit, H >>> from mindquantum import Simulator >>> sim = Simulator('projectq', 2) >>> sim.apply_circuit(Circuit().un(H, 2)) >>> sim.apply_circuit(Circuit().ry('a', 0).ry('b', 1), np.array([1.1, 2.2])) >>> sim projectq simulator with 2 qubits (little endian). Current quantum state: -0.0721702531972066¦00⟩ -0.30090405886869676¦01⟩ 0.22178317006196263¦10⟩ 0.9246947752567126¦11⟩ >>> sim.apply_circuit(Circuit().measure(0).measure(1)) shots: 1 Keys: q1 q0│0.00 0.2 0.4 0.6 0.8 1.0 ───────────┼───────────┴───────────┴───────────┴───────────┴───────────┴ 11│▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓ │ {'11': 1} """ _check_input_type('circuit', Circuit, circuit) if self.n_qubits < circuit.n_qubits: raise ValueError(f"Circuit has {circuit.n_qubits} qubits, which is more than simulator qubits.") if circuit.has_measure_gate: res = MeasureResult() res.add_measure(circuit.all_measures.keys()) if circuit.params_name: if pr is None: raise ValueError("Applying a parameterized circuit needs a parameter_resolver") pr = _check_and_generate_pr_type(pr, circuit.params_name) else: pr = ParameterResolver() if circuit.has_measure_gate: samples = np.array( self.sim.apply_circuit_with_measure(circuit.get_cpp_obj(), pr.get_cpp_obj(), res.keys_map) ) samples = samples.reshape((1, -1)) res.collect_data(samples) return res if circuit.params_name: self.sim.apply_circuit(circuit.get_cpp_obj(), pr.get_cpp_obj()) else: self.sim.apply_circuit(circuit.get_cpp_obj()) return None def sampling(self, circuit, pr=None, shots=1, seed=None): """ Samping the measure qubit in circuit. Sampling do not change the origin quantum state of this simulator. Args: circuit (Circuit): The circuit that you want to evolution and do sampling. pr (Union[None, dict, ParameterResolver]): The parameter resolver for this circuit, if this circuit is a parameterized circuit. Default: None. shots (int): How many shots you want to sampling this circuit. Default: 1 seed (int): Random seed for random sampling. If None, seed will be a random int number. Default: None. Returns: MeasureResult, the measure result of sampling. Examples: >>> from mindquantum import Circuit, Measure >>> from mindquantum import Simulator >>> circ = Circuit().ry('a', 0).ry('b', 1) >>> circ += Measure('q0_0').on(0) >>> circ += Measure('q0_1').on(0) >>> circ += Measure('q1').on(1) >>> sim = Simulator('projectq', circ.n_qubits) >>> res = sim.sampling(circ, {'a': 1.1, 'b': 2.2}, shots=100, seed=42) >>> res shots: 100 Keys: q1 q0_1 q0_0│0.00 0.122 0.245 0.367 0.49 0.612 ──────────────────┼───────────┴───────────┴───────────┴───────────┴───────────┴ 000│▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ │ 011│▒▒▒▒▒▒▒▒▒ │ 100│▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓ │ 111│▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ │ {'000': 18, '011': 9, '100': 49, '111': 24} """ if not circuit.all_measures.map: raise ValueError("circuit must have at least one measurement gate.") _check_input_type("circuit", Circuit, circuit) if self.n_qubits < circuit.n_qubits: raise ValueError(f"Circuit has {circuit.n_qubits} qubits, which is more than simulator qubits.") _check_int_type("sampling shots", shots) _check_value_should_not_less("sampling shots", 1, shots) if circuit.parameterized: if pr is None: raise ValueError("Sampling a parameterized circuit need a ParameterResolver") if not isinstance(pr, (dict, ParameterResolver)): raise TypeError("pr requires a dict or a ParameterResolver, but get {}!".format(type(pr))) pr = ParameterResolver(pr) else: pr = ParameterResolver() if seed is None: seed = int(np.random.randint(1, 2 << 20)) else: _check_seed(seed) res = MeasureResult() res.add_measure(circuit.all_measures.keys()) sim = self if circuit.is_measure_end and not circuit.is_noise_circuit: sim = Simulator(self.backend, self.n_qubits, self.seed) sim.set_qs(self.get_qs()) sim.apply_circuit(circuit.remove_measure(), pr) circuit = Circuit(circuit.all_measures.keys()) samples = np.array( sim.sim.sampling(circuit.get_cpp_obj(), pr.get_cpp_obj(), shots, res.keys_map, seed) ).reshape((shots, -1)) res.collect_data(samples) return res def apply_hamiltonian(self, hamiltonian: Hamiltonian): """ Apply hamiltonian to a simulator, this hamiltonian can be hermitian or non hermitian. Note: The quantum state may be not a normalized quantum state after apply hamiltonian. Args: hamiltonian (Hamiltonian): the hamiltonian you want to apply. Examples: >>> from mindquantum import Simulator >>> from mindquantum import Circuit, Hamiltonian >>> from mindquantum.core.operators import QubitOperator >>> import scipy.sparse as sp >>> sim = Simulator('projectq', 1) >>> sim.apply_circuit(Circuit().h(0)) >>> sim.get_qs() array([0.70710678+0.j, 0.70710678+0.j]) >>> ham1 = Hamiltonian(QubitOperator('Z0')) >>> sim.apply_hamiltonian(ham1) >>> sim.get_qs() array([ 0.70710678+0.j, -0.70710678+0.j]) >>> sim.reset() >>> ham2 = Hamiltonian(sp.csr_matrix([[1, 2], [3, 4]])) >>> sim.apply_hamiltonian(ham2) >>> sim.get_qs() array([1.+0.j, 3.+0.j]) """ _check_input_type('hamiltonian', Hamiltonian, hamiltonian) _check_hamiltonian_qubits_number(hamiltonian, self.n_qubits) self.sim.apply_hamiltonian(hamiltonian.get_cpp_obj()) def get_expectation(self, hamiltonian): r""" Get expectation of the given hamiltonian. The hamiltonian could be non hermitian. .. math:: E = \left<\psi\right|H\left|\psi\right> Args: hamiltonian (Hamiltonian): The hamiltonian you want to get expectation. Returns: numbers.Number, the expectation value. Examples: >>> from mindquantum.core.operators import QubitOperator >>> from mindquantum import Circuit, Simulator >>> from mindquantum import Hamiltonian >>> sim = Simulator('projectq', 1) >>> sim.apply_circuit(Circuit().ry(1.2, 0)) >>> ham = Hamiltonian(QubitOperator('Z0')) >>> sim.get_expectation(ham) (0.36235775447667357+0j) """ if not isinstance(hamiltonian, Hamiltonian): raise TypeError(f"hamiltonian requires a Hamiltonian, but got {type(hamiltonian)}") _check_hamiltonian_qubits_number(hamiltonian, self.n_qubits) return self.sim.get_expectation(hamiltonian.get_cpp_obj()) def get_qs(self, ket=False): """ Get current quantum state of this simulator. Args: ket (bool): Whether to return the quantum state in ket format or not. Default: False. Returns: numpy.ndarray, the current quantum state. Examples: >>> from mindquantum import qft, Simulator >>> sim = Simulator('projectq', 2) >>> sim.apply_circuit(qft(range(2))) >>> sim.get_qs() array([0.5+0.j, 0.5+0.j, 0.5+0.j, 0.5+0.j]) """ if not isinstance(ket, bool): raise TypeError(f"ket requires a bool, but get {type(ket)}") state = np.array(self.sim.get_qs()) if ket: return '\n'.join(ket_string(state)) return state def set_qs(self, quantum_state): """ Set quantum state for this simulation. Args: quantum_state (numpy.ndarray): the quantum state that you want. Examples: >>> from mindquantum import Simulator >>> import numpy as np >>> sim = Simulator('projectq', 1) >>> sim.get_qs() array([1.+0.j, 0.+0.j]) >>> sim.set_qs(np.array([1, 1])) >>> sim.get_qs() array([0.70710678+0.j, 0.70710678+0.j]) """ if not isinstance(quantum_state, np.ndarray): raise TypeError(f"quantum state must be a ndarray, but get {type(quantum_state)}") if len(quantum_state.shape) != 1: raise ValueError(f"vec requires a 1-dimensional array, but get {quantum_state.shape}") n_qubits = np.log2(quantum_state.shape[0]) if n_qubits % 1 != 0: raise ValueError(f"vec size {quantum_state.shape[0]} is not power of 2") n_qubits = int(n_qubits) if self.n_qubits != n_qubits: raise ValueError(f"{n_qubits} qubits vec does not match with simulation qubits ({self.n_qubits})") self.sim.set_qs(quantum_state / np.sqrt(np.sum(np.abs(quantum_state) ** 2))) def get_expectation_with_grad( self, hams, circ_right, circ_left=None, simulator_left=None, encoder_params_name=None, ansatz_params_name=None, parallel_worker=None, ): r""" Get a function that return the forward value and gradient w.r.t circuit parameters. This method is designed to calculate the expectation and its gradient shown as below. .. math:: E = \left<\varphi\right|U_l^\dagger H U_r \left|\psi\right> where :math:`U_l` is circ_left, :math:`U_r` is circ_right, :math:`H` is hams and :math:`\left|\psi\right>` is the current quantum state of this simulator, and :math:`\left|\varphi\right>` is the quantum state of `simulator_left`. Args: hams (Hamiltonian): The hamiltonian that need to get expectation. circ_right (Circuit): The :math:`U_r` circuit described above. circ_left (Circuit): The :math:`U_l` circuit described above. By default, this circuit will be none, and in this situation, :math:`U_l` will be equals to :math:`U_r`. Default: None. simulator_left (Simulator): The simulator that contains :math:`\left|\varphi\right>`. If None, then :math:`\left|\varphi\right>` is assumed to be equals to :math:`\left|\psi\right>`. Default: None. encoder_params_name (list[str]): To specific which parameters belongs to encoder, that will encoder the input data into quantum state. The encoder data can be a batch. Default: None. ansatz_params_name (list[str]): To specific which parameters belongs to ansatz, that will be trained during training. Default: None. parallel_worker (int): The parallel worker numbers. The parallel workers can handle batch in parallel threads. Default: None. Returns: GradOpsWrapper, a grad ops wrapper than contains information to generate this grad ops. Examples: >>> import numpy as np >>> from mindquantum import Simulator, Hamiltonian >>> from mindquantum import Circuit >>> from mindquantum.core.operators import QubitOperator >>> circ = Circuit().ry('a', 0) >>> ham = Hamiltonian(QubitOperator('Z0')) >>> sim = Simulator('projectq', 1) >>> grad_ops = sim.get_expectation_with_grad(ham, circ) >>> grad_ops(np.array([1.0])) (array([[0.54030231+0.j]]), array([[[-0.84147098+0.j]]])) >>> sim1 = Simulator('projectq', 1) >>> prep_circ = Circuit().h(0) >>> ansatz = Circuit().ry('a', 0).rz('b', 0).ry('c', 0) >>> sim1.apply_circuit(prep_circ) >>> sim2 = Simulator('projectq', 1) >>> ham = Hamiltonian(QubitOperator("")) >>> grad_ops = sim2.get_expectation_with_grad(ham, ansatz, Circuit(), simulator_left=sim1) >>> f, g = grad_ops(np.array([7.902762e-01, 2.139225e-04, 7.795934e-01])) >>> f array([[0.99999989-7.52279618e-05j]]) """ if isinstance(hams, Hamiltonian): hams = [hams] elif not isinstance(hams, list): raise TypeError(f"hams requires a Hamiltonian or a list of Hamiltonian, but get {type(hams)}") for h_tmp in hams: _check_input_type("hams's element", Hamiltonian, h_tmp) _check_hamiltonian_qubits_number(h_tmp, self.n_qubits) _check_input_type("circ_right", Circuit, circ_right) if circ_right.is_noise_circuit: raise ValueError("noise circuit not support yet.") non_hermitian = False if circ_left is not None: _check_input_type("circ_left", Circuit, circ_left) if circ_left.is_noise_circuit: raise ValueError("noise circuit not support yet.") non_hermitian = True if simulator_left is not None: _check_input_type("simulator_left", Simulator, simulator_left) if self.backend != simulator_left.backend: raise ValueError( f"simulator_left should have the same backend as this simulator, \ which is {self.backend}, but get {simulator_left.backend}" ) if self.n_qubits != simulator_left.n_qubits: raise ValueError( f"simulator_left should have the same n_qubits as this simulator, \ which is {self.n_qubits}, but get {simulator_left.n_qubits}" ) non_hermitian = True if non_hermitian and simulator_left is None: simulator_left = self if circ_left is None: circ_left = circ_right if circ_left.has_measure_gate or circ_right.has_measure_gate: raise ValueError("circuit for variational algorithm cannot have measure gate") if parallel_worker is not None: _check_int_type("parallel_worker", parallel_worker) if encoder_params_name is None and ansatz_params_name is None: encoder_params_name = [] ansatz_params_name = list(circ_right.params_name) for i in circ_left.params_name: if i not in ansatz_params_name: ansatz_params_name.append(i) if encoder_params_name is None: encoder_params_name = [] if ansatz_params_name is None: ansatz_params_name = [] _check_input_type("encoder_params_name", list, encoder_params_name) _check_input_type("ansatz_params_name", list, ansatz_params_name) for i in encoder_params_name: _check_input_type("Element of encoder_params_name", str, i) for i in ansatz_params_name: _check_input_type("Element of ansatz_params_name", str, i) s1 = set(circ_right.params_name) | set(circ_left.params_name) s2 = set(encoder_params_name) | set(ansatz_params_name) if s1 - s2 or s2 - s1: raise ValueError("encoder_params_name and ansatz_params_name are different with circuit parameters") circ_n_qubits = max(circ_left.n_qubits, circ_right.n_qubits) if self.n_qubits < circ_n_qubits: raise ValueError(f"Simulator has {self.n_qubits} qubits, but circuit has {circ_n_qubits} qubits.") version = "both" if not ansatz_params_name: version = "encoder" if not encoder_params_name: version = "ansatz" def grad_ops(*inputs): if version == "both" and len(inputs) != 2: raise ValueError("Need two inputs!") if version in ("encoder", "ansatz") and len(inputs) != 1: raise ValueError("Need one input!") if version == "both": _check_encoder(inputs[0], len(encoder_params_name)) _check_ansatz(inputs[1], len(ansatz_params_name)) batch_threads, mea_threads = _thread_balance(inputs[0].shape[0], len(hams), parallel_worker) inputs0 = inputs[0] inputs1 = inputs[1] if version == "encoder": _check_encoder(inputs[0], len(encoder_params_name)) batch_threads, mea_threads = _thread_balance(inputs[0].shape[0], len(hams), parallel_worker) inputs0 = inputs[0] inputs1 = np.array([]) if version == "ansatz": _check_ansatz(inputs[0], len(ansatz_params_name)) batch_threads, mea_threads = _thread_balance(1, len(hams), parallel_worker) inputs0 = np.array([[]]) inputs1 = inputs[0] if non_hermitian: f_g1_g2 = self.sim.non_hermitian_measure_with_grad( [i.get_cpp_obj() for i in hams], [i.get_cpp_obj(hermitian=True) for i in hams], circ_left.get_cpp_obj(), circ_left.get_cpp_obj(hermitian=True), circ_right.get_cpp_obj(), circ_right.get_cpp_obj(hermitian=True), inputs0, inputs1, encoder_params_name, ansatz_params_name, batch_threads, mea_threads, simulator_left.sim, ) else: f_g1_g2 = self.sim.hermitian_measure_with_grad( [i.get_cpp_obj() for i in hams], circ_right.get_cpp_obj(), circ_right.get_cpp_obj(hermitian=True), inputs0, inputs1, encoder_params_name, ansatz_params_name, batch_threads, mea_threads, ) res = np.array(f_g1_g2) if version == 'both': f = res[:, :, 0] g1 = res[:, :, 1 : 1 + len(encoder_params_name)] # noqa:E203 g2 = res[:, :, 1 + len(encoder_params_name) :] # noqa:E203 return f, g1, g2 f = res[:, :, 0] g = res[:, :, 1:] return f, g grad_wrapper = GradOpsWrapper( grad_ops, hams, circ_right, circ_left, encoder_params_name, ansatz_params_name, parallel_worker ) s = f'{self.n_qubits} qubit' + ('' if self.n_qubits == 1 else 's') s += f' {self.backend} VQA Operator' grad_wrapper.set_str(s) return grad_wrapper def _check_encoder(data, encoder_params_size): if not isinstance(data, np.ndarray): raise ValueError(f"encoder parameters need numpy array, but get {type(data)}") data_shape = data.shape if len(data_shape) != 2: raise ValueError("encoder data requires a two dimension numpy array") if data_shape[1] != encoder_params_size: raise ValueError( f"encoder parameters size do not match with encoder parameters name,\ need {encoder_params_size} but get {data_shape[1]}." ) def _check_ansatz(data, ansatz_params_size): """Check ansatz.""" if not isinstance(data, np.ndarray): raise ValueError(f"ansatz parameters need numpy array, but get {type(data)}") data_shape = data.shape if len(data_shape) != 1: raise ValueError("ansatz data requires a one dimension numpy array") if data_shape[0] != ansatz_params_size: raise ValueError( f"ansatz parameters size do not match with ansatz parameters name,\ need {ansatz_params_size} but get {data_shape[0]}" ) def _thread_balance(n_prs, n_meas, parallel_worker): """Thread balance.""" if parallel_worker is None: parallel_worker = n_meas * n_prs if n_meas * n_prs <= parallel_worker: batch_threads = n_prs mea_threads = n_meas else: if n_meas < n_prs: batch_threads = min(n_prs, parallel_worker) mea_threads = min(n_meas, max(1, parallel_worker // batch_threads)) else: mea_threads = min(n_meas, parallel_worker) batch_threads = min(n_prs, max(1, parallel_worker // mea_threads)) return batch_threads, mea_threads def _check_hamiltonian_qubits_number(hamiltonian, sim_qubits): """Check hamiltonian qubits number.""" if hamiltonian.how_to != MODE['origin']: if hamiltonian.n_qubits != sim_qubits: raise ValueError( f"Hamiltonian qubits is {hamiltonian.n_qubits}, not match \ with simulator qubits number {sim_qubits}" ) else: if hamiltonian.n_qubits > sim_qubits: raise ValueError(f"Hamiltonian qubits is {hamiltonian.n_qubits}, which is bigger than simulator qubits.") class GradOpsWrapper: """ Wrapper the gradient operator that with the information that generate this gradient operator. Args: grad_ops (Union[FunctionType, MethodType])): A function or a method that return forward value and gradient w.r.t parameters. hams (Hamiltonian): The hamiltonian that generate this grad ops. circ_right (Circuit): The right circuit that generate this grad ops. circ_left (Circuit): The left circuit that generate this grad ops. encoder_params_name (list[str]): The encoder parameters name. ansatz_params_name (list[str]): The ansatz parameters name. parallel_worker (int): The number of parallel worker to run the batch. """ def __init__(self, grad_ops, hams, circ_right, circ_left, encoder_params_name, ansatz_params_name, parallel_worker): """Initialize a GradOpsWrapper object.""" self.grad_ops = grad_ops self.hams = hams self.circ_right = circ_right self.circ_left = circ_left self.encoder_params_name = encoder_params_name self.ansatz_params_name = ansatz_params_name self.parallel_worker = parallel_worker self.str = '' def __call__(self, *args): """Definition of a function call operator.""" return self.grad_ops(*args) def set_str(self, s): """ Set expression for gradient operator. Args: s (str): The string of QNN operator. """ self.str = s def inner_product(bra_simulator: Simulator, ket_simulator: Simulator): """ Calculate the inner product of two state that in the given simulator. Args: bra_simulator (Simulator): The simulator that serve as bra state. ket_simulator (Simulator): The simulator that serve as ket state. Returns: numbers.Number, the inner product of two quantum state. Examples: >>> from mindquantum import RX, RY, Simulator >>> from mindquantum.simulator import inner_product >>> bra_simulator = Simulator('projectq', 1) >>> bra_simulator.apply_gate(RY(1.2).on(0)) >>> ket_simulator = Simulator('projectq', 1) >>> ket_simulator.apply_gate(RX(2.3).on(0)) >>> inner_product(bra_simulator, ket_simulator) """ _check_input_type('bra_simulator', Simulator, bra_simulator) _check_input_type('ket_simulator', Simulator, ket_simulator) if bra_simulator.n_qubits != ket_simulator.n_qubits: raise ValueError( f"Two simulator should have same quantum state, \ but get {bra_simulator.n_qubits} and {ket_simulator.n_qubits}." ) if bra_simulator.backend != ket_simulator.backend: raise ValueError("The backend of two simulator should be same.") if bra_simulator.backend == 'projectq' and ket_simulator.backend == 'projectq': bra_simulator.flush() ket_simulator.flush() return mb.cpu_projectq_inner_product(bra_simulator.sim, ket_simulator.sim) raise ValueError(f"backend for {bra_simulator.backend} not implement.") __all__ = ['Simulator', 'get_supported_simulator', 'GradOpsWrapper', 'inner_product']
42.153459
120
0.586894
4,088
33,512
4.7182
0.106164
0.020686
0.021153
0.00674
0.366497
0.282922
0.217389
0.177105
0.141228
0.111209
0
0.024353
0.302787
33,512
794
121
42.206549
0.786946
0.397529
0
0.227273
0
0.002674
0.127635
0.0088
0
0
0
0
0
1
0.064171
false
0
0.026738
0
0.144385
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7126b8e0e2a112a169fa2ccb17434fdbeb6afcc6
10,853
py
Python
seasonedParser/core.py
KevinMidboe/seasonMover
380c4a02f48679c0204ecf1a5807718b93f2ff19
[ "MIT" ]
null
null
null
seasonedParser/core.py
KevinMidboe/seasonMover
380c4a02f48679c0204ecf1a5807718b93f2ff19
[ "MIT" ]
9
2017-09-29T11:35:37.000Z
2020-02-19T09:34:15.000Z
seasonedParser/core.py
KevinMidboe/seasonedParser
380c4a02f48679c0204ecf1a5807718b93f2ff19
[ "MIT" ]
null
null
null
#!/usr/bin/env python3.6 # -*- coding: utf-8 -*- # @Author: KevinMidboe # @Date: 2017-08-25 23:22:27 # @Last Modified by: KevinMidboe # @Last Modified time: 2019-02-02 01:04:25 from guessit import guessit from babelfish import Language, LanguageReverseError import hashlib import os, errno import shutil import re import tvdb_api import click from pprint import pprint from titlecase import titlecase import langdetect from exceptions import InsufficientNameError import logging logger = logging.getLogger('seasonedParser') from video import VIDEO_EXTENSIONS, Episode, Movie, Video from subtitle import SUBTITLE_EXTENSIONS, Subtitle, get_subtitle_path from utils import sanitize, refine def search_external_subtitles(path, directory=None): dirpath, filename = os.path.split(path) dirpath = dirpath or '.' fileroot, fileext = os.path.splitext(filename) subtitles = {} for p in os.listdir(directory or dirpath): if not p.endswith(SUBTITLE_EXTENSIONS): continue language = Language('und') language_code = p[len(fileroot):-len(os.path.splitext(p)[1])].replace(fileext, '').replace('_','-')[1:] if language_code: try: language = Language.fromietf(language_code) except (ValueError, LanguageReverseError): logger.error('Cannot parse language code %r', language_code) f = open(os.path.join(dirpath, p), 'r', encoding='ISO-8859-15') pattern = re.compile('[0-9:\,-<>]+') # head = list(islice(f.read(), 10)) filecontent = pattern.sub('', f.read()) filecontent = filecontent[0:1000] language = langdetect.detect(filecontent) f.close() subtitles[os.path.join(dirpath, p)] = language logger.debug('Found subtitles %r', subtitles) return subtitles def find_file_size(video): return os.path.getsize(video.name) def scan_video(path): """Scan a video from a `path`. :param str path: existing path to the video. :return: the scanned video. :rtype: :class:`~subliminal.video.Video` """ # check for non-existing path if not os.path.exists(path): raise ValueError('Path does not exist') # check video extension if not path.endswith(VIDEO_EXTENSIONS): raise ValueError('%r is not a valid video extension' % os.path.splitext(path)[1]) dirpath, filename = os.path.split(path) logger.info('Scanning video %r in %r', filename, dirpath) # guess video = Video.fromguess(path, guessit(filename)) video.subtitles |= set(search_external_subtitles(video.name)) refine(video) # hash of name # if isinstance(video, Movie): # if type(video.title) is str and type(video.year) is int: # home_path = '{} ({})'.format(video.title, video.year) # hash_str = ''.join([video.title, str(video.year) or '']) # elif isinstance(video, Episode): # if type(video.series) is str and type(video.season) is int and type(video.episode) is int: # home_path = '{} ({})'.format(video.title, video.year) # hash_str = ''.join([video.series, str(video.season), str(video.episode)]) # video.hash = hashlib.md5(hash_str.encode()).hexdigest() # except: # print(video) return video def scan_subtitle(path): if not os.path.exists(path): raise ValueError('Path does not exist') dirpath, filename = os.path.split(path) logger.info('Scanning subtitle %r in %r', filename, dirpath) # guess parent_path = path.strip(filename) subtitle = Subtitle.fromguess(parent_path, guessit(path)) return subtitle def subtitle_path(sibling, subtitle): parent_path = os.path.dirname(sibling) return os.path.join(parent_path, os.path.basename(subtitle)) def scan_videos(path): """Scan `path` for videos and their subtitles. See :func:`refine` to find additional information for the video. :param str path: existing directory path to scan. :return: the scanned videos. :rtype: list of :class:`~subliminal.video.Video` """ # check for non-existing path if not os.path.exists(path): raise ValueError('Path does not exist') # check for non-directory path if not os.path.isdir(path): raise ValueError('Path is not a directory') # setup progress bar path_children = 0 for _ in os.walk(path): path_children += 1 with click.progressbar(length=path_children, show_pos=True, label='Collecting videos') as bar: # walk the path videos = [] insufficient_name = [] errors_path = [] for dirpath, dirnames, filenames in os.walk(path): logger.debug('Walking directory %r', dirpath) # remove badly encoded and hidden dirnames for dirname in list(dirnames): if dirname.startswith('.'): logger.debug('Skipping hidden dirname %r in %r', dirname, dirpath) dirnames.remove(dirname) # scan for videos for filename in filenames: if not (filename.endswith(VIDEO_EXTENSIONS)): logger.debug('Skipping non-video file %s', filename) continue # skip hidden files if filename.startswith('.'): logger.debug('Skipping hidden filename %r in %r', filename, dirpath) continue # reconstruct the file path filepath = os.path.join(dirpath, filename) if os.path.islink(filepath): logger.debug('Skipping link %r in %r', filename, dirpath) continue # scan if filename.endswith(VIDEO_EXTENSIONS): # video try: video = scan_video(filepath) except InsufficientNameError as e: logger.info(e) insufficient_name.append(filepath) continue except ValueError: # pragma: no cover logger.exception('Error scanning video') errors_path.append(filepath) continue else: # pragma: no cover raise ValueError('Unsupported file %r' % filename) videos.append(video) bar.update(1) return videos, insufficient_name, errors_path def organize_files(path): hashList = {} mediafiles = scan_files(path) # print(mediafiles) for file in mediafiles: hashList.setdefault(file.__hash__(),[]).append(file) # hashList[file.__hash__()] = file return hashList def save_subtitles(files, single=False, directory=None, encoding=None): t = tvdb_api.Tvdb() if not isinstance(files, list): files = [files] for file in files: # TODO this should not be done in the loop dirname = "%s S%sE%s" % (file.series, "%02d" % (file.season), "%02d" % (file.episode)) createParentfolder = not dirname in file.parent_path if createParentfolder: dirname = os.path.join(file.parent_path, dirname) print('Created: %s' % dirname) try: os.makedirs(dirname) except OSError as e: if e.errno != errno.EEXIST: raise # TODO Clean this ! try: tvdb_episode = t[file.series][file.season][file.episode] episode_title = tvdb_episode['episodename'] except: episode_title = '' old = os.path.join(file.parent_path, file.name) if file.name.endswith(SUBTITLE_EXTENSIONS): lang = file.getLanguage() sdh = '.sdh' if file.sdh else '' filename = "%s S%sE%s %s%s.%s.%s" % (file.series, "%02d" % (file.season), "%02d" % (file.episode), episode_title, sdh, lang, file.container) else: filename = "%s S%sE%s %s.%s" % (file.series, "%02d" % (file.season), "%02d" % (file.episode), episode_title, file.container) if createParentfolder: newname = os.path.join(dirname, filename) else: newname = os.path.join(file.parent_path, filename) print('Moved: %s ---> %s' % (old, newname)) os.rename(old, newname) def scan_folder(path): videos = [] insufficient_name = [] errored_paths = [] logger.debug('Collecting path %s', path) # non-existing if not os.path.exists(path): errored_paths.append(path) logger.exception("The path '{}' does not exist".format(path)) # file # if path is a file if os.path.isfile(path): logger.info('Path is a file') try: video = scan_video(path) videos.append(video) except InsufficientNameError as e: logger.info(e) insufficient_name.append(path) # directories if os.path.isdir(path): logger.info('Path is a directory') scanned_videos = [] try: videos, insufficient_name, errored_paths = scan_videos(path) except: logger.exception('Unexpected error while collecting directory path %s', path) errored_paths.append(path) click.echo('%s video%s collected / %s file%s with insufficient name / %s error%s' % ( click.style(str(len(videos)), bold=True, fg='green' if videos else None), 's' if len(videos) > 1 else '', click.style(str(len(insufficient_name)), bold=True, fg='yellow' if insufficient_name else None), 's' if len(insufficient_name) > 1 else '', click.style(str(len(errored_paths)), bold=True, fg='red' if errored_paths else None), 's' if len(errored_paths) > 1 else '', )) return videos, insufficient_name def pickforgirlscouts(video): if video.sufficientInfo(): video.moveLocation() return True return False def moveHome(video): wantedFilePath = video.wantedFilePath() dir = os.path.dirname(wantedFilePath) if not os.path.exists(dir): logger.info('Creating directory {}'.format(dir)) os.makedirs(dir) logger.info("Moving video file from: '{}' to: '{}'".format(video.name, wantedFilePath)) shutil.move(video.name, wantedFilePath) for sub in video.subtitles: if not os.path.isfile(sub): continue oldpath = sub newpath = subtitle_path(wantedFilePath, sub) logger.info("Moving subtitle file from: '{}' to: '{}'".format(oldpath, newpath)) shutil.move(oldpath, newpath) # Give feedback before delete ? def empthDirectory(paths): pass
32.887879
152
0.602322
1,278
10,853
5.046948
0.21831
0.026047
0.012403
0.011938
0.228682
0.167597
0.11969
0.11969
0.114419
0.093643
0
0.008741
0.28324
10,853
329
153
32.987842
0.820414
0.152861
0
0.206897
0
0.004926
0.095385
0
0
0
0
0.00304
0
1
0.059113
false
0.004926
0.078818
0.004926
0.187192
0.014778
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7127698ab3d52c9f1add1c5b008972b4228385d7
1,266
py
Python
networking_vsphere/utils/db.py
huadream/networking-vsphere
8669a78d4d2eb4620610fe7e4548cac7fbfa9e6a
[ "Apache-2.0" ]
null
null
null
networking_vsphere/utils/db.py
huadream/networking-vsphere
8669a78d4d2eb4620610fe7e4548cac7fbfa9e6a
[ "Apache-2.0" ]
null
null
null
networking_vsphere/utils/db.py
huadream/networking-vsphere
8669a78d4d2eb4620610fe7e4548cac7fbfa9e6a
[ "Apache-2.0" ]
null
null
null
# Copyright 2016 Mirantis, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from neutron.db.models import agent as agents_db from neutron_lib.db import api as db_api from networking_vsphere.common import constants def get_agent_by_host(agent_host): """Return a L2 agent on the host.""" session = db_api.get_writer_session() with session.begin(subtransactions=True): query = session.query(agents_db.Agent) agent = query.filter( agents_db.Agent.host == agent_host, agents_db.Agent.agent_type == constants.AGENT_TYPE_DVS, agents_db.Agent.admin_state_up.is_(True)).first() if agent and agent.is_active: return agent return None
37.235294
78
0.71169
185
1,266
4.745946
0.540541
0.068337
0.059226
0.036446
0
0
0
0
0
0
0
0.009045
0.21406
1,266
33
79
38.363636
0.873367
0.49684
0
0
0
0
0
0
0
0
0
0
0
1
0.071429
false
0
0.214286
0
0.428571
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
71290ff28eeef27fa1289d7f582ff081fb7a83c1
2,645
py
Python
Client/Classes/slide.py
crew/dds-client
5d530f053955df07b75410807816241a10b567d3
[ "MIT" ]
null
null
null
Client/Classes/slide.py
crew/dds-client
5d530f053955df07b75410807816241a10b567d3
[ "MIT" ]
null
null
null
Client/Classes/slide.py
crew/dds-client
5d530f053955df07b75410807816241a10b567d3
[ "MIT" ]
null
null
null
import json # TODO: Make symlinks between the Server and Client slide, message, and ConfigParser classes class Slide(): """ Class for Slide Show Slide Objects @ivar url: The Slide's URL @type url: String @ivar duration: The Duration to show the slide (in seconds) @type duration: Integer @ivar id: The Slide's ID @type id: Integer @ivar meta: The Slide's meta content @type meta: Dictionary @copyright: Northeastern University Crew 2014 """ @staticmethod def makeSlide(url, duration, id, meta): """ Slide Constructor based on the given input (instead of a dictionary) @param url: The Slide's URL @type url: String @param duration: The Duration to show the slide (in seconds) @type duration: Integer @param id: The Slide's ID @type id: Integer @param meta: The Slide's meta content @type meta: Dictionary @return: The constructed Slide @rtype: Slide """ return Slide({"permalink": url, "duration": duration, "ID": id, "meta": meta}) def __init__(self, infoDict): """ Slide Constructor @param infoDict: The relevant information for the slide @type infoDict: Dictionary @return: The constructed Slide @rtype: Slide """ self.__type__ = "slide" print "Got meta:", infoDict["meta"] self.url = infoDict["permalink"] if (not (isinstance(infoDict["meta"], str)) and (infoDict["meta"]["dds_external_url"][0] != "")): self.url = infoDict["meta"]["dds_external_url"][0] self.duration = infoDict["duration"] self.id = infoDict["ID"] self.meta = infoDict["meta"] def toJSON(self): """ @return: A JSON Representation of the slide @rtype: String """ text = json.dumps(self.__dict__) return text def sameID(self, id): """ Predicate method which checks if the given id is equal to the slide's @param id: The id to check @type id: Integer @return: Whether the id matches the slide's @rtype: Boolean @todo: Do we I{really} need a method for this? """ return self.id == id def __str__(self): return "Slide[url=" + str(self.url) + ", duration=" + str(self.duration) + ", id=" + str( self.id) + ", meta=" + str(self.meta) + "]" def __repr__(self): # Simplified output (Shown when Arrays of Slides are Printed) return "Slide(" + str(self.url) + "," + str(self.duration) + ")"
32.654321
97
0.582987
323
2,645
4.69969
0.303406
0.063241
0.047431
0.01581
0.300395
0.300395
0.300395
0.206851
0.135705
0.080369
0
0.003254
0.302836
2,645
80
98
33.0625
0.819957
0.056711
0
0
0
0
0.136488
0
0
0
0
0.025
0
0
null
null
0
0.04
null
null
0.04
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
1
0
0
0
0
0
0
0
0
2
712af4c8d551dfa14800db9ae1b75dfc39f56d2e
3,692
py
Python
eval_model.py
yaojin17/adversarial-project
76af16f126ae701fb3a0a83152b37cbec5e7b28f
[ "Apache-2.0" ]
null
null
null
eval_model.py
yaojin17/adversarial-project
76af16f126ae701fb3a0a83152b37cbec5e7b28f
[ "Apache-2.0" ]
null
null
null
eval_model.py
yaojin17/adversarial-project
76af16f126ae701fb3a0a83152b37cbec5e7b28f
[ "Apache-2.0" ]
null
null
null
import torch import torch.nn as nn import torch.nn.functional as F import torchvision from torchvision import datasets, transforms from utils import prepare_cifar from fgsm_attack import FGSMAttack from tqdm import tqdm, trange from pgd20 import pgd20_attack from model import get_model_for_attack def eval_model(model, test_loader, device): correct_adv, correct = [], [] distance = [] num = 0 with trange(10000) as pbar: for x, label in test_loader: x, label = x.to(device), label.to(device) batch, c, h, w = x.shape model.eval() with torch.no_grad(): output = model(x) pred = output.argmax(dim=1) correct.append(pred == label) num += x.shape[0] pbar.set_description(f"Acc: {torch.cat(correct).float().mean():.5f}") pbar.update(x.shape[0]) natural_acc = torch.cat(correct).float().mean() return natural_acc, distance def eval_model_pgd(model, test_loader, device, step_size, epsilon, perturb_steps): correct_adv, correct = [], [] distance = [] num = 0 with trange(10000) as pbar: for x, label in test_loader: x, label = x.to(device), label.to(device) batch, c, h, w = x.shape x_adv = pgd20_attack(model, x.clone(), label.clone(), step_size, epsilon, perturb_steps) x_adv = x_adv.to(device) model.eval() with torch.no_grad(): output = model(x) output_adv = model(x_adv) distance.append(torch.max((x - x_adv).reshape(batch, -1).abs(), dim=1)[0]) pred = output.argmax(dim=1) pred_adv = output_adv.argmax(dim=1) correct.append(pred == label) correct_adv.append(pred_adv == label) num += x.shape[0] pbar.set_description( f"Acc: {torch.cat(correct).float().mean():.5f}, Robust Acc:{torch.cat(correct_adv).float().mean():.5f}") pbar.update(x.shape[0]) natural_acc = torch.cat(correct).float().mean() robust_acc = torch.cat(correct_adv).float().mean() distance = torch.cat(distance).max() return natural_acc, robust_acc, distance def eval_model_with_attack(model, test_loader, attack, epsilon, device): correct_adv, correct = [], [] distance = [] num = 0 nb = 0 with trange(10000) as pbar: for x, label in test_loader: x, label = x.to(device), label.to(device) batch, c, h, w = x.shape # x_adv = attack(x.clone(), label.clone()) x_adv = attack(model, x.clone(), label.clone()) # x_adv = attack.perturb(x) # x_adv = torch.min(torch.max(x_adv, x - epsilon), x + epsilon) x_adv = x_adv.clamp(0, 1) x_adv = x_adv.to(device) model.eval() with torch.no_grad(): output = model(x) output_adv = model(x_adv) distance.append(torch.max((x - x_adv).reshape(batch, -1).abs(), dim=1)[0]) pred = output.argmax(dim=1) pred_adv = output_adv.argmax(dim=1) correct.append(pred == label) correct_adv.append(pred_adv == label) num += x.shape[0] nb += 1 pbar.set_description( f"Acc: {torch.cat(correct).float().mean():.5f}, Robust Acc:{torch.cat(correct_adv).float().mean():.5f}") pbar.update(x.shape[0]) natural_acc = torch.cat(correct).float().mean() robust_acc = torch.cat(correct_adv).float().mean() distance = torch.cat(distance).max() return natural_acc, robust_acc, distance
38.458333
120
0.578548
499
3,692
4.136273
0.152305
0.031008
0.053295
0.087209
0.791667
0.743702
0.728198
0.670058
0.670058
0.652616
0
0.018961
0.285753
3,692
95
121
38.863158
0.763747
0.03467
0
0.764706
0
0.023529
0.068539
0.059831
0
0
0
0
0
1
0.035294
false
0
0.117647
0
0.188235
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
712c4c6a5b75135845d649016c8d2919cb39542c
6,613
py
Python
api/app/main.py
JexPY/filemanager-fastapi
da830fe6d9a3d515e0d04e6e690ff366225ec251
[ "MIT" ]
24
2020-09-15T11:59:55.000Z
2022-03-13T19:58:02.000Z
api/app/main.py
JexPY/filemanager-fastapi
da830fe6d9a3d515e0d04e6e690ff366225ec251
[ "MIT" ]
null
null
null
api/app/main.py
JexPY/filemanager-fastapi
da830fe6d9a3d515e0d04e6e690ff366225ec251
[ "MIT" ]
5
2020-10-11T08:41:29.000Z
2022-03-10T07:23:55.000Z
from fastapi import FastAPI, File, UploadFile, BackgroundTasks, Depends, HTTPException,status,Query from fastapi.responses import FileResponse from fastapi.middleware.cors import CORSMiddleware from fastapi.security import HTTPBearer,OAuth2AuthorizationCodeBearer,HTTPBasicCredentials from fastapi.staticfiles import StaticFiles from fastapi.middleware.cors import CORSMiddleware from dotenv import load_dotenv from typing import List,Optional import os import sys from services.serveUploadedFiles import handle_upload_image_file, handle_multiple_image_file_uploads, handle_upload_video_file from services.serveQrcode import handle_qr_code from services.security.customBearerCheck import validate_token from services.storage.local import response_image_file from services.serveDataFromUrl import handle_download_data_from_url, handle_multiple_image_file_downloads load_dotenv() app = FastAPI(docs_url=None if os.environ.get('docs_url') == 'None' else '/docs', redoc_url=None if os.environ.get('redoc_url') == 'None' else '/redoc') # If you want to serve files from local server you need to mount your static file directory if os.environ.get('PREFERED_STORAGE') == 'local' and 'pytest' not in sys.modules.keys(): app.mount("/static", StaticFiles(directory="static"), name="static") # If you want cors configuration also possible thanks to fast-api origins = os.environ.get('CORS_ORIGINS').split(',') app.add_middleware( CORSMiddleware, allow_origins=origins, allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) @app.get("/", tags=["main"]) def root( cpu_load: Optional[str] = Query( False, description='True/False depending your needs, gets average CPU load value', regex='^(True|False)$' ), token: str = Depends(validate_token)): result = { "Hello": f"Token is {token}", } if cpu_load == 'True': result['cpu_average_load'] = os.getloadavg() return result # File size validates NGINX @app.post("/image", tags=["image"]) async def upload_image_file( thumbnail: Optional[str] = Query( os.environ.get('IMAGE_THUMBNAIL'), description='True/False depending your needs', regex='^(True|False)$' ), file: UploadFile = File(...), OAuth2AuthorizationCodeBearer = Depends(validate_token)): return handle_upload_image_file(True if thumbnail == 'True' else False, file) @app.post("/images", tags=["image"]) async def upload_image_files( thumbnail: Optional[str] = Query( os.environ.get('IMAGE_THUMBNAIL'), description='True/False depending your needs', regex='^(True|False)$' ), files: List[UploadFile] = File(...), OAuth2AuthorizationCodeBearer = Depends(validate_token) ): fileAmount = len(files) if fileAmount > int(os.environ.get('MULTIPLE_FILE_UPLOAD_LIMIT')): raise HTTPException( status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE, detail='Amount of files must not be more than {}'.format(os.environ.get('MULTIPLE_FILE_UPLOAD_LIMIT')) ) return handle_multiple_image_file_uploads(files, fileAmount, True if thumbnail == 'True' else False) @app.get("/image", tags=["image"]) async def get_image( image: str = Query(..., description='uploaded image name', max_length=50 ), image_type: str = Query( ..., description='Should provide verision of image you want from localStorage original, thumbnail or qrImage', regex='^(original|thumbnail|qrImage)$' ), OAuth2AuthorizationCodeBearer = Depends(validate_token) ): return response_image_file(image, image_type) @app.post("/qrImage", tags=["image"]) async def text_to_generate_qr_image( qr_text: str = Query( ..., description='Provide text to generate qr image', ), with_logo: Optional[str] = Query( os.environ.get('QR_IMAGE_WITH_LOGO'), description='True/False depending your needs default is {}'.format(os.environ.get('QR_IMAGE_WITH_LOGO')), regex='^(True|False)$' ), OAuth2AuthorizationCodeBearer = Depends(validate_token)): return handle_qr_code(qr_text, True if with_logo == 'True' else False) @app.post("/video", tags=["video"]) async def upload_video_file( optimize: Optional[str] = Query( os.environ.get('VIDEO_OPTIMIZE'), description='True/False depending your needs default is {}'.format(os.environ.get('VIDEO_OPTIMIZE')), regex='^(True|False)$' ), file: UploadFile = File(..., description='Allows mov, mp4, m4a, 3gp, 3g2, mj2'), OAuth2AuthorizationCodeBearer = Depends(validate_token)): return handle_upload_video_file(True if optimize == 'True' else False, file) @app.get("/imageUrl", tags=["from url"]) async def image_from_url( image_url: str = Query( None, description = "Pass valid image url to upload", min_length = 5 ), thumbnail: Optional[str] = Query( os.environ.get('IMAGE_THUMBNAIL'), description='True/False depending your needs', regex='^(True|False)$' ), OAuth2AuthorizationCodeBearer = Depends(validate_token)): return handle_download_data_from_url(image_url, True if thumbnail == 'True' else False, file_type='image') @app.get("/imageUrls", tags=["from url"]) async def images_from_urls( image_urls: List[str] = Query( None, description = "Pass valid image urls to upload", min_length = 5 ), OAuth2AuthorizationCodeBearer = Depends(validate_token)): fileAmount = len(image_urls) if fileAmount > int(os.environ.get('MULTIPLE_FILE_UPLOAD_LIMIT')): raise HTTPException( status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE, detail='Amount of files must not be more than {}'.format(os.environ.get('MULTIPLE_FILE_UPLOAD_LIMIT')) ) return handle_multiple_image_file_downloads(image_urls, fileAmount) @app.get("/videoUrl", tags=["from url"]) async def video_from_url( video_url: str = Query( None, description = "Pass valid video url to upload", min_length = 5 ), optimize: Optional[str] = Query( os.environ.get('VIDEO_OPTIMIZE'), description='True/False depending your needs default is {}'.format(os.environ.get('VIDEO_OPTIMIZE')), regex='^(True|False)$' ), OAuth2AuthorizationCodeBearer = Depends(validate_token)): return handle_download_data_from_url(video_url, False, True if optimize == 'True' else False, file_type='video')
37.573864
152
0.692122
804
6,613
5.508706
0.210199
0.034545
0.04606
0.088508
0.575977
0.504629
0.415218
0.299616
0.299616
0.282682
0
0.004846
0.188719
6,613
176
153
37.573864
0.82069
0.027068
0
0.425676
0
0
0.200902
0.020837
0
0
0
0
0
1
0.006757
false
0.02027
0.101351
0
0.168919
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
712c549f586b26511dd3c9bf33e7238504d23130
3,570
py
Python
src/modules/Load.py
aaanh/duplicated_accelcamp
7d4b60ace023bede907f8ed367ba492731a1951d
[ "FTL", "CNRI-Python", "RSA-MD" ]
null
null
null
src/modules/Load.py
aaanh/duplicated_accelcamp
7d4b60ace023bede907f8ed367ba492731a1951d
[ "FTL", "CNRI-Python", "RSA-MD" ]
2
2021-05-21T16:31:41.000Z
2021-08-25T16:05:48.000Z
src/modules/Load.py
aaanh/duplicated_accelcamp
7d4b60ace023bede907f8ed367ba492731a1951d
[ "FTL", "CNRI-Python", "RSA-MD" ]
null
null
null
from modules.LoadAccel import * from modules.LoadOmega import * import os from tkinter import * defaultdir = "../data" def LoadDataSet(dirpath=None): if(dirpath==None): root = Tk() root.withdraw() dirpath = filedialog.askdirectory(parent=root,initialdir=defaultdir,title='Please select a dataset') files = os.listdir(dirpath) print("-------Found "+str(len(files))+ " files-------") for i in files: print("Found: "+i) print("----------------------------") i = 1 runs_files = [] while(True): run = list(filter(lambda x: x == "run"+str(i), files)) if(run != []): runs_files += run else: break i+=1 print("Found "+str(len(runs_files))+" runs") runs_data = [] for run in runs_files: print("\n\n-----------------"+run+"-----------------") runs_data.append(LoadRun(dirpath+"/"+run+"/")) return runs_data # load a single AccelData object and RotaryData object # simpler front-end for LoadRun() def LoadSingleRun( dirpath=None): run = LoadRun(dirpath) return { "accel": run["accel"][0], "omega": run["omega"][0]} # deprecated: def LoadRun(dirpath=None): return LoadMultiRun(dirpath) # Load multiple runs as a list of AccelData objects and list of RotaryData objects def LoadMultiRun(dirpath=None): if(dirpath==None): root = Tk() root.withdraw() dirpath = filedialog.askdirectory(parent=root,initialdir=defaultdir,title='Please select a run') found_files = os.listdir(dirpath) print("-------Found "+str(len(found_files))+ " files-------") for i in found_files: print("Found: "+i) print("The Following Files Will be Ignored:") not_file = list(filter(lambda x: ((x.split(".")[type_index]!="accel" and x.split(".")[type_index]!="omega") or x.split(".")[-1].lower()!="csv" or len(x.split(".")) != 4 ), found_files)) for i in not_file: print("- "+i+("(Wrong File Structure)" if len(i.split(".")) != 4 else "(Wrong File Format)" if i.split(".")[-1].lower()!="csv" else "(Unsupported Type)" if i.split(".")[type_index]!="accel" and i.split(".")[type_index]!="omega" else "" )) if(not_file == []): print("--None--") print("----------------------------") files = list(filter(lambda x: not_file.count(x) == 0, found_files)) accels_files = list(filter(lambda x: x.split(".")[type_index]=="accel", files)) accels_data = [] for file in accels_files: print("processing "+file+"...") data = LoadAccelFile(dirpath+"/"+file) if(data != "Model is not currently supported"): accels_data.append(data) else: print("Failed to Load: "+file+" (Model not supported)") omega_files = list(filter(lambda x: x.split(".")[type_index]=="omega", files)) omega_data = [] for file in omega_files: print("processing "+file+"...") omega_data.append(Load_Omega(filepath=str(dirpath+"/"+file))) if accels_data == [] and omega_data == []: raise FileNotFoundError('No files were found.') return {"accel": accels_data, "omega": omega_data}
35.346535
123
0.519048
394
3,570
4.614213
0.258883
0.036304
0.046205
0.046755
0.325633
0.235424
0.235424
0.235424
0.194719
0.130913
0
0.003594
0.298599
3,570
100
124
35.7
0.722444
0.04958
0
0.202532
0
0
0.159976
0.023418
0
0
0
0
0
1
0.050633
false
0
0.050633
0.012658
0.151899
0.177215
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
712ec90b75d074ea84403e57993fff8f8e2dc0dc
3,069
py
Python
hundun/systems/_systems.py
llbxg/hundun
a063ba4cf42665a3b7861aaccd1e9e31719eef8d
[ "MIT" ]
4
2021-12-24T09:19:45.000Z
2022-03-17T23:44:44.000Z
hundun/systems/_systems.py
llbxg/hundun
a063ba4cf42665a3b7861aaccd1e9e31719eef8d
[ "MIT" ]
5
2022-02-15T07:17:26.000Z
2022-03-03T04:49:38.000Z
hundun/systems/_systems.py
llbxg/hundun
a063ba4cf42665a3b7861aaccd1e9e31719eef8d
[ "MIT" ]
null
null
null
# 力学系(dynamical systems) from abc import ABC as _ABC, abstractmethod as _abstractmethod import numpy as _np from ._tu import TU as _TU class DynamicalSystems(_ABC): def __init__(self, t=None, u=None, **params): self.dim = 0 self.parameter() if self.dim == 0: class_name = self.__class__.__name__ msg = (f"need to set {class_name}'s dimension " f"({class_name}.dim=? in parameter())") raise NotImplementedError(msg) self.t = t or 0 self.u = u if u is not None else _np.zeros(self.dim) self._t_seq, self._u_seq = [], [] self.parameter(**params) @property def inf(self): return any(_np.isinf(self.u)) @property def internal_state(self): return _TU(self.t, self.u) @property def t_seq(self): return _np.array(self._t_seq) @property def u_seq(self): return _np.array(self._u_seq) @classmethod def on_attractor(cls, t0=None, u0=None, h=0.01, *, T_0=5000, **params): c = cls(t0, u0) c.parameter(**params) c.settle_on_attractor(t0, u0, h=h, T_0=T_0) return c @classmethod def get_u_seq(cls, n, *args, **kwargs): c = cls.on_attractor(*args, **kwargs) c.solve_n_times(n) return c.u_seq @_abstractmethod def equation(self, t, u): """equation""" def j(self, **params): return _np.array(self.jacobian(**params)) def jacobian(self): """jacobian""" return None def make_inital(self): return _np.random.rand(self.dim) def parameter(self): """set parameter for equation""" def reset_u_seq(self): self._u_seq = [] def settle_on_attractor(self, t0=None, u0=None, *, T_0=5000, notsave=True, **params): self.u = self.make_inital() if u0 is None else u0 self.t = t0 or 0 for _ in range(T_0): self.solve(*self.internal_state, **params) if notsave: self._u_seq, self._t_seq = [], [] if t0 is None: self.t = 0 self.t, self.u = self.internal_state return self.internal_state def solve(self, *args, **kwargs): tu = self._solve(*args, **kwargs) if kwargs.get('save', True): self._u_seq.append(tu.u) self._t_seq.append(tu.t) return tu def solve_n_times(self, n): for _ in range(n): self.solve(*self.internal_state) return self.t_seq, self.u_seq def __call__(self, t, u): return _np.array(self.equation(t, u)) def __repr__(self): v = vars(self) p = ', '.join(f'{key}={_np.round(v[key], 3)}' for key in v.keys() if ('_' not in key) and (key not in ['t', 'u'])) name = self.__class__.__name__ return f'{name}({p})' @_abstractmethod def _solve(self, t, u): self.t, self.u = t, u return _TU(self.t, self.u)
24.165354
75
0.548713
431
3,069
3.668213
0.211137
0.050601
0.030361
0.0253
0.127135
0.073371
0
0
0
0
0
0.016268
0.318996
3,069
126
76
24.357143
0.740191
0.022157
0
0.117647
0
0
0.040201
0.00804
0
0
0
0
0
1
0.223529
false
0
0.035294
0.082353
0.447059
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
712f06edd2c360eccafbcbf093e6b134d0761a1c
3,919
py
Python
povary/apps/statistics/migrations/0001_initial.py
TorinAsakura/cooking
cf0c78f613fa9ce0fcd4ec7a397ab880d9dd631a
[ "BSD-3-Clause" ]
null
null
null
povary/apps/statistics/migrations/0001_initial.py
TorinAsakura/cooking
cf0c78f613fa9ce0fcd4ec7a397ab880d9dd631a
[ "BSD-3-Clause" ]
null
null
null
povary/apps/statistics/migrations/0001_initial.py
TorinAsakura/cooking
cf0c78f613fa9ce0fcd4ec7a397ab880d9dd631a
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'TrackingType' db.create_table('statistics_trackingtype', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('title', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)), ('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), ('updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)), )) db.send_create_signal('statistics', ['TrackingType']) # Adding model 'Tracking' db.create_table('statistics_tracking', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('tracking_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['statistics.TrackingType'])), ('message', self.gf('django.db.models.fields.TextField')(null=True, blank=True)), ('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'], null=True, blank=True)), ('object_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)), ('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), ('updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)), )) db.send_create_signal('statistics', ['Tracking']) def backwards(self, orm): # Deleting model 'TrackingType' db.delete_table('statistics_trackingtype') # Deleting model 'Tracking' db.delete_table('statistics_tracking') models = { 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'statistics.tracking': { 'Meta': {'ordering': "('-created',)", 'object_name': 'Tracking'}, 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}), 'tracking_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['statistics.TrackingType']"}), 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}) }, 'statistics.trackingtype': { 'Meta': {'object_name': 'TrackingType'}, 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}) } } complete_apps = ['statistics']
59.378788
161
0.602705
418
3,919
5.5311
0.169856
0.093426
0.157439
0.224913
0.693772
0.693772
0.682526
0.679066
0.625865
0.509516
0
0.005334
0.186782
3,919
66
162
59.378788
0.720113
0.032917
0
0.283019
0
0
0.493791
0.309115
0
0
0
0
0
1
0.037736
false
0
0.075472
0
0.169811
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
712f74ba0feddfa2cbc236392371cf7d36c0e8d3
306
py
Python
ex035.py
danilodelucio/Exercicios_Curso_em_Video
d59e1b4efaf27dd0fc828a608201613c69ac333d
[ "MIT" ]
null
null
null
ex035.py
danilodelucio/Exercicios_Curso_em_Video
d59e1b4efaf27dd0fc828a608201613c69ac333d
[ "MIT" ]
null
null
null
ex035.py
danilodelucio/Exercicios_Curso_em_Video
d59e1b4efaf27dd0fc828a608201613c69ac333d
[ "MIT" ]
null
null
null
r1 = float(input('Primeiro seguimento: ')) r2 = float(input('Segundo seguimento: ')) r3 = float(input('Terceiro seguimento: ')) if r1 < r2 + r3 and r2 < r1 + r3 and r3 < r1 + r2: print('Os segmentos acima PODEM FORMAR UM TRIÂNGULO!') else: print('Os segmentos acima NÃO PODEM FORMAR UM TRIÂGULO!')
38.25
61
0.676471
45
306
4.6
0.488889
0.144928
0.154589
0.202899
0
0
0
0
0
0
0
0.048193
0.186275
306
7
62
43.714286
0.783133
0
0
0
0
0
0.506536
0
0
0
0
0
0
1
0
false
0
0
0
0
0.285714
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
712fe840477ae9afc89ee0d30308453c6ea2bba1
429
py
Python
PycharmProjects/PythonExercicios/ex027.py
RodrigoMASRamos/Projects.py
ed15981b320914c9667305dcd5fb5b7906fd9b00
[ "MIT" ]
null
null
null
PycharmProjects/PythonExercicios/ex027.py
RodrigoMASRamos/Projects.py
ed15981b320914c9667305dcd5fb5b7906fd9b00
[ "MIT" ]
null
null
null
PycharmProjects/PythonExercicios/ex027.py
RodrigoMASRamos/Projects.py
ed15981b320914c9667305dcd5fb5b7906fd9b00
[ "MIT" ]
null
null
null
# Faça um programa que leia o nome completo de uma pessoa, mostrando em seguida o primeiro e o último nome separadamente. # # Ex: Ana Maria de Souza #primeiro = Ana #último = Souza '''Tive dificuldades com a ultima linha desse código. Estude mais a função Split e a aula 9!''' nome = input('Digite um nome completo: ').strip() n = nome.split() print(f'O seu primeiro nome é {n[0]}') print(f'Já o seu ultimo nome é {n[len(n)-1]}')
42.9
121
0.710956
78
429
3.910256
0.628205
0.078689
0.039344
0
0
0
0
0
0
0
0
0.008451
0.172494
429
10
122
42.9
0.850704
0.608392
0
0
0
0
0.559748
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
712febd61548dc3d1908db056b52dfe5ee29e08f
705
py
Python
lang/Python/playing-cards-1.py
ethansaxenian/RosettaDecode
8ea1a42a5f792280b50193ad47545d14ee371fb7
[ "MIT" ]
5
2021-01-29T20:08:05.000Z
2022-03-22T06:16:05.000Z
lang/Python/playing-cards-1.py
ethansaxenian/RosettaDecode
8ea1a42a5f792280b50193ad47545d14ee371fb7
[ "MIT" ]
null
null
null
lang/Python/playing-cards-1.py
ethansaxenian/RosettaDecode
8ea1a42a5f792280b50193ad47545d14ee371fb7
[ "MIT" ]
1
2021-04-13T04:19:31.000Z
2021-04-13T04:19:31.000Z
import random class Card(object): suits = ("Clubs","Hearts","Spades","Diamonds") pips = ("2","3","4","5","6","7","8","9","10","Jack","Queen","King","Ace") def __init__(self, pip,suit): self.pip=pip self.suit=suit def __str__(self): return "%s %s"%(self.pip,self.suit) class Deck(object): def __init__(self): self.deck = [Card(pip,suit) for suit in Card.suits for pip in Card.pips] def __str__(self): return "[%s]"%", ".join( (str(card) for card in self.deck)) def shuffle(self): random.shuffle(self.deck) def deal(self): self.shuffle() # Can't tell what is next from self.deck return self.deck.pop(0)
26.111111
80
0.574468
105
705
3.704762
0.447619
0.102828
0.056555
0.082262
0.087404
0
0
0
0
0
0
0.02037
0.234043
705
26
81
27.115385
0.7
0.053901
0
0.105263
0
0
0.093233
0
0
0
0
0
0
1
0.315789
false
0
0.052632
0.105263
0.736842
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
712ffc09478a5f0361603a065889b6ec9109be8d
1,300
py
Python
restfulpy/tests/test_jwt_cli.py
mehdishirazi/restfulpy
244a53a8ea4692a37b4db82b6cb5ef83c27f0b53
[ "MIT" ]
null
null
null
restfulpy/tests/test_jwt_cli.py
mehdishirazi/restfulpy
244a53a8ea4692a37b4db82b6cb5ef83c27f0b53
[ "MIT" ]
null
null
null
restfulpy/tests/test_jwt_cli.py
mehdishirazi/restfulpy
244a53a8ea4692a37b4db82b6cb5ef83c27f0b53
[ "MIT" ]
null
null
null
import json import pytest from bddcli import Given, given, when, stdout, stderr, Application from itsdangerous import TimedJSONWebSignatureSerializer from itsdangerous.exc import SignatureExpired from nanohttp import settings from restfulpy import Application as RestfulpyApplication foo = RestfulpyApplication(name='jwt') foo.__configuration__ = '' def foo_main(): return foo.cli_main() app = Application('foo', 'restfulpy.tests.test_jwt_cli:foo_main') def test_jwt(): foo.configure(force=True) pirincipal = TimedJSONWebSignatureSerializer( settings.jwt.secret, algorithm_name=settings.jwt.algorithm ) with Given(app, ['jwt', 'create']): assert stderr == '' token = f'{stdout}'[:-1] assert pirincipal.loads(token) == {} # Create a jwt token with a payload payload = dict(a=1) when(given + f'\'{json.dumps(payload)}\'') assert stderr == '' token = f'{stdout}'[:-1] assert pirincipal.loads(token) == payload # Create a expired token when(given + '-e -1') assert stderr == '' token = f'{stdout}'[:-1] with pytest.raises(SignatureExpired): pirincipal.loads(token) if __name__ == '__main__': foo.cli_main(['jwt', 'create'])
24.528302
66
0.646923
144
1,300
5.701389
0.354167
0.043849
0.062119
0.065773
0.154689
0.154689
0.124239
0.124239
0.124239
0.124239
0
0.004995
0.23
1,300
52
67
25
0.815185
0.043077
0
0.176471
0
0
0.079839
0.029839
0
0
0
0
0.147059
1
0.058824
false
0
0.205882
0.029412
0.294118
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
71313701f0c786d0a59625fef35a307d370ccdba
913
py
Python
Challenges/16/tree_max/tree_max.py
makkahwi/data-structures-and-algorithms
06551786258bb7dabb9b0ab07c0f80ff78abca41
[ "MIT" ]
null
null
null
Challenges/16/tree_max/tree_max.py
makkahwi/data-structures-and-algorithms
06551786258bb7dabb9b0ab07c0f80ff78abca41
[ "MIT" ]
null
null
null
Challenges/16/tree_max/tree_max.py
makkahwi/data-structures-and-algorithms
06551786258bb7dabb9b0ab07c0f80ff78abca41
[ "MIT" ]
null
null
null
class BinaryNode: def __init__(self, value): self.value = value self.left = None self.right = None class BinaryTree: def __init__(self): self.root = None def tree_max(self): """ To find the maximum node value Input: None Output: Return maximum value """ if self.root == None: raise Exception("Empty Tree") elif self.root.left == None and self.root.right == None: return self.root.value max = self.root.value def search(current): nonlocal max if current.value > max: max = current.value if current.left: search(current.left) if current.right: search(current.right) search(self.root) return max if __name__ == "__main__": pass
18.26
64
0.511501
98
913
4.591837
0.336735
0.124444
0.048889
0
0
0
0
0
0
0
0
0
0.405257
913
49
65
18.632653
0.828729
0.079956
0
0
0
0
0.022959
0
0
0
0
0
0
1
0.153846
false
0.038462
0
0
0.307692
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7131fdbb28ce493b1ce2fdb0b6b3ceece6c2c5e7
234
py
Python
lulu/length-of-last-word.py
joshua-jin/algorithm-campus
8f60cd63542f4f5778a992179c3e767fbc023338
[ "MIT" ]
8
2016-05-10T12:59:36.000Z
2020-09-16T19:47:44.000Z
lulu/length-of-last-word.py
joshua-jin/algorithm-campus
8f60cd63542f4f5778a992179c3e767fbc023338
[ "MIT" ]
null
null
null
lulu/length-of-last-word.py
joshua-jin/algorithm-campus
8f60cd63542f4f5778a992179c3e767fbc023338
[ "MIT" ]
2
2016-12-22T09:28:54.000Z
2020-01-22T17:56:02.000Z
class Solution: # @param {string} s A string # @return {int} the length of last word def lengthOfLastWord(self, s): # Write your code here if s == '': return 0 return len(s.split()[-1])
26
43
0.542735
31
234
4.096774
0.806452
0
0
0
0
0
0
0
0
0
0
0.012987
0.34188
234
8
44
29.25
0.811688
0.363248
0
0
0
0
0
0
0
0
0
0.125
0
1
0.2
false
0
0
0
0.8
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
3
713212e94fcb5070eba40619572546d9533d74ef
406
py
Python
marvelmind_nav/launch/marvel_base_launch.py
aminballoon/ros_marvelmind_package
ca42a11d8c4205f0b15027eb32495b8abeddab52
[ "BSD-2-Clause" ]
null
null
null
marvelmind_nav/launch/marvel_base_launch.py
aminballoon/ros_marvelmind_package
ca42a11d8c4205f0b15027eb32495b8abeddab52
[ "BSD-2-Clause" ]
null
null
null
marvelmind_nav/launch/marvel_base_launch.py
aminballoon/ros_marvelmind_package
ca42a11d8c4205f0b15027eb32495b8abeddab52
[ "BSD-2-Clause" ]
null
null
null
from launch import LaunchDescription from launch_ros.actions import LifecycleNode # from launch_ros.actions import Node import sys def generate_launch_description(): return LaunchDescription([ LifecycleNode(package='marvelmind_nav', node_executable='marvelmind_nav', node_name='lc_marvel2', output='screen'), ]) def main(argv=sys.argv[1:]): print("Running main")
31.230769
81
0.726601
47
406
6.085106
0.574468
0.104895
0.090909
0.13986
0.181818
0
0
0
0
0
0
0.00597
0.174877
406
13
82
31.230769
0.847761
0.086207
0
0
1
0
0.151351
0
0
0
0
0
0
1
0.2
false
0
0.3
0.1
0.6
0.1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
713280b49e1a2690e858ead874212334f33b4458
6,928
py
Python
sahyun_bot/elastic_settings.py
TheGoodlike13/sahyun-bot
8ebc3d4e58a0acf9bde3c9ea8339145abcc53fcb
[ "MIT" ]
1
2022-02-21T18:55:34.000Z
2022-02-21T18:55:34.000Z
sahyun_bot/elastic_settings.py
TheGoodlike13/sahyun-bot
8ebc3d4e58a0acf9bde3c9ea8339145abcc53fcb
[ "MIT" ]
null
null
null
sahyun_bot/elastic_settings.py
TheGoodlike13/sahyun-bot
8ebc3d4e58a0acf9bde3c9ea8339145abcc53fcb
[ "MIT" ]
null
null
null
""" Initializes settings for elastic.py. To make the index dynamic (which also allows to switch it out for tests), the value must be explicitly initialized by some other module. If this does not happen, and somebody attempts to load elastic.py, 'ready_or_die' will get executed which will shut down the application, thus preventing any shenanigans with the wrong parameters being used. At least in normal circumstances :) """ from datetime import timezone, datetime from typing import Optional, List, Union from elasticsearch_dsl import Document, Date, integer_types, ValidationException, Search from elasticsearch_dsl.query import Query from sahyun_bot.the_danger_zone import nuke_from_orbit from sahyun_bot.utils import NON_EXISTENT from sahyun_bot.utils_settings import read_config, parse_bool, parse_list DEFAULT_HOST = 'localhost' DEFAULT_CUSTOMSFORGE_INDEX = 'cdlcs' DEFAULT_USER_INDEX = 'users' DEFAULT_FUZZINESS = 'auto:5,11' DEFAULT_SHINGLE_CEILING = 3 DEFAULT_PLATFORMS = ['pc'] DEFAULT_PARTS = ['lead', 'rhythm'] DEFAULT_OFFICIAL = False TEST_CUSTOMSFORGE_INDEX = DEFAULT_CUSTOMSFORGE_INDEX + '_test' TEST_USER_INDEX = DEFAULT_USER_INDEX + '_test' TEST_ONLY_VALUES = frozenset([ TEST_CUSTOMSFORGE_INDEX, TEST_USER_INDEX, ]) e_host = NON_EXISTENT e_cf_index = NON_EXISTENT e_rank_index = NON_EXISTENT e_fuzzy = NON_EXISTENT e_shingle = NON_EXISTENT e_explain = NON_EXISTENT e_refresh = False e_platforms = NON_EXISTENT e_parts = NON_EXISTENT e_allow_official = NON_EXISTENT def important_values() -> List: return [e_cf_index, e_rank_index] def ready_or_die(): """ Immediately shuts down the application if the module is not properly configured. Make the call immediately after imports in every module that depends on this configuration to be loaded. """ if NON_EXISTENT in important_values(): nuke_from_orbit('programming error - elastic module imported before elastic_settings is ready!') def init(): global e_host global e_cf_index global e_rank_index global e_fuzzy global e_shingle global e_explain global e_platforms global e_parts global e_allow_official e_host = read_config('elastic', 'Host', fallback=DEFAULT_HOST) e_cf_index = read_config('elastic', 'CustomsforgeIndex', fallback=DEFAULT_CUSTOMSFORGE_INDEX) e_rank_index = read_config('elastic', 'RankIndex', fallback=DEFAULT_USER_INDEX) e_fuzzy = read_config('elastic', 'Fuzziness', fallback=DEFAULT_FUZZINESS) e_shingle = read_config('elastic', 'ShingleCeiling', convert=int, fallback=DEFAULT_SHINGLE_CEILING) e_explain = read_config('elastic', 'Explain', convert=parse_bool, fallback=False) # noinspection PyTypeChecker e_platforms = read_config('elastic', 'Platforms', convert=parse_list, fallback=DEFAULT_PLATFORMS) # noinspection PyTypeChecker e_parts = read_config('elastic', 'Parts', convert=parse_list, fallback=DEFAULT_PARTS) e_allow_official = read_config('elastic', 'RandomOfficial', convert=parse_bool, fallback=DEFAULT_OFFICIAL) e_shingle = max(2, e_shingle) for value in important_values(): if value in TEST_ONLY_VALUES: nuke_from_orbit('configuration error - cannot use TEST values for REAL initialization') def init_test(): global e_host global e_cf_index global e_rank_index global e_fuzzy global e_shingle global e_explain global e_refresh global e_platforms global e_parts global e_allow_official e_host = DEFAULT_HOST e_cf_index = TEST_CUSTOMSFORGE_INDEX e_rank_index = TEST_USER_INDEX e_fuzzy = DEFAULT_FUZZINESS e_shingle = DEFAULT_SHINGLE_CEILING e_explain = True e_refresh = True e_platforms = DEFAULT_PLATFORMS e_parts = DEFAULT_PARTS e_allow_official = DEFAULT_OFFICIAL RANDOM_SORT = { '_script': { 'script': 'Math.random()', 'type': 'number', }, } class BaseDoc(Document): @classmethod def index_name(cls) -> Optional[str]: return cls._index._name if cls._index else None @classmethod def mapping(cls) -> Optional[dict]: return cls._doc_type.mapping.to_dict() @classmethod def search(cls, **kwargs) -> Search: return super().search(**kwargs).extra(explain=e_explain) @classmethod def as_lucine(cls, query: Union[Query, dict], **kwargs) -> str: """ :returns given query as it will be interpreted by the index of this document in Lucine format """ kwargs['explain'] = True kwargs['rewrite'] = True es = cls._get_connection() body = query if isinstance(query, dict) else {'query': query.to_dict()} result = es.indices.validate_query(body, cls._default_index(), **kwargs) if 'error' in result: raise ValueError(result['error']) return result['explanations'][0]['explanation'] def explain(self, query: Query, **kwargs) -> dict: """ :returns lucine query, whether it matches this document & basic explanation why or why not """ es = self._get_connection() body = {'query': query.to_dict()} response = es.explain(self._get_index(), self.meta.id, body=body, **kwargs) return { 'search': self.as_lucine(body), 'match': response['matched'], 'reason': response['explanation'], } def terms(self, *fields: str, **kwargs) -> dict: """ :returns for every field, the terms that have been analyzed for this particular document """ vectors = self.term_vectors(*fields, **kwargs) return {field_name: list(data['terms'].keys()) for field_name, data in vectors.items()} def term_vectors(self, *fields: str, **kwargs) -> dict: """ :returns for every field, information about the terms that have been analyzed for this particular document """ es = self._get_connection() response = es.termvectors(index=self._get_index(), id=self.meta.id, fields=fields, **kwargs) return response['term_vectors'] def delete(self, **kwargs): kwargs.setdefault('refresh', e_refresh) super().delete(**kwargs) def update(self, **kwargs): kwargs.setdefault('refresh', e_refresh) return super().update(**kwargs) def save(self, **kwargs): kwargs.setdefault('refresh', e_refresh) return super().save(**kwargs) class EpochSecond(Date): def __init__(self, *args, **kwargs): kwargs.pop('default_timezone', None) kwargs['format'] = 'epoch_second' super().__init__(default_timezone=timezone.utc, *args, **kwargs) def _deserialize(self, data): if not isinstance(data, integer_types): raise ValidationException(f'Could not parse epoch second from the value <{data}>') return datetime.fromtimestamp(data, tz=timezone.utc)
32.990476
116
0.69948
887
6,928
5.21646
0.270575
0.028744
0.033067
0.009726
0.194943
0.132267
0.132267
0.123406
0.123406
0.082343
0
0.001088
0.203811
6,928
209
117
33.148325
0.837745
0.15026
0
0.192857
0
0
0.103693
0
0
0
0
0
0
1
0.114286
false
0
0.078571
0.028571
0.285714
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
713385c102d9118616dbff568943593032126378
13,816
py
Python
ipec/ga/population.py
wwwbbb8510/ippso
fa20d23cd8edba5908e65a0ab0ab990d7ce3d5d5
[ "MIT" ]
9
2018-05-10T01:04:34.000Z
2019-06-28T07:47:37.000Z
ipec/ga/population.py
wwwbbb8510/ippso
fa20d23cd8edba5908e65a0ab0ab990d7ce3d5d5
[ "MIT" ]
null
null
null
ipec/ga/population.py
wwwbbb8510/ippso
fa20d23cd8edba5908e65a0ab0ab990d7ce3d5d5
[ "MIT" ]
2
2020-10-12T03:54:30.000Z
2021-09-08T14:10:21.000Z
import copy import logging import numpy as np from ipec.cnn.evaluator import Evaluator, CNNEvaluator, initialise_cnn_evaluator from ipec.cnn.layers import ConvLayer from ipec.cnn.layers import DisabledLayer from ipec.cnn.layers import FullyConnectedLayer from ipec.cnn.layers import PoolingLayer from ipec.ip.decoder import Decoder from .chromosome import Chromosome, CNNChromosome POPULATION_DEFAULT_PARAMS = { 'pop_size': 3, #50, 'chromosome_length': 5, #15, 'max_full': 2, #5, 'elitism_rate': 0.5, 'mutation_rate': np.asarray([0.1, 0.2]), 'layers': { 'conv': ConvLayer(), 'pooling': PoolingLayer(), 'full': FullyConnectedLayer(), 'disabled': DisabledLayer() }, 'max_generation': 3, #50 } def initialise_cnn_population(pop_size=None, chromosome_length=None, max_fully_connected_length=None, elitism_rate=None, mutation_rate=None, layers=None, evaluator=None, max_generation=None): """ initialise a cnn population :param pop_size: population size :type pop_size: int :param chromosome_length: the length/dimension of the chromosome :type chromosome_length: int :param max_fully_connected_length: the max length of fully-connected layers :type max_fully_connected_length: int :param elitism_rate: elitism rate :type elitism_rate: float :param mutation_rate: mutation rate. [mutation rate for interfaces in a chromosome, mutation rate for bits in an interface] :type mutation_rate: numpy.array :param layers: a dict of (layer_name, layer) pairs; keys: conv, pooling, full, disabled :type layers: dict :param max_generation: max DE generation :type max_generation: int :return: a cnn population :rtype: CNNPopulation """ if pop_size is None: pop_size = POPULATION_DEFAULT_PARAMS['pop_size'] if chromosome_length is None: chromosome_length = POPULATION_DEFAULT_PARAMS['chromosome_length'] if max_fully_connected_length is None: max_fully_connected_length = POPULATION_DEFAULT_PARAMS['max_full'] if mutation_rate is None: mutation_rate = POPULATION_DEFAULT_PARAMS['mutation_rate'] if elitism_rate is None: elitism_rate = POPULATION_DEFAULT_PARAMS['elitism_rate'] if max_generation is None: max_generation = POPULATION_DEFAULT_PARAMS['max_generation'] if layers is None: layers = POPULATION_DEFAULT_PARAMS['layers'] logging.info('===initialise the PSO population with the following parameters===') logging.info('population size: %d, chromosome length: %d, max fully-connected length: %d, max generation: %d', pop_size, chromosome_length, max_fully_connected_length, max_generation) return CNNPopulation(pop_size, chromosome_length, max_fully_connected_length, elitism_rate, mutation_rate, layers, evaluator, max_generation).initialise() class Population: """ Population class """ def __init__(self, pop_size, chromosome_length, elitism_rate, mutation_rate, layers, evaluator=None, max_generation=None): """ constructor :param pop_size: population size :type pop_size: int :param chromosome_length: the length/dimension of the chromosome :type chromosome_length: int :param elitism_rate: elitism rate :type elitism_rate: float :param mutation_rate: mutation rate. [mutation rate for interfaces in a chromosome, mutation rate for bits in an interface] :type mutation_rate: numpy.array :param layers: a dict of (layer_name, layer) pairs; keys: conv, pooling, full, disabled :type layers: dict :param evaluator: evaluator to calculate the fitness :type evaluator: Evaluator :param max_generation: max generation :type max_generation: int """ self.pop_size = pop_size self.pop = np.empty(pop_size, dtype=Chromosome) self.chromosome_length = chromosome_length self.elitism_rate = elitism_rate self.mutation_rate = mutation_rate self.layers = layers self.max_generation = max_generation if max_generation > 0 else POPULATION_DEFAULT_PARAMS['max_generation'] self.evaluator = evaluator self.decoder = Decoder() self.best_chromosome = None self.roulette_proportions = None def evolve(self): """ evolve the population """ for g in range(self.max_generation): logging.info('===start updating population at step-%d===', g) # evaluate the first generation as the chromosomes are not evaluated during initialisation if g == 0: for chromosome in self.pop: eval_result = self.evaluator.eval(chromosome) # use minus standard deviation which is the less the better # use minus number of connections which is the less the better chromosome.fitness = (eval_result[0], -eval_result[1], -eval_result[2]) # generate new pop new_pop = np.empty(self.pop_size, dtype=Chromosome) new_pop_index = 0 # add elite chromosomes in the new generation elite_chromosomes = self.elitism() if elite_chromosomes is not None: for chromosome in elite_chromosomes: new_chromosome = copy.deepcopy(chromosome) new_chromosome.id = new_pop_index new_pop[new_pop_index] = new_chromosome new_pop_index = new_pop_index + 1 # generate children (after doing selection, crossover, mutation) in the population while new_pop_index < self.pop_size: chromosome_1, chromosome_2 = self.select() candidate_chromosome = self.crossover(chromosome_1, chromosome_2) candidate_chromosome = self.mutate(candidate_chromosome) candidate_chromosome.id = new_pop_index eval_result = self.evaluator.eval(chromosome) # use minus standard deviation which is the less the better # use minus number of connections which is the less the better chromosome.fitness = (eval_result[0], -eval_result[1], -eval_result[2]) # update best chromosome if self.best_chromosome is None: self.best_chromosome = copy.deepcopy(self.pop[new_pop_index]) elif self.best_chromosome.compare_with(self.pop[new_pop_index]) < 0: self.best_chromosome = copy.deepcopy(self.pop[new_pop_index]) logging.info('===fitness of Chromosome-%d at generation-%d: %s===', new_pop_index, g, str(self.pop[new_pop_index].fitness)) new_pop[new_pop_index] = candidate_chromosome new_pop_index = new_pop_index + 1 logging.info('===fitness of best chromosome at generation-%d: %s===', g, str(self.best_chromosome.fitness)) logging.info('===finish updating population at generation-%d===', g) return self.best_chromosome def elitism(self): """ GA elitism :return: elitism array of chromosome :type: numpy.array """ elitism_pop = None elitism_amount = int(self.elitism_rate * self.pop_size) if elitism_amount > 0: # construct a sortable array dtype = [('chromosome', Chromosome), ('s_0', float), ('s_1', float), ('s_2', float)] sortable_pop = np.empty(self.pop_size, dtype=dtype) for i in range(self.pop_size): fitness = self.pop[i].fitness sortable_pop[i] = (self.pop[i], fitness[0], fitness[1], fitness[2]) sorted_pop = np.sort(sortable_pop, order=['s_0', 's_1', 's_2']) elitism_pop = np.empty(elitism_amount, dtype=Chromosome) for i in range(self.pop_size-elitism_amount, self.pop_size): elitism_pop[i-(self.pop_size-elitism_amount)] = sorted_pop[i][0] return elitism_pop def select(self): """ select two chromosomes for crossover and mutation :return: two unique chromosomes :rtype: tuple """ # roulette-select chromosome_1 c1_index = self.spin_roulette() chromosome_1 = self.pop[c1_index] # roulette-select chromosome_2 c2_index = c1_index while c1_index == c2_index: c2_index = self.spin_roulette() chromosome_2 = self.pop[c2_index] return (chromosome_1, chromosome_2) def spin_roulette(self): if self.roulette_proportions is None: self.roulette_proportions = self.calculate_roulette_proportions() prob = np.random.uniform(0, 1) roulette_index = self.pop_size - 1 for i in range(self.roulette_proportions.shape[0]): if prob < self.roulette_proportions[i]: roulette_index = i break return roulette_index def calculate_roulette_proportions(self): """ calculate roulette proportions for selection :return: """ # calculate the accumulated fitness accumulated_fitness = 0 for chromosome in self.pop: accumulated_fitness += chromosome.fitness[0] # calculate the proportion previous_roulette_point = 0 self.roulette_proportions = np.zeros(29) for i in range(self.pop_size-1): new_roulette_point = previous_roulette_point + self.pop[i].fitness[0]/accumulated_fitness self.roulette_proportions[i] = new_roulette_point previous_roulette_point = new_roulette_point return self.roulette_proportions def crossover(self, chromosome_1, chromosome_2): """ crossover :param chromosome_1: first parent chromosome :type chromosome_1: Chromosome :param chromosome_2: second parent chromosome :type chromosome_2: Chromosome :return: candidate chromosome :rtype: Chromosome """ candidate_chromosome = copy.deepcopy(chromosome_1) start_point = np.random.randint(0, self.chromosome_length) mutation_length = np.random.randint(1, self.chromosome_length - start_point+1) for i in range(start_point, start_point+mutation_length): candidate_chromosome.x[i] = chromosome_2.x[i] return candidate_chromosome def mutate(self, candidate_chromosome): """ mutation :param candidate_chromosome: candidate chromosome after crossover :type candidate_chromosome: Chromosome :return: candidate chromosome :rtype: Chromosome """ for i in range(self.chromosome_length): interface = candidate_chromosome.x[i] rand = np.random.uniform(0, 1) # check whether to mutate the interface if rand < self.mutation_rate[0]: bin_ip_list = list(interface.ip.bin_ip) bin_ip_length = len(bin_ip_list) field_length = interface.ip_structure.fields_length # mutate fields of a specific layer type instead of the entire IP for j in range(bin_ip_length - field_length, bin_ip_length): # check whether to mutate the bit rand = np.random.uniform(0, 1) if rand < self.mutation_rate[1]: bin_ip_list[j] = '0' if bin_ip_list[j] == '1' else '1' candidate_chromosome.x[i].update_ip_by_binary_string(''.join(bin_ip_list)) if self.layers is not None: candidate_chromosome.x[i].update_subnet_and_structure(self.layers) else: continue # fix invalid interface after crossover candidate_chromosome.fix_invalid_interface() return candidate_chromosome class CNNPopulation(Population): """ CNNPopulation class """ def __init__(self, pop_size, chromosome_length, max_fully_connected_length, elitism_rate, mutation_rate, layers, evaluator=None, max_generation=None): """ constructor :param pop_size: population size :type pop_size: int :param chromosome_length: the length/dimension of the chromosome :type chromosome_length: int :param max_fully_connected_length: the max length of fully-connected layers :type max_fully_connected_length: int :param f: F value in the update equation at the mutation step :type f: float :param cr: crossover rate at the mutation step :type cr: float :param layers: a dict of (layer_name, layer) pairs; keys: conv, pooling, full, disabled :type layers: dict :param evaluator: evaluator to calculate the fitness :type evaluator: CNNEvaluator :param max_generation: max generation :type max_generation: int """ self.max_fully_connected_length = max_fully_connected_length super(CNNPopulation, self).__init__(pop_size, chromosome_length, elitism_rate, mutation_rate, layers, evaluator, max_generation) def initialise(self): """ initialise the population """ # set default evaluator if self.evaluator is None: self.evaluator = initialise_cnn_evaluator() logging.info('===start initialising population') for i in range(self.pop_size): chromosome = CNNChromosome(i, self.chromosome_length, self.max_fully_connected_length, self.layers).initialise() self.pop[i] = chromosome logging.info('===finish initialising population') return self
43.040498
191
0.652287
1,653
13,816
5.22686
0.120992
0.024306
0.019097
0.037269
0.434954
0.331713
0.28669
0.270486
0.253819
0.25081
0
0.008796
0.267661
13,816
320
192
43.175
0.845128
0.256659
0
0.094118
0
0.005882
0.067189
0
0
0
0
0
0
1
0.064706
false
0
0.058824
0
0.188235
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
71347fdcbbf1d234dd6e12f39abe3f11e92be5a5
2,030
py
Python
migrations/versions/8a480de4de4c_adjusts_for_seed_development.py
eubr-bigsea/limonero
54851b73bb1e4f5626b3d38ea7eeb50f3ed2e3c5
[ "Apache-2.0" ]
1
2018-01-01T20:35:43.000Z
2018-01-01T20:35:43.000Z
migrations/versions/8a480de4de4c_adjusts_for_seed_development.py
eubr-bigsea/limonero
54851b73bb1e4f5626b3d38ea7eeb50f3ed2e3c5
[ "Apache-2.0" ]
37
2017-02-24T17:07:25.000Z
2021-09-02T14:49:19.000Z
migrations/versions/8a480de4de4c_adjusts_for_seed_development.py
eubr-bigsea/limonero
54851b73bb1e4f5626b3d38ea7eeb50f3ed2e3c5
[ "Apache-2.0" ]
2
2019-11-05T13:45:45.000Z
2020-11-13T22:02:37.000Z
"""Adjusts for Seed development Revision ID: 8a480de4de4c Revises: 7addb7587b1a Create Date: 2021-07-13 17:16:20.807567 """ from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import mysql from limonero.migration_utils import (is_mysql, is_psql, upgrade_actions, downgrade_actions, get_psql_enum_alter_commands, is_sqlite) # revision identifiers, used by Alembic. revision = '8a480de4de4c' down_revision = '7addb7587b1a' branch_labels = None depends_on = None def upgrade(): if is_mysql(): op.execute(""" ALTER TABLE `storage` CHANGE `type` `type` ENUM( 'CASSANDRA','ELASTIC_SEARCH','HDFS','HIVE', 'HIVE_WAREHOUSE', 'JDBC', 'KAFKA', 'LOCAL','MONGODB' ) CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""") elif is_psql(): storage_values = ['CASSANDRA','ELASTIC_SEARCH','HDFS', 'HIVE', 'HIVE_WAREHOUSE', 'JDBC', 'KAFKA', 'LOCAL','MONGODB'] all_commands = [ [ get_psql_enum_alter_commands(['storage'], ['type'], 'StorageTypeEnumType', storage_values, 'HDFS'), None ] ] upgrade_actions(all_commands) # ### end Alembic commands ### def downgrade(): if is_mysql(): op.execute(""" ALTER TABLE `storage` CHANGE `type` `type` ENUM( 'CASSANDRA','ELASTIC_SEARCH','HDFS','HIVE', 'HIVE_WAREHOUSE', 'KAFKA', 'JDBC','LOCAL','MONGODB' ) CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""") elif is_psql(): storage_values = ['CASSANDRA','ELASTIC_SEARCH','HDFS', 'HIVE', 'HIVE_WAREHOUSE', 'JDBC','LOCAL','MONGODB'] all_commands = [ [ None, get_psql_enum_alter_commands(['storage'], ['type'], 'StorageTypeEnumType', storage_values, 'HDFS'), ] ] downgrade_actions(all_commands)
32.222222
77
0.576355
203
2,030
5.53202
0.384236
0.05699
0.078362
0.092609
0.555655
0.534283
0.534283
0.534283
0.534283
0.534283
0
0.033684
0.29803
2,030
62
78
32.741935
0.754386
0.088177
0
0.468085
0
0
0.415259
0.059946
0
0
0
0
0
1
0.042553
false
0
0.085106
0
0.12766
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7134af531e2e0820e6645888186d7ca8871d70c0
537
py
Python
api/test/checkalive_api.py
kizunai/flask-server
79f1484e776c74f23ef22836c5a0970ef66ecdfb
[ "Apache-2.0" ]
null
null
null
api/test/checkalive_api.py
kizunai/flask-server
79f1484e776c74f23ef22836c5a0970ef66ecdfb
[ "Apache-2.0" ]
null
null
null
api/test/checkalive_api.py
kizunai/flask-server
79f1484e776c74f23ef22836c5a0970ef66ecdfb
[ "Apache-2.0" ]
null
null
null
import flask from flask_restful import Resource from utils import decoraters from logic.test.checkalive_logic import CheckaliveLogic class CheckaliveApi(Resource): """CheckaliveApi /api/checkalive """ def __init__(self): self.log_id = 1 #todo 暂时用1代替 @decoraters.response_format def get(self): """ """ checkalive_logic = CheckaliveLogic() res, err = checkalive_logic.checkalive() if err: return 1, err, self.log_id return 0, res, self.log_id
22.375
55
0.648045
61
537
5.508197
0.491803
0.133929
0.080357
0
0
0
0
0
0
0
0
0.010178
0.268156
537
23
56
23.347826
0.844784
0.09311
0
0
0
0
0
0
0
0
0
0.043478
0
1
0.142857
false
0
0.285714
0
0.642857
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
2
713860ef4def0389233076f29b376d7d4ac7466d
226
py
Python
lesson8_work/Ex7.py
NaychukAnastasiya/goiteens-python3-naychuk
a79d0af238a15f58a822bb5d8e4d48227d4a7bc1
[ "MIT" ]
null
null
null
lesson8_work/Ex7.py
NaychukAnastasiya/goiteens-python3-naychuk
a79d0af238a15f58a822bb5d8e4d48227d4a7bc1
[ "MIT" ]
null
null
null
lesson8_work/Ex7.py
NaychukAnastasiya/goiteens-python3-naychuk
a79d0af238a15f58a822bb5d8e4d48227d4a7bc1
[ "MIT" ]
null
null
null
# Написати функцію, яка перевіряє чи в списку є ім'я "Євген" def is_in_list(l, e): if e in l: return True else: return False print(is_in_list(['Ярослав', 'Богдан', 'Катя', 'Євген'], "Євгенпше"))
28.25
70
0.60177
35
226
3.771429
0.771429
0.060606
0.121212
0
0
0
0
0
0
0
0
0
0.261062
226
7
71
32.285714
0.790419
0.256637
0
0
0
0
0.188679
0
0
0
0
0
0
1
0.166667
false
0
0
0
0.5
0.166667
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
7139484e64db6cee198f70d7bc368fac65431c29
1,313
py
Python
Problems/199.py
kvedula/leetcode
8576b1ef466529b9e0d337af78fc833acb686a3c
[ "MIT" ]
null
null
null
Problems/199.py
kvedula/leetcode
8576b1ef466529b9e0d337af78fc833acb686a3c
[ "MIT" ]
null
null
null
Problems/199.py
kvedula/leetcode
8576b1ef466529b9e0d337af78fc833acb686a3c
[ "MIT" ]
null
null
null
# Kamesh Vedula # Problem: Binary Tree Right Side View # Definition for a binary tree node. # class TreeNode: # def __init__(self, val=0, left=None, right=None): # self.val = val # self.left = left # self.right = right def rightSideView(self, root: TreeNode) -> List[int]: if root is None: return [] # q = [] # q.append(root) # levelOrder = [] # while q: # count = len(q) # level = [] # for i in range(count): # temp = q.pop(0) # level.append(temp.val) # if temp.right: # q.append(temp.right) # if temp.left: # q.append(temp.left) # levelOrder.append(level) # rightVals = [lvl[-1] for lvl in levelOrder] # return rightVals q = collections.deque() q.append(root) levelOrder = [] while q: count = len(q) for i in range(count): temp = q.popleft() if i == 0: levelOrder.append(temp.val) if temp.right: q.append(temp.right) if temp.left: q.append(temp.left) return levelOrder
22.254237
55
0.450876
139
1,313
4.230216
0.330935
0.071429
0.07483
0.071429
0.414966
0.414966
0.414966
0.343537
0.343537
0.221088
0
0.00545
0.440975
1,313
58
56
22.637931
0.79564
0.549886
0
0
0
0
0
0
0
0
0
0
0
1
0.058824
false
0
0
0
0.176471
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
713966fce8e27e796ccf934cd246351e192e1902
984
py
Python
scripts/preprocessing/gamma correction.py
dekelmeirom/pathologylab
262b0bd9cb9233bc960671c2d674cf895b228f39
[ "MIT" ]
null
null
null
scripts/preprocessing/gamma correction.py
dekelmeirom/pathologylab
262b0bd9cb9233bc960671c2d674cf895b228f39
[ "MIT" ]
null
null
null
scripts/preprocessing/gamma correction.py
dekelmeirom/pathologylab
262b0bd9cb9233bc960671c2d674cf895b228f39
[ "MIT" ]
null
null
null
#!/usr/bin/env python # coding: utf-8 from skimage import exposure from matplotlib import pyplot as plt from matplotlib import colors as colors import numpy as np import matplotlib.image as mpimg import colorsys import math import os PATH = "C:\\Users\\dekelmeirom\\OneDrive - Technion\\Documents\\university\\pdl_project_res\\" SAVE_PATH = "C:\\Users\\dekelmeirom\\OneDrive - Technion\\Documents\\university\\pdl_project_res\\" def rgb2gray(rgb): return np.dot(rgb[...,:3], [0.2989, 0.5870, 0.1140]) def gamma_correction(img): gray = rgb2gray(img) # compute gamma = log(mid*255)/log(mean) mid = 0.5 mean = np.mean(gray) gamma = math.log(mid)/math.log(mean) img_gamma = exposure.adjust_gamma(img, gamma) return img_gamma for filename in os.listdir(PATH): with open(PATH + filename, "rb") as img_file: img = plt.imread(img_file) img_gamma = gamma_correction(img) plt.imsave(SAVE_PATH + filename[:-4] + "gamma.png", img_gamma)
28.114286
99
0.704268
147
984
4.612245
0.462585
0.058997
0.058997
0.061947
0.20354
0.20354
0.20354
0.20354
0.20354
0.20354
0
0.030303
0.161585
984
34
100
28.941176
0.791515
0.074187
0
0
0
0
0.199339
0.180617
0
0
0
0
0
1
0.083333
false
0
0.333333
0.041667
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
713a3074599a837fcc6b69f08f73d38cb5ca45a1
182
py
Python
curso2.py
ralabarta/educationar_python_repo
f89ddb0bb19b039459e42472e0f52f31c69a3853
[ "MIT" ]
null
null
null
curso2.py
ralabarta/educationar_python_repo
f89ddb0bb19b039459e42472e0f52f31c69a3853
[ "MIT" ]
null
null
null
curso2.py
ralabarta/educationar_python_repo
f89ddb0bb19b039459e42472e0f52f31c69a3853
[ "MIT" ]
null
null
null
import statistics datos = [2,4,6,8] datos2 = [2, 2, 3, 5, 8, 9] mean_r = statistics.mean(datos) median_r = statistics.median(datos2) print(mean_r) print(median_r)
13
37
0.637363
30
182
3.733333
0.5
0.089286
0
0
0
0
0
0
0
0
0
0.084507
0.21978
182
13
38
14
0.704225
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.142857
0
0.142857
0.285714
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
713cd79703169c6cc701b303f0c7df900f7c2689
491
py
Python
tasks/migrations/0005_itemtemplatefield_type.py
heolin123/funcrowd
20167783de208394c09ed0429a5f02ec6dd79c42
[ "MIT" ]
null
null
null
tasks/migrations/0005_itemtemplatefield_type.py
heolin123/funcrowd
20167783de208394c09ed0429a5f02ec6dd79c42
[ "MIT" ]
11
2019-11-12T23:26:45.000Z
2021-06-10T17:37:23.000Z
tasks/migrations/0005_itemtemplatefield_type.py
heolin123/funcrowd
20167783de208394c09ed0429a5f02ec6dd79c42
[ "MIT" ]
null
null
null
# Generated by Django 2.0.8 on 2018-12-01 23:24 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('tasks', '0004_remove_annotation_is_done'), ] operations = [ migrations.AddField( model_name='itemtemplatefield', name='type', field=models.CharField(choices=[('INT', 'INT'), ('STR', 'STR'), ('BOOL', 'BOOL'), ('LIST', 'LIST')], default='STR', max_length=10), ), ]
25.842105
143
0.586558
54
491
5.222222
0.796296
0
0
0
0
0
0
0
0
0
0
0.056604
0.244399
491
18
144
27.277778
0.703504
0.09165
0
0
1
0
0.195946
0.067568
0
0
0
0
0
1
0
false
0
0.083333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
713e566b91f55269e724b2a11eda8f515d37d765
415
py
Python
evaluate/coverage_filter.py
iqbal-lab-org/pandora_paper_roc
bb21c76faefa8021c86c3be9d77b8b5999fe2ef5
[ "MIT" ]
null
null
null
evaluate/coverage_filter.py
iqbal-lab-org/pandora_paper_roc
bb21c76faefa8021c86c3be9d77b8b5999fe2ef5
[ "MIT" ]
null
null
null
evaluate/coverage_filter.py
iqbal-lab-org/pandora_paper_roc
bb21c76faefa8021c86c3be9d77b8b5999fe2ef5
[ "MIT" ]
2
2020-11-04T18:15:43.000Z
2020-11-06T01:38:08.000Z
from evaluate.filter import Filter from .vcf import VCF class CoverageFilter(Filter): def __init__(self, coverage_threshold: float): self._coverage_threshold = coverage_threshold @property def coverage_threshold(self) -> float: return self._coverage_threshold def record_should_be_filtered_out(self, record: VCF) -> bool: return record.coverage < self.coverage_threshold
27.666667
65
0.742169
49
415
5.959184
0.428571
0.349315
0.287671
0
0
0
0
0
0
0
0
0
0.187952
415
14
66
29.642857
0.866469
0
0
0
0
0
0
0
0
0
0
0
0
1
0.3
false
0
0.2
0.2
0.8
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
713f16f6c1f8f19f1fc82172faed9240a38e2015
1,632
py
Python
examples/custom-validator.py
RyanSquared/gigaspoon
c5bf31fbffa1c7ec8e0c91ef7ae79040d553151a
[ "MIT" ]
1
2018-02-06T16:15:44.000Z
2018-02-06T16:15:44.000Z
examples/custom-validator.py
RyanSquared/gigaspoon
c5bf31fbffa1c7ec8e0c91ef7ae79040d553151a
[ "MIT" ]
1
2019-10-15T13:57:09.000Z
2019-10-15T16:08:42.000Z
examples/custom-validator.py
RyanSquared/gigaspoon
c5bf31fbffa1c7ec8e0c91ef7ae79040d553151a
[ "MIT" ]
null
null
null
import os import flask import gigaspoon as gs app = flask.Flask(__name__) app.secret_key = os.urandom(24) class CustomSelect(gs.v.Validator): def __init__(self, name, options): self.name = name self._options = set(options) def __repr__(self): return "%r %r" % (type(self), self._options) def populate(self): return { "options": self._options, "name": self.name } def validate(self, form, key, value): if value not in self._options: self.raise_error(key, value) html = """ <!DOCTYPE HTML> {% for message in get_flashed_messages() -%} <pre>{{ message }}</pre> {%- endfor %} <form method="POST"> {% autoescape false %} {{ g.csrf_token_validator.csrf_tag }} {% endautoescape %} <select required name="{{ g.user_validator.name }}"> {% for user in g.user_validator.options -%} <option value="{{ user }}">{{ user }}</option> {%- endfor %} <option value="break!">Bad input!</option> </select> <input type="submit" value="submit"> </form> """ @app.route("/", methods=["GET", "POST"]) @gs.set_methods("POST") @gs.validator(CustomSelect("user", ["Fred", "George"])) @gs.validator(gs.v.CSRF()) @gs.base def index(form): if form.is_form_mode(): # Method is POST and form fields are valid flask.flash(repr(form)) return flask.redirect(flask.url_for('index')) return flask.render_template_string(html) @app.errorhandler(gs.e.FormError) def handle_form_error(exc): return flask.escape(str(exc)), 400 if __name__ == "__main__": app.run()
23.652174
56
0.610907
206
1,632
4.640777
0.427184
0.046025
0.029289
0
0
0
0
0
0
0
0
0.003937
0.221814
1,632
68
57
24
0.748819
0.02451
0
0.038462
0
0
0.358491
0.061635
0
0
0
0
0
1
0.115385
false
0
0.057692
0.057692
0.288462
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
714198dc8f030861acfb87346bb996cc656b4494
2,244
py
Python
gdk/jetson/tracker.py
dbadrian/gdk_dlrc17
7aebed740dc4a09f3549674b0cfeb22bdb392ac6
[ "MIT" ]
1
2019-03-29T12:36:55.000Z
2019-03-29T12:36:55.000Z
gdk/jetson/tracker.py
dbadrian/gdk_dlrc17
7aebed740dc4a09f3549674b0cfeb22bdb392ac6
[ "MIT" ]
null
null
null
gdk/jetson/tracker.py
dbadrian/gdk_dlrc17
7aebed740dc4a09f3549674b0cfeb22bdb392ac6
[ "MIT" ]
null
null
null
import time import sys import logging # Import PS-Drone import cv2 import numpy as np import gdk.config as config logger = logging.getLogger(__name__) class CheckerBoardTracker(): def __init__(self): self.tracking = False def update(self, frame): self.tracking, self.corners = self.__get_corners_from_marker(frame) if self.tracking: self.centroid = self.__get_centroid_from_corners() self.outer_corners = self.__get_main_corners_from_corners() self.height, self.width = frame.shape[:2] return self.tracking def get_centroid_error(self): if self.tracking: errx = (self.centroid[0][0] - config.XY_TRACK_POINT[0])#/(config.XY_TRACK_POINT[0]) erry = (self.centroid[0][1] - config.XY_TRACK_POINT[1])#/(config.XY_TRACK_POINT[1]) return errx, erry def get_distance_error(self): if self.tracking: short_1 = np.linalg.norm(self.outer_corners[0]-self.outer_corners[1]) short_2 = np.linalg.norm(self.outer_corners[3]-self.outer_corners[2]) long_1 = np.linalg.norm(self.outer_corners[1]-self.outer_corners[3]) long_2 = np.linalg.norm(self.outer_corners[2]-self.outer_corners[0]) avg_short = (short_1+short_2)/2.0 avg_long = (long_1+long_2)/2.0 dif_short = ( avg_short - config.BEST_DISTANCE[0])/config.BEST_DISTANCE[0] dif_long = (avg_long - config.BEST_DISTANCE[1])/config.BEST_DISTANCE[1] return (dif_short+dif_long)/2.0 def __get_main_corners_from_corners(self): return np.array([self.corners[0][0], self.corners[3][0], self.corners[16][0], self.corners[19][0]]) def __get_centroid_from_corners(self): return np.sum(self.corners, 0) / float(len(self.corners)) def __get_corners_from_marker(self, frame): corners = None gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) found, corners = cv2.findChessboardCorners( gray, config.PATTERN_SIZE, corners, cv2.CALIB_CB_ADAPTIVE_THRESH+cv2.CALIB_CB_NORMALIZE_IMAGE+cv2.CALIB_CB_FAST_CHECK) npcorners = np.array(corners) return found, npcorners
34.523077
130
0.657754
312
2,244
4.435897
0.25
0.058526
0.104046
0.052023
0.264451
0.182081
0.083815
0
0
0
0
0.031142
0.227273
2,244
64
131
35.0625
0.767013
0.030749
0
0.066667
0
0
0
0
0
0
0
0
0
1
0.155556
false
0
0.133333
0.044444
0.444444
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7143d82db45d18175969ef941dd86101829ae9a5
15,402
py
Python
Tryp_T.py
johnheap/VAPPER-Galaxy
4ce903b3b44755198e59368057863a5eb62ff6c6
[ "Apache-2.0" ]
null
null
null
Tryp_T.py
johnheap/VAPPER-Galaxy
4ce903b3b44755198e59368057863a5eb62ff6c6
[ "Apache-2.0" ]
null
null
null
Tryp_T.py
johnheap/VAPPER-Galaxy
4ce903b3b44755198e59368057863a5eb62ff6c6
[ "Apache-2.0" ]
null
null
null
""" * Copyright 2018 University of Liverpool * Author: John Heap, Computational Biology Facility, UoL * Based on original scripts of Sara Silva Pereira, Institute of Infection and Global Health, UoL * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * """ import subprocess import pandas as pd import re import os import sys import matplotlib as mpl mpl.use('Agg') import matplotlib.pyplot as plt pList = ['P1', 'P2', 'P3', 'P4', 'P5', 'P6', 'P7', 'P8', 'P9', 'P10', 'P11', 'P12', 'P13', 'P14', 'P15'] quietString = "" #"">> Vap_log.txt 2>&1" def transcriptMapping(inputname, strain, forwardFN,reverseFN): #where is our Reference data - dir_path = os.path.dirname(os.path.realpath(__file__)) refName = dir_path+"/data/Reference/Tc148" #default if strain == "Tc148": refName = dir_path+"/data/Reference/Tc148" if strain == "IL3000": refName = dir_path+"/data/Reference/IL3000" #argString = "bowtie2 -x Refe4rence/IL3000 -1 data/"+forwardFN+" -2 data/"+reverseFN+" -S "+inputname+".sam" #>log.txt #argString = "bowtie2 -x Reference/Tc148 -1 data/"+forwardFN+" -2 data/"+reverseFN+" -S "+inputname+".sam" #>log.txt argString = "bowtie2 -x "+refName+" -1 "+forwardFN+" -2 "+reverseFN+" -S "+inputname+".sam"+quietString #>log.txt #print(argString) returncode = subprocess.call(argString, shell=True) def processSamFiles(inputname): #debug use a mapping sam file we have already found #dir_path = os.path.dirname(os.path.realpath(__file__)) #bugName = dir_path+"/data/T_Test" #defasult cur_path = os.getcwd() samName = cur_path+"/"+inputname #argString = "samtools view -bS "+bugName+" > "+inputname+".bam" argString = "samtools view -bS "+inputname+".sam > "+samName+".bam"+quietString #print(argString) returncode = subprocess.call(argString, shell=True) #argString = "samtools sort "+bugName+" -o "+inputname+".sorted" argString = "samtools sort "+samName+".bam -o "+samName+".sorted"+quietString #print("argstring = "+argString) returncode = subprocess.call(argString, shell=True) #argString = "samtools index "+bugName+".sorted "+inputname+".sorted.bai" argString = "samtools index "+samName+".sorted "+samName+".sorted.bai"+quietString #print("argstring = " + argString) returncode = subprocess.call(argString, shell=True) def transcriptAbundance(inputname, strain): dir_path = os.path.dirname(os.path.realpath(__file__)) refName = dir_path + "/data/Reference/ORFAnnotation.gtf" # defasult if strain == "Tc148": refName = dir_path + "/data/Reference/ORFAnnotation.gtf" if strain == "IL3000": refName = dir_path + "/data/Reference/IL3000.gtf" #argString = "cufflinks -G Reference/IL3000.gtf -o "+inputname+".cuff -u -p 8 "+inputname+".sorted" #argString = "cufflinks -G Reference/ORFAnnotation.gtf -o "+inputname+".cuff -u -p 8 "+inputname+".sorted" argString = "cufflinks -q -G "+refName+" -o "+inputname+".cuff -u -p 8 "+inputname+".sorted"+quietString returncode = subprocess.call(argString, shell = True) def convertToFasta(inputName, strain): #equivalent to Sara's awk scripte dir_path = os.path.dirname(os.path.realpath(__file__)) refName = dir_path + "/data/Reference/ORFAnnotation.gtf" # default if strain == "Tc148": refName = dir_path + "/data/Reference/148_prot.fasta" if strain == "IL3000": refName = dir_path + "/data/Reference/IL3000_prot.fasta" cuff_df = pd.read_csv(inputName+".cuff/genes.fpkm_tracking", sep='\t') cuff_df = cuff_df[(cuff_df['FPKM'] > 0)] cuff_df.to_csv("cuffTest.csv") gene_id_List = cuff_df['gene_id'].tolist() #print(gene_id_List) #print ("Found from 8880="+str(found)) # need to load in IL3000_prot.fasta # for each line with >TcIL3000_1_1940 # search within cuff_df[gene_id] for match # add it to the outfile. (need to save it as used by hmmer later number = 0 all = 0 with open(inputName+"_6frame.fas", 'w') as outfile: ref = open(refName,'r') #ref = open(r"Reference/IL3000_prot.fasta",'r') n = 0 line = ref.readline() while line: if line[0] == '>': all = all+1 ln = line[1:] #remove > ln = ln.rstrip() #remove /n /r etc #print (ln) if ln in gene_id_List: number = number+1 outfile.write(line) line = ref.readline() if line: while line[0] != '>': outfile.write(line) line=ref.readline() if not line: break; else: line = ref.readline() else: line =ref.readline() ref.close() print(str(len(gene_id_List))+":"+str(number)+" from "+str(all)) return cuff_df def HMMerMotifSearch(name, strain, cuff_df): motifs = ['1', '2a', '2b', '3', '4a', '4b', '4c', '5', '6', '7', '8a', '8b', '9a', '9b', '9c', '10a', '10b', '11a', '11b', '12', '13a', '13b', '13c', '13d', '14', '15a', '15b', '15c'] dir_path = os.path.dirname(os.path.realpath(__file__)) phylopath = dir_path + "/data/Motifs/Phylotype" lineCounts = [] compoundList = [] for m in motifs: argString = "hmmsearch "+phylopath + m + ".hmm " + name + "_6frame.fas > Phy" + m + ".out" print(argString) subprocess.call(argString, shell=True) hmmResult = open("Phy" + m + ".out", 'r') regex = r"Tc148[0-9]{1,8}" if strain == "Tc148": regex = r"Tc148[0-9]{1,8}" if strain == "IL3000": regex = r"TcIL3000_[0-9]{1,4}_[0-9]{1,5}" n = 0 outList = [] for line in hmmResult: m = re.search(regex, line) if m: outList.append(""+m.group()) n += 1 if re.search(r"inclusion", line): print("inclusion threshold reached") break compoundList.append(outList) lineCounts.append(n) hmmResult.close() #print(lineCounts) #print(cuff_df) concatGroups = [1, 2, 1, 3, 1, 1, 1, 2, 3, 2, 2, 1, 4, 1, 3] countList = [] weightList = [] countIndex = 0 totalCount = 0 totalWeigth = 0 for c in concatGroups: a = [] weight = [] for n in range(0, c): a = a + compoundList.pop(0) t = set(a) countList.append(len(t)) wa = 0 for w in t: wt = cuff_df.loc[cuff_df['gene_id'] == w, 'FPKM'].iloc[0] #print(w) #print(wt) wa = wa+wt weightList.append(wa) totalWeigth+=wa totalCount += len(t) countList.append(totalCount) weightList.append(totalWeigth) #print(countList) #print("--------") #print(weightList) #print("--------") return countList,weightList def relativeFrequencyTable(countList, name, htmlresource): relFreqList = [] c = float(countList[15]) for i in range(0, 15): relFreqList.append(countList[i] / c) data = {'Phylotype': pList, 'Relative Frequency': relFreqList} relFreq_df = pd.DataFrame(data) j_fname = htmlresource+ "/" + name + "_t_relative_frequency.csv" relFreq_df.to_csv(j_fname) return relFreqList # 0-14 = p1-p15 counts [15] = total counts def weightedFrequencyTable(countList, name, htmlresource): relFreqList = [] c = float(countList[15]) for i in range(0, 15): relFreqList.append(countList[i] / c) data = {'Phylotype': pList, 'Weighted Frequency': relFreqList} relFreq_df = pd.DataFrame(data) j_fname = htmlresource+ "/" + name + "_t_weighted_frequency.csv" relFreq_df.to_csv(j_fname) return relFreqList # 0-14 = p1-p15 counts [15] = total counts def createStackedBar(name,freqList,strain,pdf,html_resource): palette = ["#0000ff", "#6495ed", "#00ffff", "#caff70", "#228b22", "#528b8b", "#00ff00", "#a52a2a", "#ff0000", "#ffff00", "#ffa500", "#ff1493", "#9400d3", "#bebebe", "#000000", "#ff00ff"] VAP_148 = [0.072, 0.032, 0.032, 0.004, 0.007, 0.005, 0.202, 0.004, 0.006, 0.014, 0.130, 0.133, 0.054, 0.039, 0.265] VAP_IL3000 = [0.073, 0.040, 0.049, 0.018, 0.060, 0.055, 0.054, 0.025, 0.012, 0.060, 0.142, 0.100, 0.061, 0.078, 0.172] cmap = plt.cm.get_cmap('tab20') palette = [cmap(i) for i in range(cmap.N)] if strain == "Tc148": VAPtable = VAP_148 VAPname='Tc148\nGenome VAP' if strain == "IL3000": VAPtable = VAP_IL3000 VAPname= 'IL3000\nGenome VAP' width = 0.35 # the width of the bars: can also be len(x) sequence plots = [] fpos = 0 vpos = 0 for p in range(0, 15): tp = plt.bar(0, freqList[p], width, color= palette[p], bottom = fpos) fpos +=freqList[p] tp = plt.bar(1, VAPtable[p], width, color= palette[p], bottom = vpos) vpos +=VAPtable[p] plots.append(tp) plt.xticks([0,1],[name,VAPname]) plt.legend(plots[::-1],['p15','p14','p13','p12','p11','p10','p9','p8','p7','p6','p5','p4','p3','p2','p1']) title = "Figure Legend: The transcriptomic Variant Antigen Profile of $\itTrypanosoma$ $\itcongolense$ estimated as phylotype " \ "proportion adjusted for transcript abundance and the reference genomic Variant Antigen Profile. " \ "\nData was produced with the 'Variant Antigen Profiler' (Silva Pereira et al., 2019)." #plt.title(title, wrap="True") #plt.text(-0.2, -0.05, title, va="top", transform=ax.transAxes, wrap="True") plt.text(-0.3, -0.15, title, va="top", wrap="True") plt.tight_layout(pad=1.5) plt.subplots_adjust(bottom = 0.3,top=0.99,left=0.125,right=0.9,hspace=0.2,wspace=0.2) plt.savefig(html_resource + "/stackedbar.png") if pdf == 'PDF_Yes': plt.savefig(html_resource + "/stackedbar.pdf") #plt.show() def createHTML(name,htmlfn,htmlresource,freqList,weightList): #assumes imgs are heatmap.png, dheatmap.png, vapPCA.png and already in htmlresource htmlString = r"<html><title>T.congolense VAP</title><body><div style='text-align:center'><h2><i>Trypanosoma congolense</i> Variant Antigen Profile</h2><h3>" htmlString += name htmlString += r"<br>Transcriptomic Analysis</h3></p>" htmlString += "<p style = 'margin-left:20%; margin-right:20%'>Table Legend: Variant Antigen Profiles of a transcriptome of <i>Trypanosoma congolense</i> estimated as phylotype proportion. " \ "Weighted frequency refers to the phylotype proportion based transcript abundance. " \ "Data was produced with the 'Variant Antigen Profiler' (Silva Pereira et al., 2019).</p> " htmlString += r"<style> table, th, tr, td {border: 1px solid black; border-collapse: collapse;}</style>" htmlString += r"<table style='width:50%;margin-left:25%;text-align:center'><tr><th>Phylotype</th><th>Relative Frequency</th><th>Weighted Frequency</th></tr>" tabString = "" # flush out table with correct values for i in range(0, 15): f = format(freqList[i], '.4f') w = format(weightList[i], '.4f') tabString += "<tr><td>phy" + str(i + 1) + "</td><td>" + f + "</td><td>" + w + "</td></tr>" htmlString += tabString + "</table><br><br><br><br><br>" htmlString += r"<p> <h3>Stacked Bar chart of Phylotype Frequency</h3> The 'weighted' relative frequency of each phylotype alongside the VAP of selected strain.</p>" imgString = r"<img src = 'stackedbar.png' alt='Stacked bar chart of phylotype variation' style='max-width:100%'><br><br>" htmlString += imgString # htmlString += r"<p><h3>The Deviation Heat Map and Dendogram</h3>The phylotype variation expressed as the deviation from your sample mean compared to the model dataset</p>" # imgString = r"<img src = 'dheatmap.png' alt='Deviation Heatmap' style='max-width:100%'><br><br>" # htmlString += imgString # htmlString += r"<p><h3>The Variation PCA plot</h3>PCA analysis corresponding to absolute variation. Colour coded according to location</p>" # imgString = r"<img src = 'vapPCA.png' alt='PCA Analysis' style='max-width:100%'><br><br>" # htmlString += imgString + r"</div></body></html>" with open(htmlfn, "w") as htmlfile: htmlfile.write(htmlString) #argdict = {'name':2, 'pdfexport': 3, 'strain': 4, 'forward': 5, 'reverse': 6, 'html_file': 7, 'html_resource': 8} def transcriptomicProcess(args,dict): transcriptMapping(args[dict['name']], args[dict['strain']], args[dict['forward']], args[dict['reverse']]) #uses bowtie processSamFiles(args[dict['name']]) #uses samtools transcriptAbundance(args[dict['name']],args[dict['strain']]) #uses cufflinks -> ?.cuff/*.* cuff_df = convertToFasta(args[dict['name']],args[dict['strain']]) countList, weightList = HMMerMotifSearch(args[dict['name']],args[dict['strain']], cuff_df) relFreqList = relativeFrequencyTable(countList,args[dict['name']],args[dict['html_resource']]) relWeightList = weightedFrequencyTable(weightList,args[dict['name']],args[dict['html_resource']]) createStackedBar(args[dict['name']],relWeightList, args[dict['strain']],args[dict['pdfexport']],args[dict['html_resource']]) createHTML(args[dict['name']],args[dict['html_file']],args[dict['html_resource']], relFreqList, relWeightList) if __name__ == "__main__": #print("Commencing Transcript Mapping") #transcriptMapping("T_Test", "Transcripts.1","Transcripts.2") #print("Processimg Sam Files") #processSamFiles("T_Test") #print("Assessing Transcript Abundance") #transcriptAbundance("T_Test") #print ("Converting to Fasta Subset") #cuff_df = convertToFasta("T_Test") #print("Commencing HMMer search") #countList, weightList = HMMerMotifSearch("T_Test",cuff_df) #relativeFrequencyTable(countList,'T_Test') #weightedFrequencyTable(weightList,'T_Test') relFreqList = [0.111842105,0.059210526,0.026315789,0.013157895, 0.006578947,0.013157895,0.032894737,0.019736842, 0.039473684,0.046052632,0.217105263,0.065789474, 0.151315789,0.059210526,0.138157895] relWeightList = [0.07532571,0.05900545,0.009601452,0.042357532,0.01236219,0.001675663,0.04109726, 0.097464248,0.057491666,0.05826875,0.279457473,0.070004772,0.065329007,0.085361298,0.045197529] createStackedBar('T_Test',relWeightList, 'Tc148','PDF_Yes','results') createHTML("t_test","results/t_test.html","results",relFreqList,relWeightList)
44.514451
195
0.617907
1,965
15,402
4.775573
0.280407
0.019608
0.012894
0.017263
0.28101
0.253197
0.231671
0.211317
0.192349
0.145247
0
0.072558
0.224192
15,402
345
196
44.643478
0.712779
0.243085
0
0.22807
0
0.026316
0.23681
0.057343
0
0
0
0
0
1
0.04386
false
0
0.030702
0
0.092105
0.013158
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7144cdbf12d2350acabc972907aa336bd9391ec1
442
py
Python
scale/node/migrations/0003_node_is_paused_errors.py
kaydoh/scale
1b6a3b879ffe83e10d3b9d9074835a4c3bf476ee
[ "Apache-2.0" ]
121
2015-11-18T18:15:33.000Z
2022-03-10T01:55:00.000Z
scale/node/migrations/0003_node_is_paused_errors.py
kaydoh/scale
1b6a3b879ffe83e10d3b9d9074835a4c3bf476ee
[ "Apache-2.0" ]
1,415
2015-12-23T23:36:04.000Z
2022-01-07T14:10:09.000Z
scale/node/migrations/0003_node_is_paused_errors.py
kaydoh/scale
1b6a3b879ffe83e10d3b9d9074835a4c3bf476ee
[ "Apache-2.0" ]
66
2015-12-03T20:38:56.000Z
2020-07-27T15:28:11.000Z
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('node', '0002_node_pause_reason'), ] operations = [ migrations.AddField( model_name='node', name='is_paused_errors', field=models.BooleanField(default=False), preserve_default=True, ), ]
21.047619
53
0.606335
42
442
6.095238
0.761905
0
0
0
0
0
0
0
0
0
0
0.015773
0.282805
442
20
54
22.1
0.791798
0.047511
0
0
0
0
0.109785
0.052506
0
0
0
0
0
1
0
false
0
0.142857
0
0.357143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
852e97bba32d24a91db45aae8147ee20bfee4935
3,482
py
Python
LineDetect/videoLineDet.py
RonellBr/LaneDetection
349e5c75bee39c3006fcf206565915fe7493e796
[ "MIT" ]
null
null
null
LineDetect/videoLineDet.py
RonellBr/LaneDetection
349e5c75bee39c3006fcf206565915fe7493e796
[ "MIT" ]
null
null
null
LineDetect/videoLineDet.py
RonellBr/LaneDetection
349e5c75bee39c3006fcf206565915fe7493e796
[ "MIT" ]
null
null
null
################################################################ # Author: Ronell Bresler # Module: VideoLineDetect.py # # # References: # https://www.analyticsvidhya.com/blog/2020/05/tutorial-real-time-lane-detection-opencv/ # https://towardsdatascience.com/tutorial-build-a-lane-detector-679fd8953132 # https://medium.com/computer-car/udacity-self-driving-car-nanodegree-project-1-finding-lane-lines-9cd6a846c58c # https://campushippo.com/lessons/detect-highway-lane-lines-with-opencv-and-python-21438a3e2 # https://www.youtube.com/watch?v=G0cHyaP9HaQ # https://opencv-python-tutroals.readthedocs.io/en/latest/py_tutorials/py_gui/py_video_display/py_video_display.html ################################################################ import cv2 import matplotlib.pyplot as plt import numpy as np class Inputfile: def __init__(self, cap, height, width, frame): self.cap = cap self.height = height self.width = width self.frame = frame def main(): inputfile = Inputfile(cv2.VideoCapture('SampleIMG/gmod2.mp4'), 0, 0, 0) while inputfile.cap.isOpened(): ret, frame = inputfile.cap.read() inputfile.frame = frame inputfile.height = inputfile.frame.shape[0] inputfile.width = inputfile.frame.shape[1] frame1 = One_frame(inputfile) cv2.imshow('frame', frame1) if cv2.waitKey(1) & 0xFF == ord('q'): break cap.release() cv2.destroyAllWindows() ################################################################ def One_frame(inputfile): region_of_interest_vertices = Set_region_of_interest_vertices(inputfile.height, inputfile.width) # Canny filter canny_edges = Canny_edge_detector(inputfile.frame) # Crop img with roi cropped_image = Region_of_interest(canny_edges, np.array([region_of_interest_vertices], np.int32), inputfile.height, inputfile.width) lines = cv2.HoughLinesP(cropped_image, rho=6, theta=np.pi/180, threshold=160, lines=np.array([]), minLineLength=40, maxLineGap=25) return Draw_lines(inputfile.frame, lines) ################################################################ def Canny_edge_detector(frame): gray = cv2.cvtColor(frame, cv2.COLOR_RGB2GRAY) blur = cv2.GaussianBlur(gray, (5, 5), 0) canny_image = cv2.Canny(gray, 100, 200) return canny_image ################################################################ def Region_of_interest(img, vertices, height, width): mask = np.zeros_like(img) cv2.fillPoly(mask, vertices, 255) masked_image = cv2.bitwise_and(img, mask) return masked_image ################################################################ def Draw_lines(img, lines): color = [0, 255, 0] # green thickness = 10 for line in lines: for x1, y1, x2, y2 in line: cv2.line(img, (x1,y1), (x2,y2), color, thickness) return img ################################################################ def Set_region_of_interest_vertices(height, width): region_of_interest_vertices = [ (0, height), (round(width/1.9), round(height/1.9)), (width, height) ] return region_of_interest_vertices if __name__ == "__main__": main()
31.654545
138
0.553418
371
3,482
5.02965
0.420485
0.034298
0.068596
0.07717
0.028939
0
0
0
0
0
0
0.037937
0.220276
3,482
110
139
31.654545
0.649355
0.178059
0
0
0
0
0.014404
0
0
0
0.001746
0
0
1
0.116667
false
0
0.05
0
0.266667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8531b1ab7623355d9bad7b738e3d301375faa339
75
py
Python
Raffle/__init__.py
duanegtr/legendv3-cogs
ffde1452a75ad42b4f6511b612ce486e96fcd6de
[ "MIT" ]
10
2020-05-25T13:32:30.000Z
2022-02-01T12:33:07.000Z
Raffle/__init__.py
darcyle/tl-cogs
6b13c4a6247115571c5a2bb6ea98ed1fe2d44d79
[ "MIT" ]
2
2020-05-23T22:53:07.000Z
2020-08-09T11:28:12.000Z
Raffle/__init__.py
darcyle/tl-cogs
6b13c4a6247115571c5a2bb6ea98ed1fe2d44d79
[ "MIT" ]
7
2020-05-18T17:37:33.000Z
2022-01-13T04:08:05.000Z
from .raffle import Raffle def setup(bot): bot.add_cog(Raffle(bot))
18.75
28
0.693333
12
75
4.25
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.186667
75
4
28
18.75
0.836066
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
0
0
0
4
8531c7eec28be5dec06698cc6a230d91cb72ae17
88
py
Python
tritimap/__init__.py
zwbao/Triti-Map
58d79d773df3862b6e03717bf3563d8c427c7027
[ "MIT" ]
null
null
null
tritimap/__init__.py
zwbao/Triti-Map
58d79d773df3862b6e03717bf3563d8c427c7027
[ "MIT" ]
null
null
null
tritimap/__init__.py
zwbao/Triti-Map
58d79d773df3862b6e03717bf3563d8c427c7027
[ "MIT" ]
null
null
null
import os __version__ = "0.9.2" root_dir = os.path.dirname(os.path.abspath(__file__))
14.666667
53
0.727273
15
88
3.666667
0.8
0.218182
0
0
0
0
0
0
0
0
0
0.038462
0.113636
88
5
54
17.6
0.666667
0
0
0
0
0
0.056818
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
85320c46304d80fec430c5914f3f698f8524a178
3,407
py
Python
code/app.py
annotation/app-uruk
aee4ed0c3fd574251f8b4eb9169705e8ac26bf95
[ "MIT" ]
null
null
null
code/app.py
annotation/app-uruk
aee4ed0c3fd574251f8b4eb9169705e8ac26bf95
[ "MIT" ]
null
null
null
code/app.py
annotation/app-uruk
aee4ed0c3fd574251f8b4eb9169705e8ac26bf95
[ "MIT" ]
null
null
null
import types from tf.advanced.helpers import dh from tf.advanced.find import loadModule from tf.advanced.app import App def transform_prime(app, n, p): return ("'" * int(p)) if p else "" def transform_ctype(app, n, t): if t == "uncertain": return "?" elif t == "properName": return "=" elif t == "supplied": return "&gt;" else: return "" def transform_atf(app, n, a): return app.atfFromSign(n, flags=True) class TfApp(App): def __init__(app, *args, silent=False, **kwargs): app.transform_ctype = types.MethodType(transform_ctype, app) app.transform_prime = types.MethodType(transform_prime, app) app.transform_atf = types.MethodType(transform_atf, app) atf = loadModule("atf", *args) atf.atfApi(app) app.atf = atf super().__init__(*args, silent=silent, **kwargs) app.image = loadModule("image", *args) app.image.getImagery(app, silent, checkout=kwargs.get("checkout", "")) app.reinit() def reinit(app): customMethods = app.customMethods customMethods.afterChild.clear() customMethods.afterChild.update(quad=app.getOp) customMethods.plainCustom.clear() customMethods.plainCustom.update( sign=app.plainAtfType, quad=app.plainAtfType, cluster=app.plainAtfType, ) customMethods.prettyCustom.clear() customMethods.prettyCustom.update( case=app.caseDir, cluster=app.clusterBoundaries, comments=app.commentsCls ) def cdli(app, n, linkText=None, asString=False): (nType, objectType, identifier) = app.image.imageCls(app, n) if linkText is None: linkText = identifier result = app.image.wrapLink(linkText, objectType, "main", identifier) if asString: return result else: dh(result) # PRETTY HELPERS def getGraphics(app, isPretty, n, nType, outer): api = app.api F = api.F E = api.E result = "" isOuter = outer or (all(F.otype.v(parent) != "quad" for parent in E.sub.t(n))) if isOuter: width = "2em" if nType == "sign" else "4em" height = "4em" if nType == "quad" else "6em" theGraphics = app.image.getImages( app, n, kind="lineart", width=width, height=height, _asString=True, withCaption=False, warning=False, ) if theGraphics: result = f"<div>{theGraphics}</div>" if isPretty else f" {theGraphics}" return result def lineart(app, ns, key=None, asLink=False, withCaption=None, **options): return app.image.getImages( app, ns, kind="lineart", key=key, asLink=asLink, withCaption=withCaption, **options, ) def photo(app, ns, key=None, asLink=False, withCaption=None, **options): return app.image.getImages( app, ns, kind="photo", key=key, asLink=asLink, withCaption=withCaption, **options, ) def imagery(app, objectType, kind): return set(app._imagery.get(objectType, {}).get(kind, {}))
28.872881
87
0.561785
361
3,407
5.249307
0.301939
0.029551
0.022164
0.031662
0.134037
0.134037
0.134037
0.134037
0.081266
0.081266
0
0.001729
0.321104
3,407
117
88
29.119658
0.817553
0.004109
0
0.223404
0
0
0.039811
0.007078
0
0
0
0
0
1
0.106383
false
0
0.042553
0.053191
0.276596
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
85322edb1455b94f135f0f46c6eb2897360629a3
12,632
py
Python
shunt/hmap/hmap.py
velezj/project-manager
92e28e5718ca1302f6da0cf8b3d4a3bb5a1a8a72
[ "MIT" ]
null
null
null
shunt/hmap/hmap.py
velezj/project-manager
92e28e5718ca1302f6da0cf8b3d4a3bb5a1a8a72
[ "MIT" ]
null
null
null
shunt/hmap/hmap.py
velezj/project-manager
92e28e5718ca1302f6da0cf8b3d4a3bb5a1a8a72
[ "MIT" ]
null
null
null
import logging logger = logging.getLogger( __name__ ) import copy import tempfile import os import jinja2 import yaml ## # Interface functions for Hiearchichal Maps (hmaps) # which are jsut dictionaries-of-dictionaries :) TEMPLATE_HANDLEBAR_START = "{{" TEMPLATE_HANDLEBAR_END = "}}" JINJA_VARIABLE_KEY = "_" ##============================================================================ ## # Returns true iff the given object is a structured key with # given delimiter def is_structured_key( x, delim='/' ): return isinstance( x, str ) and delim in x ##============================================================================ ## # Convert from a structured key to a path. # A structured key is just a delimited single-string key # much like a file system path or url :) def structured_key_to_path( sk, delim='/' ): def _numerate(x): try: return int(x) except: return x return list(map(_numerate, sk.split( delim ))) ##============================================================================ ## # Take a path of a structured key and return a path def ensure_path( sk_or_path, delim='/' ): if isinstance( sk_or_path, str ): return structured_key_to_path( sk_or_path, delim=delim ) return sk_or_path ##============================================================================ ## # Traverse a hiearchical map (dict of dict) structure with a path # (a list of keys). # This will return the parent dictionary and key for the last # item in the path or None,None if the path is not valid # # This will *change* the given hmap (potentially) since it will # *create* the hmap structure down the path if it was not # previously created in the hmap def hmap_probe( hmap, path ): path = ensure_path( path ) if path is None or hmap is None or len(path) < 1: return None, None if len(path) == 1: return hmap, path[0] if path[0] not in hmap: hmap[ path[0] ] = {} return hmap_probe( hmap[ path[0] ], path[1:] ) ##============================================================================ ## # Get the value for a path from an hmap # Or returns the given default value. # This may change the given hmap by probing it. def hmap_get( hmap, path, default ): node, key = hmap_probe( hmap, path ) if node is None or key not in node: return default return node[ key ] ##============================================================================ ## # Sets the value of the given path in an hmap to the # given value. # This will create the path layers if need be def hmap_set( hmap, path, value ): node, key = hmap_probe( hmap, path ) if node is None: raise ValueError( "Could not probe hmap, returned None. This usually means that the hmap itself was None!" ) old = node.get( key, None ) node[ key ] = value return old ##============================================================================ ## # returns true if the given path has a set value in the given hmap def hmap_has_path( hmap, path ): node, key = hmap_probe( hmap_probe, path ) return node is not None and key in node ##============================================================================ ##============================================================================ ## # Given an hmap that *may* have structured keys as keys, # returns a new hmap which has the structured keys resolves into # an actual structure in the hmap (so not more keys are strucutred-keys) # # The resulting hmap *may* share structure with the input hmap def resolve_structured_keys( hmap, delim='/' ): # ok, create a new dict as the base base = {} # now, let's check each key of the given hmap # and resolve if it is a strucutred key, otherwise # use the value of the input hjmap for key, value in hmap.items(): # recurse to value irregardless of key if it is an hmap node if isinstance( value, dict ): value = resolve_structured_keys( value, delim=delim ) # nothing to resolve for this key, jsut use hte value if not is_structured_key( key ): base[ key ] = value else: # resolve the key path = ensure_path( key ) temp_map = base for p in path[:-1]: temp_map[ p ] = {} temp_map = temp_map[p] # ok, last part of path gets the value temp_map[path[-1]] = value # return the resolved map return base ##============================================================================ ##============================================================================ ##============================================================================ ## # Returns true iff the given object does not have any free variables # (which are template {{ }} handlebar slots) in it def has_free_variables( x ): if isinstance( x, (list,tuple) ): return not any( has_free_variables, x ) if isinstance( x, dict ): return not any( has_free_variables, x.items() ) s = str(x) return TEMPLATE_HANDLEBAR_START not in s and TEMPLATE_HANDLEBAR_END not in s ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ## # Resolves the free variables within the hmap. # This does a global resolve on all the free variables since # the templates are treated globally # # Returns a new parse state with given parse state as parent def resolve_free_variables( parse_state, template_context ): # first, translate any variable blocks into jinja set statements # for use within the hmap hmap_with_jinja_vars = add_jinja_variable_nodes( parse_state.hmap, template_context ) # write out the resulting hmap's YAML with tempfile.NamedTemporaryFile( mode='w', prefix='shunt-pre-resolve_') as f: f.write( yaml.dump( hmap_with_jinja_vars ) ) f.flush() logger.info( "dumping pre-resolve into '{0}'".format( f.name ) ) # ok, load in the jinja template template, render_context = template_context.load_intermediate_template( f.name ) # now render the template template_string = template.render(render_context) opened_file = None with open( f.name + ".rendered", 'w' ) as wf: opened_file = f.name + ".rendered" wf.write( template_string ) # ok, repase the resulting yaml try: new_parse_state = parse_yaml( opened_file, parent=parse_state ) except Exception as e: msg = "Unable to re-load rendered template as YAML. Rendering at '{0}'".format( opened_file ) raise RuntimeError( msg ) from e # ok, remove rendered temporary file os.remove( opened_file ) # return the resulting parse return new_parse_state ##============================================================================ ##============================================================================ ##============================================================================ ## # Given a ParseState, returns a new hmap with any 'var' nodes # having and additional '_' key with jinja template code to # actually set the variables for jinja templates def add_jinja_variable_nodes( hmap, template_context ): # deal with non-dictionaries if not isinstance( hmap, dict ): # lists and tuples and just recursed over each element :) if isinstance( hmap, (list,tuple) ): return type(hmap)( map( lambda x: add_jinja_variable_nodes(x,template_context), hmap ) ) # everything else is an atom and cannot have vars return hmap # new structure to return new_hmap = copy.copy( hmap ) # ok, grab any immediate variables if 'vars' in hmap: # create jinaj set equivalents accum = hmap['vars'] jinja_sets = [] for (key,value) in accum.items(): jinja_sets.append( "{{%- set {name} = \"{value}\" -%}}".format( name = discard_handlebars( key ), value = discard_handlebars( value ) ) ) # assign jinja sets to special key new_hmap[ JINJA_VARIABLE_KEY ] = "\n".join( jinja_sets ) # recurse to children for (key, value) in hmap.items(): if key == 'vars': continue new_hmap[ key ] = add_jinja_variable_nodes( value, template_context ) # return new structure return new_hmap ##============================================================================ ## # Given a string, discards any enclosing handlebars (first order) def discard_handlebars( x ): if not isinstance( x, str ): return x find_start_idx = x.find( TEMPLATE_HANDLEBAR_START ) res = x if find_start_idx >= 0: res = res[0:find_start_idx] + res[find_start_idx+len(TEMPLATE_HANDLEBAR_START):] find_end_idx = res.rfind( TEMPLATE_HANDLEBAR_END ) if find_end_idx >= 0: res = res[0:find_end_idx] + res[find_end_idx+len(TEMPLATE_HANDLEBAR_END):] return res ##============================================================================ ##============================================================================ ##============================================================================ ## # A template context allows us to load "intermediate" templates. # This also includes the jinja Environment and loaders being used class TemplateContext( object ): ## # def __init__( self, environment = None, context = None): if environment is None: self.environment = jinja2.Environment( loader = jinja2.FileSystemLoader([ "templates", ".", ] ) ) else: self.environment = environment if context is None: self.context = {} else: self.context = context ## # def load_intermediate_template( self, template_filename ): with open( template_filename ) as f: template = self.environment.from_string( f.read() ) context = self.context return template, context DEFAULT_TEMPLATE_CONTEXT = TemplateContext() ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================ ##============================================================================
36.827988
116
0.443002
1,207
12,632
4.506214
0.22121
0.014709
0.011951
0.012502
0.072256
0.055709
0.031807
0.013238
0.013238
0.013238
0
0.001776
0.197831
12,632
342
117
36.935673
0.534985
0.518445
0
0.06338
0
0
0.046575
0
0
0
0
0
0
1
0.105634
false
0
0.042254
0.007042
0.323944
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8533f45f79e26e6d7713f555d363262a5ebdca2b
2,496
py
Python
kur/sources/jsonl.py
greedyuser/kur
ba6588ebfa5dec66d1e462c180618cc115fd38ef
[ "Apache-2.0" ]
867
2016-12-05T20:24:23.000Z
2022-02-18T09:07:14.000Z
kur/sources/jsonl.py
greedyuser/kur
ba6588ebfa5dec66d1e462c180618cc115fd38ef
[ "Apache-2.0" ]
90
2017-01-14T22:46:23.000Z
2021-02-09T13:32:27.000Z
kur/sources/jsonl.py
greedyuser/kur
ba6588ebfa5dec66d1e462c180618cc115fd38ef
[ "Apache-2.0" ]
135
2017-01-18T19:21:20.000Z
2022-01-24T16:57:59.000Z
import linecache import numpy import json from ..sources import ChunkSource ############################################################################### class JSONLSource(ChunkSource): """ Data source for tensors stored in JSONL format """ ########################################################################### def __init__(self, source, key, num_entries, *args, **kwargs): """ Creates a new JSONL source for file named `source`. """ super().__init__(*args, **kwargs) self.source = source self.num_entries = num_entries self.key = key self.indices = numpy.arange(len(self)) ########################################################################### def __iter__(self): """ Return an iterator to the data. Get the value (tensor) for self.key from each object and yield batches of these tensors """ start = 0 while start < self.num_entries: end = min(self.num_entries, start + self.chunk_size) # linecache line numbering starts at 1 batch = [ json.loads(linecache.getline(self.source, i + 1).strip())[self.key] for i in self.indices[start:end] ] yield batch start = end ########################################################################### def __len__(self): """ Returns the total number of entries that this source can return, if known. """ return self.num_entries ########################################################################### def shape(self): """ Return the shape of the tensor (excluding batch size) returned by this data source. """ return numpy.array(json.loads(linecache.getline(self.source, 0 + 1))[self.key]).shape ########################################################################### def can_shuffle(self): """ This source can be shuffled. """ return True ########################################################################### def shuffle(self, indices): """ Applies a permutation to the data. """ if len(indices) > len(self): raise ValueError('Shuffleable was asked to apply permutation, but ' 'the permutation is longer than the length of the data set.') self.indices[:len(indices)] = self.indices[:len(indices)][indices]
35.15493
93
0.455529
236
2,496
4.716102
0.415254
0.053908
0.050314
0.044924
0.062893
0.062893
0
0
0
0
0
0.002693
0.25601
2,496
70
94
35.657143
0.596661
0.204728
0
0
0
0
0.078287
0
0
0
0
0
0
1
0.1875
false
0
0.125
0
0.4375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8535410c8ebbea8fb51fba1d44a3fdf3092fb5af
161
py
Python
tests/web_platform/css_flexbox_1/test_flexbox_stf_table_cell.py
jonboland/colosseum
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
[ "BSD-3-Clause" ]
71
2015-04-13T09:44:14.000Z
2019-03-24T01:03:02.000Z
tests/web_platform/css_flexbox_1/test_flexbox_stf_table_cell.py
jonboland/colosseum
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
[ "BSD-3-Clause" ]
35
2019-05-06T15:26:09.000Z
2022-03-28T06:30:33.000Z
tests/web_platform/css_flexbox_1/test_flexbox_stf_table_cell.py
jonboland/colosseum
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
[ "BSD-3-Clause" ]
139
2015-05-30T18:37:43.000Z
2019-03-27T17:14:05.000Z
from tests.utils import W3CTestCase class TestFlexbox_StfTableCell(W3CTestCase): vars().update(W3CTestCase.find_tests(__file__, 'flexbox_stf-table-cell'))
26.833333
77
0.807453
19
161
6.473684
0.842105
0
0
0
0
0
0
0
0
0
0
0.020408
0.086957
161
5
78
32.2
0.816327
0
0
0
0
0
0.136646
0.136646
0
0
0
0
0
1
0
true
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
8539abd165125b9d66931bb7fd6e71c602de1c9e
346
py
Python
pages/middleware.py
dragetd/LambdaCast
a8227d8d19a2fdb1ff1d5e8ad7366d60a1e253f7
[ "BSD-2-Clause" ]
6
2015-04-05T01:28:23.000Z
2022-02-06T17:29:47.000Z
pages/middleware.py
dragetd/LambdaCast
a8227d8d19a2fdb1ff1d5e8ad7366d60a1e253f7
[ "BSD-2-Clause" ]
2
2022-01-05T23:07:10.000Z
2022-03-30T17:52:45.000Z
pages/middleware.py
dragetd/LambdaCast
a8227d8d19a2fdb1ff1d5e8ad7366d60a1e253f7
[ "BSD-2-Clause" ]
2
2022-02-06T17:29:53.000Z
2022-02-26T17:23:09.000Z
''' Created on Jun 4, 2014 @author: benjamin ''' from pages.models import Page class PagesMiddleware(object): def process_template_response(self, request, response): ''' add pages to response context ''' response.context_data['page_list'] = Page.objects.filter(activated=True).order_by('orderid') return response
21.625
100
0.699422
42
346
5.642857
0.809524
0.126582
0
0
0
0
0
0
0
0
0
0.017731
0.184971
346
16
101
21.625
0.822695
0.208092
0
0
0
0
0.061303
0
0
0
0
0
0
1
0.2
false
0
0.2
0
0.8
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
8539ad589810749d569e8c96666ae5bd84a052e5
2,571
py
Python
nb2blog.py
rowanc1/nb2blog
1c625a2727124898c4f3d9c9742feb268c554ebd
[ "MIT" ]
null
null
null
nb2blog.py
rowanc1/nb2blog
1c625a2727124898c4f3d9c9742feb268c554ebd
[ "MIT" ]
null
null
null
nb2blog.py
rowanc1/nb2blog
1c625a2727124898c4f3d9c9742feb268c554ebd
[ "MIT" ]
null
null
null
#!/usr/local/bin/python import requests, argparse, p3c, os, json, subprocess, keyring def main(): parser = argparse.ArgumentParser(description='Upload a notebook to a gist and 3point/SimPEG blog.') parser.add_argument('notebook', type=str, help='The file name of the notebook.') parser.add_argument('-m', type=str, help='Description of the notebook.') args = parser.parse_args() jsonFile = '/'.join(p3c.__file__.split('/')[:-1]+['nb2blog.json']) if os.path.exists(jsonFile): with file(jsonFile,'r') as f: R = json.loads(f.read()) else: f = file(jsonFile,'w') f.write('{}\n') f.close() R = {} # Get the data ready for uploading to gist.github.com ipynb = file(args.notebook,'r') data = { "description": args.m, "public": True, "files": {} } data['files'][args.notebook] = {"content": str(ipynb.read())} ipynb.close() token = keyring.get_password('3pt','github.gist') if token is None: raise Exception("""keyring could not find your gist token: ipython > import keyring > keyring.set_password('3pt', 'github.gist', 'YOUR GITHUB TOKEN') Go to github to create one if you haven't made it yet (make sure you enable gist,repo,user): https://github.com/settings/applications#personal-access-tokens """) # Check if the ipynb is in the dict, and post to gist.github.com if args.notebook in R: url = R[args.notebook]['gistURL'] resp = requests.patch("%s?access_token=%s"%(url,token), data=json.dumps(data)) else: resp = requests.post("https://api.github.com/gists?access_token=%s"%token, data=json.dumps(data)) url = resp.json()['url'] R[args.notebook] = {"gistURL": url} gitResp = resp.json() f = file(jsonFile,'w') f.write(json.dumps(R)) f.close() # Convert the notebook to html subprocess.check_output("ipython nbconvert %s --to html --template basic" % (args.notebook.replace(' ','\\ ')), shell=True) f = file(args.notebook.replace('ipynb','html'),'r') nbhtml = f.read() f.close() subprocess.check_output("rm %s" % (args.notebook.replace(' ','\\ ')).replace('ipynb','html'), shell=True) uid = args.notebook[:-6].lower().replace(' ','-') title = args.notebook[:-6].title() b = p3c.Blog.new({'uid':uid,"content":nbhtml, "title":title, "description": args.m, 'setTags':'simpeg'}) if __name__ == "__main__": main()
34.743243
127
0.596266
336
2,571
4.5
0.392857
0.079365
0.037698
0.018519
0.085979
0.026455
0
0
0
0
0
0.005053
0.230261
2,571
73
128
35.219178
0.758969
0.064566
0
0.132075
0
0.018868
0.315285
0.011245
0
0
0
0
0
1
0.018868
false
0.037736
0.037736
0
0.056604
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
853a7d0f953c95bc8bdbaefb204194d26a724aed
970
py
Python
tests/test_main.py
JeanmarieAlder/bollards-api
39c82a53575ecd2fbd7f98864512a5494b800836
[ "MIT" ]
1
2021-07-26T06:40:04.000Z
2021-07-26T06:40:04.000Z
tests/test_main.py
JeanmarieAlder/bollards-api
39c82a53575ecd2fbd7f98864512a5494b800836
[ "MIT" ]
null
null
null
tests/test_main.py
JeanmarieAlder/bollards-api
39c82a53575ecd2fbd7f98864512a5494b800836
[ "MIT" ]
null
null
null
from bollards_api.main.forms import ContactForm def test_home_page(client): """Test that home page displays correctly""" rv = client.get('/') assert b'<h1 class="text-center">Welcome to Bollards API</h1>' in rv.data assert b'<p class="card-text">Discover all bollards between Vaud, Switzerland and France.</p>' in rv.data assert b'Welcome to the bollards.ch API.' in rv.data # /home should be equal to / rv_home = client.get('/home') assert rv_home.data == rv.data def test_about_page(client): rv = client.get('/about') assert b'42' in rv.data def test_about_page2(client): rv = client.get('/about') assert b'42' in rv.data def test_contact_form_works(app): """Currently not in use""" with app.app_context(): contactForm = ContactForm() assert True def test_404_on_bad_request(client): rv = client.get('/randomlink') assert b'<h1>Looks like you ran into 404.</h1>' in rv.data
27.714286
109
0.670103
152
970
4.164474
0.421053
0.066351
0.075829
0.061611
0.229068
0.14534
0.14534
0.14534
0.14534
0.14534
0
0.01938
0.202062
970
35
110
27.714286
0.79845
0.089691
0
0.190476
0
0
0.271789
0.06078
0
0
0
0
0.380952
1
0.238095
false
0
0.047619
0
0.285714
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
853bd8589a69bf83feb568fd0e023ea150961b83
11,382
py
Python
src/data_scraping.py
othmanefc/ttfl_fantasy
6b5d4316553a5d01114218fcfbe26588de499ead
[ "CC0-1.0" ]
null
null
null
src/data_scraping.py
othmanefc/ttfl_fantasy
6b5d4316553a5d01114218fcfbe26588de499ead
[ "CC0-1.0" ]
6
2020-01-28T23:09:28.000Z
2022-02-10T00:28:14.000Z
src/data_scraping.py
othmanefc/ttfl_fantasy
6b5d4316553a5d01114218fcfbe26588de499ead
[ "CC0-1.0" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from typing import List, Dict, Any, Union, Optional, Callable, Sequence from bs4 import BeautifulSoup, Comment, element import pandas as pd import re from urllib.request import urlopen import os import datetime from tqdm import tqdm as tqdm_notebook import time from src.constants import DATA_DIR def get_scores(date: str, metrics: List[str]) -> pd.DataFrame: path_check = os.path.join(DATA_DIR, "dates", f"{date}.csv") if os.path.exists(path_check): df_games = pd.read_csv(path_check) return df_games url_parent: str = "https://www.basketball-reference.com" url: str = (f"https://www.basketball-reference.com/boxscores/?month=" f"{date[4:6]}&day={date[6:8]}&year={date[0:4]}") soup: BeautifulSoup = BeautifulSoup(urlopen(url), "lxml") games: Sequence[Optional[element.Tag]] = soup.find_all( "div", class_="game_summary expanded nohover") if len(games) == 0: return pd.DataFrame(columns=metrics) df_games: List[Any] = [] for game in tqdm_notebook(games, desc=f"Date: {date}", total=len(games)): summary: Dict[str, List[Any]] = {} # host = game.find_all('table')[1].find_all('a')[1]['href'][7:10] # away = game.find_all('table')[1].find_all('a')[0]['href'][7:10] winner: Sequence[Optional[element.Tag]] = game.find( "tr", class_="winner").find_all("td") loser: Sequence[Optional[element.Tag]] = game.find( "tr", class_="loser").find_all("td") summary["winner"] = [ winner[0].find("a")["href"][7:10], int(winner[1].get_text()), ] summary["loser"] = [ loser[0].find("a")["href"][7:10], int(loser[1].get_text()) ] url_game: str = url_parent + game.find("a", text="Box Score")["href"] soup_game: BeautifulSoup = BeautifulSoup(urlopen(url_game), "lxml") box_score: Optional[element.Tag] = game.find("a", text="Box Score")["href"] date = re.findall(r"\d\d\d\d\d\d\d\d", box_score)[0] for result, (side, score) in summary.items(): game_result: Optional[element.Tag] = soup_game.find( "table", class_="sortable stats_table", id=f"box-{side}-game-basic") player_list: List[Any] = game_result.find_all("tr", class_=None)[1:-1] team: List[Dict[str, Optional[Union[float, int, str]]]] = [] for player in player_list: player_name: Optional[str] = player.find("th")["csk"] player_dict: Dict[str, Optional[Union[str, int, str]]] = { "name": player_name, "date": date } for metric in metrics: try: res: Union[str, int, float] = player.find( "td", { "data-stat": metric }).contents[0] except Exception: res: Union[str, int, float] = 0 player_dict.update({metric: res}) if result == "winner": player_dict.update({ "result": 1, "score": score, "team": summary["winner"][0], "opp": summary["loser"][0], "opp_score": summary["loser"][1], }) if result == "loser": player_dict.update({ "result": 0, "score": score, "team": summary["winner"][0], "opp": summary["winner"][0], "opp_score": summary["winner"][1], }) if int(str(player_dict["mp"]).split(":")[0]) >= 10: team.append(player_dict) team_df: pd.DataFrame = pd.DataFrame(team) team_df["score"] = score df_games.append(pd.DataFrame(team_df)) df_games_df: pd.DataFrame = pd.concat(df_games) if ' trb' in df_games_df.columns: df_games_df.rename({' trb': 'trb'}, inplace=True) Data_scrapper.write_csv(df=df_games_df, name=date, extra_path="dates") return df_games_df class Data_scrapper(object): def __init__(self, start: str, end: str) -> None: self.metrics: List[str] = [ "mp", "fg", "fga", "fg_pct", "fg3", "fg3a", "fg3_pct", "ft", "fta", "ft_pct", "orb", "drb", " trb", "ast", "stl", "blk", "tov", "pf", "pts", "plus_minus", ] self.start: datetime.datetime = datetime.datetime.strptime( start, "%Y%m%d") self.end: datetime.datetime = datetime.datetime.strptime(end, "%Y%m%d") self.timeframe: pd.DataFrame = self.generate_time_frame() @staticmethod def write_csv(df: pd.DataFrame, name: str, extra_path: str = None) -> None: if extra_path is not None: path_data: str = os.path.join(DATA_DIR, extra_path) else: path_data = os.path.join(DATA_DIR) if not os.path.exists(path_data): os.mkdir(path_data) full_path: str = os.path.join(path_data, f"{name}.csv") df.to_csv(full_path, index=False) def get_timeframe_data(self, sleep: int = 0, name: str = "default", write: bool = True, get_scores: Callable = get_scores) -> pd.DataFrame: full_time_list: List[pd.DataFrame] = [] for date in tqdm_notebook(self.timeframe, total=len(self.timeframe), desc="Main Frame"): # get_scores_cached: Callable = memory1.cache(get_scores) # date_df: pd.DataFrame = get_scores_cached(date, self.metrics) date_df: pd.DataFrame = get_scores(date, self.metrics) full_time_list.append(date_df) time.sleep(sleep) full_time_df: pd.DataFrame = pd.concat(full_time_list, sort=True) if write: Data_scrapper.write_csv(full_time_df, name=name) return full_time_df def generate_time_frame(self) -> List[str]: date_range: List[str] = [ (self.start + datetime.timedelta(days=x)).strftime("%Y%m%d") for x in range(0, (self.end - self.start).days + 1) ] return date_range @staticmethod def get_next_games( date: str, season_year: Union[str, int]) -> List[Dict[str, Optional[str]]]: month: str = datetime.datetime.strptime( date, "%Y%m%d").strftime("%B").lower() url_games: str = (f"https://www.basketball-reference.com/leagues/" f"NBA_{season_year}_games-{month}.html") print(url_games) soup: BeautifulSoup = BeautifulSoup(urlopen(url_games), "lxml") month_games: Sequence[Any] = soup.find_all("tr") match_ups: List[Dict[str, Optional[str]]] = [] for month_game in month_games: try: check_date: bool = month_game.find("th")["csk"].startswith( date) except Exception: continue if check_date: visitor: Optional[str] = month_game.find( "td", { "data-stat": "visitor_team_name" }).find("a")["href"][7:10] home: Optional[str] = month_game.find( "td", { "data-stat": "home_team_name" }).find("a")["href"][7:10] match_ups.append({"home": home, "visitor": visitor}) return match_ups @staticmethod def get_all_players( team: Optional[str], date: str, season_year: Union[str, int]) -> List[Dict[str, Optional[str]]]: url: str = (f"https://www.basketball-reference.com/" f"teams/{team}/{season_year}.html") print(url) soup: BeautifulSoup = BeautifulSoup(urlopen(url), "lxml") table_players: Optional[element.Tag] = soup.find("tbody") players: List[Dict[str, Optional[element.Tag]]] = [] for player in table_players.find_all("tr"): name: Optional[str] = player.find("td", {"data-stat": "player"})["csk"] players.append({"name": name, "team": team, "date": date}) return players @staticmethod def get_injured_players(team: Optional[str], date: str, season_year: Union[str, int]) -> List: url: str = (f"https://www.basketball-reference.com/" f"teams/{team}/{season_year}.html") soup: BeautifulSoup = BeautifulSoup(urlopen(url), "lxml") div_inj: Optional[element.Tag] = soup.find("div", id="all_injury") try: comments: Sequence[Optional[element.Tag]] = div_inj.find_all( string=lambda text: isinstance(text, Comment)) comms: Optional[str] = re.sub("\n", "", comments[0]).strip() soup = BeautifulSoup(comms, "lxml") body: Optional[element.Tag] = soup.find("tbody") players: List[Dict[str, Optional[str]]] = [] for player in body.find_all("tr"): name: Optional[str] = player.find( "th", {"data-stat": "player"})["csk"] players.append({"name": name, "team": team, "date": date}) return players except Exception: return list() @staticmethod def get_next_games_player(date: str, season_year: Union[str, int]) -> pd.DataFrame: match_ups: List[Dict[str, Optional[str]]] = Data_scrapper.get_next_games( date, season_year) all_players_list: List = [] for match_up in match_ups: for i, team in enumerate(match_up.values()): all_players: List[Dict[ str, Optional[str]]] = Data_scrapper.get_all_players( team, date, season_year) injured_players: List = Data_scrapper.get_injured_players( team, date, season_year) injured_players_names: List = ([ player["name"] for player in injured_players ] if len(injured_players) > 0 else []) available_players: List = [ player for player in all_players if player["name"] not in injured_players_names ] for player in available_players: ind: int = 1 if i == 0 else 0 player["opp"] = list(match_up.values())[ind] all_players_list.extend(available_players) return pd.DataFrame(all_players_list)
41.540146
79
0.511949
1,283
11,382
4.381138
0.176929
0.029354
0.032023
0.027041
0.349582
0.273617
0.232877
0.178082
0.097136
0.097136
0
0.008689
0.352838
11,382
273
80
41.692308
0.754412
0.025391
0
0.160494
0
0.004115
0.092541
0.014702
0
0
0
0
0
1
0.037037
false
0
0.041152
0
0.123457
0.00823
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
853d84e2e9e82207867461c37d8d12080faf1569
766
py
Python
src/DataGenerator.py
nikhil-garg/CartPoleSimulation
fd778327af5fe764248b68db53a456a77e903656
[ "MIT" ]
null
null
null
src/DataGenerator.py
nikhil-garg/CartPoleSimulation
fd778327af5fe764248b68db53a456a77e903656
[ "MIT" ]
null
null
null
src/DataGenerator.py
nikhil-garg/CartPoleSimulation
fd778327af5fe764248b68db53a456a77e903656
[ "MIT" ]
null
null
null
from src.CartClass import * from src.utilis import * from src.utilis import * from tqdm import tqdm csv = 'data_rnn' number_of_experiments = 10 length_of_experiment = 1e3 dt_main_simulation = dt_main_simulation_globals track_relative_complexity = 0.5 # randomly placed points/s track_complexity = int(dt_main_simulation*length_of_experiment*track_relative_complexity) # Total number of randomly placed points mode = 2 MyCart = Cart() for i in range(number_of_experiments): print(i) sleep(0.1) Generate_Experiment(MyCart, mode=mode, exp_len=length_of_experiment, dt=dt_main_simulation, track_complexity=track_complexity, csv=csv)
30.64
131
0.678851
97
766
5.061856
0.463918
0.04888
0.130346
0.077393
0.09776
0.09776
0
0
0
0
0
0.015901
0.261097
766
25
132
30.64
0.85159
0.082245
0
0.095238
0
0
0.011412
0
0
0
0
0
0
1
0
false
0
0.190476
0
0.190476
0.047619
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
854189ca67276f7eea81393038f259f4f1505403
4,970
py
Python
trojsten/events/migrations/0001_initial.py
MvonK/web
b701a6ea8fb6f0bdfb720e66d0a430db13db8bff
[ "MIT" ]
5
2018-04-22T22:44:02.000Z
2021-04-26T20:44:44.000Z
trojsten/events/migrations/0001_initial.py
MvonK/web
b701a6ea8fb6f0bdfb720e66d0a430db13db8bff
[ "MIT" ]
250
2018-04-24T12:04:11.000Z
2022-03-09T06:56:47.000Z
trojsten/events/migrations/0001_initial.py
MvonK/web
b701a6ea8fb6f0bdfb720e66d0a430db13db8bff
[ "MIT" ]
8
2019-04-28T11:33:03.000Z
2022-02-26T13:30:36.000Z
# -*- coding: utf-8 -*- from django.db import migrations, models class Migration(migrations.Migration): dependencies = [] operations = [ migrations.CreateModel( name="Event", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True ), ), ("name", models.CharField(max_length=100, verbose_name="n\xe1zov")), ("start_time", models.DateTimeField(verbose_name="\u010das za\u010diatku")), ("end_time", models.DateTimeField(verbose_name="\u010das konca")), ( "registration_deadline", models.DateTimeField( null=True, verbose_name="deadline pre registr\xe1ciu", blank=True ), ), ( "text", models.TextField( default="", help_text='Obsah bude prehnan\xfd <a href="http://en.wikipedia.org/wiki/Markdown">Markdownom</a>.', blank=True, ), ), ], options={ "ordering": ["-end_time", "-start_time"], "verbose_name": "akcia", "verbose_name_plural": "akcie", }, ), migrations.CreateModel( name="EventType", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True ), ), ("name", models.CharField(max_length=100, verbose_name="n\xe1zov")), ("is_camp", models.BooleanField(verbose_name="s\xfastredko")), ], options={"verbose_name": "typ akcie", "verbose_name_plural": "typy akci\xed"}, ), migrations.CreateModel( name="Invitation", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True ), ), ( "type", models.SmallIntegerField( default=0, verbose_name="typ pozv\xe1nky", choices=[ (0, "\xfa\u010dastn\xedk"), (1, "n\xe1hradn\xedk"), (2, "ved\xfaci"), ], ), ), ("going", models.NullBooleanField(verbose_name="z\xfa\u010dastn\xed sa")), ], options={"verbose_name": "pozv\xe1nka", "verbose_name_plural": "pozv\xe1nky"}, ), migrations.CreateModel( name="Link", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True ), ), ("title", models.CharField(max_length=100, verbose_name="titulok")), ("name", models.CharField(max_length=300, verbose_name="meno")), ("url", models.URLField(max_length=300)), ], options={"verbose_name": "odkaz", "verbose_name_plural": "odkazy"}, ), migrations.CreateModel( name="Place", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True ), ), ("name", models.CharField(max_length=100, verbose_name="n\xe1zov")), ], options={"verbose_name": "miesto akcie", "verbose_name_plural": "miesta akci\xed"}, ), migrations.CreateModel( name="Registration", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True ), ), ("name", models.CharField(max_length=100, verbose_name="n\xe1zov")), ( "text", models.TextField( help_text='Obsah bude prehnan\xfd <a href="http://en.wikipedia.org/wiki/Markdown">Markdownom</a>.' ), ), ], options={ "verbose_name": "Prihl\xe1\u0161ka", "verbose_name_plural": "Prihl\xe1\u0161ky", }, ), ]
37.368421
123
0.423742
370
4,970
5.521622
0.313514
0.161527
0.073421
0.067548
0.503182
0.45815
0.418013
0.399413
0.399413
0.399413
0
0.022148
0.45493
4,970
132
124
37.651515
0.732004
0.004225
0
0.574803
0
0.015748
0.175864
0.004245
0
0
0
0
0
1
0
false
0
0.007874
0
0.031496
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
85423a2d8bf70f44a96088e677ba0a04dd9e70d8
111
py
Python
common/code/snippets/py/arr_of_chunks.py
nevesnunes/env
7a5e3816334337e04a87e1a2e4dc322215901744
[ "MIT" ]
4
2020-04-07T14:45:02.000Z
2021-12-28T22:43:16.000Z
common/code/snippets/py/arr_of_chunks.py
nevesnunes/env
7a5e3816334337e04a87e1a2e4dc322215901744
[ "MIT" ]
null
null
null
common/code/snippets/py/arr_of_chunks.py
nevesnunes/env
7a5e3816334337e04a87e1a2e4dc322215901744
[ "MIT" ]
2
2020-04-08T03:12:06.000Z
2021-03-04T20:33:03.000Z
#!/usr/bin/env python3 n = 2 l = "foo" chunks = [l[i - n : i] for i in range(n, len(l) + n, n)] print(chunks)
15.857143
56
0.54955
24
111
2.541667
0.625
0
0
0
0
0
0
0
0
0
0
0.023256
0.225225
111
6
57
18.5
0.686047
0.189189
0
0
0
0
0.033708
0
0
0
0
0
0
1
0
false
0
0
0
0
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
854307b441b9f6e3e18268c1d5065eddb624b4e1
545
py
Python
setup.py
pandapan0021/SHTTPServer_plus.py
c7c0fbe1a0a0ecc655d128cf61a49ea0b35bb32b
[ "MIT" ]
null
null
null
setup.py
pandapan0021/SHTTPServer_plus.py
c7c0fbe1a0a0ecc655d128cf61a49ea0b35bb32b
[ "MIT" ]
null
null
null
setup.py
pandapan0021/SHTTPServer_plus.py
c7c0fbe1a0a0ecc655d128cf61a49ea0b35bb32b
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- ''' ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A setuptools based setup module. -------------------------------------------------------- See: http://packaging.python.org/en/latest/distributing.html http://github.com/pypa/sampleproject ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ''' from setuptools import setup setup( name='SHTTPServer_plus', url='https://github.com/pandapan0021/SHTTPServer_plus.git', classifiers=[ 'Programing Language :: Python :: 3.5', ], )
23.695652
60
0.46055
44
545
5.659091
0.795455
0.072289
0
0
0
0
0
0
0
0
0
0.014583
0.119266
545
22
61
24.772727
0.504167
0.6
0
0
0
0
0.553191
0
0
0
0
0
0
1
0
true
0
0.125
0
0.125
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
1
85440bc2c3a337f01c193e1d5db700e9605da53f
1,128
py
Python
lion - white.py
Abdumajidhu/Image-Enhancement-therough-Image-Processing-Techniques
126690319297a5ed7df99ff47797980cc525ecf3
[ "MIT" ]
1
2019-10-27T13:03:05.000Z
2019-10-27T13:03:05.000Z
lion - white.py
Abdumajidhu/Image-Enhancement-therough-Image-Processing-Techniques
126690319297a5ed7df99ff47797980cc525ecf3
[ "MIT" ]
null
null
null
lion - white.py
Abdumajidhu/Image-Enhancement-therough-Image-Processing-Techniques
126690319297a5ed7df99ff47797980cc525ecf3
[ "MIT" ]
1
2021-12-17T06:01:52.000Z
2021-12-17T06:01:52.000Z
# import opencv import numpy as np import cv2 # Read image src = cv2.imread("exercise_images/lion.jpg",0) # Set threshold and maxValue thresh = 25 thresh3 = 255 thresh4 = 205 thresh5 = 105 thresh2 = 155 maxValue = 255 # Basic threshold example th, dst = cv2.threshold(src, thresh, maxValue, cv2.THRESH_BINARY); th, dsts = cv2.threshold(src, thresh2, maxValue, cv2.THRESH_BINARY); th, dsts1 = cv2.threshold(src, thresh3, maxValue, cv2.THRESH_BINARY); th, dsts2 = cv2.threshold(src, thresh4, maxValue, cv2.THRESH_BINARY); th, dsts3 = cv2.threshold(src, thresh5, maxValue, cv2.THRESH_BINARY); improved = np.hstack((src,dsts)) #stacking images side-by-side improvedmore = np.hstack((src,dsts)) #stacking images side-by-side imp = np.hstack((dst,dsts)) #stacking images side-by-side cv2.imshow('Have You of 165',dst) cv2.imshow('Got You of 155',dsts2) cv2.imshow('Have You of 255',dsts3) cv2.imshow('Got You of 205',dsts1) cv2.imshow('Have You of 100',dsts) cv2.imwrite('doc.jpeg',improved) cv2.imwrite('doc2.jpeg',improvedmore) cv2.imwrite('alike.jpeg',imp) #cv2.imshow('Image',src)
28.923077
70
0.711879
171
1,128
4.660819
0.333333
0.067754
0.094103
0.144291
0.368883
0.132999
0.097867
0.097867
0.097867
0
0
0.071875
0.148936
1,128
38
71
29.684211
0.758333
0.161348
0
0
0
0
0.137931
0.026696
0
0
0
0
0
1
0
false
0
0.08
0
0.08
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
854591a05898d20509fd9ed5f1b49eb9ada02c6f
3,185
py
Python
gallery/tests.py
melissa-koi/PersonalGallery
0fed4a60a754d04f169a976403fb2bde6425ae59
[ "MIT" ]
null
null
null
gallery/tests.py
melissa-koi/PersonalGallery
0fed4a60a754d04f169a976403fb2bde6425ae59
[ "MIT" ]
null
null
null
gallery/tests.py
melissa-koi/PersonalGallery
0fed4a60a754d04f169a976403fb2bde6425ae59
[ "MIT" ]
null
null
null
from django.test import TestCase from .models import Image, Category, Location # Create your tests here. class CategoryTest(TestCase): # set up method def setUp(self): self.new_category = Category(name='newCategory') # tear down method def tearDown(self): Category.objects.all().delete() # testing instance def test_instance(self): self.assertTrue(self.new_category, Category) # testing saving image category def test_save_category(self): self.new_category.save_category() categories = Category.objects.all() self.assertTrue(len(categories) > 0) # testing deleting a category def test_delete_category(self): self.new_category.save_category() categories = Category.objects.all() self.new_category.delete_category() self.assertTrue(len(categories) < 1) class LocationTest(TestCase): # set up method def setUp(self): self.new_location = Location(name='canada') # tear down method def tearDown(self): Location.objects.all().delete() # testing instance def test_instance(self): self.assertTrue(self.new_location, Location) # testing saving image location def test_save_location(self): self.new_location.save_location() locations = Location.objects.all() self.assertTrue(len(locations) > 0) # testing deleting a location def test_delete_location(self): self.new_location.save_location() locations = Location.objects.all() self.new_location.delete_location() self.assertTrue(len(locations) < 1) class ImageTest(TestCase): # set up method def setUp(self): # creating a new image category and saving self.new_category = Category(name='newCategory') self.new_category.save() # creating aa new image location and saving self.new_location = Location(name='Canada') self.new_location.save() # creating a new image self.new_image = Image(image_url='building.png', name='building', description='Image of building taken at sunset', location=self.new_location, category=self.new_category) self.new_image.save() # tear down method def tearDown(self): Category.objects.all().delete() Location.objects.all().delete() Image.objects.all().delete() # testing saving an image def test_save_image(self): self.new_image.save_image() images = Image.objects.all() self.assertTrue(len(images) > 0) # testing saving multiple images def test_save_multiple_images(self): self.new_image.save_image() image2 = Image(image_url='building2.png', name='building2', description='Image of building taken at sunrise', location=self.new_location, category=self.new_category) image2.save_image() images = Image.objects.all() self.assertTrue(len(images) > 1) # testing deleting an image def test_delete_image(self): self.new_image.save_image() images = Image.objects.all() self.new_image.delete_image() self.assertTrue(len(images) < 1)
30.333333
178
0.668132
386
3,185
5.373057
0.160622
0.081003
0.047734
0.046287
0.575699
0.551109
0.431533
0.416586
0.372228
0.335583
0
0.004482
0.229513
3,185
104
179
30.625
0.840668
0.141601
0
0.459016
0
0
0.052709
0
0
0
0
0
0.147541
1
0.245902
false
0
0.032787
0
0.327869
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
8545d557cf7ae6b6369e7d408bec5095c2c77b1b
2,167
py
Python
examples/mxnet/export.py
mitaki28/onnx-chainer
845aa6c168d912ce044183c6dff6f21ce498d17c
[ "MIT" ]
null
null
null
examples/mxnet/export.py
mitaki28/onnx-chainer
845aa6c168d912ce044183c6dff6f21ce498d17c
[ "MIT" ]
1
2018-09-21T08:11:43.000Z
2018-09-21T08:11:43.000Z
examples/mxnet/export.py
mitaki28/onnx-chainer
845aa6c168d912ce044183c6dff6f21ce498d17c
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- import collections import numpy as np import chainer import chainer.functions as F import chainercv.links as C import mxnet import onnx_chainer def save_as_onnx_then_import_from_mxnet(model, fn): # Prepare an input tensor x = np.random.rand(1, 3, 224, 224).astype(np.float32) * 255 # Run the model on the data with chainer.using_config('train', False): chainer_out = model(x).array # Export Chainer model into ONNX onnx_chainer.export(model, x, fn) # Load ONNX model into MXNet symbol sym, arg, aux = mxnet.contrib.onnx.import_model(fn) # Find the name of input tensor data_names = [graph_input for graph_input in sym.list_inputs() if graph_input not in arg and graph_input not in aux] data_shapes = [(data_names[0], x.shape)] # Create MXNet model mod = mxnet.mod.Module( symbol=sym, data_names=data_names, context=mxnet.cpu(), label_names=None) mod.bind( for_training=False, data_shapes=data_shapes, label_shapes=None) mod.set_params( arg_params=arg, aux_params=aux, allow_missing=True, allow_extra=True) # Create input data Batch = collections.namedtuple('Batch', ['data']) input_data = Batch([mxnet.nd.array(x)]) # Forward computation using MXNet mod.forward(input_data) # Retrieve the output of forward result mxnet_out = mod.get_outputs()[0].asnumpy() # Check the prediction results are same assert np.argmax(chainer_out) == np.argmax(mxnet_out) # Check both outputs have same values np.testing.assert_almost_equal(chainer_out, mxnet_out, decimal=5) def main(): model = C.VGG16(pretrained_model='imagenet') save_as_onnx_then_import_from_mxnet(model, 'vgg16.onnx') model = C.ResNet50(pretrained_model='imagenet', arch='he') # Change cover_all option to False to match the default behavior of MXNet's pooling model.pool1 = lambda x: F.max_pooling_2d( x, ksize=3, stride=2, cover_all=False) save_as_onnx_then_import_from_mxnet(model, 'resnet50.onnx') if __name__ == '__main__': main()
28.893333
87
0.693124
320
2,167
4.484375
0.43125
0.027875
0.020906
0.029268
0.07108
0.07108
0.07108
0.07108
0
0
0
0.01687
0.206737
2,167
74
88
29.283784
0.817917
0.20766
0
0
0
0
0.036994
0
0
0
0
0
0.05
1
0.05
false
0
0.275
0
0.325
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8545f8abe40c339f00fc63341382c5d42092fb16
982
py
Python
Otherfiles/notebook_check.py
lewiuberg/pycm
50fe8f55e073d438fadd0e27cc02090cd8361501
[ "MIT" ]
1,266
2018-01-22T20:54:00.000Z
2022-03-31T12:41:53.000Z
Otherfiles/notebook_check.py
lewiuberg/pycm
50fe8f55e073d438fadd0e27cc02090cd8361501
[ "MIT" ]
375
2018-02-19T16:06:24.000Z
2022-03-17T16:27:48.000Z
Otherfiles/notebook_check.py
lewiuberg/pycm
50fe8f55e073d438fadd0e27cc02090cd8361501
[ "MIT" ]
110
2018-01-22T23:38:59.000Z
2022-03-23T10:08:30.000Z
# -*- coding: utf-8 -*- """Notebook-check script.""" import os import nbformat from nbconvert.preprocessors import ExecutePreprocessor from art import tprint NOTEBOOKS_LIST = [ "Document", "Example1", "Example2", "Example3", "Example4", "Example5", "Example6", "Example7", "Example8"] EXTENSION = ".ipynb" if __name__ == "__main__": tprint("PYCM", "bulbhead") tprint("Document", "bulbhead") print("Processing ...") for index, notebook in enumerate(NOTEBOOKS_LIST): ep = ExecutePreprocessor(timeout=6000, kernel_name='python3') path = os.path.join("Document", notebook) with open(path + EXTENSION, "r", encoding="utf-8") as f: nb = nbformat.read(f, as_version=4) ep.preprocess(nb, {'metadata': {'path': 'Document/'}}) with open(path + EXTENSION, 'w', encoding='utf-8') as f: nbformat.write(nb, f) print("{0}.{1} [OK]".format(str(index + 1), notebook))
28.882353
69
0.607943
109
982
5.366972
0.59633
0.020513
0.041026
0.071795
0.051282
0
0
0
0
0
0
0.026212
0.223014
982
33
70
29.757576
0.740498
0.045825
0
0
0
0
0.201933
0
0
0
0
0
0
1
0
false
0
0.142857
0
0.142857
0.178571
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8546f1c8d609306bdae939b66f98fe07f0ef570a
2,315
py
Python
src/eventsHandler/on_message/moderation/mute/revoke_mute.py
gastbob40/discord_brawl_bot
90ee7ef6492b5e4272a8baa42fd97f8369b07864
[ "MIT" ]
null
null
null
src/eventsHandler/on_message/moderation/mute/revoke_mute.py
gastbob40/discord_brawl_bot
90ee7ef6492b5e4272a8baa42fd97f8369b07864
[ "MIT" ]
null
null
null
src/eventsHandler/on_message/moderation/mute/revoke_mute.py
gastbob40/discord_brawl_bot
90ee7ef6492b5e4272a8baa42fd97f8369b07864
[ "MIT" ]
null
null
null
from typing import List import discord import yaml from src.models.models import Mute, session from src.utils.embeds_manager import EmbedsManager from src.utils.permissions_manager import PermissionsManager async def revoke_mute(client: discord.Client, message: discord.Message, args: List[str]): with open('run/config/config.yml', 'r') as file: config = yaml.safe_load(file) if not PermissionsManager.has_perm(message.author, 'mute'): return await message.channel.send( embed=EmbedsManager.error_embed( "Vous n'avez pas les permissions pour cette commande." ) ) # Help message if args and args[0] == '-h': return await message.channel.send( embed=EmbedsManager.information_embed( "Rappel de la commande : \n" f"`{config['prefix']}rmute <mute_id>`" ) ) if len(args) != 1: return await message.channel.send( embed=EmbedsManager.error_embed( f":x: Erreur dans la commande, merci de spécifier l'index du mute." ) ) if not args[0].startswith("m"): return await message.channel.send( embed=EmbedsManager.error_embed( ":x: Erreur, index invalide." ) ) index = int(args[0][1:]) current_mute: Mute = session.query(Mute).filter_by(id=index).first() if current_mute is None: return await message.channel.send( embed=EmbedsManager.error_embed( ":x: Erreur, index invalide." ) ) if not current_mute.is_active: return await message.channel.send( embed=EmbedsManager.error_embed( ":x: Erreur, ce mute est déjà révoqué." ) ) current_mute.is_active = False session.commit() target: discord.Member = message.guild.get_member(current_mute.target_id) for channel in message.guild.channels: if not target.permissions_in(channel).send_messages: await channel.set_permissions(target, overwrite=None) await message.channel.send( embed=EmbedsManager.complete_embed( f"⚠ Le mute **{args[0]}** a été révoqué." ) )
30.866667
89
0.597408
267
2,315
5.086142
0.400749
0.064801
0.097938
0.118557
0.309278
0.309278
0.279087
0.244477
0.244477
0.16053
0
0.003729
0.304968
2,315
74
90
31.283784
0.839652
0.005184
0
0.216667
0
0
0.145589
0.019557
0
0
0
0
0
1
0
false
0
0.1
0
0.2
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
854748ec64dba84be69bb1448db0b45736a94d36
489
py
Python
modoboa_public_api/migrations/0005_modoboainstance_domain_alias_counter.py
tonioo/modoboa-public-api
6f8f31348488e15fe935cacc6ee4655d56087fa1
[ "MIT" ]
2
2017-08-31T14:59:40.000Z
2019-03-02T21:39:26.000Z
modoboa_public_api/migrations/0005_modoboainstance_domain_alias_counter.py
tonioo/modoboa-public-api
6f8f31348488e15fe935cacc6ee4655d56087fa1
[ "MIT" ]
null
null
null
modoboa_public_api/migrations/0005_modoboainstance_domain_alias_counter.py
tonioo/modoboa-public-api
6f8f31348488e15fe935cacc6ee4655d56087fa1
[ "MIT" ]
2
2017-07-24T22:42:01.000Z
2018-10-26T05:05:50.000Z
# -*- coding: utf-8 -*- # Generated by Django 1.9.7 on 2016-06-15 18:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('modoboa_public_api', '0004_auto_20160614_1717'), ] operations = [ migrations.AddField( model_name='modoboainstance', name='domain_alias_counter', field=models.PositiveIntegerField(default=0), ), ]
23.285714
58
0.646217
53
489
5.716981
0.849057
0
0
0
0
0
0
0
0
0
0
0.089431
0.245399
489
20
59
24.45
0.731707
0.137014
0
0
1
0
0.181384
0.054893
0
0
0
0
0
1
0
false
0
0.153846
0
0.384615
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
854863e9c50296993f6a388b3257fb1d9813ee80
1,398
py
Python
QUANTAXIS_Test/QAAnalysis_Test/QASignal_hull_MA_Test.py
PenghuiCheng/QUANTAXIS
b8d81ed592d7008151dc0bbbd5d1030e8af73067
[ "MIT" ]
1
2020-01-31T05:23:21.000Z
2020-01-31T05:23:21.000Z
QUANTAXIS_Test/QAAnalysis_Test/QASignal_hull_MA_Test.py
PenghuiCheng/QUANTAXIS
b8d81ed592d7008151dc0bbbd5d1030e8af73067
[ "MIT" ]
null
null
null
QUANTAXIS_Test/QAAnalysis_Test/QASignal_hull_MA_Test.py
PenghuiCheng/QUANTAXIS
b8d81ed592d7008151dc0bbbd5d1030e8af73067
[ "MIT" ]
null
null
null
import QUANTAXIS as QA from numpy import * from scipy.signal import savgol_filter import numpy as np import matplotlib.pyplot as plt from QUANTAXIS.QAIndicator.talib_numpy import * import mpl_finance as mpf import matplotlib.dates as mdates def smooth_demo(): data2 = QA.QA_fetch_crypto_asset_day_adv(['huobi'], symbol=['btcusdt'], start='2017-10-01', end='2020-06-30 23:59:59') xn = data2.close.values ma5 = talib.MA(data2.close.values, 10) hma5 = TA_HMA(data2.close.values, 10) kama5 = TA_KAMA(data2.close.values, 10) window_size, poly_order = 5, 1 yy_sg = savgol_filter(xn, window_size, poly_order) plt.figure(figsize = (22,9)) ax1 = plt.subplot(111) mpf.candlestick2_ochl(ax1, data2.data.open.values, data2.data.close.values, data2.data.high.values, data2.data.low.values, width=0.6, colorup='r', colordown='green', alpha=0.5) #ax1.title("The smoothing windows") #plt.plot(xn, lw=1, alpha=0.8) ax1.plot(hma5, lw=2, linestyle="--", color='darkcyan', alpha=0.6) ax1.plot(yy_sg, lw=1, color='darkcyan', alpha=0.8) ax1.plot(ma5, lw=1, color='orange', alpha=0.8) ax1.plot(kama5, lw=1, color='lightskyblue', alpha=0.8) l=['Hull Moving Average', 'savgol_filter', 'talib.MA10', 'KAMA10'] ax1.legend(l) plt.title("Smoothing a MA10 line") plt.show() if __name__=='__main__': smooth_demo()
33.285714
180
0.679542
222
1,398
4.148649
0.486486
0.039088
0.06949
0.058632
0.045603
0
0
0
0
0
0
0.073024
0.167382
1,398
41
181
34.097561
0.718213
0.045064
0
0
0
0
0.12003
0
0
0
0
0
0
1
0.03125
false
0
0.25
0
0.28125
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8548e030cb94201bb3f56fd501049ba22c2f09df
1,626
py
Python
Src/Server/handlers.py
HamishHamiltonSmith/TKNET-Remote-file-transfer
2fc394281463482c5e6039cead9dc052cc09eb94
[ "Apache-2.0" ]
1
2021-12-04T16:57:19.000Z
2021-12-04T16:57:19.000Z
Src/Server/handlers.py
HamishHamiltonSmith/TKNET-Remote-file-transfer
2fc394281463482c5e6039cead9dc052cc09eb94
[ "Apache-2.0" ]
null
null
null
Src/Server/handlers.py
HamishHamiltonSmith/TKNET-Remote-file-transfer
2fc394281463482c5e6039cead9dc052cc09eb94
[ "Apache-2.0" ]
null
null
null
import time import os from datetime import datetime import breakpoint def log(msg): curr_date = datetime.now() l = open('/usr/share/Tknet/Server/tknet.log','a') l.write(f'\n[{curr_date}]: {msg}') l.close() def file_transfer_handle(c,x,d_name,address): c.send('FILEMODE'.encode()) c.send(f'DIRADD {d_name.split(".")[0]}'.encode()) time.sleep(0.5) log(f'Reached breakpoint of directory transfer for {address}') breakpoint.wait(c) log(f'Breakpoint resolved for {address}') log(f'Sending {x[1]} to {address}') c.send(f'FILEADD {x[1]}'.encode()) time.sleep(1) f = open(f'{x[1]}') c.send(f'FILECONT {x[1]} {f.read()}'.encode()) time.sleep(1) c.send('END'.encode()) def dir_transfer_handle(c,x,d_name,address): log(f"{[x[1]]}-Found directory, sending all files...") c.send('DIRMODE'.encode()) time.sleep(0.5) c.send("The selected option contains multiple files, be warned...".encode()) files = os.listdir(f'{x[1]}') time.sleep(0.5) c.send(f"DIRADD {d_name}".encode()) log(f'Reached breakpoint of directory transfer for {address}') breakpoint.wait(c) log(f'Breakpoint resolved for {address}') for item in files: if os.path.isdir(f'{x[1]}/{item}'): print(f'Dir found {item}') else: log(f'Sending {item} to {address}') c.send(f"FILEADD {item}".encode()) time.sleep(1) f = open(f'{x[1]}/{item}') c.send(f'FILECONT {item} {f.read()}'.encode()) time.sleep(1) #End directory transfer c.send('END'.encode())
31.269231
80
0.590406
246
1,626
3.861789
0.292683
0.057895
0.037895
0.067368
0.492632
0.468421
0.313684
0.254737
0.254737
0.204211
0
0.01489
0.215252
1,626
52
81
31.269231
0.729624
0.01353
0
0.333333
0
0
0.365337
0.034289
0
0
0
0
0
1
0.066667
false
0
0.088889
0
0.155556
0.022222
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
85495a86fbc5eda9a5807aaf00f10f29d51d67f3
5,297
py
Python
SequenceModel/seq_model.py
BhaveshJP25/RSNA
48d85faf82651b1ae4fdcd829ce2d4978a858d3f
[ "MIT" ]
null
null
null
SequenceModel/seq_model.py
BhaveshJP25/RSNA
48d85faf82651b1ae4fdcd829ce2d4978a858d3f
[ "MIT" ]
null
null
null
SequenceModel/seq_model.py
BhaveshJP25/RSNA
48d85faf82651b1ae4fdcd829ce2d4978a858d3f
[ "MIT" ]
null
null
null
import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optim class SequenceModel(nn.Module): def __init__(self, model_num, feature_dim, feature_num, lstm_layers, hidden, drop_out, Add_position): super(SequenceModel, self).__init__() self.feature_num=feature_num # seq model 1 self.fea_conv = nn.Sequential( nn.Dropout2d(drop_out), nn.Conv2d(feature_dim, 512, kernel_size=(1, 1), stride=(1,1), padding=(0,0), bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Dropout2d(drop_out), nn.Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), padding=(0, 0), bias=False), nn.BatchNorm2d(128), nn.ReLU(), nn.Dropout2d(drop_out), ) self.fea_first_final = nn.Sequential(nn.Conv2d(128 * feature_num, 6, kernel_size=(1, 1), stride=(1, 1), padding=(0, 0), bias=True)) # # bidirectional GRU self.hidden_fea = hidden self.fea_lstm = nn.GRU(128 * feature_num, self.hidden_fea, num_layers=lstm_layers, batch_first=True, bidirectional=True) self.fea_lstm_final = nn.Sequential(nn.Conv2d(1, 6, kernel_size=(1, self.hidden_fea*2), stride=(1, 1), padding=(0, 0), dilation=1, bias=True)) ratio = 4 if Add_position: model_num += 2 else: model_num += 1 # seq model 2 self.conv_first = nn.Sequential(nn.Conv2d(model_num, 128*ratio, kernel_size=(5, 1), stride=(1,1), padding=(2,0), dilation=1, bias=False), nn.BatchNorm2d(128*ratio), nn.ReLU(), nn.Conv2d(128*ratio, 64*ratio, kernel_size=(3, 1), stride=(1, 1), padding=(2, 0), dilation=2, bias=False), nn.BatchNorm2d(64*ratio), nn.ReLU()) self.conv_res = nn.Sequential(nn.Conv2d(64 * ratio, 64 * ratio, kernel_size=(3, 1), stride=(1, 1), padding=(4, 0), dilation=4, bias=False), nn.BatchNorm2d(64 * ratio), nn.ReLU(), nn.Conv2d(64 * ratio, 64 * ratio, kernel_size=(3, 1), stride=(1, 1), padding=(2, 0), dilation=2, bias=False), nn.BatchNorm2d(64 * ratio), nn.ReLU(),) self.conv_final = nn.Sequential(nn.Conv2d(64*ratio, 1, kernel_size=(3, 1), stride=(1, 1), padding=(1, 0), dilation=1,bias=False)) # bidirectional GRU self.hidden = hidden self.lstm = nn.GRU(64*ratio*6, self.hidden, num_layers=lstm_layers, batch_first=True, bidirectional=True) self.final = nn.Sequential(nn.Conv2d(1, 6, kernel_size=(1, self.hidden*2), stride=(1, 1), padding=(0, 0), dilation=1, bias=True)) def forward(self, fea, x): batch_size, _, _, _ = x.shape fea = self.fea_conv(fea) fea = fea.permute(0, 1, 3, 2).contiguous() fea = fea.view(batch_size, 128 * self.feature_num, -1).contiguous() fea = fea.view(batch_size, 128 * self.feature_num, -1, 1).contiguous() fea_first_final = self.fea_first_final(fea) ################################################# out0 = fea_first_final.permute(0, 3, 2, 1) ################################################# # bidirectional GRU fea = fea.view(batch_size, 128 * self.feature_num, -1).contiguous() fea = fea.permute(0, 2, 1).contiguous() fea, _ = self.fea_lstm(fea) fea = fea.view(batch_size, 1, -1, self.hidden_fea * 2) fea_lstm_final = self.fea_lstm_final(fea) fea_lstm_final = fea_lstm_final.permute(0, 3, 2, 1) ################################################# out0 += fea_lstm_final ################################################# out0_sigmoid = torch.sigmoid(out0) x = torch.cat([x, out0_sigmoid], dim = 1) x = self.conv_first(x) x = self.conv_res(x) x_cnn = self.conv_final(x) ################################################# out = x_cnn ################################################# # bidirectional GRU x = x.view(batch_size, 256, -1, 6) x = x.permute(0,2,1,3).contiguous() x = x.view(batch_size, x.size()[1], -1).contiguous() x, _= self.lstm(x) x = x.view(batch_size, 1, -1, self.hidden*2) x = self.final(x) x = x.permute(0,3,2,1) ################################################# out += x ################################################# #res return out, out0 if __name__ == '__main__': model = SequenceModel(model_num=15, feature_dim = 128, feature_num=16, lstm_layers = 2, hidden=128, drop_out=0.5, Add_position = True) print(model)
47.720721
150
0.473853
627
5,297
3.827751
0.122807
0.014167
0.033333
0.0625
0.51875
0.455
0.39125
0.370417
0.33375
0.33375
0
0.061334
0.332075
5,297
111
151
47.720721
0.617015
0.01869
0
0.15
0
0
0.001667
0
0
0
0
0
0
1
0.025
false
0
0.05
0
0.1
0.0125
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
854a55982872822a46d98df1de533deb387f7009
179
py
Python
musicbrainz/world/admin.py
tourdownunder/musicbrainz-django
bd8f8767ca0144dc10fd9ba2b59beb9de9788640
[ "BSD-3-Clause" ]
1
2020-09-19T11:00:31.000Z
2020-09-19T11:00:31.000Z
musicbrainz/world/admin.py
tourdownunder/musicbrainz-django
bd8f8767ca0144dc10fd9ba2b59beb9de9788640
[ "BSD-3-Clause" ]
8
2019-12-30T14:30:13.000Z
2021-09-22T18:03:10.000Z
musicbrainz/world/admin.py
tourdownunder/musicbrainz-django
bd8f8767ca0144dc10fd9ba2b59beb9de9788640
[ "BSD-3-Clause" ]
1
2021-10-03T10:06:29.000Z
2021-10-03T10:06:29.000Z
from django.contrib.gis import admin from .models import WorldBorder # admin.site.register(WorldBorder, admin.GeoModelAdmin) admin.site.register(WorldBorder, admin.OSMGeoAdmin)
25.571429
55
0.826816
22
179
6.727273
0.545455
0.324324
0.22973
0.378378
0.445946
0
0
0
0
0
0
0
0.083799
179
6
56
29.833333
0.902439
0.296089
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
854a9ce756635dcf347dd232a5a00548682056f7
1,597
py
Python
examples/task_hello.py
agarwalrounak/qmt
6fb8ee55fb9d544b72f6dc0c275000914e03af06
[ "MIT" ]
1
2018-09-30T00:45:53.000Z
2018-09-30T00:45:53.000Z
examples/task_hello.py
DalavanCloud/qmt
ea5d234012a1d1d39f34834ac9f2007c2a6d4852
[ "MIT" ]
null
null
null
examples/task_hello.py
DalavanCloud/qmt
ea5d234012a1d1d39f34834ac9f2007c2a6d4852
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from qmt.tasks import Task, SweepManager sweep = SweepManager.create_empty_sweep() # our dask sweep manager class HelloTask(Task): def __init__(self): super().__init__() # required init @staticmethod def _solve_instance(inputs, options): # required task solver function print('Hello World') hi = HelloTask() # create a new task sweep.run(hi).result() # run through dask and resolve future.result() hi.run_daskless() # can also run locally class HelloOptionTask(Task): def __init__(self, language_options): super().__init__(options=language_options) @staticmethod def _solve_instance(inputs, options): greetings = {'English': 'Hello', 'Spanish': 'Hola'} print(greetings[options['language']] + ' World') hola = HelloOptionTask({'language': 'Spanish'}) sweep.run(hola).result() class NameTask(Task): def __init__(self, name_options): super().__init__(options=name_options) @staticmethod def _solve_instance(inputs, options): return options['name'] class HelloDependentTask(Task): def __init__(self, name_task, language_options): super().__init__(task_list=[name_task], options=language_options) @staticmethod def _solve_instance(inputs, options): name = inputs[0] greetings = {'English': 'Hello', 'Spanish': 'Hola'} print(greetings[options['language']] + ' ' + name) name = NameTask({'name': 'John'}) hola = HelloDependentTask(name, {'language': 'Spanish'}) sweep.run(hola).result()
28.017544
74
0.670632
179
1,597
5.692737
0.346369
0.027478
0.04318
0.058881
0.432777
0.395486
0.290481
0.243376
0.243376
0
0
0.002322
0.190983
1,597
56
75
28.517857
0.786378
0.121478
0
0.324324
0
0
0.087455
0
0
0
0
0
0
1
0.216216
false
0
0.027027
0.027027
0.378378
0.081081
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
854b394bb67bd9f05c7137d19f721026d26e8cfb
1,107
py
Python
eventrack/settings/prod.py
FedorSelitsky/eventrack
54869ab0eaba56d266a3d9c0c56c601039905255
[ "MIT" ]
5
2017-12-14T09:04:42.000Z
2018-06-01T20:09:02.000Z
eventrack/settings/prod.py
FedorSelitsky/eventrack
54869ab0eaba56d266a3d9c0c56c601039905255
[ "MIT" ]
11
2017-11-08T10:35:08.000Z
2018-10-11T19:37:36.000Z
eventrack/settings/prod.py
FedorSelitsky/eventrack
54869ab0eaba56d266a3d9c0c56c601039905255
[ "MIT" ]
null
null
null
import dj_database_url from .base import * # NOQA # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False # SECURITY WARNING: keep the secret key used in production secret! if 'CFG_SECRET_KEY' in os.environ: SECRET_KEY = os.environ['CFG_SECRET_KEY'] if 'CFG_ALLOWED_HOSTS' in os.environ: ALLOWED_HOSTS = os.environ['CFG_ALLOWED_HOSTS'].split(',') # Database # https://docs.djangoproject.com/en/stable/ref/settings/#databases DATABASES = { 'default': dj_database_url.config( default='postgis://postgis:postgis@postgis/postgis', ), } # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/stable/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT = os.getenv('CFG_STATIC_ROOT', os.path.join(BASE_DIR, 'static')) MEDIA_URL = '/media/' MEDIA_ROOT = os.getenv('CFG_MEDIA_ROOT', os.path.join(BASE_DIR, 'media')) # ManifestStaticFilesStorage # https://docs.djangoproject.com/en/stable/ref/contrib/staticfiles/#manifeststaticfilesstorage STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
27
94
0.750678
146
1,107
5.520548
0.410959
0.044665
0.081886
0.093052
0.182382
0.182382
0.08933
0
0
0
0
0
0.117435
1,107
40
95
27.675
0.824974
0.383921
0
0
0
0
0.338806
0.152239
0
0
0
0
0
1
0
false
0
0.117647
0
0.117647
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
854bf3a30cf643b88428f0e25a363a7a8a2a0940
6,708
py
Python
pysparnn/cluster_selection.py
kchaliki/pysparnn
426b9e660fdd8c32bb6af4a7f833fb34a3d070ef
[ "BSD-3-Clause" ]
null
null
null
pysparnn/cluster_selection.py
kchaliki/pysparnn
426b9e660fdd8c32bb6af4a7f833fb34a3d070ef
[ "BSD-3-Clause" ]
null
null
null
pysparnn/cluster_selection.py
kchaliki/pysparnn
426b9e660fdd8c32bb6af4a7f833fb34a3d070ef
[ "BSD-3-Clause" ]
null
null
null
import random as _random import numpy as _np import collections as _collections from abc import ABC, abstractmethod from sklearn.cluster import DBSCAN def _k_best(tuple_list, k): """For a list of tuples [(distance, value), ...] - Get the k-best tuples by distance. Args: tuple_list: List of tuples. (distance, value) k: Number of tuples to return. """ tuple_lst = sorted(tuple_list, key=lambda x: x[0], reverse=False)[:k] return tuple_lst class ClusterSelector(ABC): @abstractmethod def select_clusters(self, features): pass class DefaultClusterSelector(ClusterSelector): """ Default cluster selector, picks sqrt(num_records) random points (at most 1000) and allocates points to their nearest category. This can often end up splitting similar points into multiple paths of the tree """ def __init__(self, distance_type): self._distance_type = distance_type def select_clusters(self, features): # number of points to cluster num_records = features.shape[0] matrix_size = max(int(_np.sqrt(num_records)), 1000) # set num_clusters = min(max(sqrt(num_records), 1000), num_records)) clusters_size = min(matrix_size, num_records) # make list [0, 1, ..., num_records-1] records_index = list(_np.arange(features.shape[0])) # randomly choose num_clusters records as the cluster roots # this randomizes both selection and order of features in the selection clusters_selection = _random.sample(records_index, clusters_size) clusters_selection = features[clusters_selection] # create structure to store clusters item_to_clusters = _collections.defaultdict(list) # create a distance_type object containing the cluster roots # labeling them as 0 to N-1 in their current (random) order root = self._distance_type(clusters_selection, list(_np.arange(clusters_selection.shape[0]))) # remove duplicate cluster roots root.remove_near_duplicates() # initialize distance type object with the remaining cluster roots root = self._distance_type(root.matrix, list(_np.arange(root.matrix.shape[0]))) rng_step = matrix_size # walk features in steps of matrix_size = max(sqrt(num_records), 1000) for rng in range(0, features.shape[0], rng_step): # don't exceed the array length on the last step max_rng = min(rng + rng_step, features.shape[0]) records_rng = features[rng:max_rng] # find the nearest cluster root for each feature in the step for i, clstrs in enumerate(root.nearest_search(records_rng)): _random.shuffle(clstrs) for _, cluster in _k_best(clstrs, k=1): # add each feature to its nearest cluster, here the cluster label # is the label assigned to the root feature after it had been selected at random item_to_clusters[cluster].append(i + rng) # row index in clusters_selection maps to key in item_to_clusters # but the values in item_to_clusters are row indices of the original features matrix return clusters_selection, item_to_clusters class DbscanClusterSelector(ClusterSelector): """ Dbscan based cluster selector, picks sqrt(num_records) random points (at most 1000) and then forms groups inside the random selection, before allocating other features to the groups """ def __init__(self, distance_type): self._distance_type = distance_type self._eps = 0.4 def select_clusters(self, features): # number of points to cluster num_records = features.shape[0] matrix_size = max(int(_np.sqrt(num_records)), 1000) # set num_clusters = min(max(sqrt(num_records), 1000), num_records)) clusters_size = min(matrix_size, num_records) # make list [0, 1, ..., num_records-1] records_index = list(_np.arange(features.shape[0])) # randomly choose num_clusters records as the cluster roots # this randomizes both selection and order of features in the selection random_clusters_selection = _random.sample(records_index, clusters_size) random_clusters_selection = features[random_clusters_selection] # now cluster the cluster roots themselves to avoid # randomly separating neighbours, this probably means fewer clusters per level # TODO might want to propagate the distance type to the clustering db_scan_clustering = DBSCAN(eps=self._eps, min_samples=2).fit(random_clusters_selection) # get all the individual points from the cluster unique_indices = _np.where(db_scan_clustering.labels_ == -1)[0] # and the first item from each cluster _, cluster_start_indices = _np.unique(db_scan_clustering.labels_, return_index=True) # merge and uniquefy, the result is sorted all_indices = _np.concatenate((unique_indices, cluster_start_indices)) all_indices_unique = _np.unique(all_indices) # create a matrix where rows are the first item in each dbscan cluster # set that as cluster selection and then allocate features to cluster clusters_selection = random_clusters_selection[all_indices_unique] # create structure to store clusters item_to_clusters = _collections.defaultdict(list) # create a distance_type object containing the cluster root root = self._distance_type(clusters_selection, list(_np.arange(clusters_selection.shape[0]))) rng_step = matrix_size # walk features in steps of matrix_size = max(sqrt(num_records), 1000) for rng in range(0, features.shape[0], rng_step): max_rng = min(rng + rng_step, features.shape[0]) records_rng = features[rng:max_rng] # find the nearest cluster root for each feature in the step for i, clstrs in enumerate(root.nearest_search(records_rng)): # this is slow, disable until proven useful # _random.shuffle(clstrs) for _, cluster in _k_best(clstrs, k=1): # add each feature to its nearest cluster item_to_clusters[cluster].append(i + rng) # row index in clusters_selection maps to key in item_to_clusters # but the values in item_to_clusters are row indices of the original features matrix return clusters_selection, item_to_clusters
42.726115
100
0.672481
875
6,708
4.948571
0.232
0.066744
0.032333
0.024942
0.591917
0.573672
0.573672
0.573672
0.549192
0.549192
0
0.012288
0.259988
6,708
156
101
43
0.859992
0.405933
0
0.560606
0
0
0
0
0
0
0
0.00641
0
1
0.090909
false
0.015152
0.075758
0
0.257576
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0