ngram
listlengths
0
67.8k
[ "███╗░░██╗██╗██╗░░██╗░█████╗░██████╗░░█████╗░██╗░░██╗███████╗ # ████╗░██║██║██║░░██║██╔══██╗██╔══██╗██╔══██╗██║░██╔╝╚════██║ # ██╔██╗██║██║███████║███████║██████╔╝██║░░██║█████═╝░░░███╔═╝ # ██║╚████║██║██╔══██║██╔══██║██╔══██╗██║░░██║██╔═██╗░██╔══╝░░ # ██║░╚███║██║██║░░██║██║░░██║██║░░██║╚█████╔╝██║░╚██╗███████╗ # ╚═╝░░╚══╝╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝╚══════╝ # #", "httpd = socketserver.TCPServer((\"\", PORT), Handler) print(\"serving at port\", PORT) httpd.serve_forever() #python -m http.server", "= socketserver.TCPServer((\"\", PORT), Handler) print(\"serving at port\", PORT) httpd.serve_forever() #python -m http.server --directory", "BY <NAME> ON 22-03-21. [https://nihars.com] # SOURCE [server.py] LAST MODIFIED ON 27-03-21 import", "<reponame>niharokz/website_archive #!/bin/python # # ███╗░░██╗██╗██╗░░██╗░█████╗░██████╗░░█████╗░██╗░░██╗███████╗ # ████╗░██║██║██║░░██║██╔══██╗██╔══██╗██╔══██╗██║░██╔╝╚════██║ # ██╔██╗██║██║███████║███████║██████╔╝██║░░██║█████═╝░░░███╔═╝ # ██║╚████║██║██╔══██║██╔══██║██╔══██╗██║░░██║██╔═██╗░██╔══╝░░ # ██║░╚███║██║██║░░██║██║░░██║██║░░██║╚█████╔╝██║░╚██╗███████╗", "# ████╗░██║██║██║░░██║██╔══██╗██╔══██╗██╔══██╗██║░██╔╝╚════██║ # ██╔██╗██║██║███████║███████║██████╔╝██║░░██║█████═╝░░░███╔═╝ # ██║╚████║██║██╔══██║██╔══██║██╔══██╗██║░░██║██╔═██╗░██╔══╝░░ # ██║░╚███║██║██║░░██║██║░░██║██║░░██║╚█████╔╝██║░╚██╗███████╗ # ╚═╝░░╚══╝╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝╚══════╝ # # DRAFTED", "██║░╚███║██║██║░░██║██║░░██║██║░░██║╚█████╔╝██║░╚██╗███████╗ # ╚═╝░░╚══╝╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝╚══════╝ # # DRAFTED BY <NAME> ON 22-03-21. [https://nihars.com] # SOURCE", "import os PORT = 8000 web_dir = os.path.join(os.path.dirname(__file__), 'public') os.chdir(web_dir) Handler = http.server.SimpleHTTPRequestHandler", "os.path.join(os.path.dirname(__file__), 'public') os.chdir(web_dir) Handler = http.server.SimpleHTTPRequestHandler httpd = socketserver.TCPServer((\"\", PORT), Handler) print(\"serving at", "web_dir = os.path.join(os.path.dirname(__file__), 'public') os.chdir(web_dir) Handler = http.server.SimpleHTTPRequestHandler httpd = socketserver.TCPServer((\"\", PORT), Handler)", "= http.server.SimpleHTTPRequestHandler httpd = socketserver.TCPServer((\"\", PORT), Handler) print(\"serving at port\", PORT) httpd.serve_forever() #python", "PORT), Handler) print(\"serving at port\", PORT) httpd.serve_forever() #python -m http.server --directory public &1", "# ██║╚████║██║██╔══██║██╔══██║██╔══██╗██║░░██║██╔═██╗░██╔══╝░░ # ██║░╚███║██║██║░░██║██║░░██║██║░░██║╚█████╔╝██║░╚██╗███████╗ # ╚═╝░░╚══╝╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝╚══════╝ # # DRAFTED BY <NAME> ON 22-03-21.", "# ██║░╚███║██║██║░░██║██║░░██║██║░░██║╚█████╔╝██║░╚██╗███████╗ # ╚═╝░░╚══╝╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝╚══════╝ # # DRAFTED BY <NAME> ON 22-03-21. [https://nihars.com] #", "= 8000 web_dir = os.path.join(os.path.dirname(__file__), 'public') os.chdir(web_dir) Handler = http.server.SimpleHTTPRequestHandler httpd = socketserver.TCPServer((\"\",", "# # ███╗░░██╗██╗██╗░░██╗░█████╗░██████╗░░█████╗░██╗░░██╗███████╗ # ████╗░██║██║██║░░██║██╔══██╗██╔══██╗██╔══██╗██║░██╔╝╚════██║ # ██╔██╗██║██║███████║███████║██████╔╝██║░░██║█████═╝░░░███╔═╝ # ██║╚████║██║██╔══██║██╔══██║██╔══██╗██║░░██║██╔═██╗░██╔══╝░░ # ██║░╚███║██║██║░░██║██║░░██║██║░░██║╚█████╔╝██║░╚██╗███████╗ # ╚═╝░░╚══╝╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝╚══════╝", "# ╚═╝░░╚══╝╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝╚══════╝ # # DRAFTED BY <NAME> ON 22-03-21. [https://nihars.com] # SOURCE [server.py]", "27-03-21 import http.server import socketserver import os PORT = 8000 web_dir = os.path.join(os.path.dirname(__file__),", "# ███╗░░██╗██╗██╗░░██╗░█████╗░██████╗░░█████╗░██╗░░██╗███████╗ # ████╗░██║██║██║░░██║██╔══██╗██╔══██╗██╔══██╗██║░██╔╝╚════██║ # ██╔██╗██║██║███████║███████║██████╔╝██║░░██║█████═╝░░░███╔═╝ # ██║╚████║██║██╔══██║██╔══██║██╔══██╗██║░░██║██╔═██╗░██╔══╝░░ # ██║░╚███║██║██║░░██║██║░░██║██║░░██║╚█████╔╝██║░╚██╗███████╗ # ╚═╝░░╚══╝╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝╚══════╝ #", "[https://nihars.com] # SOURCE [server.py] LAST MODIFIED ON 27-03-21 import http.server import socketserver import", "import socketserver import os PORT = 8000 web_dir = os.path.join(os.path.dirname(__file__), 'public') os.chdir(web_dir) Handler", "SOURCE [server.py] LAST MODIFIED ON 27-03-21 import http.server import socketserver import os PORT", "╚═╝░░╚══╝╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝╚══════╝ # # DRAFTED BY <NAME> ON 22-03-21. [https://nihars.com] # SOURCE [server.py] LAST", "[server.py] LAST MODIFIED ON 27-03-21 import http.server import socketserver import os PORT =", "8000 web_dir = os.path.join(os.path.dirname(__file__), 'public') os.chdir(web_dir) Handler = http.server.SimpleHTTPRequestHandler httpd = socketserver.TCPServer((\"\", PORT),", "# ██╔██╗██║██║███████║███████║██████╔╝██║░░██║█████═╝░░░███╔═╝ # ██║╚████║██║██╔══██║██╔══██║██╔══██╗██║░░██║██╔═██╗░██╔══╝░░ # ██║░╚███║██║██║░░██║██║░░██║██║░░██║╚█████╔╝██║░╚██╗███████╗ # ╚═╝░░╚══╝╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝╚══════╝ # # DRAFTED BY <NAME>", "'public') os.chdir(web_dir) Handler = http.server.SimpleHTTPRequestHandler httpd = socketserver.TCPServer((\"\", PORT), Handler) print(\"serving at port\",", "Handler = http.server.SimpleHTTPRequestHandler httpd = socketserver.TCPServer((\"\", PORT), Handler) print(\"serving at port\", PORT) httpd.serve_forever()", "socketserver.TCPServer((\"\", PORT), Handler) print(\"serving at port\", PORT) httpd.serve_forever() #python -m http.server --directory public", "PORT = 8000 web_dir = os.path.join(os.path.dirname(__file__), 'public') os.chdir(web_dir) Handler = http.server.SimpleHTTPRequestHandler httpd =", "http.server.SimpleHTTPRequestHandler httpd = socketserver.TCPServer((\"\", PORT), Handler) print(\"serving at port\", PORT) httpd.serve_forever() #python -m", "# SOURCE [server.py] LAST MODIFIED ON 27-03-21 import http.server import socketserver import os", "http.server import socketserver import os PORT = 8000 web_dir = os.path.join(os.path.dirname(__file__), 'public') os.chdir(web_dir)", "os.chdir(web_dir) Handler = http.server.SimpleHTTPRequestHandler httpd = socketserver.TCPServer((\"\", PORT), Handler) print(\"serving at port\", PORT)", "MODIFIED ON 27-03-21 import http.server import socketserver import os PORT = 8000 web_dir", "DRAFTED BY <NAME> ON 22-03-21. [https://nihars.com] # SOURCE [server.py] LAST MODIFIED ON 27-03-21", "import http.server import socketserver import os PORT = 8000 web_dir = os.path.join(os.path.dirname(__file__), 'public')", "ON 27-03-21 import http.server import socketserver import os PORT = 8000 web_dir =", "# # DRAFTED BY <NAME> ON 22-03-21. [https://nihars.com] # SOURCE [server.py] LAST MODIFIED", "██╔██╗██║██║███████║███████║██████╔╝██║░░██║█████═╝░░░███╔═╝ # ██║╚████║██║██╔══██║██╔══██║██╔══██╗██║░░██║██╔═██╗░██╔══╝░░ # ██║░╚███║██║██║░░██║██║░░██║██║░░██║╚█████╔╝██║░╚██╗███████╗ # ╚═╝░░╚══╝╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝╚══════╝ # # DRAFTED BY <NAME> ON", "██║╚████║██║██╔══██║██╔══██║██╔══██╗██║░░██║██╔═██╗░██╔══╝░░ # ██║░╚███║██║██║░░██║██║░░██║██║░░██║╚█████╔╝██║░╚██╗███████╗ # ╚═╝░░╚══╝╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝╚══════╝ # # DRAFTED BY <NAME> ON 22-03-21. [https://nihars.com]", "ON 22-03-21. [https://nihars.com] # SOURCE [server.py] LAST MODIFIED ON 27-03-21 import http.server import", "22-03-21. [https://nihars.com] # SOURCE [server.py] LAST MODIFIED ON 27-03-21 import http.server import socketserver", "socketserver import os PORT = 8000 web_dir = os.path.join(os.path.dirname(__file__), 'public') os.chdir(web_dir) Handler =", "<NAME> ON 22-03-21. [https://nihars.com] # SOURCE [server.py] LAST MODIFIED ON 27-03-21 import http.server", "# DRAFTED BY <NAME> ON 22-03-21. [https://nihars.com] # SOURCE [server.py] LAST MODIFIED ON", "LAST MODIFIED ON 27-03-21 import http.server import socketserver import os PORT = 8000", "= os.path.join(os.path.dirname(__file__), 'public') os.chdir(web_dir) Handler = http.server.SimpleHTTPRequestHandler httpd = socketserver.TCPServer((\"\", PORT), Handler) print(\"serving", "████╗░██║██║██║░░██║██╔══██╗██╔══██╗██╔══██╗██║░██╔╝╚════██║ # ██╔██╗██║██║███████║███████║██████╔╝██║░░██║█████═╝░░░███╔═╝ # ██║╚████║██║██╔══██║██╔══██║██╔══██╗██║░░██║██╔═██╗░██╔══╝░░ # ██║░╚███║██║██║░░██║██║░░██║██║░░██║╚█████╔╝██║░╚██╗███████╗ # ╚═╝░░╚══╝╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝╚══════╝ # # DRAFTED BY", "#!/bin/python # # ███╗░░██╗██╗██╗░░██╗░█████╗░██████╗░░█████╗░██╗░░██╗███████╗ # ████╗░██║██║██║░░██║██╔══██╗██╔══██╗██╔══██╗██║░██╔╝╚════██║ # ██╔██╗██║██║███████║███████║██████╔╝██║░░██║█████═╝░░░███╔═╝ # ██║╚████║██║██╔══██║██╔══██║██╔══██╗██║░░██║██╔═██╗░██╔══╝░░ # ██║░╚███║██║██║░░██║██║░░██║██║░░██║╚█████╔╝██║░╚██╗███████╗ #", "os PORT = 8000 web_dir = os.path.join(os.path.dirname(__file__), 'public') os.chdir(web_dir) Handler = http.server.SimpleHTTPRequestHandler httpd" ]
[ "actual.pose, tolerance) elif type(goal) is geometry_msgs.msg.Pose: return all_close(pose_to_list(goal), pose_to_list(actual), tolerance) return True def", "if abs(actual[index] - goal[index]) > tolerance: return False elif type(goal) is geometry_msgs.msg.PoseStamped: return", "anonymous=True) rospy.Subscriber(\"/aruco_pose\",geometry_msgs.msg.Pose,callback,(ur_robot, ur_scene, ur_move_group, ur_planning_frame, ur_eef_link, ur_group_names)) rospy.spin() except rospy.ROSInterruptException: return except KeyboardInterrupt:", "a Pose or a PoseStamped @param: actual A list of floats, a Pose", "a list of values are within a tolerance of their counterparts in another", "#!/usr/bin/env python import sys import copy import rospy import moveit_commander import moveit_msgs.msg import", "if type(goal) is list: for index in range(len(goal)): if abs(actual[index] - goal[index]) >", "group_names = robot.get_group_names() robot.get_current_state() # Misc variables ur_robot = robot ur_scene = scene", "geometry_msgs.msg.Pose: return all_close(pose_to_list(goal), pose_to_list(actual), tolerance) return True def callback(data,args): print(\"hello there\") ur_robot =", "data.orientation.x = -0.0 data.orientation.y = 1.0 data.orientation.z = 0.0 data.orientation.w = -0.0 move_group.set_pose_target(data)", "or a PoseStamped @param: tolerance A float @returns: bool \"\"\" all_equal = True", "= args[4] ur_group_names = args[5] move_group = ur_move_group print(\"elo grab\") print(data) data.position.x =", "moveit_msgs.msg import geometry_msgs.msg from math import pi from std_msgs.msg import String from moveit_commander.conversions", "method for testing if a list of values are within a tolerance of", "data.position.x - 0.05 data.position.y = data.position.y - 0.03 data.position.z = 0.15 data.orientation.x =", "group_name = \"manipulator\" move_group = moveit_commander.MoveGroupCommander(group_name) planning_frame = move_group.get_planning_frame() eef_link = move_group.get_end_effector_link() group_names", "= move_group.get_planning_frame() eef_link = move_group.get_end_effector_link() group_names = robot.get_group_names() robot.get_current_state() # Misc variables ur_robot", "ur_planning_frame, ur_eef_link, ur_group_names)) rospy.spin() except rospy.ROSInterruptException: return except KeyboardInterrupt: return if __name__ ==", "ur_group_names = group_names rospy.init_node('move_ur_python_interface', anonymous=True) rospy.Subscriber(\"/aruco_pose\",geometry_msgs.msg.Pose,callback,(ur_robot, ur_scene, ur_move_group, ur_planning_frame, ur_eef_link, ur_group_names)) rospy.spin() except", "moveit_commander.RobotCommander() scene = moveit_commander.PlanningSceneInterface() group_name = \"manipulator\" move_group = moveit_commander.MoveGroupCommander(group_name) planning_frame = move_group.get_planning_frame()", "= moveit_commander.RobotCommander() scene = moveit_commander.PlanningSceneInterface() group_name = \"manipulator\" move_group = moveit_commander.MoveGroupCommander(group_name) planning_frame =", "abs(actual[index] - goal[index]) > tolerance: return False elif type(goal) is geometry_msgs.msg.PoseStamped: return all_close(goal.pose,", "\"manipulator\" move_group = moveit_commander.MoveGroupCommander(group_name) planning_frame = move_group.get_planning_frame() eef_link = move_group.get_end_effector_link() group_names = robot.get_group_names()", "list @param: goal A list of floats, a Pose or a PoseStamped @param:", "= 0.15 data.orientation.x = -0.0 data.orientation.y = 1.0 data.orientation.z = 0.0 data.orientation.w =", "= True if type(goal) is list: for index in range(len(goal)): if abs(actual[index] -", "ur_group_names = args[5] move_group = ur_move_group print(\"elo grab\") print(data) data.position.x = data.position.x -", "list of floats, a Pose or a PoseStamped @param: actual A list of", "all_equal = True if type(goal) is list: for index in range(len(goal)): if abs(actual[index]", "ur_group_names)) rospy.spin() except rospy.ROSInterruptException: return except KeyboardInterrupt: return if __name__ == '__main__': main()", "pose_to_list import tf def all_close(goal, actual, tolerance): \"\"\" Convenience method for testing if", "moveit_commander.conversions import pose_to_list import tf def all_close(goal, actual, tolerance): \"\"\" Convenience method for", "is geometry_msgs.msg.Pose: return all_close(pose_to_list(goal), pose_to_list(actual), tolerance) return True def callback(data,args): print(\"hello there\") ur_robot", "all_close(pose_to_list(goal), pose_to_list(actual), tolerance) return True def callback(data,args): print(\"hello there\") ur_robot = args[0] ur_scene", "= 0.0 data.orientation.w = -0.0 move_group.set_pose_target(data) plan = move_group.go(wait=True) move_group.stop() move_group.clear_pose_targets() current_pose =", "ur_scene = scene ur_move_group = move_group ur_planning_frame = planning_frame ur_eef_link = eef_link ur_group_names", "index in range(len(goal)): if abs(actual[index] - goal[index]) > tolerance: return False elif type(goal)", "ur_scene = args[1] ur_move_group = args[2] ur_planning_frame = args[3] ur_eef_link = args[4] ur_group_names", "ur_scene, ur_move_group, ur_planning_frame, ur_eef_link, ur_group_names)) rospy.spin() except rospy.ROSInterruptException: return except KeyboardInterrupt: return if", "ur_robot = robot ur_scene = scene ur_move_group = move_group ur_planning_frame = planning_frame ur_eef_link", "- 0.05 data.position.y = data.position.y - 0.03 data.position.z = 0.15 data.orientation.x = -0.0", "ur_move_group = args[2] ur_planning_frame = args[3] ur_eef_link = args[4] ur_group_names = args[5] move_group", "actual, tolerance): \"\"\" Convenience method for testing if a list of values are", "= args[0] ur_scene = args[1] ur_move_group = args[2] ur_planning_frame = args[3] ur_eef_link =", "there\") ur_robot = args[0] ur_scene = args[1] ur_move_group = args[2] ur_planning_frame = args[3]", "- 0.03 data.position.z = 0.15 data.orientation.x = -0.0 data.orientation.y = 1.0 data.orientation.z =", "or a PoseStamped @param: actual A list of floats, a Pose or a", "return all_close(data, current_pose, 0.01) def main(): try: print(\"Grab\") moveit_commander.roscpp_initialize(sys.argv) robot = moveit_commander.RobotCommander() scene", "# Misc variables ur_robot = robot ur_scene = scene ur_move_group = move_group ur_planning_frame", "String from moveit_commander.conversions import pose_to_list import tf def all_close(goal, actual, tolerance): \"\"\" Convenience", "0.01) def main(): try: print(\"Grab\") moveit_commander.roscpp_initialize(sys.argv) robot = moveit_commander.RobotCommander() scene = moveit_commander.PlanningSceneInterface() group_name", "tf def all_close(goal, actual, tolerance): \"\"\" Convenience method for testing if a list", "ur_planning_frame = args[3] ur_eef_link = args[4] ur_group_names = args[5] move_group = ur_move_group print(\"elo", "0.15 data.orientation.x = -0.0 data.orientation.y = 1.0 data.orientation.z = 0.0 data.orientation.w = -0.0", "eef_link = move_group.get_end_effector_link() group_names = robot.get_group_names() robot.get_current_state() # Misc variables ur_robot = robot", "geometry_msgs.msg.PoseStamped: return all_close(goal.pose, actual.pose, tolerance) elif type(goal) is geometry_msgs.msg.Pose: return all_close(pose_to_list(goal), pose_to_list(actual), tolerance)", "args[2] ur_planning_frame = args[3] ur_eef_link = args[4] ur_group_names = args[5] move_group = ur_move_group", "robot.get_current_state() # Misc variables ur_robot = robot ur_scene = scene ur_move_group = move_group", "std_msgs.msg import String from moveit_commander.conversions import pose_to_list import tf def all_close(goal, actual, tolerance):", "import moveit_commander import moveit_msgs.msg import geometry_msgs.msg from math import pi from std_msgs.msg import", "import String from moveit_commander.conversions import pose_to_list import tf def all_close(goal, actual, tolerance): \"\"\"", "= moveit_commander.MoveGroupCommander(group_name) planning_frame = move_group.get_planning_frame() eef_link = move_group.get_end_effector_link() group_names = robot.get_group_names() robot.get_current_state() #", "planning_frame ur_eef_link = eef_link ur_group_names = group_names rospy.init_node('move_ur_python_interface', anonymous=True) rospy.Subscriber(\"/aruco_pose\",geometry_msgs.msg.Pose,callback,(ur_robot, ur_scene, ur_move_group, ur_planning_frame,", "= -0.0 move_group.set_pose_target(data) plan = move_group.go(wait=True) move_group.stop() move_group.clear_pose_targets() current_pose = move_group.get_current_pose().pose return all_close(data,", "0.0 data.orientation.w = -0.0 move_group.set_pose_target(data) plan = move_group.go(wait=True) move_group.stop() move_group.clear_pose_targets() current_pose = move_group.get_current_pose().pose", "= move_group.get_end_effector_link() group_names = robot.get_group_names() robot.get_current_state() # Misc variables ur_robot = robot ur_scene", "move_group.set_pose_target(data) plan = move_group.go(wait=True) move_group.stop() move_group.clear_pose_targets() current_pose = move_group.get_current_pose().pose return all_close(data, current_pose, 0.01)", "from std_msgs.msg import String from moveit_commander.conversions import pose_to_list import tf def all_close(goal, actual,", "move_group.go(wait=True) move_group.stop() move_group.clear_pose_targets() current_pose = move_group.get_current_pose().pose return all_close(data, current_pose, 0.01) def main(): try:", "import pi from std_msgs.msg import String from moveit_commander.conversions import pose_to_list import tf def", "from math import pi from std_msgs.msg import String from moveit_commander.conversions import pose_to_list import", "= planning_frame ur_eef_link = eef_link ur_group_names = group_names rospy.init_node('move_ur_python_interface', anonymous=True) rospy.Subscriber(\"/aruco_pose\",geometry_msgs.msg.Pose,callback,(ur_robot, ur_scene, ur_move_group,", "plan = move_group.go(wait=True) move_group.stop() move_group.clear_pose_targets() current_pose = move_group.get_current_pose().pose return all_close(data, current_pose, 0.01) def", "= group_names rospy.init_node('move_ur_python_interface', anonymous=True) rospy.Subscriber(\"/aruco_pose\",geometry_msgs.msg.Pose,callback,(ur_robot, ur_scene, ur_move_group, ur_planning_frame, ur_eef_link, ur_group_names)) rospy.spin() except rospy.ROSInterruptException:", "in another list @param: goal A list of floats, a Pose or a", "data.orientation.w = -0.0 move_group.set_pose_target(data) plan = move_group.go(wait=True) move_group.stop() move_group.clear_pose_targets() current_pose = move_group.get_current_pose().pose return", "are within a tolerance of their counterparts in another list @param: goal A", "ur_eef_link, ur_group_names)) rospy.spin() except rospy.ROSInterruptException: return except KeyboardInterrupt: return if __name__ == '__main__':", "float @returns: bool \"\"\" all_equal = True if type(goal) is list: for index", "return all_close(goal.pose, actual.pose, tolerance) elif type(goal) is geometry_msgs.msg.Pose: return all_close(pose_to_list(goal), pose_to_list(actual), tolerance) return", "a PoseStamped @param: tolerance A float @returns: bool \"\"\" all_equal = True if", "moveit_commander import moveit_msgs.msg import geometry_msgs.msg from math import pi from std_msgs.msg import String", "scene ur_move_group = move_group ur_planning_frame = planning_frame ur_eef_link = eef_link ur_group_names = group_names", "True if type(goal) is list: for index in range(len(goal)): if abs(actual[index] - goal[index])", "import copy import rospy import moveit_commander import moveit_msgs.msg import geometry_msgs.msg from math import", "args[0] ur_scene = args[1] ur_move_group = args[2] ur_planning_frame = args[3] ur_eef_link = args[4]", "another list @param: goal A list of floats, a Pose or a PoseStamped", "@returns: bool \"\"\" all_equal = True if type(goal) is list: for index in", "= args[5] move_group = ur_move_group print(\"elo grab\") print(data) data.position.x = data.position.x - 0.05", "print(\"Grab\") moveit_commander.roscpp_initialize(sys.argv) robot = moveit_commander.RobotCommander() scene = moveit_commander.PlanningSceneInterface() group_name = \"manipulator\" move_group =", "move_group.get_planning_frame() eef_link = move_group.get_end_effector_link() group_names = robot.get_group_names() robot.get_current_state() # Misc variables ur_robot =", "in range(len(goal)): if abs(actual[index] - goal[index]) > tolerance: return False elif type(goal) is", "data.position.z = 0.15 data.orientation.x = -0.0 data.orientation.y = 1.0 data.orientation.z = 0.0 data.orientation.w", "callback(data,args): print(\"hello there\") ur_robot = args[0] ur_scene = args[1] ur_move_group = args[2] ur_planning_frame", "floats, a Pose or a PoseStamped @param: tolerance A float @returns: bool \"\"\"", "move_group.clear_pose_targets() current_pose = move_group.get_current_pose().pose return all_close(data, current_pose, 0.01) def main(): try: print(\"Grab\") moveit_commander.roscpp_initialize(sys.argv)", "Pose or a PoseStamped @param: actual A list of floats, a Pose or", "sys import copy import rospy import moveit_commander import moveit_msgs.msg import geometry_msgs.msg from math", "tolerance of their counterparts in another list @param: goal A list of floats,", "= data.position.y - 0.03 data.position.z = 0.15 data.orientation.x = -0.0 data.orientation.y = 1.0", "move_group = moveit_commander.MoveGroupCommander(group_name) planning_frame = move_group.get_planning_frame() eef_link = move_group.get_end_effector_link() group_names = robot.get_group_names() robot.get_current_state()", "args[4] ur_group_names = args[5] move_group = ur_move_group print(\"elo grab\") print(data) data.position.x = data.position.x", "move_group = ur_move_group print(\"elo grab\") print(data) data.position.x = data.position.x - 0.05 data.position.y =", "goal[index]) > tolerance: return False elif type(goal) is geometry_msgs.msg.PoseStamped: return all_close(goal.pose, actual.pose, tolerance)", "for testing if a list of values are within a tolerance of their", "- goal[index]) > tolerance: return False elif type(goal) is geometry_msgs.msg.PoseStamped: return all_close(goal.pose, actual.pose,", "ur_robot = args[0] ur_scene = args[1] ur_move_group = args[2] ur_planning_frame = args[3] ur_eef_link", "return True def callback(data,args): print(\"hello there\") ur_robot = args[0] ur_scene = args[1] ur_move_group", "all_close(data, current_pose, 0.01) def main(): try: print(\"Grab\") moveit_commander.roscpp_initialize(sys.argv) robot = moveit_commander.RobotCommander() scene =", "args[3] ur_eef_link = args[4] ur_group_names = args[5] move_group = ur_move_group print(\"elo grab\") print(data)", "elif type(goal) is geometry_msgs.msg.PoseStamped: return all_close(goal.pose, actual.pose, tolerance) elif type(goal) is geometry_msgs.msg.Pose: return", "for index in range(len(goal)): if abs(actual[index] - goal[index]) > tolerance: return False elif", "def main(): try: print(\"Grab\") moveit_commander.roscpp_initialize(sys.argv) robot = moveit_commander.RobotCommander() scene = moveit_commander.PlanningSceneInterface() group_name =", "data.position.y = data.position.y - 0.03 data.position.z = 0.15 data.orientation.x = -0.0 data.orientation.y =", "= move_group.go(wait=True) move_group.stop() move_group.clear_pose_targets() current_pose = move_group.get_current_pose().pose return all_close(data, current_pose, 0.01) def main():", "type(goal) is list: for index in range(len(goal)): if abs(actual[index] - goal[index]) > tolerance:", "a Pose or a PoseStamped @param: tolerance A float @returns: bool \"\"\" all_equal", "ur_move_group = move_group ur_planning_frame = planning_frame ur_eef_link = eef_link ur_group_names = group_names rospy.init_node('move_ur_python_interface',", "= args[2] ur_planning_frame = args[3] ur_eef_link = args[4] ur_group_names = args[5] move_group =", "import geometry_msgs.msg from math import pi from std_msgs.msg import String from moveit_commander.conversions import", "= ur_move_group print(\"elo grab\") print(data) data.position.x = data.position.x - 0.05 data.position.y = data.position.y", "0.05 data.position.y = data.position.y - 0.03 data.position.z = 0.15 data.orientation.x = -0.0 data.orientation.y", "move_group.get_current_pose().pose return all_close(data, current_pose, 0.01) def main(): try: print(\"Grab\") moveit_commander.roscpp_initialize(sys.argv) robot = moveit_commander.RobotCommander()", "current_pose, 0.01) def main(): try: print(\"Grab\") moveit_commander.roscpp_initialize(sys.argv) robot = moveit_commander.RobotCommander() scene = moveit_commander.PlanningSceneInterface()", "-0.0 move_group.set_pose_target(data) plan = move_group.go(wait=True) move_group.stop() move_group.clear_pose_targets() current_pose = move_group.get_current_pose().pose return all_close(data, current_pose,", "robot ur_scene = scene ur_move_group = move_group ur_planning_frame = planning_frame ur_eef_link = eef_link", "group_names rospy.init_node('move_ur_python_interface', anonymous=True) rospy.Subscriber(\"/aruco_pose\",geometry_msgs.msg.Pose,callback,(ur_robot, ur_scene, ur_move_group, ur_planning_frame, ur_eef_link, ur_group_names)) rospy.spin() except rospy.ROSInterruptException: return", "elif type(goal) is geometry_msgs.msg.Pose: return all_close(pose_to_list(goal), pose_to_list(actual), tolerance) return True def callback(data,args): print(\"hello", "Pose or a PoseStamped @param: tolerance A float @returns: bool \"\"\" all_equal =", "of floats, a Pose or a PoseStamped @param: tolerance A float @returns: bool", "data.position.x = data.position.x - 0.05 data.position.y = data.position.y - 0.03 data.position.z = 0.15", "floats, a Pose or a PoseStamped @param: actual A list of floats, a", "current_pose = move_group.get_current_pose().pose return all_close(data, current_pose, 0.01) def main(): try: print(\"Grab\") moveit_commander.roscpp_initialize(sys.argv) robot", "grab\") print(data) data.position.x = data.position.x - 0.05 data.position.y = data.position.y - 0.03 data.position.z", "try: print(\"Grab\") moveit_commander.roscpp_initialize(sys.argv) robot = moveit_commander.RobotCommander() scene = moveit_commander.PlanningSceneInterface() group_name = \"manipulator\" move_group", "moveit_commander.MoveGroupCommander(group_name) planning_frame = move_group.get_planning_frame() eef_link = move_group.get_end_effector_link() group_names = robot.get_group_names() robot.get_current_state() # Misc", "type(goal) is geometry_msgs.msg.Pose: return all_close(pose_to_list(goal), pose_to_list(actual), tolerance) return True def callback(data,args): print(\"hello there\")", "within a tolerance of their counterparts in another list @param: goal A list", "= eef_link ur_group_names = group_names rospy.init_node('move_ur_python_interface', anonymous=True) rospy.Subscriber(\"/aruco_pose\",geometry_msgs.msg.Pose,callback,(ur_robot, ur_scene, ur_move_group, ur_planning_frame, ur_eef_link, ur_group_names))", "ur_planning_frame = planning_frame ur_eef_link = eef_link ur_group_names = group_names rospy.init_node('move_ur_python_interface', anonymous=True) rospy.Subscriber(\"/aruco_pose\",geometry_msgs.msg.Pose,callback,(ur_robot, ur_scene,", "\"\"\" all_equal = True if type(goal) is list: for index in range(len(goal)): if", "import tf def all_close(goal, actual, tolerance): \"\"\" Convenience method for testing if a", "ur_move_group print(\"elo grab\") print(data) data.position.x = data.position.x - 0.05 data.position.y = data.position.y -", "list of values are within a tolerance of their counterparts in another list", "= move_group.get_current_pose().pose return all_close(data, current_pose, 0.01) def main(): try: print(\"Grab\") moveit_commander.roscpp_initialize(sys.argv) robot =", "= robot ur_scene = scene ur_move_group = move_group ur_planning_frame = planning_frame ur_eef_link =", "rospy.Subscriber(\"/aruco_pose\",geometry_msgs.msg.Pose,callback,(ur_robot, ur_scene, ur_move_group, ur_planning_frame, ur_eef_link, ur_group_names)) rospy.spin() except rospy.ROSInterruptException: return except KeyboardInterrupt: return", "of values are within a tolerance of their counterparts in another list @param:", "type(goal) is geometry_msgs.msg.PoseStamped: return all_close(goal.pose, actual.pose, tolerance) elif type(goal) is geometry_msgs.msg.Pose: return all_close(pose_to_list(goal),", "= -0.0 data.orientation.y = 1.0 data.orientation.z = 0.0 data.orientation.w = -0.0 move_group.set_pose_target(data) plan", "PoseStamped @param: actual A list of floats, a Pose or a PoseStamped @param:", "data.orientation.y = 1.0 data.orientation.z = 0.0 data.orientation.w = -0.0 move_group.set_pose_target(data) plan = move_group.go(wait=True)", "moveit_commander.roscpp_initialize(sys.argv) robot = moveit_commander.RobotCommander() scene = moveit_commander.PlanningSceneInterface() group_name = \"manipulator\" move_group = moveit_commander.MoveGroupCommander(group_name)", "move_group ur_planning_frame = planning_frame ur_eef_link = eef_link ur_group_names = group_names rospy.init_node('move_ur_python_interface', anonymous=True) rospy.Subscriber(\"/aruco_pose\",geometry_msgs.msg.Pose,callback,(ur_robot,", "is list: for index in range(len(goal)): if abs(actual[index] - goal[index]) > tolerance: return", "planning_frame = move_group.get_planning_frame() eef_link = move_group.get_end_effector_link() group_names = robot.get_group_names() robot.get_current_state() # Misc variables", "> tolerance: return False elif type(goal) is geometry_msgs.msg.PoseStamped: return all_close(goal.pose, actual.pose, tolerance) elif", "@param: goal A list of floats, a Pose or a PoseStamped @param: actual", "scene = moveit_commander.PlanningSceneInterface() group_name = \"manipulator\" move_group = moveit_commander.MoveGroupCommander(group_name) planning_frame = move_group.get_planning_frame() eef_link", "range(len(goal)): if abs(actual[index] - goal[index]) > tolerance: return False elif type(goal) is geometry_msgs.msg.PoseStamped:", "is geometry_msgs.msg.PoseStamped: return all_close(goal.pose, actual.pose, tolerance) elif type(goal) is geometry_msgs.msg.Pose: return all_close(pose_to_list(goal), pose_to_list(actual),", "args[1] ur_move_group = args[2] ur_planning_frame = args[3] ur_eef_link = args[4] ur_group_names = args[5]", "goal A list of floats, a Pose or a PoseStamped @param: actual A", "a tolerance of their counterparts in another list @param: goal A list of", "of their counterparts in another list @param: goal A list of floats, a", "robot.get_group_names() robot.get_current_state() # Misc variables ur_robot = robot ur_scene = scene ur_move_group =", "ur_move_group, ur_planning_frame, ur_eef_link, ur_group_names)) rospy.spin() except rospy.ROSInterruptException: return except KeyboardInterrupt: return if __name__", "PoseStamped @param: tolerance A float @returns: bool \"\"\" all_equal = True if type(goal)", "pose_to_list(actual), tolerance) return True def callback(data,args): print(\"hello there\") ur_robot = args[0] ur_scene =", "data.position.y - 0.03 data.position.z = 0.15 data.orientation.x = -0.0 data.orientation.y = 1.0 data.orientation.z", "\"\"\" Convenience method for testing if a list of values are within a", "A float @returns: bool \"\"\" all_equal = True if type(goal) is list: for", "Convenience method for testing if a list of values are within a tolerance", "@param: tolerance A float @returns: bool \"\"\" all_equal = True if type(goal) is", "False elif type(goal) is geometry_msgs.msg.PoseStamped: return all_close(goal.pose, actual.pose, tolerance) elif type(goal) is geometry_msgs.msg.Pose:", "data.orientation.z = 0.0 data.orientation.w = -0.0 move_group.set_pose_target(data) plan = move_group.go(wait=True) move_group.stop() move_group.clear_pose_targets() current_pose", "list of floats, a Pose or a PoseStamped @param: tolerance A float @returns:", "bool \"\"\" all_equal = True if type(goal) is list: for index in range(len(goal)):", "tolerance) return True def callback(data,args): print(\"hello there\") ur_robot = args[0] ur_scene = args[1]", "variables ur_robot = robot ur_scene = scene ur_move_group = move_group ur_planning_frame = planning_frame", "if a list of values are within a tolerance of their counterparts in", "a PoseStamped @param: actual A list of floats, a Pose or a PoseStamped", "0.03 data.position.z = 0.15 data.orientation.x = -0.0 data.orientation.y = 1.0 data.orientation.z = 0.0", "of floats, a Pose or a PoseStamped @param: actual A list of floats,", "move_group.stop() move_group.clear_pose_targets() current_pose = move_group.get_current_pose().pose return all_close(data, current_pose, 0.01) def main(): try: print(\"Grab\")", "import pose_to_list import tf def all_close(goal, actual, tolerance): \"\"\" Convenience method for testing", "import sys import copy import rospy import moveit_commander import moveit_msgs.msg import geometry_msgs.msg from", "actual A list of floats, a Pose or a PoseStamped @param: tolerance A", "robot = moveit_commander.RobotCommander() scene = moveit_commander.PlanningSceneInterface() group_name = \"manipulator\" move_group = moveit_commander.MoveGroupCommander(group_name) planning_frame", "print(\"elo grab\") print(data) data.position.x = data.position.x - 0.05 data.position.y = data.position.y - 0.03", "tolerance: return False elif type(goal) is geometry_msgs.msg.PoseStamped: return all_close(goal.pose, actual.pose, tolerance) elif type(goal)", "ur_eef_link = eef_link ur_group_names = group_names rospy.init_node('move_ur_python_interface', anonymous=True) rospy.Subscriber(\"/aruco_pose\",geometry_msgs.msg.Pose,callback,(ur_robot, ur_scene, ur_move_group, ur_planning_frame, ur_eef_link,", "= moveit_commander.PlanningSceneInterface() group_name = \"manipulator\" move_group = moveit_commander.MoveGroupCommander(group_name) planning_frame = move_group.get_planning_frame() eef_link =", "print(\"hello there\") ur_robot = args[0] ur_scene = args[1] ur_move_group = args[2] ur_planning_frame =", "their counterparts in another list @param: goal A list of floats, a Pose", "@param: actual A list of floats, a Pose or a PoseStamped @param: tolerance", "print(data) data.position.x = data.position.x - 0.05 data.position.y = data.position.y - 0.03 data.position.z =", "= args[1] ur_move_group = args[2] ur_planning_frame = args[3] ur_eef_link = args[4] ur_group_names =", "Misc variables ur_robot = robot ur_scene = scene ur_move_group = move_group ur_planning_frame =", "return all_close(pose_to_list(goal), pose_to_list(actual), tolerance) return True def callback(data,args): print(\"hello there\") ur_robot = args[0]", "tolerance A float @returns: bool \"\"\" all_equal = True if type(goal) is list:", "rospy.init_node('move_ur_python_interface', anonymous=True) rospy.Subscriber(\"/aruco_pose\",geometry_msgs.msg.Pose,callback,(ur_robot, ur_scene, ur_move_group, ur_planning_frame, ur_eef_link, ur_group_names)) rospy.spin() except rospy.ROSInterruptException: return except", "moveit_commander.PlanningSceneInterface() group_name = \"manipulator\" move_group = moveit_commander.MoveGroupCommander(group_name) planning_frame = move_group.get_planning_frame() eef_link = move_group.get_end_effector_link()", "list: for index in range(len(goal)): if abs(actual[index] - goal[index]) > tolerance: return False", "pi from std_msgs.msg import String from moveit_commander.conversions import pose_to_list import tf def all_close(goal,", "counterparts in another list @param: goal A list of floats, a Pose or", "True def callback(data,args): print(\"hello there\") ur_robot = args[0] ur_scene = args[1] ur_move_group =", "= data.position.x - 0.05 data.position.y = data.position.y - 0.03 data.position.z = 0.15 data.orientation.x", "= 1.0 data.orientation.z = 0.0 data.orientation.w = -0.0 move_group.set_pose_target(data) plan = move_group.go(wait=True) move_group.stop()", "= \"manipulator\" move_group = moveit_commander.MoveGroupCommander(group_name) planning_frame = move_group.get_planning_frame() eef_link = move_group.get_end_effector_link() group_names =", "geometry_msgs.msg from math import pi from std_msgs.msg import String from moveit_commander.conversions import pose_to_list", "python import sys import copy import rospy import moveit_commander import moveit_msgs.msg import geometry_msgs.msg", "values are within a tolerance of their counterparts in another list @param: goal", "A list of floats, a Pose or a PoseStamped @param: tolerance A float", "1.0 data.orientation.z = 0.0 data.orientation.w = -0.0 move_group.set_pose_target(data) plan = move_group.go(wait=True) move_group.stop() move_group.clear_pose_targets()", "def all_close(goal, actual, tolerance): \"\"\" Convenience method for testing if a list of", "= scene ur_move_group = move_group ur_planning_frame = planning_frame ur_eef_link = eef_link ur_group_names =", "= move_group ur_planning_frame = planning_frame ur_eef_link = eef_link ur_group_names = group_names rospy.init_node('move_ur_python_interface', anonymous=True)", "import moveit_msgs.msg import geometry_msgs.msg from math import pi from std_msgs.msg import String from", "copy import rospy import moveit_commander import moveit_msgs.msg import geometry_msgs.msg from math import pi", "= args[3] ur_eef_link = args[4] ur_group_names = args[5] move_group = ur_move_group print(\"elo grab\")", "import rospy import moveit_commander import moveit_msgs.msg import geometry_msgs.msg from math import pi from", "= robot.get_group_names() robot.get_current_state() # Misc variables ur_robot = robot ur_scene = scene ur_move_group", "tolerance): \"\"\" Convenience method for testing if a list of values are within", "from moveit_commander.conversions import pose_to_list import tf def all_close(goal, actual, tolerance): \"\"\" Convenience method", "all_close(goal.pose, actual.pose, tolerance) elif type(goal) is geometry_msgs.msg.Pose: return all_close(pose_to_list(goal), pose_to_list(actual), tolerance) return True", "return False elif type(goal) is geometry_msgs.msg.PoseStamped: return all_close(goal.pose, actual.pose, tolerance) elif type(goal) is", "all_close(goal, actual, tolerance): \"\"\" Convenience method for testing if a list of values", "testing if a list of values are within a tolerance of their counterparts", "-0.0 data.orientation.y = 1.0 data.orientation.z = 0.0 data.orientation.w = -0.0 move_group.set_pose_target(data) plan =", "rospy import moveit_commander import moveit_msgs.msg import geometry_msgs.msg from math import pi from std_msgs.msg", "args[5] move_group = ur_move_group print(\"elo grab\") print(data) data.position.x = data.position.x - 0.05 data.position.y", "math import pi from std_msgs.msg import String from moveit_commander.conversions import pose_to_list import tf", "move_group.get_end_effector_link() group_names = robot.get_group_names() robot.get_current_state() # Misc variables ur_robot = robot ur_scene =", "tolerance) elif type(goal) is geometry_msgs.msg.Pose: return all_close(pose_to_list(goal), pose_to_list(actual), tolerance) return True def callback(data,args):", "ur_eef_link = args[4] ur_group_names = args[5] move_group = ur_move_group print(\"elo grab\") print(data) data.position.x", "def callback(data,args): print(\"hello there\") ur_robot = args[0] ur_scene = args[1] ur_move_group = args[2]", "A list of floats, a Pose or a PoseStamped @param: actual A list", "eef_link ur_group_names = group_names rospy.init_node('move_ur_python_interface', anonymous=True) rospy.Subscriber(\"/aruco_pose\",geometry_msgs.msg.Pose,callback,(ur_robot, ur_scene, ur_move_group, ur_planning_frame, ur_eef_link, ur_group_names)) rospy.spin()", "main(): try: print(\"Grab\") moveit_commander.roscpp_initialize(sys.argv) robot = moveit_commander.RobotCommander() scene = moveit_commander.PlanningSceneInterface() group_name = \"manipulator\"" ]
[ "_exec_cmd(debugger, command, capture_output=False): if capture_output: cmdretobj = lldb.SBCommandReturnObject() debugger.GetCommandInterpreter().HandleCommand(command, cmdretobj) return cmdretobj else:", "assert 0 <= bpid <= 254 assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert", "do a full reattach (the kernel load address may differ) fdp_attach(debugger, vm.name, exe_ctx,", "global vm print(lldbagilityutils.LLDBAGILITY) print(\"* Attaching to the VM\") try: vm = stubvm.STUBVM(vm_stub, vm_name)", "command, exe_ctx, result, internal_dict): \"\"\" Save the current state of the attached macOS", "virtual memory\") vm.halt() data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) new_data = b\"ABCDEFGH\" vm.write_virtual_memory(vm.read_register(\"rsp\"), new_data) assert", "vm.halt() assert vm.is_breakpoint_hit() vm.interrupt_and_restore_last_snapshot() vm.single_step() vm.set_hard_breakpoint(\"e\", 0x0, rip) assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100)", "exe_ctx, result, internal_dict): \"\"\" Connect to a macOS VM via VMSN. Currently not", "and then re-enable them once the state # has been saved, so that", "address = 0x{:016x}\".format(vaddr)) else: print(\"* Invalid expression\") elif args.action == \"unset\": vm.unset_hard_breakpoint(args.nreg) print(\"*", "# modifications to RFLAGS should be disabled assert vm.read_register(\"rflags\") == orig_values[\"rflags\"] del new_values[\"rflags\"]", "can be active simultaneously. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-hbreakpoint\") subparsers = parser.add_subparsers(dest=\"action\") set_parser =", "vm.is_state_halted() def _t2(): print(\"* Read/write registers\") vm.halt() orig_values = vm.read_registers(regs) new_values = {reg:", "not attach! {}\".format(str(exc))) return print(\"* Resuming the VM execution until reaching kernel code\")", "\"rip\", \"rflags\", \"cs\", \"fs\", \"gs\", } def _t1(): print(\"* Halt/resume/single step\") vm.halt() assert", "if vm.interrupt_and_restore_last_snapshot(): print(\"* State restored\") # do a full reattach (the kernel load", "fdp_attach(debugger, vm.name, exe_ctx, result, internal_dict) else: print(\"* No saved state found\") @_attached def", "= argparse.ArgumentParser(prog=\"fdp-hbreakpoint\") subparsers = parser.add_subparsers(dest=\"action\") set_parser = subparsers.add_parser(\"set\") set_parser.add_argument( \"trigger\", choices={\"e\", \"rw\", \"w\"},", "kdpserver.KDPServer() th = threading.Thread(target=kdpsv.debug, args=(vm,)) th.daemon = True th.start() # connect LLDB to", "has been saved, so that LLDB sends again the KDP requests for setting", "== \"set\": vaddr = _evaluate_expression(exe_ctx, args.expression) if vaddr: vm.set_hard_breakpoint(args.trigger, args.nreg, vaddr) print(\"* Hardware", "0x2, 0x1234) assert vm.read_register(\"dr2\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000011001100010000000000101010 vm.set_hard_breakpoint(\"rw\", 0x3, 0x1234)", "\"r13\", \"r14\", \"r15\", \"rip\", \"rflags\", \"cs\", \"fs\", \"gs\", } def _t1(): print(\"* Halt/resume/single", "2, 3}, help=\"Breakpoint slot to free (corresponding to registers DR0, DR1, DR2 and", "else: raise AssertionError @_attached def fdp_test(debugger, command, exe_ctx, result, internal_dict): \"\"\" Run some", "internal_dict): \"\"\" Connect to a macOS VM via VMSN. Currently not maintained! Existing", "vm.read_registers((\"dr0\", \"dr1\", \"dr2\", \"dr3\", \"dr6\", \"dr7\")) # interrupt and save the VM state", "== 0b00000000000000000000000000000010 vm.set_hard_breakpoint(\"w\", 0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000010000000000000010 vm.set_hard_breakpoint(\"rw\", 0x1, 0x1234) assert", "breakpoints are deleted on attaching. Re-execute this command every time the VM is", "once the state # has been saved, so that LLDB sends again the", "Kernel slide: 0x{:x}\".format(vm.kernel_slide)) print(\"* Kernel cr3: 0x{:x}\".format(vm.kernel_cr3)) print(\"* Kernel version: {}\".format(vm.kernel_version)) print(\"* VM", "\"\"\" Run some tests. Warning: tests change the state of the machine and", "orig_data def _t5(): print(\"* Debug registers\") vm.halt() vm.write_register(\"dr7\", 0x0) vm.set_hard_breakpoint(\"rw\", 0x0, 0x1234) assert", "restore the VM state print(\"* Restoring the last saved VM state\") if vm.interrupt_and_restore_last_snapshot():", "vm.single_step() assert not vm.is_breakpoint_hit() vm.interrupt_and_take_snapshot() vm.single_step() vm.single_step() rip = vm.read_register(\"rip\") vm.interrupt_and_restore_last_snapshot() vm.single_step() bpid", "except (TypeError, ValueError): return None else: return vaddr def fdp_attach(debugger, command, exe_ctx, result,", "address: 0x{:016x}\".format(vm.kernel_load_vaddr)) print(\"* Kernel slide: 0x{:x}\".format(vm.kernel_slide)) print(\"* Kernel cr3: 0x{:x}\".format(vm.kernel_cr3)) print(\"* Kernel version:", "VM breakpoints deleted\") # detach the previous process (if any) exe_ctx.process.Detach() # remove", "elif args.action == \"unset\": vm.unset_hard_breakpoint(args.nreg) print(\"* Hardware breakpoint unset\") else: raise AssertionError @_attached", "exe_ctx, result, internal_dict): \"\"\" Save the current state of the attached macOS VM.", "then re-enable them once the state # has been saved, so that LLDB", "== \"unset\": vm.unset_hard_breakpoint(args.nreg) print(\"* Hardware breakpoint unset\") else: raise AssertionError @_attached def fdp_test(debugger,", "0x3, 0x1234) assert vm.read_register(\"dr3\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101010 vm.unset_hard_breakpoint(0x0) assert vm.read_register(\"dr7\")", "the state of the machine and modify the last saved state! \"\"\" regs", "the VM state process_was_stopped = exe_ctx.process.is_stopped print(\"* Saving the VM state\") vm.interrupt_and_take_snapshot() print(\"*", "step\") vm.halt() assert vm.is_state_halted() vm.resume() assert not vm.is_state_halted() vm.halt() for _ in range(100):", "print(\"* Kernel version: {}\".format(vm.kernel_version)) print(\"* VM breakpoints deleted\") # detach the previous process", "access to trap on: execute, read/write, or write only.\", ) set_parser.add_argument( \"nreg\", type=lambda", "0x100) == orig_data def _t5(): print(\"* Debug registers\") vm.halt() vm.write_register(\"dr7\", 0x0) vm.set_hard_breakpoint(\"rw\", 0x0,", "of the kdp struct vm.store_kdp_at_next_write_virtual_memory() if _exec_cmd(debugger, \"memory write &kdp 41\", capture_output=True).GetError(): print(\"*", "\"\"\" Connect to a macOS VM via VMSN. Currently not maintained! Existing breakpoints", "the fake KDP server kdpsv_addr, kdpsv_port = kdpsv.sv_sock.getsockname() _exec_cmd(debugger, \"kdp-remote '{}:{}'\".format(kdpsv_addr, kdpsv_port)) #", "vm.read_register(\"dr7\") == 0b00000000000000000000000000000010 vm.set_hard_breakpoint(\"w\", 0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000010000000000000010 vm.set_hard_breakpoint(\"rw\", 0x1, 0x1234)", "vm.set_hard_breakpoint(\"e\", 0x0, rip) assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() if exe_ctx.process.is_running:", "KDP requests for setting them exe_ctx.target.DisableAllBreakpoints() # similarly, for hard breakpoints we save", "41\", capture_output=True).GetError(): print(\"* Unable to find the 'kdp' symbol. Did you specify the", "vm.resume() time.sleep(0.100) vm.interrupt_and_restore_last_snapshot() assert vm.is_state_halted() assert not vm.is_breakpoint_hit() assert vm.read_registers(regs) == orig_values assert", "exe_ctx, result, internal_dict): \"\"\" Interrupt (pause) the execution of the attached macOS VM.", "vm.halt() data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) new_data = b\"ABCDEFGH\" vm.write_virtual_memory(vm.read_register(\"rsp\"), new_data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8)", "vm.single_step() bpid = vm.set_soft_exec_breakpoint(rip) assert 0 <= bpid <= 254 assert not vm.is_breakpoint_hit()", "ValueError): return None else: return vaddr def fdp_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\"", "bpid <= 254 assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() vm.interrupt_and_restore_last_snapshot() vm.single_step()", "# connect LLDB to the fake KDP server kdpsv_addr, kdpsv_port = kdpsv.sv_sock.getsockname() _exec_cmd(debugger,", "_t6): _t() print(\"* All tests passed!\") def __lldb_init_module(debugger, internal_dict): # FDP debugger.HandleCommand(\"command script", "at least for the current session # we disable soft breakpoints before saving", "vm.set_hard_breakpoint(\"e\", 0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000000000000000000010 vm.set_hard_breakpoint(\"w\", 0x0, 0x1234) assert vm.read_register(\"dr7\") ==", "result, internal_dict): \"\"\" Connect to a macOS VM via VMSN. Currently not maintained!", "assert vm.read_register(\"dr3\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101010 vm.unset_hard_breakpoint(0x0) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101000", "# saving the state causes all breakpoints (soft and hard) to be unset,", "deleted\") # detach the previous process (if any) exe_ctx.process.Detach() # remove all LLDB", "lldbagility.fdp_save fdp-save\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_restore fdp-restore\") debugger.HandleCommand( \"command script add -f", "registers DR0, DR1, DR2 and DR3. Consequently, a maximum of four hardware breakpoints", "reg in regs: assert vm.read_register(reg) == orig_values[reg] def _t3(): print(\"* Read/write virtual memory\")", "assert not vm.is_breakpoint_hit() assert vm.read_registers(regs) == orig_values assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) == orig_data def", "_t5, _t6): _t() print(\"* All tests passed!\") def __lldb_init_module(debugger, internal_dict): # FDP debugger.HandleCommand(\"command", "the target to debug?\") vm.abort_store_kdp_at_next_write_virtual_memory() def _attached(f): @functools.wraps(f) def _wrapper(*args, **kwargs): global vm", "_attached(f): @functools.wraps(f) def _wrapper(*args, **kwargs): global vm if not vm: print(\"* Not attached", "parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.FDPSTUB, args.vm_name) def vmsn_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\" Connect", "args.vm_name) def vmsn_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\" Connect to a macOS VM", "result, internal_dict): \"\"\" Restore the attached macOS VM to the last saved state.", "Restoring the last saved VM state\") if vm.interrupt_and_restore_last_snapshot(): print(\"* State restored\") # do", "keep in mind that FDP soft and page breakpoints do not work just", "page breakpoints do not work just after a restore # (see VMR3AddSoftBreakpoint()) vm.unset_all_breakpoints()", "lldbagility.fdp_test fdp-test\") debugger.HandleCommand(\"command alias fa fdp-attach\") debugger.HandleCommand(\"command alias fs fdp-save\") debugger.HandleCommand(\"command alias fr", "assert not vm.is_state_halted() vm.halt() for _ in range(100): vm.single_step() assert vm.is_state_halted() def _t2():", "result, internal_dict): \"\"\" Connect to a macOS VM via FDP. The VM must", "b\"A\" * 0x100) vm.single_step() vm.resume() time.sleep(0.100) vm.interrupt_and_restore_last_snapshot() assert vm.is_state_halted() assert not vm.is_breakpoint_hit() assert", "in regs: vm.write_register(reg, new_values[reg]) # modifications to RFLAGS should be disabled assert vm.read_register(\"rflags\")", "# do a full reattach (the kernel load address may differ) fdp_attach(debugger, vm.name,", "Connect to a macOS VM via VMSN. Currently not maintained! Existing breakpoints are", "free (corresponding to registers DR0, DR1, DR2 and DR3).\", ) args = parser.parse_args(shlex.split(command))", "if args.action == \"set\": vaddr = _evaluate_expression(exe_ctx, args.expression) if vaddr: vm.set_hard_breakpoint(args.trigger, args.nreg, vaddr)", "vm.is_breakpoint_hit() if exe_ctx.process.is_running: vm.interrupt() vm.unset_all_breakpoints() for _t in (_t1, _t2, _t3, _t4, _t5,", "assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) == orig_data def _t5(): print(\"* Debug registers\") vm.halt() vm.write_register(\"dr7\", 0x0)", "a VM!\") return return f(*args, **kwargs) return _wrapper @_attached def fdp_save(debugger, command, exe_ctx,", "Attaching to the VM\") try: vm = stubvm.STUBVM(vm_stub, vm_name) except Exception as exc:", "import argparse import functools import re import shlex import threading import time import", "the VM execution until reaching kernel code\") vm.complete_attach() print(\"* Kernel load address: 0x{:016x}\".format(vm.kernel_load_vaddr))", "The VM must have already been started. Existing breakpoints are deleted on attaching.", "breakpoints (soft and hard) to be unset, but # we can preserve them", "== 0b00000011001100010000000000101010 vm.set_hard_breakpoint(\"rw\", 0x3, 0x1234) assert vm.read_register(\"dr3\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101010", "No saved state found\") @_attached def fdp_interrupt(debugger, command, exe_ctx, result, internal_dict): \"\"\" Interrupt", "vaddr = _evaluate_expression(exe_ctx, args.expression) if vaddr: vm.set_hard_breakpoint(args.trigger, args.nreg, vaddr) print(\"* Hardware breakpoint set:", "that FDP soft and page breakpoints do not work just after a restore", "0x{:016x}\".format(vaddr)) else: print(\"* Invalid expression\") elif args.action == \"unset\": vm.unset_hard_breakpoint(args.nreg) print(\"* Hardware breakpoint", "\"rcx\", \"rdx\", \"rdi\", \"rsi\", \"rbp\", \"rsp\", \"r8\", \"r9\", \"r10\", \"r11\", \"r12\", \"r13\", \"r14\",", "state of the attached macOS VM. Breakpoints are not saved (but retained for", "\"rsp\", \"r8\", \"r9\", \"r10\", \"r11\", \"r12\", \"r13\", \"r14\", \"r15\", \"rip\", \"rflags\", \"cs\", \"fs\",", "not process_was_stopped: # display stop info _exec_cmd(debugger, \"process status\") @_attached def fdp_restore(debugger, command,", "assert vm.is_state_halted() vm.resume() assert not vm.is_state_halted() vm.halt() for _ in range(100): vm.single_step() assert", "\"\"\" regs = { \"rax\", \"rbx\", \"rcx\", \"rdx\", \"rdi\", \"rsi\", \"rbp\", \"rsp\", \"r8\",", "print(\"* State restored\") # do a full reattach (the kernel load address may", "all LLDB breakpoints exe_ctx.target.DeleteAllBreakpoints() print(\"* LLDB breakpoints deleted\") # start the fake KDP", "machine and modify the last saved state! \"\"\" regs = { \"rax\", \"rbx\",", "assert vm.read_register(\"dr1\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000000001100010000000000001010 vm.set_hard_breakpoint(\"rw\", 0x2, 0x1234) assert vm.read_register(\"dr2\")", "registers\") vm.halt() orig_values = vm.read_registers(regs) new_values = {reg: 0x1337 for reg in regs}", "== orig_values[reg] def _t3(): print(\"* Read/write virtual memory\") vm.halt() data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8)", "vmsn_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\" Connect to a macOS VM via VMSN.", "\"rsi\", \"rbp\", \"rsp\", \"r8\", \"r9\", \"r10\", \"r11\", \"r12\", \"r13\", \"r14\", \"r15\", \"rip\", \"rflags\",", "Read/write registers\") vm.halt() orig_values = vm.read_registers(regs) new_values = {reg: 0x1337 for reg in", "(the kernel load address may differ) fdp_attach(debugger, vm.name, exe_ctx, result, internal_dict) else: print(\"*", "via FDP. The VM must have already been started. Existing breakpoints are deleted", "return cmdretobj else: debugger.HandleCommand(command) return None def _evaluate_expression(exe_ctx, expression): res = exe_ctx.frame.EvaluateExpression(expression) try:", "DR2 and DR3. Consequently, a maximum of four hardware breakpoints can be active", "\"rdi\", \"rsi\", \"rbp\", \"rsp\", \"r8\", \"r9\", \"r10\", \"r11\", \"r12\", \"r13\", \"r14\", \"r15\", \"rip\",", "Read/write virtual memory\") vm.halt() data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) new_data = b\"ABCDEFGH\" vm.write_virtual_memory(vm.read_register(\"rsp\"), new_data)", "vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() vm.interrupt_and_restore_last_snapshot() vm.single_step() vm.set_hard_breakpoint(\"e\", 0x0, rip) assert not", "and restore it afterwards dbgregs = vm.read_registers((\"dr0\", \"dr1\", \"dr2\", \"dr3\", \"dr6\", \"dr7\")) #", "vm.write_virtual_memory(vm.read_register(\"rsp\"), new_data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == new_data vm.write_virtual_memory(vm.read_register(\"rsp\"), data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) ==", "restore # (see VMR3AddSoftBreakpoint()) vm.unset_all_breakpoints() vm.single_step() assert not vm.is_breakpoint_hit() vm.interrupt_and_take_snapshot() vm.single_step() vm.single_step() rip", "parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.VMSNSTUB, args.vm_name) def _attach(debugger, exe_ctx, vm_stub, vm_name):", "\"r8\", \"r9\", \"r10\", \"r11\", \"r12\", \"r13\", \"r14\", \"r15\", \"rip\", \"rflags\", \"cs\", \"fs\", \"gs\",", "0x1234) assert vm.read_register(\"dr2\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000011001100010000000000101010 vm.set_hard_breakpoint(\"rw\", 0x3, 0x1234) assert", "vm.interrupt() vm.unset_all_breakpoints() for _t in (_t1, _t2, _t3, _t4, _t5, _t6): _t() print(\"*", "for _t in (_t1, _t2, _t3, _t4, _t5, _t6): _t() print(\"* All tests", "breakpoints before saving and then re-enable them once the state # has been", "find out the address of the kdp struct vm.store_kdp_at_next_write_virtual_memory() if _exec_cmd(debugger, \"memory write", "started. Existing breakpoints are deleted on attaching. Re-execute this command every time the", "exe_ctx.process.is_stopped print(\"* Saving the VM state\") vm.interrupt_and_take_snapshot() print(\"* State saved\") # restore soft", "# -*- coding: utf-8 -*- from __future__ import print_function import argparse import functools", "saved\") # restore soft breakpoints exe_ctx.target.EnableAllBreakpoints() # restore hard breakpoints vm.write_registers(dbgregs) if not", "254 assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() vm.interrupt_and_restore_last_snapshot() vm.single_step() vm.set_hard_breakpoint(\"e\", 0x0,", "version: {}\".format(vm.kernel_version)) print(\"* VM breakpoints deleted\") # detach the previous process (if any)", "write &kdp 41\", capture_output=True).GetError(): print(\"* Unable to find the 'kdp' symbol. Did you", "to a VM!\") return return f(*args, **kwargs) return _wrapper @_attached def fdp_save(debugger, command,", "debugger.HandleCommand(\"command alias fs fdp-save\") debugger.HandleCommand(\"command alias fr fdp-restore\") debugger.HandleCommand(\"command alias fi fdp-interrupt\") debugger.HandleCommand(\"command", "vm if not vm: print(\"* Not attached to a VM!\") return return f(*args,", "new_data vm.write_virtual_memory(vm.read_register(\"rsp\"), data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == data def _t4(): print(\"* Save/restore\") vm.halt()", "vm.single_step() vm.single_step() rip = vm.read_register(\"rip\") vm.interrupt_and_restore_last_snapshot() vm.single_step() bpid = vm.set_soft_exec_breakpoint(rip) assert 0 <=", "and hard) to be unset, but # we can preserve them at least", "of the debug registers before saving, # and restore it afterwards dbgregs =", "assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() vm.interrupt_and_restore_last_snapshot() vm.single_step() vm.set_hard_breakpoint(\"e\", 0x0, rip)", "can preserve them at least for the current session # we disable soft", "lldbagility.fdp_interrupt fdp-interrupt\" ) debugger.HandleCommand( \"command script add -f lldbagility.fdp_hbreakpoint fdp-hbreakpoint\" ) debugger.HandleCommand(\"command script", "global vm if not vm: print(\"* Not attached to a VM!\") return return", "assert vm.read_registers(regs - {\"rflags\"}) == new_values vm.write_registers(orig_values) for reg in regs: assert vm.read_register(reg)", "3}, help=\"Breakpoint slot to free (corresponding to registers DR0, DR1, DR2 and DR3).\",", "we disable soft breakpoints before saving and then re-enable them once the state", "vaddr def fdp_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\" Connect to a macOS VM", "command every time the VM is rebooted. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-attach\") parser.add_argument(\"vm_name\") args", "from __future__ import print_function import argparse import functools import re import shlex import", "type=lambda i: int(i, 0), choices={0, 1, 2, 3}, help=\"Breakpoint slot to use (corresponding", "= True th.start() # connect LLDB to the fake KDP server kdpsv_addr, kdpsv_port", "\"expression\", help=\"Breakpoint address or expression to be evaluated as such.\" ) unset_parser =", "0x1, 0x1234) assert vm.read_register(\"dr1\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000000001100010000000000001010 vm.set_hard_breakpoint(\"rw\", 0x2, 0x1234)", "debugger.HandleCommand( \"command script add -f lldbagility.fdp_hbreakpoint fdp-hbreakpoint\" ) debugger.HandleCommand(\"command script add -f lldbagility.fdp_test", "vm_name) except Exception as exc: print(\"* Could not attach! {}\".format(str(exc))) return print(\"* Resuming", "sends again the KDP requests for setting them exe_ctx.target.DisableAllBreakpoints() # similarly, for hard", "debugger.HandleCommand(\"command alias fh fdp-hbreakpoint\") # VMSN debugger.HandleCommand(\"command script add -f lldbagility.vmsn_attach vmsn-attach\") debugger.HandleCommand(\"command", "the address of the kdp struct vm.store_kdp_at_next_write_virtual_memory() if _exec_cmd(debugger, \"memory write &kdp 41\",", "vm.resume() assert not vm.is_state_halted() vm.halt() for _ in range(100): vm.single_step() assert vm.is_state_halted() def", "Debug registers\") vm.halt() vm.write_register(\"dr7\", 0x0) vm.set_hard_breakpoint(\"rw\", 0x0, 0x1234) assert vm.read_register(\"dr0\") == 0x1234 assert", "= kdpsv.sv_sock.getsockname() _exec_cmd(debugger, \"kdp-remote '{}:{}'\".format(kdpsv_addr, kdpsv_port)) # trigger a memory write to find", "debugger.HandleCommand(\"command script add -f lldbagility.fdp_attach fdp-attach\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_save fdp-save\") debugger.HandleCommand(\"command", "we can preserve them at least for the current session # we disable", "def _wrapper(*args, **kwargs): global vm if not vm: print(\"* Not attached to a", "result, internal_dict): \"\"\" Save the current state of the attached macOS VM. Breakpoints", "not saved (but retained for the current session). \"\"\" # saving the state", "args.action == \"set\": vaddr = _evaluate_expression(exe_ctx, args.expression) if vaddr: vm.set_hard_breakpoint(args.trigger, args.nreg, vaddr) print(\"*", "args=(vm,)) th.daemon = True th.start() # connect LLDB to the fake KDP server", "= { \"rax\", \"rbx\", \"rcx\", \"rdx\", \"rdi\", \"rsi\", \"rbp\", \"rsp\", \"r8\", \"r9\", \"r10\",", "\"fs\", \"gs\", } def _t1(): print(\"* Halt/resume/single step\") vm.halt() assert vm.is_state_halted() vm.resume() assert", "Set or unset hardware breakpoints. Hardware breakpoints are implemented using the debug registers", "new_values vm.write_registers(orig_values) for reg in regs: assert vm.read_register(reg) == orig_values[reg] def _t3(): print(\"*", "or expression to be evaluated as such.\" ) unset_parser = subparsers.add_parser(\"unset\") unset_parser.add_argument( \"nreg\",", "print(\"* Kernel slide: 0x{:x}\".format(vm.kernel_slide)) print(\"* Kernel cr3: 0x{:x}\".format(vm.kernel_cr3)) print(\"* Kernel version: {}\".format(vm.kernel_version)) print(\"*", "dbgregs = vm.read_registers((\"dr0\", \"dr1\", \"dr2\", \"dr3\", \"dr6\", \"dr7\")) # interrupt and save the", "else: print(\"* Invalid expression\") elif args.action == \"unset\": vm.unset_hard_breakpoint(args.nreg) print(\"* Hardware breakpoint unset\")", "vm.interrupt_and_restore_last_snapshot() assert vm.is_state_halted() assert not vm.is_breakpoint_hit() assert vm.read_registers(regs) == orig_values assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100)", "0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000000000000000000010 vm.set_hard_breakpoint(\"w\", 0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000010000000000000010", "have already been started. Existing breakpoints are deleted on attaching. Re-execute this command", "== 0b00000000001100010000000000001010 vm.set_hard_breakpoint(\"rw\", 0x2, 0x1234) assert vm.read_register(\"dr2\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000011001100010000000000101010", "args.action == \"unset\": vm.unset_hard_breakpoint(args.nreg) print(\"* Hardware breakpoint unset\") else: raise AssertionError @_attached def", "vm.single_step() vm.set_hard_breakpoint(\"e\", 0x0, rip) assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() if", "None def _evaluate_expression(exe_ctx, expression): res = exe_ctx.frame.EvaluateExpression(expression) try: vaddr = int(res.GetValue(), 0) except", "FDP soft and page breakpoints do not work just after a restore #", "be active simultaneously. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-hbreakpoint\") subparsers = parser.add_subparsers(dest=\"action\") set_parser = subparsers.add_parser(\"set\")", "0b00000000001100010000000000001010 vm.set_hard_breakpoint(\"rw\", 0x2, 0x1234) assert vm.read_register(\"dr2\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000011001100010000000000101010 vm.set_hard_breakpoint(\"rw\",", "state of the machine and modify the last saved state! \"\"\" regs =", "breakpoints do not work just after a restore # (see VMR3AddSoftBreakpoint()) vm.unset_all_breakpoints() vm.single_step()", "and restore the VM state print(\"* Restoring the last saved VM state\") if", "&kdp 41\", capture_output=True).GetError(): print(\"* Unable to find the 'kdp' symbol. Did you specify", "help=\"Breakpoint slot to free (corresponding to registers DR0, DR1, DR2 and DR3).\", )", "== new_data vm.write_virtual_memory(vm.read_register(\"rsp\"), data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == data def _t4(): print(\"* Save/restore\")", "-f lldbagility.fdp_test fdp-test\") debugger.HandleCommand(\"command alias fa fdp-attach\") debugger.HandleCommand(\"command alias fs fdp-save\") debugger.HandleCommand(\"command alias", "= argparse.ArgumentParser(prog=\"vmsn-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.VMSNSTUB, args.vm_name) def _attach(debugger, exe_ctx,", "0b00000011001100010000000000101010 vm.set_hard_breakpoint(\"rw\", 0x3, 0x1234) assert vm.read_register(\"dr3\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101010 vm.unset_hard_breakpoint(0x0)", "assert vm.read_register(\"dr7\") == 0b00000000000000010000000000000010 vm.set_hard_breakpoint(\"rw\", 0x1, 0x1234) assert vm.read_register(\"dr1\") == 0x1234 assert vm.read_register(\"dr7\")", "_ in range(100): vm.single_step() assert vm.is_state_halted() def _t2(): print(\"* Read/write registers\") vm.halt() orig_values", "tests passed!\") def __lldb_init_module(debugger, internal_dict): # FDP debugger.HandleCommand(\"command script add -f lldbagility.fdp_attach fdp-attach\")", "command, exe_ctx, result, internal_dict): \"\"\" Connect to a macOS VM via VMSN. Currently", "of memory access to trap on: execute, read/write, or write only.\", ) set_parser.add_argument(", "functools import re import shlex import threading import time import traceback import kdpserver", "def _t5(): print(\"* Debug registers\") vm.halt() vm.write_register(\"dr7\", 0x0) vm.set_hard_breakpoint(\"rw\", 0x0, 0x1234) assert vm.read_register(\"dr0\")", "def vmsn_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\" Connect to a macOS VM via", "\"r9\", \"r10\", \"r11\", \"r12\", \"r13\", \"r14\", \"r15\", \"rip\", \"rflags\", \"cs\", \"fs\", \"gs\", }", "time import traceback import kdpserver import lldb import lldbagilityutils import stubvm vm =", "KDP server kdpsv_addr, kdpsv_port = kdpsv.sv_sock.getsockname() _exec_cmd(debugger, \"kdp-remote '{}:{}'\".format(kdpsv_addr, kdpsv_port)) # trigger a", "print(\"* Read/write virtual memory\") vm.halt() data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) new_data = b\"ABCDEFGH\" vm.write_virtual_memory(vm.read_register(\"rsp\"),", "passed!\") def __lldb_init_module(debugger, internal_dict): # FDP debugger.HandleCommand(\"command script add -f lldbagility.fdp_attach fdp-attach\") debugger.HandleCommand(\"command", "\"rbp\", \"rsp\", \"r8\", \"r9\", \"r10\", \"r11\", \"r12\", \"r13\", \"r14\", \"r15\", \"rip\", \"rflags\", \"cs\",", "for reg in regs: vm.write_register(reg, new_values[reg]) # modifications to RFLAGS should be disabled", "Kernel load address: 0x{:016x}\".format(vm.kernel_load_vaddr)) print(\"* Kernel slide: 0x{:x}\".format(vm.kernel_slide)) print(\"* Kernel cr3: 0x{:x}\".format(vm.kernel_cr3)) print(\"*", "th.start() # connect LLDB to the fake KDP server kdpsv_addr, kdpsv_port = kdpsv.sv_sock.getsockname()", "state print(\"* Restoring the last saved VM state\") if vm.interrupt_and_restore_last_snapshot(): print(\"* State restored\")", "vm.is_state_halted() assert not vm.is_breakpoint_hit() assert vm.read_registers(regs) == orig_values assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) == orig_data", "== 0x1234 assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101010 vm.unset_hard_breakpoint(0x0) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101000 vm.unset_hard_breakpoint(0x1) assert", "0b00110011001100010000000000000000 def _t6(): print(\"* Soft/hard exec breakpoint\") vm.halt() # keep in mind that", "return f(*args, **kwargs) return _wrapper @_attached def fdp_save(debugger, command, exe_ctx, result, internal_dict): \"\"\"", "exe_ctx.target.DeleteAllBreakpoints() print(\"* LLDB breakpoints deleted\") # start the fake KDP server kdpsv =", "the last saved state! \"\"\" regs = { \"rax\", \"rbx\", \"rcx\", \"rdx\", \"rdi\",", "vm.write_registers(dbgregs) if not process_was_stopped: # display stop info _exec_cmd(debugger, \"process status\") @_attached def", "a macOS VM via VMSN. Currently not maintained! Existing breakpoints are deleted on", "stubvm.VMSNSTUB, args.vm_name) def _attach(debugger, exe_ctx, vm_stub, vm_name): global vm print(lldbagilityutils.LLDBAGILITY) print(\"* Attaching to", "vm print(lldbagilityutils.LLDBAGILITY) print(\"* Attaching to the VM\") try: vm = stubvm.STUBVM(vm_stub, vm_name) except", "vm.complete_attach() print(\"* Kernel load address: 0x{:016x}\".format(vm.kernel_load_vaddr)) print(\"* Kernel slide: 0x{:x}\".format(vm.kernel_slide)) print(\"* Kernel cr3:", "maximum of four hardware breakpoints can be active simultaneously. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-hbreakpoint\")", "such.\" ) unset_parser = subparsers.add_parser(\"unset\") unset_parser.add_argument( \"nreg\", type=lambda i: int(i, 0), choices={0, 1,", "# FDP debugger.HandleCommand(\"command script add -f lldbagility.fdp_attach fdp-attach\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_save", "data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == data def _t4(): print(\"* Save/restore\") vm.halt() orig_values =", "\"command script add -f lldbagility.fdp_interrupt fdp-interrupt\" ) debugger.HandleCommand( \"command script add -f lldbagility.fdp_hbreakpoint", "code\") vm.complete_attach() print(\"* Kernel load address: 0x{:016x}\".format(vm.kernel_load_vaddr)) print(\"* Kernel slide: 0x{:x}\".format(vm.kernel_slide)) print(\"* Kernel", "work just after a restore # (see VMR3AddSoftBreakpoint()) vm.unset_all_breakpoints() vm.single_step() assert not vm.is_breakpoint_hit()", "def _t2(): print(\"* Read/write registers\") vm.halt() orig_values = vm.read_registers(regs) new_values = {reg: 0x1337", "any) exe_ctx.process.Detach() # remove all LLDB breakpoints exe_ctx.target.DeleteAllBreakpoints() print(\"* LLDB breakpoints deleted\") #", "vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == new_data vm.write_virtual_memory(vm.read_register(\"rsp\"), data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == data def _t4():", "_exec_cmd(debugger, \"kdp-remote '{}:{}'\".format(kdpsv_addr, kdpsv_port)) # trigger a memory write to find out the", "\"rbx\", \"rcx\", \"rdx\", \"rdi\", \"rsi\", \"rbp\", \"rsp\", \"r8\", \"r9\", \"r10\", \"r11\", \"r12\", \"r13\",", "orig_values assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) == orig_data def _t5(): print(\"* Debug registers\") vm.halt() vm.write_register(\"dr7\",", "internal_dict): \"\"\" Run some tests. Warning: tests change the state of the machine", "debugger.HandleCommand(\"command alias fa fdp-attach\") debugger.HandleCommand(\"command alias fs fdp-save\") debugger.HandleCommand(\"command alias fr fdp-restore\") debugger.HandleCommand(\"command", "# we disable soft breakpoints before saving and then re-enable them once the", "internal_dict): \"\"\" Set or unset hardware breakpoints. Hardware breakpoints are implemented using the", "regs} for reg in regs: vm.write_register(reg, new_values[reg]) # modifications to RFLAGS should be", "hard breakpoints vm.write_registers(dbgregs) if not process_was_stopped: # display stop info _exec_cmd(debugger, \"process status\")", "vm.read_registers(regs) orig_data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) vm.interrupt_and_take_snapshot() assert vm.is_state_halted() vm.write_virtual_memory(vm.read_register(\"rsp\"), b\"A\" * 0x100) vm.single_step()", "help=\"Breakpoint address or expression to be evaluated as such.\" ) unset_parser = subparsers.add_parser(\"unset\")", "modifications to RFLAGS should be disabled assert vm.read_register(\"rflags\") == orig_values[\"rflags\"] del new_values[\"rflags\"] assert", "Connect to a macOS VM via FDP. The VM must have already been", "fh fdp-hbreakpoint\") # VMSN debugger.HandleCommand(\"command script add -f lldbagility.vmsn_attach vmsn-attach\") debugger.HandleCommand(\"command alias va", "breakpoints exe_ctx.target.DeleteAllBreakpoints() print(\"* LLDB breakpoints deleted\") # start the fake KDP server kdpsv", "== 0x1234 assert vm.read_register(\"dr7\") == 0b00000011001100010000000000101010 vm.set_hard_breakpoint(\"rw\", 0x3, 0x1234) assert vm.read_register(\"dr3\") == 0x1234", "assert vm.read_register(\"dr0\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000000000000110000000000000010 vm.set_hard_breakpoint(\"e\", 0x0, 0x1234) assert vm.read_register(\"dr7\")", "not vm.is_breakpoint_hit() vm.interrupt_and_take_snapshot() vm.single_step() vm.single_step() rip = vm.read_register(\"rip\") vm.interrupt_and_restore_last_snapshot() vm.single_step() bpid = vm.set_soft_exec_breakpoint(rip)", "breakpoints. Hardware breakpoints are implemented using the debug registers DR0, DR1, DR2 and", "or unset hardware breakpoints. Hardware breakpoints are implemented using the debug registers DR0,", "to find out the address of the kdp struct vm.store_kdp_at_next_write_virtual_memory() if _exec_cmd(debugger, \"memory", "debug registers before saving, # and restore it afterwards dbgregs = vm.read_registers((\"dr0\", \"dr1\",", "VM\") try: vm = stubvm.STUBVM(vm_stub, vm_name) except Exception as exc: print(\"* Could not", "to the last saved state. Breakpoints are deleted on restoring. \"\"\" # interrupt", "== 0b00110011001100010000000010100000 vm.unset_hard_breakpoint(0x2) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010000000 vm.unset_hard_breakpoint(0x3) assert vm.read_register(\"dr7\") == 0b00110011001100010000000000000000 def", "command, exe_ctx, result, internal_dict): \"\"\" Connect to a macOS VM via FDP. The", "Hardware breakpoint unset\") else: raise AssertionError @_attached def fdp_test(debugger, command, exe_ctx, result, internal_dict):", "setting them exe_ctx.target.DisableAllBreakpoints() # similarly, for hard breakpoints we save the state of", "res = exe_ctx.frame.EvaluateExpression(expression) try: vaddr = int(res.GetValue(), 0) except (TypeError, ValueError): return None", "the KDP requests for setting them exe_ctx.target.DisableAllBreakpoints() # similarly, for hard breakpoints we", "VM. \"\"\" vm.interrupt() @_attached def fdp_hbreakpoint(debugger, command, exe_ctx, result, internal_dict): \"\"\" Set or", "{}\".format(vm.kernel_version)) print(\"* VM breakpoints deleted\") # detach the previous process (if any) exe_ctx.process.Detach()", "new_data = b\"ABCDEFGH\" vm.write_virtual_memory(vm.read_register(\"rsp\"), new_data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == new_data vm.write_virtual_memory(vm.read_register(\"rsp\"), data) assert", "fdp-interrupt\" ) debugger.HandleCommand( \"command script add -f lldbagility.fdp_hbreakpoint fdp-hbreakpoint\" ) debugger.HandleCommand(\"command script add", "be unset, but # we can preserve them at least for the current", "def fdp_test(debugger, command, exe_ctx, result, internal_dict): \"\"\" Run some tests. Warning: tests change", "last saved state. Breakpoints are deleted on restoring. \"\"\" # interrupt and restore", "change the state of the machine and modify the last saved state! \"\"\"", "print(\"* Saving the VM state\") vm.interrupt_and_take_snapshot() print(\"* State saved\") # restore soft breakpoints", "vaddr = int(res.GetValue(), 0) except (TypeError, ValueError): return None else: return vaddr def", "info _exec_cmd(debugger, \"process status\") @_attached def fdp_restore(debugger, command, exe_ctx, result, internal_dict): \"\"\" Restore", "vm.single_step() assert vm.is_state_halted() def _t2(): print(\"* Read/write registers\") vm.halt() orig_values = vm.read_registers(regs) new_values", "vm.write_registers(orig_values) for reg in regs: assert vm.read_register(reg) == orig_values[reg] def _t3(): print(\"* Read/write", "Save/restore\") vm.halt() orig_values = vm.read_registers(regs) orig_data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) vm.interrupt_and_take_snapshot() assert vm.is_state_halted() vm.write_virtual_memory(vm.read_register(\"rsp\"),", ") debugger.HandleCommand(\"command script add -f lldbagility.fdp_test fdp-test\") debugger.HandleCommand(\"command alias fa fdp-attach\") debugger.HandleCommand(\"command alias", "tests. Warning: tests change the state of the machine and modify the last", "== 0b00110011001100010000000010101000 vm.unset_hard_breakpoint(0x1) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010100000 vm.unset_hard_breakpoint(0x2) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010000000 vm.unset_hard_breakpoint(0x3)", "import re import shlex import threading import time import traceback import kdpserver import", "before saving and then re-enable them once the state # has been saved,", "result, internal_dict): \"\"\" Interrupt (pause) the execution of the attached macOS VM. \"\"\"", "remove all LLDB breakpoints exe_ctx.target.DeleteAllBreakpoints() print(\"* LLDB breakpoints deleted\") # start the fake", "= parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.FDPSTUB, args.vm_name) def vmsn_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\"", "lldbagility.fdp_attach fdp-attach\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_save fdp-save\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_restore", "expression to be evaluated as such.\" ) unset_parser = subparsers.add_parser(\"unset\") unset_parser.add_argument( \"nreg\", type=lambda", "DR1, DR2 and DR3. Consequently, a maximum of four hardware breakpoints can be", "macOS VM to the last saved state. Breakpoints are deleted on restoring. \"\"\"", "differ) fdp_attach(debugger, vm.name, exe_ctx, result, internal_dict) else: print(\"* No saved state found\") @_attached", "(but retained for the current session). \"\"\" # saving the state causes all", "\"\"\" Set or unset hardware breakpoints. Hardware breakpoints are implemented using the debug", "vm.read_register(\"dr1\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000000001100010000000000001010 vm.set_hard_breakpoint(\"rw\", 0x2, 0x1234) assert vm.read_register(\"dr2\") ==", "assert vm.read_register(\"dr7\") == 0b00000011001100010000000000101010 vm.set_hard_breakpoint(\"rw\", 0x3, 0x1234) assert vm.read_register(\"dr3\") == 0x1234 assert vm.read_register(\"dr7\")", "the VM state print(\"* Restoring the last saved VM state\") if vm.interrupt_and_restore_last_snapshot(): print(\"*", "alias fa fdp-attach\") debugger.HandleCommand(\"command alias fs fdp-save\") debugger.HandleCommand(\"command alias fr fdp-restore\") debugger.HandleCommand(\"command alias", "subparsers = parser.add_subparsers(dest=\"action\") set_parser = subparsers.add_parser(\"set\") set_parser.add_argument( \"trigger\", choices={\"e\", \"rw\", \"w\"}, help=\"Type of", "vm = stubvm.STUBVM(vm_stub, vm_name) except Exception as exc: print(\"* Could not attach! {}\".format(str(exc)))", "parser.add_subparsers(dest=\"action\") set_parser = subparsers.add_parser(\"set\") set_parser.add_argument( \"trigger\", choices={\"e\", \"rw\", \"w\"}, help=\"Type of memory access", "vm.read_register(reg) == orig_values[reg] def _t3(): print(\"* Read/write virtual memory\") vm.halt() data = vm.read_virtual_memory(vm.read_register(\"rsp\"),", "= subparsers.add_parser(\"unset\") unset_parser.add_argument( \"nreg\", type=lambda i: int(i, 0), choices={0, 1, 2, 3}, help=\"Breakpoint", "== 0b00000000000000110000000000000010 vm.set_hard_breakpoint(\"e\", 0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000000000000000000010 vm.set_hard_breakpoint(\"w\", 0x0, 0x1234) assert", "_t4(): print(\"* Save/restore\") vm.halt() orig_values = vm.read_registers(regs) orig_data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) vm.interrupt_and_take_snapshot() assert", "def _t6(): print(\"* Soft/hard exec breakpoint\") vm.halt() # keep in mind that FDP", "struct vm.store_kdp_at_next_write_virtual_memory() if _exec_cmd(debugger, \"memory write &kdp 41\", capture_output=True).GetError(): print(\"* Unable to find", "def _evaluate_expression(exe_ctx, expression): res = exe_ctx.frame.EvaluateExpression(expression) try: vaddr = int(res.GetValue(), 0) except (TypeError,", "expression\") elif args.action == \"unset\": vm.unset_hard_breakpoint(args.nreg) print(\"* Hardware breakpoint unset\") else: raise AssertionError", "0x100) vm.single_step() vm.resume() time.sleep(0.100) vm.interrupt_and_restore_last_snapshot() assert vm.is_state_halted() assert not vm.is_breakpoint_hit() assert vm.read_registers(regs) ==", "kdpsv_addr, kdpsv_port = kdpsv.sv_sock.getsockname() _exec_cmd(debugger, \"kdp-remote '{}:{}'\".format(kdpsv_addr, kdpsv_port)) # trigger a memory write", "def fdp_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\" Connect to a macOS VM via", "vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) vm.interrupt_and_take_snapshot() assert vm.is_state_halted() vm.write_virtual_memory(vm.read_register(\"rsp\"), b\"A\" * 0x100) vm.single_step() vm.resume() time.sleep(0.100) vm.interrupt_and_restore_last_snapshot()", "are deleted on restoring. \"\"\" # interrupt and restore the VM state print(\"*", "== 0x1234 assert vm.read_register(\"dr7\") == 0b00000000001100010000000000001010 vm.set_hard_breakpoint(\"rw\", 0x2, 0x1234) assert vm.read_register(\"dr2\") == 0x1234", "unset hardware breakpoints. Hardware breakpoints are implemented using the debug registers DR0, DR1,", "vm.set_soft_exec_breakpoint(rip) assert 0 <= bpid <= 254 assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt()", "\"dr6\", \"dr7\")) # interrupt and save the VM state process_was_stopped = exe_ctx.process.is_stopped print(\"*", "the VM\") try: vm = stubvm.STUBVM(vm_stub, vm_name) except Exception as exc: print(\"* Could", "session). \"\"\" # saving the state causes all breakpoints (soft and hard) to", "vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == data def _t4(): print(\"* Save/restore\") vm.halt() orig_values = vm.read_registers(regs) orig_data", "vm.unset_all_breakpoints() for _t in (_t1, _t2, _t3, _t4, _t5, _t6): _t() print(\"* All", "_t in (_t1, _t2, _t3, _t4, _t5, _t6): _t() print(\"* All tests passed!\")", "print(\"* All tests passed!\") def __lldb_init_module(debugger, internal_dict): # FDP debugger.HandleCommand(\"command script add -f", "been saved, so that LLDB sends again the KDP requests for setting them", "cmdretobj) return cmdretobj else: debugger.HandleCommand(command) return None def _evaluate_expression(exe_ctx, expression): res = exe_ctx.frame.EvaluateExpression(expression)", "kdpsv.sv_sock.getsockname() _exec_cmd(debugger, \"kdp-remote '{}:{}'\".format(kdpsv_addr, kdpsv_port)) # trigger a memory write to find out", "attached macOS VM. Breakpoints are not saved (but retained for the current session).", "\"nreg\", type=lambda i: int(i, 0), choices={0, 1, 2, 3}, help=\"Breakpoint slot to free", "\"r15\", \"rip\", \"rflags\", \"cs\", \"fs\", \"gs\", } def _t1(): print(\"* Halt/resume/single step\") vm.halt()", "vm.unset_hard_breakpoint(0x1) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010100000 vm.unset_hard_breakpoint(0x2) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010000000 vm.unset_hard_breakpoint(0x3) assert vm.read_register(\"dr7\")", "breakpoint unset\") else: raise AssertionError @_attached def fdp_test(debugger, command, exe_ctx, result, internal_dict): \"\"\"", "the attached macOS VM to the last saved state. Breakpoints are deleted on", "saved, so that LLDB sends again the KDP requests for setting them exe_ctx.target.DisableAllBreakpoints()", "\"dr1\", \"dr2\", \"dr3\", \"dr6\", \"dr7\")) # interrupt and save the VM state process_was_stopped", "evaluated as such.\" ) unset_parser = subparsers.add_parser(\"unset\") unset_parser.add_argument( \"nreg\", type=lambda i: int(i, 0),", "return vaddr def fdp_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\" Connect to a macOS", "@_attached def fdp_hbreakpoint(debugger, command, exe_ctx, result, internal_dict): \"\"\" Set or unset hardware breakpoints.", "_t3(): print(\"* Read/write virtual memory\") vm.halt() data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) new_data = b\"ABCDEFGH\"", "breakpoint\") vm.halt() # keep in mind that FDP soft and page breakpoints do", "assert vm.is_breakpoint_hit() if exe_ctx.process.is_running: vm.interrupt() vm.unset_all_breakpoints() for _t in (_t1, _t2, _t3, _t4,", "_t2, _t3, _t4, _t5, _t6): _t() print(\"* All tests passed!\") def __lldb_init_module(debugger, internal_dict):", "print(\"* Hardware breakpoint unset\") else: raise AssertionError @_attached def fdp_test(debugger, command, exe_ctx, result,", "internal_dict): \"\"\" Connect to a macOS VM via FDP. The VM must have", "assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == new_data vm.write_virtual_memory(vm.read_register(\"rsp\"), data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == data def", "\"dr7\")) # interrupt and save the VM state process_was_stopped = exe_ctx.process.is_stopped print(\"* Saving", "exe_ctx.process.is_running: vm.interrupt() vm.unset_all_breakpoints() for _t in (_t1, _t2, _t3, _t4, _t5, _t6): _t()", "th = threading.Thread(target=kdpsv.debug, args=(vm,)) th.daemon = True th.start() # connect LLDB to the", "saved VM state\") if vm.interrupt_and_restore_last_snapshot(): print(\"* State restored\") # do a full reattach", "# (see VMR3AddSoftBreakpoint()) vm.unset_all_breakpoints() vm.single_step() assert not vm.is_breakpoint_hit() vm.interrupt_and_take_snapshot() vm.single_step() vm.single_step() rip =", "shlex import threading import time import traceback import kdpserver import lldb import lldbagilityutils", "type=lambda i: int(i, 0), choices={0, 1, 2, 3}, help=\"Breakpoint slot to free (corresponding", "orig_values[\"rflags\"] del new_values[\"rflags\"] assert vm.read_registers(regs - {\"rflags\"}) == new_values vm.write_registers(orig_values) for reg in", "\"\"\" parser = argparse.ArgumentParser(prog=\"vmsn-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.VMSNSTUB, args.vm_name) def", "subparsers.add_parser(\"unset\") unset_parser.add_argument( \"nreg\", type=lambda i: int(i, 0), choices={0, 1, 2, 3}, help=\"Breakpoint slot", "vm.single_step() vm.resume() time.sleep(0.100) vm.interrupt_and_restore_last_snapshot() assert vm.is_state_halted() assert not vm.is_breakpoint_hit() assert vm.read_registers(regs) == orig_values", "-*- coding: utf-8 -*- from __future__ import print_function import argparse import functools import", "debug?\") vm.abort_store_kdp_at_next_write_virtual_memory() def _attached(f): @functools.wraps(f) def _wrapper(*args, **kwargs): global vm if not vm:", "parser = argparse.ArgumentParser(prog=\"fdp-hbreakpoint\") subparsers = parser.add_subparsers(dest=\"action\") set_parser = subparsers.add_parser(\"set\") set_parser.add_argument( \"trigger\", choices={\"e\", \"rw\",", "and DR3).\", ) args = parser.parse_args(shlex.split(command)) if args.action == \"set\": vaddr = _evaluate_expression(exe_ctx,", "deleted\") # start the fake KDP server kdpsv = kdpserver.KDPServer() th = threading.Thread(target=kdpsv.debug,", "return None def _evaluate_expression(exe_ctx, expression): res = exe_ctx.frame.EvaluateExpression(expression) try: vaddr = int(res.GetValue(), 0)", "for reg in regs} for reg in regs: vm.write_register(reg, new_values[reg]) # modifications to", "= vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) new_data = b\"ABCDEFGH\" vm.write_virtual_memory(vm.read_register(\"rsp\"), new_data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == new_data", "= int(res.GetValue(), 0) except (TypeError, ValueError): return None else: return vaddr def fdp_attach(debugger,", "FDP. The VM must have already been started. Existing breakpoints are deleted on", "\"trigger\", choices={\"e\", \"rw\", \"w\"}, help=\"Type of memory access to trap on: execute, read/write,", "(if any) exe_ctx.process.Detach() # remove all LLDB breakpoints exe_ctx.target.DeleteAllBreakpoints() print(\"* LLDB breakpoints deleted\")", "\"set\": vaddr = _evaluate_expression(exe_ctx, args.expression) if vaddr: vm.set_hard_breakpoint(args.trigger, args.nreg, vaddr) print(\"* Hardware breakpoint", "rip = vm.read_register(\"rip\") vm.interrupt_and_restore_last_snapshot() vm.single_step() bpid = vm.set_soft_exec_breakpoint(rip) assert 0 <= bpid <=", "-f lldbagility.fdp_restore fdp-restore\") debugger.HandleCommand( \"command script add -f lldbagility.fdp_interrupt fdp-interrupt\" ) debugger.HandleCommand( \"command", "return print(\"* Resuming the VM execution until reaching kernel code\") vm.complete_attach() print(\"* Kernel", "to debug?\") vm.abort_store_kdp_at_next_write_virtual_memory() def _attached(f): @functools.wraps(f) def _wrapper(*args, **kwargs): global vm if not", "the debug registers DR0, DR1, DR2 and DR3. Consequently, a maximum of four", "0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000000000000000000010 vm.set_hard_breakpoint(\"w\", 0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000010000000000000010 vm.set_hard_breakpoint(\"rw\",", "not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() if exe_ctx.process.is_running: vm.interrupt() vm.unset_all_breakpoints() for _t", "saving, # and restore it afterwards dbgregs = vm.read_registers((\"dr0\", \"dr1\", \"dr2\", \"dr3\", \"dr6\",", "\"dr3\", \"dr6\", \"dr7\")) # interrupt and save the VM state process_was_stopped = exe_ctx.process.is_stopped", "help=\"Type of memory access to trap on: execute, read/write, or write only.\", )", "alias fi fdp-interrupt\") debugger.HandleCommand(\"command alias fh fdp-hbreakpoint\") # VMSN debugger.HandleCommand(\"command script add -f", "Saving the VM state\") vm.interrupt_and_take_snapshot() print(\"* State saved\") # restore soft breakpoints exe_ctx.target.EnableAllBreakpoints()", "slot to use (corresponding to registers ).\", ) set_parser.add_argument( \"expression\", help=\"Breakpoint address or", "Breakpoints are deleted on restoring. \"\"\" # interrupt and restore the VM state", "of the attached macOS VM. Breakpoints are not saved (but retained for the", "to the fake KDP server kdpsv_addr, kdpsv_port = kdpsv.sv_sock.getsockname() _exec_cmd(debugger, \"kdp-remote '{}:{}'\".format(kdpsv_addr, kdpsv_port))", "this command every time the VM is rebooted. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-attach\") parser.add_argument(\"vm_name\")", "of four hardware breakpoints can be active simultaneously. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-hbreakpoint\") subparsers", "re import shlex import threading import time import traceback import kdpserver import lldb", "regs: assert vm.read_register(reg) == orig_values[reg] def _t3(): print(\"* Read/write virtual memory\") vm.halt() data", "# trigger a memory write to find out the address of the kdp", "print(\"* Hardware breakpoint set: address = 0x{:016x}\".format(vaddr)) else: print(\"* Invalid expression\") elif args.action", "start the fake KDP server kdpsv = kdpserver.KDPServer() th = threading.Thread(target=kdpsv.debug, args=(vm,)) th.daemon", "after a restore # (see VMR3AddSoftBreakpoint()) vm.unset_all_breakpoints() vm.single_step() assert not vm.is_breakpoint_hit() vm.interrupt_and_take_snapshot() vm.single_step()", "capture_output=True).GetError(): print(\"* Unable to find the 'kdp' symbol. Did you specify the target", "\"\"\" Interrupt (pause) the execution of the attached macOS VM. \"\"\" vm.interrupt() @_attached", "= parser.add_subparsers(dest=\"action\") set_parser = subparsers.add_parser(\"set\") set_parser.add_argument( \"trigger\", choices={\"e\", \"rw\", \"w\"}, help=\"Type of memory", "disabled assert vm.read_register(\"rflags\") == orig_values[\"rflags\"] del new_values[\"rflags\"] assert vm.read_registers(regs - {\"rflags\"}) == new_values", "reattach (the kernel load address may differ) fdp_attach(debugger, vm.name, exe_ctx, result, internal_dict) else:", "0b00110011001100010000000010100000 vm.unset_hard_breakpoint(0x2) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010000000 vm.unset_hard_breakpoint(0x3) assert vm.read_register(\"dr7\") == 0b00110011001100010000000000000000 def _t6():", "import stubvm vm = None def _exec_cmd(debugger, command, capture_output=False): if capture_output: cmdretobj =", "parser = argparse.ArgumentParser(prog=\"vmsn-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.VMSNSTUB, args.vm_name) def _attach(debugger,", "exe_ctx, result, internal_dict): \"\"\" Connect to a macOS VM via FDP. The VM", "Kernel version: {}\".format(vm.kernel_version)) print(\"* VM breakpoints deleted\") # detach the previous process (if", "macOS VM. \"\"\" vm.interrupt() @_attached def fdp_hbreakpoint(debugger, command, exe_ctx, result, internal_dict): \"\"\" Set", "assert vm.read_register(\"dr7\") == 0b00110011001100010000000010000000 vm.unset_hard_breakpoint(0x3) assert vm.read_register(\"dr7\") == 0b00110011001100010000000000000000 def _t6(): print(\"* Soft/hard", "some tests. Warning: tests change the state of the machine and modify the", "on attaching. Re-execute this command every time the VM is rebooted. \"\"\" parser", "add -f lldbagility.fdp_hbreakpoint fdp-hbreakpoint\" ) debugger.HandleCommand(\"command script add -f lldbagility.fdp_test fdp-test\") debugger.HandleCommand(\"command alias", "script add -f lldbagility.fdp_test fdp-test\") debugger.HandleCommand(\"command alias fa fdp-attach\") debugger.HandleCommand(\"command alias fs fdp-save\")", "fi fdp-interrupt\") debugger.HandleCommand(\"command alias fh fdp-hbreakpoint\") # VMSN debugger.HandleCommand(\"command script add -f lldbagility.vmsn_attach", "and modify the last saved state! \"\"\" regs = { \"rax\", \"rbx\", \"rcx\",", "_evaluate_expression(exe_ctx, args.expression) if vaddr: vm.set_hard_breakpoint(args.trigger, args.nreg, vaddr) print(\"* Hardware breakpoint set: address =", "state. Breakpoints are deleted on restoring. \"\"\" # interrupt and restore the VM", "print(\"* Unable to find the 'kdp' symbol. Did you specify the target to", "set_parser.add_argument( \"trigger\", choices={\"e\", \"rw\", \"w\"}, help=\"Type of memory access to trap on: execute,", "preserve them at least for the current session # we disable soft breakpoints", "interrupt and save the VM state process_was_stopped = exe_ctx.process.is_stopped print(\"* Saving the VM", "def _attached(f): @functools.wraps(f) def _wrapper(*args, **kwargs): global vm if not vm: print(\"* Not", "exe_ctx.target.EnableAllBreakpoints() # restore hard breakpoints vm.write_registers(dbgregs) if not process_was_stopped: # display stop info", "just after a restore # (see VMR3AddSoftBreakpoint()) vm.unset_all_breakpoints() vm.single_step() assert not vm.is_breakpoint_hit() vm.interrupt_and_take_snapshot()", "\"rw\", \"w\"}, help=\"Type of memory access to trap on: execute, read/write, or write", "attaching. Re-execute this command every time the VM is rebooted. \"\"\" parser =", "int(res.GetValue(), 0) except (TypeError, ValueError): return None else: return vaddr def fdp_attach(debugger, command,", "def __lldb_init_module(debugger, internal_dict): # FDP debugger.HandleCommand(\"command script add -f lldbagility.fdp_attach fdp-attach\") debugger.HandleCommand(\"command script", "debugger.HandleCommand(\"command script add -f lldbagility.fdp_save fdp-save\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_restore fdp-restore\") debugger.HandleCommand(", "deleted on attaching. \"\"\" parser = argparse.ArgumentParser(prog=\"vmsn-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx,", "@functools.wraps(f) def _wrapper(*args, **kwargs): global vm if not vm: print(\"* Not attached to", "vm.unset_hard_breakpoint(0x3) assert vm.read_register(\"dr7\") == 0b00110011001100010000000000000000 def _t6(): print(\"* Soft/hard exec breakpoint\") vm.halt() #", "<= bpid <= 254 assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() vm.interrupt_and_restore_last_snapshot()", "to use (corresponding to registers ).\", ) set_parser.add_argument( \"expression\", help=\"Breakpoint address or expression", "new_values[\"rflags\"] assert vm.read_registers(regs - {\"rflags\"}) == new_values vm.write_registers(orig_values) for reg in regs: assert", "print(lldbagilityutils.LLDBAGILITY) print(\"* Attaching to the VM\") try: vm = stubvm.STUBVM(vm_stub, vm_name) except Exception", "re-enable them once the state # has been saved, so that LLDB sends", "0b00110011001100010000000010000000 vm.unset_hard_breakpoint(0x3) assert vm.read_register(\"dr7\") == 0b00110011001100010000000000000000 def _t6(): print(\"* Soft/hard exec breakpoint\") vm.halt()", "del new_values[\"rflags\"] assert vm.read_registers(regs - {\"rflags\"}) == new_values vm.write_registers(orig_values) for reg in regs:", "state\") vm.interrupt_and_take_snapshot() print(\"* State saved\") # restore soft breakpoints exe_ctx.target.EnableAllBreakpoints() # restore hard", "via VMSN. Currently not maintained! Existing breakpoints are deleted on attaching. \"\"\" parser", "# interrupt and restore the VM state print(\"* Restoring the last saved VM", "is rebooted. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.FDPSTUB,", "_attach(debugger, exe_ctx, vm_stub, vm_name): global vm print(lldbagilityutils.LLDBAGILITY) print(\"* Attaching to the VM\") try:", "saved (but retained for the current session). \"\"\" # saving the state causes", "fdp-test\") debugger.HandleCommand(\"command alias fa fdp-attach\") debugger.HandleCommand(\"command alias fs fdp-save\") debugger.HandleCommand(\"command alias fr fdp-restore\")", "fdp_test(debugger, command, exe_ctx, result, internal_dict): \"\"\" Run some tests. Warning: tests change the", "assert not vm.is_breakpoint_hit() vm.interrupt_and_take_snapshot() vm.single_step() vm.single_step() rip = vm.read_register(\"rip\") vm.interrupt_and_restore_last_snapshot() vm.single_step() bpid =", "to RFLAGS should be disabled assert vm.read_register(\"rflags\") == orig_values[\"rflags\"] del new_values[\"rflags\"] assert vm.read_registers(regs", "vm.read_register(\"dr0\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000000000000110000000000000010 vm.set_hard_breakpoint(\"e\", 0x0, 0x1234) assert vm.read_register(\"dr7\") ==", "fr fdp-restore\") debugger.HandleCommand(\"command alias fi fdp-interrupt\") debugger.HandleCommand(\"command alias fh fdp-hbreakpoint\") # VMSN debugger.HandleCommand(\"command", "\"\"\" Connect to a macOS VM via FDP. The VM must have already", "in mind that FDP soft and page breakpoints do not work just after", "attached macOS VM. \"\"\" vm.interrupt() @_attached def fdp_hbreakpoint(debugger, command, exe_ctx, result, internal_dict): \"\"\"", "be evaluated as such.\" ) unset_parser = subparsers.add_parser(\"unset\") unset_parser.add_argument( \"nreg\", type=lambda i: int(i,", "unset_parser.add_argument( \"nreg\", type=lambda i: int(i, 0), choices={0, 1, 2, 3}, help=\"Breakpoint slot to", "kdpserver import lldb import lldbagilityutils import stubvm vm = None def _exec_cmd(debugger, command,", "import kdpserver import lldb import lldbagilityutils import stubvm vm = None def _exec_cmd(debugger,", "exe_ctx.process.Detach() # remove all LLDB breakpoints exe_ctx.target.DeleteAllBreakpoints() print(\"* LLDB breakpoints deleted\") # start", "vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() if exe_ctx.process.is_running: vm.interrupt() vm.unset_all_breakpoints() for _t in", "assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101010 vm.unset_hard_breakpoint(0x0) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101000 vm.unset_hard_breakpoint(0x1) assert vm.read_register(\"dr7\") ==", "\"\"\" # interrupt and restore the VM state print(\"* Restoring the last saved", "debugger.HandleCommand( \"command script add -f lldbagility.fdp_interrupt fdp-interrupt\" ) debugger.HandleCommand( \"command script add -f", "vm.read_register(\"rflags\") == orig_values[\"rflags\"] del new_values[\"rflags\"] assert vm.read_registers(regs - {\"rflags\"}) == new_values vm.write_registers(orig_values) for", "argparse.ArgumentParser(prog=\"fdp-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.FDPSTUB, args.vm_name) def vmsn_attach(debugger, command, exe_ctx,", "assert vm.read_register(\"rflags\") == orig_values[\"rflags\"] del new_values[\"rflags\"] assert vm.read_registers(regs - {\"rflags\"}) == new_values vm.write_registers(orig_values)", "fdp-hbreakpoint\" ) debugger.HandleCommand(\"command script add -f lldbagility.fdp_test fdp-test\") debugger.HandleCommand(\"command alias fa fdp-attach\") debugger.HandleCommand(\"command", "coding: utf-8 -*- from __future__ import print_function import argparse import functools import re", "process (if any) exe_ctx.process.Detach() # remove all LLDB breakpoints exe_ctx.target.DeleteAllBreakpoints() print(\"* LLDB breakpoints", "== data def _t4(): print(\"* Save/restore\") vm.halt() orig_values = vm.read_registers(regs) orig_data = vm.read_virtual_memory(vm.read_register(\"rsp\"),", "a macOS VM via FDP. The VM must have already been started. Existing", "breakpoint set: address = 0x{:016x}\".format(vaddr)) else: print(\"* Invalid expression\") elif args.action == \"unset\":", "cr3: 0x{:x}\".format(vm.kernel_cr3)) print(\"* Kernel version: {}\".format(vm.kernel_version)) print(\"* VM breakpoints deleted\") # detach the", "try: vaddr = int(res.GetValue(), 0) except (TypeError, ValueError): return None else: return vaddr", "= _evaluate_expression(exe_ctx, args.expression) if vaddr: vm.set_hard_breakpoint(args.trigger, args.nreg, vaddr) print(\"* Hardware breakpoint set: address", "vm.interrupt_and_take_snapshot() assert vm.is_state_halted() vm.write_virtual_memory(vm.read_register(\"rsp\"), b\"A\" * 0x100) vm.single_step() vm.resume() time.sleep(0.100) vm.interrupt_and_restore_last_snapshot() assert vm.is_state_halted()", "Hardware breakpoints are implemented using the debug registers DR0, DR1, DR2 and DR3.", "a maximum of four hardware breakpoints can be active simultaneously. \"\"\" parser =", "vm.read_register(\"dr7\") == 0b00110011001100010000000010101000 vm.unset_hard_breakpoint(0x1) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010100000 vm.unset_hard_breakpoint(0x2) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010000000", "debugger.HandleCommand(\"command script add -f lldbagility.fdp_test fdp-test\") debugger.HandleCommand(\"command alias fa fdp-attach\") debugger.HandleCommand(\"command alias fs", "detach the previous process (if any) exe_ctx.process.Detach() # remove all LLDB breakpoints exe_ctx.target.DeleteAllBreakpoints()", "exe_ctx, result, internal_dict): \"\"\" Restore the attached macOS VM to the last saved", "fake KDP server kdpsv_addr, kdpsv_port = kdpsv.sv_sock.getsockname() _exec_cmd(debugger, \"kdp-remote '{}:{}'\".format(kdpsv_addr, kdpsv_port)) # trigger", "0x0, rip) assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() if exe_ctx.process.is_running: vm.interrupt()", "vm.unset_hard_breakpoint(0x2) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010000000 vm.unset_hard_breakpoint(0x3) assert vm.read_register(\"dr7\") == 0b00110011001100010000000000000000 def _t6(): print(\"*", "\"command script add -f lldbagility.fdp_hbreakpoint fdp-hbreakpoint\" ) debugger.HandleCommand(\"command script add -f lldbagility.fdp_test fdp-test\")", "it afterwards dbgregs = vm.read_registers((\"dr0\", \"dr1\", \"dr2\", \"dr3\", \"dr6\", \"dr7\")) # interrupt and", "f(*args, **kwargs) return _wrapper @_attached def fdp_save(debugger, command, exe_ctx, result, internal_dict): \"\"\" Save", "i: int(i, 0), choices={0, 1, 2, 3}, help=\"Breakpoint slot to use (corresponding to", "to be unset, but # we can preserve them at least for the", "vm.halt() # keep in mind that FDP soft and page breakpoints do not", "import time import traceback import kdpserver import lldb import lldbagilityutils import stubvm vm", "1, 2, 3}, help=\"Breakpoint slot to use (corresponding to registers ).\", ) set_parser.add_argument(", "# and restore it afterwards dbgregs = vm.read_registers((\"dr0\", \"dr1\", \"dr2\", \"dr3\", \"dr6\", \"dr7\"))", "VM state print(\"* Restoring the last saved VM state\") if vm.interrupt_and_restore_last_snapshot(): print(\"* State", "last saved VM state\") if vm.interrupt_and_restore_last_snapshot(): print(\"* State restored\") # do a full", "_t5(): print(\"* Debug registers\") vm.halt() vm.write_register(\"dr7\", 0x0) vm.set_hard_breakpoint(\"rw\", 0x0, 0x1234) assert vm.read_register(\"dr0\") ==", "LLDB sends again the KDP requests for setting them exe_ctx.target.DisableAllBreakpoints() # similarly, for", "vm.halt() vm.write_register(\"dr7\", 0x0) vm.set_hard_breakpoint(\"rw\", 0x0, 0x1234) assert vm.read_register(\"dr0\") == 0x1234 assert vm.read_register(\"dr7\") ==", "them exe_ctx.target.DisableAllBreakpoints() # similarly, for hard breakpoints we save the state of the", "add -f lldbagility.fdp_interrupt fdp-interrupt\" ) debugger.HandleCommand( \"command script add -f lldbagility.fdp_hbreakpoint fdp-hbreakpoint\" )", "= vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) vm.interrupt_and_take_snapshot() assert vm.is_state_halted() vm.write_virtual_memory(vm.read_register(\"rsp\"), b\"A\" * 0x100) vm.single_step() vm.resume() time.sleep(0.100)", "\"r12\", \"r13\", \"r14\", \"r15\", \"rip\", \"rflags\", \"cs\", \"fs\", \"gs\", } def _t1(): print(\"*", "slot to free (corresponding to registers DR0, DR1, DR2 and DR3).\", ) args", "0x{:x}\".format(vm.kernel_cr3)) print(\"* Kernel version: {}\".format(vm.kernel_version)) print(\"* VM breakpoints deleted\") # detach the previous", "range(100): vm.single_step() assert vm.is_state_halted() def _t2(): print(\"* Read/write registers\") vm.halt() orig_values = vm.read_registers(regs)", "Unable to find the 'kdp' symbol. Did you specify the target to debug?\")", "four hardware breakpoints can be active simultaneously. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-hbreakpoint\") subparsers =", "regs = { \"rax\", \"rbx\", \"rcx\", \"rdx\", \"rdi\", \"rsi\", \"rbp\", \"rsp\", \"r8\", \"r9\",", "= None def _exec_cmd(debugger, command, capture_output=False): if capture_output: cmdretobj = lldb.SBCommandReturnObject() debugger.GetCommandInterpreter().HandleCommand(command, cmdretobj)", "target to debug?\") vm.abort_store_kdp_at_next_write_virtual_memory() def _attached(f): @functools.wraps(f) def _wrapper(*args, **kwargs): global vm if", "stop info _exec_cmd(debugger, \"process status\") @_attached def fdp_restore(debugger, command, exe_ctx, result, internal_dict): \"\"\"", "0 <= bpid <= 254 assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit()", "to registers ).\", ) set_parser.add_argument( \"expression\", help=\"Breakpoint address or expression to be evaluated", "debugger.HandleCommand(command) return None def _evaluate_expression(exe_ctx, expression): res = exe_ctx.frame.EvaluateExpression(expression) try: vaddr = int(res.GetValue(),", "def fdp_save(debugger, command, exe_ctx, result, internal_dict): \"\"\" Save the current state of the", "add -f lldbagility.fdp_attach fdp-attach\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_save fdp-save\") debugger.HandleCommand(\"command script add", "0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000010000000000000010 vm.set_hard_breakpoint(\"rw\", 0x1, 0x1234) assert vm.read_register(\"dr1\") == 0x1234", "argparse.ArgumentParser(prog=\"fdp-hbreakpoint\") subparsers = parser.add_subparsers(dest=\"action\") set_parser = subparsers.add_parser(\"set\") set_parser.add_argument( \"trigger\", choices={\"e\", \"rw\", \"w\"}, help=\"Type", "least for the current session # we disable soft breakpoints before saving and", "== orig_data def _t5(): print(\"* Debug registers\") vm.halt() vm.write_register(\"dr7\", 0x0) vm.set_hard_breakpoint(\"rw\", 0x0, 0x1234)", "vm.interrupt() @_attached def fdp_hbreakpoint(debugger, command, exe_ctx, result, internal_dict): \"\"\" Set or unset hardware", "\"nreg\", type=lambda i: int(i, 0), choices={0, 1, 2, 3}, help=\"Breakpoint slot to use", "load address: 0x{:016x}\".format(vm.kernel_load_vaddr)) print(\"* Kernel slide: 0x{:x}\".format(vm.kernel_slide)) print(\"* Kernel cr3: 0x{:x}\".format(vm.kernel_cr3)) print(\"* Kernel", "VM. Breakpoints are not saved (but retained for the current session). \"\"\" #", "AssertionError @_attached def fdp_test(debugger, command, exe_ctx, result, internal_dict): \"\"\" Run some tests. Warning:", "fdp_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\" Connect to a macOS VM via FDP.", "# restore hard breakpoints vm.write_registers(dbgregs) if not process_was_stopped: # display stop info _exec_cmd(debugger,", "kdp struct vm.store_kdp_at_next_write_virtual_memory() if _exec_cmd(debugger, \"memory write &kdp 41\", capture_output=True).GetError(): print(\"* Unable to", "command, capture_output=False): if capture_output: cmdretobj = lldb.SBCommandReturnObject() debugger.GetCommandInterpreter().HandleCommand(command, cmdretobj) return cmdretobj else: debugger.HandleCommand(command)", "vm.is_state_halted() vm.halt() for _ in range(100): vm.single_step() assert vm.is_state_halted() def _t2(): print(\"* Read/write", "i: int(i, 0), choices={0, 1, 2, 3}, help=\"Breakpoint slot to free (corresponding to", "\"memory write &kdp 41\", capture_output=True).GetError(): print(\"* Unable to find the 'kdp' symbol. Did", "raise AssertionError @_attached def fdp_test(debugger, command, exe_ctx, result, internal_dict): \"\"\" Run some tests.", "breakpoints we save the state of the debug registers before saving, # and", "breakpoints vm.write_registers(dbgregs) if not process_was_stopped: # display stop info _exec_cmd(debugger, \"process status\") @_attached", "internal_dict): \"\"\" Restore the attached macOS VM to the last saved state. Breakpoints", "of the attached macOS VM. \"\"\" vm.interrupt() @_attached def fdp_hbreakpoint(debugger, command, exe_ctx, result,", "choices={\"e\", \"rw\", \"w\"}, help=\"Type of memory access to trap on: execute, read/write, or", "vm.set_hard_breakpoint(\"rw\", 0x2, 0x1234) assert vm.read_register(\"dr2\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000011001100010000000000101010 vm.set_hard_breakpoint(\"rw\", 0x3,", "and DR3. Consequently, a maximum of four hardware breakpoints can be active simultaneously.", "the state of the debug registers before saving, # and restore it afterwards", "assert vm.read_register(\"dr7\") == 0b00110011001100010000000010100000 vm.unset_hard_breakpoint(0x2) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010000000 vm.unset_hard_breakpoint(0x3) assert vm.read_register(\"dr7\") ==", "3}, help=\"Breakpoint slot to use (corresponding to registers ).\", ) set_parser.add_argument( \"expression\", help=\"Breakpoint", "exe_ctx, result, internal_dict): \"\"\" Run some tests. Warning: tests change the state of", "restoring. \"\"\" # interrupt and restore the VM state print(\"* Restoring the last", "0x1234 assert vm.read_register(\"dr7\") == 0b00000000000000110000000000000010 vm.set_hard_breakpoint(\"e\", 0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000000000000000000010 vm.set_hard_breakpoint(\"w\",", "**kwargs): global vm if not vm: print(\"* Not attached to a VM!\") return", "the last saved state. Breakpoints are deleted on restoring. \"\"\" # interrupt and", "print(\"* Not attached to a VM!\") return return f(*args, **kwargs) return _wrapper @_attached", "symbol. Did you specify the target to debug?\") vm.abort_store_kdp_at_next_write_virtual_memory() def _attached(f): @functools.wraps(f) def", "the last saved VM state\") if vm.interrupt_and_restore_last_snapshot(): print(\"* State restored\") # do a", "internal_dict) else: print(\"* No saved state found\") @_attached def fdp_interrupt(debugger, command, exe_ctx, result,", "= vm.read_register(\"rip\") vm.interrupt_and_restore_last_snapshot() vm.single_step() bpid = vm.set_soft_exec_breakpoint(rip) assert 0 <= bpid <= 254", "lldb import lldbagilityutils import stubvm vm = None def _exec_cmd(debugger, command, capture_output=False): if", "VM state\") if vm.interrupt_and_restore_last_snapshot(): print(\"* State restored\") # do a full reattach (the", "breakpoints are implemented using the debug registers DR0, DR1, DR2 and DR3. Consequently,", "{\"rflags\"}) == new_values vm.write_registers(orig_values) for reg in regs: assert vm.read_register(reg) == orig_values[reg] def", "import lldbagilityutils import stubvm vm = None def _exec_cmd(debugger, command, capture_output=False): if capture_output:", "0x1234) assert vm.read_register(\"dr1\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000000001100010000000000001010 vm.set_hard_breakpoint(\"rw\", 0x2, 0x1234) assert", ") debugger.HandleCommand( \"command script add -f lldbagility.fdp_hbreakpoint fdp-hbreakpoint\" ) debugger.HandleCommand(\"command script add -f", "exe_ctx, result, internal_dict) else: print(\"* No saved state found\") @_attached def fdp_interrupt(debugger, command,", "must have already been started. Existing breakpoints are deleted on attaching. Re-execute this", "(TypeError, ValueError): return None else: return vaddr def fdp_attach(debugger, command, exe_ctx, result, internal_dict):", "\"rax\", \"rbx\", \"rcx\", \"rdx\", \"rdi\", \"rsi\", \"rbp\", \"rsp\", \"r8\", \"r9\", \"r10\", \"r11\", \"r12\",", "= vm.read_registers(regs) orig_data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) vm.interrupt_and_take_snapshot() assert vm.is_state_halted() vm.write_virtual_memory(vm.read_register(\"rsp\"), b\"A\" * 0x100)", "bpid = vm.set_soft_exec_breakpoint(rip) assert 0 <= bpid <= 254 assert not vm.is_breakpoint_hit() vm.resume()", "vm.is_state_halted() vm.write_virtual_memory(vm.read_register(\"rsp\"), b\"A\" * 0x100) vm.single_step() vm.resume() time.sleep(0.100) vm.interrupt_and_restore_last_snapshot() assert vm.is_state_halted() assert not", "you specify the target to debug?\") vm.abort_store_kdp_at_next_write_virtual_memory() def _attached(f): @functools.wraps(f) def _wrapper(*args, **kwargs):", "-f lldbagility.fdp_attach fdp-attach\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_save fdp-save\") debugger.HandleCommand(\"command script add -f", "command, exe_ctx, result, internal_dict): \"\"\" Set or unset hardware breakpoints. Hardware breakpoints are", "def fdp_hbreakpoint(debugger, command, exe_ctx, result, internal_dict): \"\"\" Set or unset hardware breakpoints. Hardware", "lldbagilityutils import stubvm vm = None def _exec_cmd(debugger, command, capture_output=False): if capture_output: cmdretobj", "-f lldbagility.fdp_save fdp-save\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_restore fdp-restore\") debugger.HandleCommand( \"command script add", "fdp_interrupt(debugger, command, exe_ctx, result, internal_dict): \"\"\" Interrupt (pause) the execution of the attached", "assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == data def _t4(): print(\"* Save/restore\") vm.halt() orig_values = vm.read_registers(regs)", "deleted on attaching. Re-execute this command every time the VM is rebooted. \"\"\"", "them once the state # has been saved, so that LLDB sends again", "the current state of the attached macOS VM. Breakpoints are not saved (but", "the VM state\") vm.interrupt_and_take_snapshot() print(\"* State saved\") # restore soft breakpoints exe_ctx.target.EnableAllBreakpoints() #", "\"rdx\", \"rdi\", \"rsi\", \"rbp\", \"rsp\", \"r8\", \"r9\", \"r10\", \"r11\", \"r12\", \"r13\", \"r14\", \"r15\",", "{reg: 0x1337 for reg in regs} for reg in regs: vm.write_register(reg, new_values[reg]) #", "def fdp_interrupt(debugger, command, exe_ctx, result, internal_dict): \"\"\" Interrupt (pause) the execution of the", "state! \"\"\" regs = { \"rax\", \"rbx\", \"rcx\", \"rdx\", \"rdi\", \"rsi\", \"rbp\", \"rsp\",", "macOS VM via FDP. The VM must have already been started. Existing breakpoints", "Breakpoints are not saved (but retained for the current session). \"\"\" # saving", "server kdpsv = kdpserver.KDPServer() th = threading.Thread(target=kdpsv.debug, args=(vm,)) th.daemon = True th.start() #", "restore it afterwards dbgregs = vm.read_registers((\"dr0\", \"dr1\", \"dr2\", \"dr3\", \"dr6\", \"dr7\")) # interrupt", "= exe_ctx.process.is_stopped print(\"* Saving the VM state\") vm.interrupt_and_take_snapshot() print(\"* State saved\") # restore", "@_attached def fdp_save(debugger, command, exe_ctx, result, internal_dict): \"\"\" Save the current state of", "restore hard breakpoints vm.write_registers(dbgregs) if not process_was_stopped: # display stop info _exec_cmd(debugger, \"process", "hardware breakpoints. Hardware breakpoints are implemented using the debug registers DR0, DR1, DR2", "FDP debugger.HandleCommand(\"command script add -f lldbagility.fdp_attach fdp-attach\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_save fdp-save\")", "parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.VMSNSTUB, args.vm_name) def _attach(debugger, exe_ctx, vm_stub, vm_name): global vm print(lldbagilityutils.LLDBAGILITY)", "State saved\") # restore soft breakpoints exe_ctx.target.EnableAllBreakpoints() # restore hard breakpoints vm.write_registers(dbgregs) if", "* 0x100) vm.single_step() vm.resume() time.sleep(0.100) vm.interrupt_and_restore_last_snapshot() assert vm.is_state_halted() assert not vm.is_breakpoint_hit() assert vm.read_registers(regs)", "debug registers DR0, DR1, DR2 and DR3. Consequently, a maximum of four hardware", "a memory write to find out the address of the kdp struct vm.store_kdp_at_next_write_virtual_memory()", "#!/usr/bin/env python2 # -*- coding: utf-8 -*- from __future__ import print_function import argparse", "print_function import argparse import functools import re import shlex import threading import time", "print(\"* Debug registers\") vm.halt() vm.write_register(\"dr7\", 0x0) vm.set_hard_breakpoint(\"rw\", 0x0, 0x1234) assert vm.read_register(\"dr0\") == 0x1234", "lldbagility.fdp_restore fdp-restore\") debugger.HandleCommand( \"command script add -f lldbagility.fdp_interrupt fdp-interrupt\" ) debugger.HandleCommand( \"command script", "VM state\") vm.interrupt_and_take_snapshot() print(\"* State saved\") # restore soft breakpoints exe_ctx.target.EnableAllBreakpoints() # restore", ") unset_parser = subparsers.add_parser(\"unset\") unset_parser.add_argument( \"nreg\", type=lambda i: int(i, 0), choices={0, 1, 2,", "alias fr fdp-restore\") debugger.HandleCommand(\"command alias fi fdp-interrupt\") debugger.HandleCommand(\"command alias fh fdp-hbreakpoint\") # VMSN", "saving the state causes all breakpoints (soft and hard) to be unset, but", "new_values = {reg: 0x1337 for reg in regs} for reg in regs: vm.write_register(reg,", "internal_dict): \"\"\" Interrupt (pause) the execution of the attached macOS VM. \"\"\" vm.interrupt()", "DR2 and DR3).\", ) args = parser.parse_args(shlex.split(command)) if args.action == \"set\": vaddr =", "for setting them exe_ctx.target.DisableAllBreakpoints() # similarly, for hard breakpoints we save the state", ") set_parser.add_argument( \"nreg\", type=lambda i: int(i, 0), choices={0, 1, 2, 3}, help=\"Breakpoint slot", "data def _t4(): print(\"* Save/restore\") vm.halt() orig_values = vm.read_registers(regs) orig_data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100)", "save the state of the debug registers before saving, # and restore it", "interrupt and restore the VM state print(\"* Restoring the last saved VM state\")", "debugger.HandleCommand(\"command alias fr fdp-restore\") debugger.HandleCommand(\"command alias fi fdp-interrupt\") debugger.HandleCommand(\"command alias fh fdp-hbreakpoint\") #", "vm.halt() assert vm.is_breakpoint_hit() if exe_ctx.process.is_running: vm.interrupt() vm.unset_all_breakpoints() for _t in (_t1, _t2, _t3,", "(_t1, _t2, _t3, _t4, _t5, _t6): _t() print(\"* All tests passed!\") def __lldb_init_module(debugger,", "load address may differ) fdp_attach(debugger, vm.name, exe_ctx, result, internal_dict) else: print(\"* No saved", "Did you specify the target to debug?\") vm.abort_store_kdp_at_next_write_virtual_memory() def _attached(f): @functools.wraps(f) def _wrapper(*args,", "internal_dict): \"\"\" Save the current state of the attached macOS VM. Breakpoints are", "_evaluate_expression(exe_ctx, expression): res = exe_ctx.frame.EvaluateExpression(expression) try: vaddr = int(res.GetValue(), 0) except (TypeError, ValueError):", "not maintained! Existing breakpoints are deleted on attaching. \"\"\" parser = argparse.ArgumentParser(prog=\"vmsn-attach\") parser.add_argument(\"vm_name\")", "= threading.Thread(target=kdpsv.debug, args=(vm,)) th.daemon = True th.start() # connect LLDB to the fake", "(see VMR3AddSoftBreakpoint()) vm.unset_all_breakpoints() vm.single_step() assert not vm.is_breakpoint_hit() vm.interrupt_and_take_snapshot() vm.single_step() vm.single_step() rip = vm.read_register(\"rip\")", "server kdpsv_addr, kdpsv_port = kdpsv.sv_sock.getsockname() _exec_cmd(debugger, \"kdp-remote '{}:{}'\".format(kdpsv_addr, kdpsv_port)) # trigger a memory", "found\") @_attached def fdp_interrupt(debugger, command, exe_ctx, result, internal_dict): \"\"\" Interrupt (pause) the execution", "registers DR0, DR1, DR2 and DR3).\", ) args = parser.parse_args(shlex.split(command)) if args.action ==", "0x1234) assert vm.read_register(\"dr3\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101010 vm.unset_hard_breakpoint(0x0) assert vm.read_register(\"dr7\") ==", "hard breakpoints we save the state of the debug registers before saving, #", "rebooted. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.FDPSTUB, args.vm_name)", "VM state process_was_stopped = exe_ctx.process.is_stopped print(\"* Saving the VM state\") vm.interrupt_and_take_snapshot() print(\"* State", "reg in regs: vm.write_register(reg, new_values[reg]) # modifications to RFLAGS should be disabled assert", "Interrupt (pause) the execution of the attached macOS VM. \"\"\" vm.interrupt() @_attached def", "state process_was_stopped = exe_ctx.process.is_stopped print(\"* Saving the VM state\") vm.interrupt_and_take_snapshot() print(\"* State saved\")", "_t3, _t4, _t5, _t6): _t() print(\"* All tests passed!\") def __lldb_init_module(debugger, internal_dict): #", "print(\"* Resuming the VM execution until reaching kernel code\") vm.complete_attach() print(\"* Kernel load", "the state # has been saved, so that LLDB sends again the KDP", "display stop info _exec_cmd(debugger, \"process status\") @_attached def fdp_restore(debugger, command, exe_ctx, result, internal_dict):", "else: print(\"* No saved state found\") @_attached def fdp_interrupt(debugger, command, exe_ctx, result, internal_dict):", "trap on: execute, read/write, or write only.\", ) set_parser.add_argument( \"nreg\", type=lambda i: int(i,", "assert vm.is_breakpoint_hit() vm.interrupt_and_restore_last_snapshot() vm.single_step() vm.set_hard_breakpoint(\"e\", 0x0, rip) assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt()", "state found\") @_attached def fdp_interrupt(debugger, command, exe_ctx, result, internal_dict): \"\"\" Interrupt (pause) the", "@_attached def fdp_interrupt(debugger, command, exe_ctx, result, internal_dict): \"\"\" Interrupt (pause) the execution of", "in range(100): vm.single_step() assert vm.is_state_halted() def _t2(): print(\"* Read/write registers\") vm.halt() orig_values =", "def _exec_cmd(debugger, command, capture_output=False): if capture_output: cmdretobj = lldb.SBCommandReturnObject() debugger.GetCommandInterpreter().HandleCommand(command, cmdretobj) return cmdretobj", "parser = argparse.ArgumentParser(prog=\"fdp-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.FDPSTUB, args.vm_name) def vmsn_attach(debugger,", "kernel code\") vm.complete_attach() print(\"* Kernel load address: 0x{:016x}\".format(vm.kernel_load_vaddr)) print(\"* Kernel slide: 0x{:x}\".format(vm.kernel_slide)) print(\"*", "# restore soft breakpoints exe_ctx.target.EnableAllBreakpoints() # restore hard breakpoints vm.write_registers(dbgregs) if not process_was_stopped:", "stubvm vm = None def _exec_cmd(debugger, command, capture_output=False): if capture_output: cmdretobj = lldb.SBCommandReturnObject()", "the execution of the attached macOS VM. \"\"\" vm.interrupt() @_attached def fdp_hbreakpoint(debugger, command,", "vm.set_hard_breakpoint(\"w\", 0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000010000000000000010 vm.set_hard_breakpoint(\"rw\", 0x1, 0x1234) assert vm.read_register(\"dr1\") ==", "in (_t1, _t2, _t3, _t4, _t5, _t6): _t() print(\"* All tests passed!\") def", "debugger.HandleCommand(\"command script add -f lldbagility.fdp_restore fdp-restore\") debugger.HandleCommand( \"command script add -f lldbagility.fdp_interrupt fdp-interrupt\"", "script add -f lldbagility.fdp_save fdp-save\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_restore fdp-restore\") debugger.HandleCommand( \"command", "alias fh fdp-hbreakpoint\") # VMSN debugger.HandleCommand(\"command script add -f lldbagility.vmsn_attach vmsn-attach\") debugger.HandleCommand(\"command alias", "= argparse.ArgumentParser(prog=\"fdp-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.FDPSTUB, args.vm_name) def vmsn_attach(debugger, command,", "assert vm.read_registers(regs) == orig_values assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) == orig_data def _t5(): print(\"* Debug", "assert vm.read_register(\"dr7\") == 0b00000000001100010000000000001010 vm.set_hard_breakpoint(\"rw\", 0x2, 0x1234) assert vm.read_register(\"dr2\") == 0x1234 assert vm.read_register(\"dr7\")", "print(\"* VM breakpoints deleted\") # detach the previous process (if any) exe_ctx.process.Detach() #", "DR3).\", ) args = parser.parse_args(shlex.split(command)) if args.action == \"set\": vaddr = _evaluate_expression(exe_ctx, args.expression)", "expression): res = exe_ctx.frame.EvaluateExpression(expression) try: vaddr = int(res.GetValue(), 0) except (TypeError, ValueError): return", "not vm: print(\"* Not attached to a VM!\") return return f(*args, **kwargs) return", "the kdp struct vm.store_kdp_at_next_write_virtual_memory() if _exec_cmd(debugger, \"memory write &kdp 41\", capture_output=True).GetError(): print(\"* Unable", "we save the state of the debug registers before saving, # and restore", "debugger.GetCommandInterpreter().HandleCommand(command, cmdretobj) return cmdretobj else: debugger.HandleCommand(command) return None def _evaluate_expression(exe_ctx, expression): res =", "VM via FDP. The VM must have already been started. Existing breakpoints are", "vaddr: vm.set_hard_breakpoint(args.trigger, args.nreg, vaddr) print(\"* Hardware breakpoint set: address = 0x{:016x}\".format(vaddr)) else: print(\"*", "argparse import functools import re import shlex import threading import time import traceback", "import traceback import kdpserver import lldb import lldbagilityutils import stubvm vm = None", "vaddr) print(\"* Hardware breakpoint set: address = 0x{:016x}\".format(vaddr)) else: print(\"* Invalid expression\") elif", "the VM is rebooted. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger,", "process_was_stopped = exe_ctx.process.is_stopped print(\"* Saving the VM state\") vm.interrupt_and_take_snapshot() print(\"* State saved\") #", "saved state found\") @_attached def fdp_interrupt(debugger, command, exe_ctx, result, internal_dict): \"\"\" Interrupt (pause)", "vm.read_register(\"rip\") vm.interrupt_and_restore_last_snapshot() vm.single_step() bpid = vm.set_soft_exec_breakpoint(rip) assert 0 <= bpid <= 254 assert", "# start the fake KDP server kdpsv = kdpserver.KDPServer() th = threading.Thread(target=kdpsv.debug, args=(vm,))", "current state of the attached macOS VM. Breakpoints are not saved (but retained", "on: execute, read/write, or write only.\", ) set_parser.add_argument( \"nreg\", type=lambda i: int(i, 0),", "\"r10\", \"r11\", \"r12\", \"r13\", \"r14\", \"r15\", \"rip\", \"rflags\", \"cs\", \"fs\", \"gs\", } def", "assert vm.read_register(\"dr7\") == 0b00110011001100010000000000000000 def _t6(): print(\"* Soft/hard exec breakpoint\") vm.halt() # keep", "0b00110011001100010000000010101000 vm.unset_hard_breakpoint(0x1) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010100000 vm.unset_hard_breakpoint(0x2) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010000000 vm.unset_hard_breakpoint(0x3) assert", "vm.halt() for _ in range(100): vm.single_step() assert vm.is_state_halted() def _t2(): print(\"* Read/write registers\")", "the fake KDP server kdpsv = kdpserver.KDPServer() th = threading.Thread(target=kdpsv.debug, args=(vm,)) th.daemon =", "to find the 'kdp' symbol. Did you specify the target to debug?\") vm.abort_store_kdp_at_next_write_virtual_memory()", "add -f lldbagility.fdp_restore fdp-restore\") debugger.HandleCommand( \"command script add -f lldbagility.fdp_interrupt fdp-interrupt\" ) debugger.HandleCommand(", "Exception as exc: print(\"* Could not attach! {}\".format(str(exc))) return print(\"* Resuming the VM", "\"\"\" Save the current state of the attached macOS VM. Breakpoints are not", "\"\"\" # saving the state causes all breakpoints (soft and hard) to be", "print(\"* No saved state found\") @_attached def fdp_interrupt(debugger, command, exe_ctx, result, internal_dict): \"\"\"", "in regs: assert vm.read_register(reg) == orig_values[reg] def _t3(): print(\"* Read/write virtual memory\") vm.halt()", "def _t4(): print(\"* Save/restore\") vm.halt() orig_values = vm.read_registers(regs) orig_data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) vm.interrupt_and_take_snapshot()", "0x0) vm.set_hard_breakpoint(\"rw\", 0x0, 0x1234) assert vm.read_register(\"dr0\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000000000000110000000000000010 vm.set_hard_breakpoint(\"e\",", "\"w\"}, help=\"Type of memory access to trap on: execute, read/write, or write only.\",", "for _ in range(100): vm.single_step() assert vm.is_state_halted() def _t2(): print(\"* Read/write registers\") vm.halt()", "set_parser.add_argument( \"expression\", help=\"Breakpoint address or expression to be evaluated as such.\" ) unset_parser", "result, internal_dict): \"\"\" Run some tests. Warning: tests change the state of the", "utf-8 -*- from __future__ import print_function import argparse import functools import re import", "connect LLDB to the fake KDP server kdpsv_addr, kdpsv_port = kdpsv.sv_sock.getsockname() _exec_cmd(debugger, \"kdp-remote", "command, exe_ctx, result, internal_dict): \"\"\" Restore the attached macOS VM to the last", "kernel load address may differ) fdp_attach(debugger, vm.name, exe_ctx, result, internal_dict) else: print(\"* No", "to free (corresponding to registers DR0, DR1, DR2 and DR3).\", ) args =", "the current session # we disable soft breakpoints before saving and then re-enable", "stubvm.STUBVM(vm_stub, vm_name) except Exception as exc: print(\"* Could not attach! {}\".format(str(exc))) return print(\"*", "assert vm.is_state_halted() assert not vm.is_breakpoint_hit() assert vm.read_registers(regs) == orig_values assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) ==", "_t2(): print(\"* Read/write registers\") vm.halt() orig_values = vm.read_registers(regs) new_values = {reg: 0x1337 for", "VMSN. Currently not maintained! Existing breakpoints are deleted on attaching. \"\"\" parser =", "not vm.is_breakpoint_hit() assert vm.read_registers(regs) == orig_values assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) == orig_data def _t5():", "or write only.\", ) set_parser.add_argument( \"nreg\", type=lambda i: int(i, 0), choices={0, 1, 2,", "vm.interrupt_and_take_snapshot() vm.single_step() vm.single_step() rip = vm.read_register(\"rip\") vm.interrupt_and_restore_last_snapshot() vm.single_step() bpid = vm.set_soft_exec_breakpoint(rip) assert 0", "return None else: return vaddr def fdp_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\" Connect", "\"kdp-remote '{}:{}'\".format(kdpsv_addr, kdpsv_port)) # trigger a memory write to find out the address", "every time the VM is rebooted. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-attach\") parser.add_argument(\"vm_name\") args =", "for the current session). \"\"\" # saving the state causes all breakpoints (soft", "soft breakpoints exe_ctx.target.EnableAllBreakpoints() # restore hard breakpoints vm.write_registers(dbgregs) if not process_was_stopped: # display", "= vm.set_soft_exec_breakpoint(rip) assert 0 <= bpid <= 254 assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100)", "-f lldbagility.fdp_interrupt fdp-interrupt\" ) debugger.HandleCommand( \"command script add -f lldbagility.fdp_hbreakpoint fdp-hbreakpoint\" ) debugger.HandleCommand(\"command", "are implemented using the debug registers DR0, DR1, DR2 and DR3. Consequently, a", "kdpsv = kdpserver.KDPServer() th = threading.Thread(target=kdpsv.debug, args=(vm,)) th.daemon = True th.start() # connect", "time.sleep(0.100) vm.interrupt_and_restore_last_snapshot() assert vm.is_state_halted() assert not vm.is_breakpoint_hit() assert vm.read_registers(regs) == orig_values assert vm.read_virtual_memory(vm.read_register(\"rsp\"),", "\"r11\", \"r12\", \"r13\", \"r14\", \"r15\", \"rip\", \"rflags\", \"cs\", \"fs\", \"gs\", } def _t1():", "parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.FDPSTUB, args.vm_name) def vmsn_attach(debugger, command, exe_ctx, result,", "the current session). \"\"\" # saving the state causes all breakpoints (soft and", "== 0b00110011001100010000000010101010 vm.unset_hard_breakpoint(0x0) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101000 vm.unset_hard_breakpoint(0x1) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010100000 vm.unset_hard_breakpoint(0x2)", "DR1, DR2 and DR3).\", ) args = parser.parse_args(shlex.split(command)) if args.action == \"set\": vaddr", "= stubvm.STUBVM(vm_stub, vm_name) except Exception as exc: print(\"* Could not attach! {}\".format(str(exc))) return", "Warning: tests change the state of the machine and modify the last saved", "<= 254 assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() vm.interrupt_and_restore_last_snapshot() vm.single_step() vm.set_hard_breakpoint(\"e\",", "to the VM\") try: vm = stubvm.STUBVM(vm_stub, vm_name) except Exception as exc: print(\"*", "fdp-save\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_restore fdp-restore\") debugger.HandleCommand( \"command script add -f lldbagility.fdp_interrupt", "0b00110011001100010000000010101010 vm.unset_hard_breakpoint(0x0) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101000 vm.unset_hard_breakpoint(0x1) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010100000 vm.unset_hard_breakpoint(0x2) assert", "_attach(debugger, exe_ctx, stubvm.VMSNSTUB, args.vm_name) def _attach(debugger, exe_ctx, vm_stub, vm_name): global vm print(lldbagilityutils.LLDBAGILITY) print(\"*", "script add -f lldbagility.fdp_hbreakpoint fdp-hbreakpoint\" ) debugger.HandleCommand(\"command script add -f lldbagility.fdp_test fdp-test\") debugger.HandleCommand(\"command", "VM must have already been started. Existing breakpoints are deleted on attaching. Re-execute", "vm.set_hard_breakpoint(\"rw\", 0x0, 0x1234) assert vm.read_register(\"dr0\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000000000000110000000000000010 vm.set_hard_breakpoint(\"e\", 0x0,", "breakpoints are deleted on attaching. \"\"\" parser = argparse.ArgumentParser(prog=\"vmsn-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command))", "_attach(debugger, exe_ctx, stubvm.FDPSTUB, args.vm_name) def vmsn_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\" Connect to", "time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() if exe_ctx.process.is_running: vm.interrupt() vm.unset_all_breakpoints() for _t in (_t1, _t2,", "_wrapper @_attached def fdp_save(debugger, command, exe_ctx, result, internal_dict): \"\"\" Save the current state", "for the current session # we disable soft breakpoints before saving and then", "# remove all LLDB breakpoints exe_ctx.target.DeleteAllBreakpoints() print(\"* LLDB breakpoints deleted\") # start the", "th.daemon = True th.start() # connect LLDB to the fake KDP server kdpsv_addr,", "address of the kdp struct vm.store_kdp_at_next_write_virtual_memory() if _exec_cmd(debugger, \"memory write &kdp 41\", capture_output=True).GetError():", "None def _exec_cmd(debugger, command, capture_output=False): if capture_output: cmdretobj = lldb.SBCommandReturnObject() debugger.GetCommandInterpreter().HandleCommand(command, cmdretobj) return", "traceback import kdpserver import lldb import lldbagilityutils import stubvm vm = None def", "Restore the attached macOS VM to the last saved state. Breakpoints are deleted", "exe_ctx, result, internal_dict): \"\"\" Set or unset hardware breakpoints. Hardware breakpoints are implemented", "to be evaluated as such.\" ) unset_parser = subparsers.add_parser(\"unset\") unset_parser.add_argument( \"nreg\", type=lambda i:", "modify the last saved state! \"\"\" regs = { \"rax\", \"rbx\", \"rcx\", \"rdx\",", "vm.read_register(\"dr7\") == 0b00110011001100010000000010000000 vm.unset_hard_breakpoint(0x3) assert vm.read_register(\"dr7\") == 0b00110011001100010000000000000000 def _t6(): print(\"* Soft/hard exec", "fdp-interrupt\") debugger.HandleCommand(\"command alias fh fdp-hbreakpoint\") # VMSN debugger.HandleCommand(\"command script add -f lldbagility.vmsn_attach vmsn-attach\")", ").\", ) set_parser.add_argument( \"expression\", help=\"Breakpoint address or expression to be evaluated as such.\"", "VM via VMSN. Currently not maintained! Existing breakpoints are deleted on attaching. \"\"\"", "print(\"* Kernel cr3: 0x{:x}\".format(vm.kernel_cr3)) print(\"* Kernel version: {}\".format(vm.kernel_version)) print(\"* VM breakpoints deleted\") #", "vm.set_hard_breakpoint(\"rw\", 0x1, 0x1234) assert vm.read_register(\"dr1\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000000001100010000000000001010 vm.set_hard_breakpoint(\"rw\", 0x2,", "__lldb_init_module(debugger, internal_dict): # FDP debugger.HandleCommand(\"command script add -f lldbagility.fdp_attach fdp-attach\") debugger.HandleCommand(\"command script add", "fdp-restore\") debugger.HandleCommand( \"command script add -f lldbagility.fdp_interrupt fdp-interrupt\" ) debugger.HandleCommand( \"command script add", "0) except (TypeError, ValueError): return None else: return vaddr def fdp_attach(debugger, command, exe_ctx,", "lldbagility.fdp_hbreakpoint fdp-hbreakpoint\" ) debugger.HandleCommand(\"command script add -f lldbagility.fdp_test fdp-test\") debugger.HandleCommand(\"command alias fa fdp-attach\")", "memory\") vm.halt() data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) new_data = b\"ABCDEFGH\" vm.write_virtual_memory(vm.read_register(\"rsp\"), new_data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"),", "DR3. Consequently, a maximum of four hardware breakpoints can be active simultaneously. \"\"\"", "the state causes all breakpoints (soft and hard) to be unset, but #", "_exec_cmd(debugger, \"memory write &kdp 41\", capture_output=True).GetError(): print(\"* Unable to find the 'kdp' symbol.", "a full reattach (the kernel load address may differ) fdp_attach(debugger, vm.name, exe_ctx, result,", "(pause) the execution of the attached macOS VM. \"\"\" vm.interrupt() @_attached def fdp_hbreakpoint(debugger,", "except Exception as exc: print(\"* Could not attach! {}\".format(str(exc))) return print(\"* Resuming the", "add -f lldbagility.fdp_test fdp-test\") debugger.HandleCommand(\"command alias fa fdp-attach\") debugger.HandleCommand(\"command alias fs fdp-save\") debugger.HandleCommand(\"command", "-*- from __future__ import print_function import argparse import functools import re import shlex", "= exe_ctx.frame.EvaluateExpression(expression) try: vaddr = int(res.GetValue(), 0) except (TypeError, ValueError): return None else:", "deleted on restoring. \"\"\" # interrupt and restore the VM state print(\"* Restoring", "regs: vm.write_register(reg, new_values[reg]) # modifications to RFLAGS should be disabled assert vm.read_register(\"rflags\") ==", "LLDB to the fake KDP server kdpsv_addr, kdpsv_port = kdpsv.sv_sock.getsockname() _exec_cmd(debugger, \"kdp-remote '{}:{}'\".format(kdpsv_addr,", "exe_ctx.frame.EvaluateExpression(expression) try: vaddr = int(res.GetValue(), 0) except (TypeError, ValueError): return None else: return", "all breakpoints (soft and hard) to be unset, but # we can preserve", "Invalid expression\") elif args.action == \"unset\": vm.unset_hard_breakpoint(args.nreg) print(\"* Hardware breakpoint unset\") else: raise", "and page breakpoints do not work just after a restore # (see VMR3AddSoftBreakpoint())", "time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() vm.interrupt_and_restore_last_snapshot() vm.single_step() vm.set_hard_breakpoint(\"e\", 0x0, rip) assert not vm.is_breakpoint_hit() vm.resume()", "- {\"rflags\"}) == new_values vm.write_registers(orig_values) for reg in regs: assert vm.read_register(reg) == orig_values[reg]", "Existing breakpoints are deleted on attaching. \"\"\" parser = argparse.ArgumentParser(prog=\"vmsn-attach\") parser.add_argument(\"vm_name\") args =", "_t() print(\"* All tests passed!\") def __lldb_init_module(debugger, internal_dict): # FDP debugger.HandleCommand(\"command script add", "to a macOS VM via VMSN. Currently not maintained! Existing breakpoints are deleted", "return _wrapper @_attached def fdp_save(debugger, command, exe_ctx, result, internal_dict): \"\"\" Save the current", "\"process status\") @_attached def fdp_restore(debugger, command, exe_ctx, result, internal_dict): \"\"\" Restore the attached", "'kdp' symbol. Did you specify the target to debug?\") vm.abort_store_kdp_at_next_write_virtual_memory() def _attached(f): @functools.wraps(f)", "exe_ctx.target.DisableAllBreakpoints() # similarly, for hard breakpoints we save the state of the debug", "vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) == orig_data def _t5(): print(\"* Debug registers\") vm.halt() vm.write_register(\"dr7\", 0x0) vm.set_hard_breakpoint(\"rw\",", "assert vm.read_register(reg) == orig_values[reg] def _t3(): print(\"* Read/write virtual memory\") vm.halt() data =", "threading import time import traceback import kdpserver import lldb import lldbagilityutils import stubvm", "assert vm.read_register(\"dr7\") == 0b00000000000000000000000000000010 vm.set_hard_breakpoint(\"w\", 0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000010000000000000010 vm.set_hard_breakpoint(\"rw\", 0x1,", "def _attach(debugger, exe_ctx, vm_stub, vm_name): global vm print(lldbagilityutils.LLDBAGILITY) print(\"* Attaching to the VM\")", "import print_function import argparse import functools import re import shlex import threading import", "args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.VMSNSTUB, args.vm_name) def _attach(debugger, exe_ctx, vm_stub, vm_name): global", "not vm.is_state_halted() vm.halt() for _ in range(100): vm.single_step() assert vm.is_state_halted() def _t2(): print(\"*", "lldb.SBCommandReturnObject() debugger.GetCommandInterpreter().HandleCommand(command, cmdretobj) return cmdretobj else: debugger.HandleCommand(command) return None def _evaluate_expression(exe_ctx, expression): res", "vm.interrupt_and_restore_last_snapshot(): print(\"* State restored\") # do a full reattach (the kernel load address", "registers ).\", ) set_parser.add_argument( \"expression\", help=\"Breakpoint address or expression to be evaluated as", "vm.single_step() rip = vm.read_register(\"rip\") vm.interrupt_and_restore_last_snapshot() vm.single_step() bpid = vm.set_soft_exec_breakpoint(rip) assert 0 <= bpid", "== orig_values[\"rflags\"] del new_values[\"rflags\"] assert vm.read_registers(regs - {\"rflags\"}) == new_values vm.write_registers(orig_values) for reg", "vm.read_register(\"dr3\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101010 vm.unset_hard_breakpoint(0x0) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101000 vm.unset_hard_breakpoint(0x1)", "help=\"Breakpoint slot to use (corresponding to registers ).\", ) set_parser.add_argument( \"expression\", help=\"Breakpoint address", "mind that FDP soft and page breakpoints do not work just after a", "fdp_save(debugger, command, exe_ctx, result, internal_dict): \"\"\" Save the current state of the attached", "set: address = 0x{:016x}\".format(vaddr)) else: print(\"* Invalid expression\") elif args.action == \"unset\": vm.unset_hard_breakpoint(args.nreg)", "if not vm: print(\"* Not attached to a VM!\") return return f(*args, **kwargs)", "0x{:016x}\".format(vm.kernel_load_vaddr)) print(\"* Kernel slide: 0x{:x}\".format(vm.kernel_slide)) print(\"* Kernel cr3: 0x{:x}\".format(vm.kernel_cr3)) print(\"* Kernel version: {}\".format(vm.kernel_version))", "reg in regs} for reg in regs: vm.write_register(reg, new_values[reg]) # modifications to RFLAGS", "(corresponding to registers ).\", ) set_parser.add_argument( \"expression\", help=\"Breakpoint address or expression to be", "vm.halt() orig_values = vm.read_registers(regs) orig_data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) vm.interrupt_and_take_snapshot() assert vm.is_state_halted() vm.write_virtual_memory(vm.read_register(\"rsp\"), b\"A\"", "LLDB breakpoints exe_ctx.target.DeleteAllBreakpoints() print(\"* LLDB breakpoints deleted\") # start the fake KDP server", "0x1234 assert vm.read_register(\"dr7\") == 0b00000011001100010000000000101010 vm.set_hard_breakpoint(\"rw\", 0x3, 0x1234) assert vm.read_register(\"dr3\") == 0x1234 assert", "== 0b00110011001100010000000000000000 def _t6(): print(\"* Soft/hard exec breakpoint\") vm.halt() # keep in mind", "so that LLDB sends again the KDP requests for setting them exe_ctx.target.DisableAllBreakpoints() #", "python2 # -*- coding: utf-8 -*- from __future__ import print_function import argparse import", "slide: 0x{:x}\".format(vm.kernel_slide)) print(\"* Kernel cr3: 0x{:x}\".format(vm.kernel_cr3)) print(\"* Kernel version: {}\".format(vm.kernel_version)) print(\"* VM breakpoints", "print(\"* Could not attach! {}\".format(str(exc))) return print(\"* Resuming the VM execution until reaching", "previous process (if any) exe_ctx.process.Detach() # remove all LLDB breakpoints exe_ctx.target.DeleteAllBreakpoints() print(\"* LLDB", "int(i, 0), choices={0, 1, 2, 3}, help=\"Breakpoint slot to use (corresponding to registers", "afterwards dbgregs = vm.read_registers((\"dr0\", \"dr1\", \"dr2\", \"dr3\", \"dr6\", \"dr7\")) # interrupt and save", "restored\") # do a full reattach (the kernel load address may differ) fdp_attach(debugger,", "vm.read_register(\"dr7\") == 0b00110011001100010000000000000000 def _t6(): print(\"* Soft/hard exec breakpoint\") vm.halt() # keep in", "disable soft breakpoints before saving and then re-enable them once the state #", "memory access to trap on: execute, read/write, or write only.\", ) set_parser.add_argument( \"nreg\",", "vm.is_state_halted() vm.resume() assert not vm.is_state_halted() vm.halt() for _ in range(100): vm.single_step() assert vm.is_state_halted()", "write to find out the address of the kdp struct vm.store_kdp_at_next_write_virtual_memory() if _exec_cmd(debugger,", "= 0x{:016x}\".format(vaddr)) else: print(\"* Invalid expression\") elif args.action == \"unset\": vm.unset_hard_breakpoint(args.nreg) print(\"* Hardware", "vm.interrupt_and_take_snapshot() print(\"* State saved\") # restore soft breakpoints exe_ctx.target.EnableAllBreakpoints() # restore hard breakpoints", "@_attached def fdp_test(debugger, command, exe_ctx, result, internal_dict): \"\"\" Run some tests. Warning: tests", "= parser.parse_args(shlex.split(command)) if args.action == \"set\": vaddr = _evaluate_expression(exe_ctx, args.expression) if vaddr: vm.set_hard_breakpoint(args.trigger,", "0x100) vm.interrupt_and_take_snapshot() assert vm.is_state_halted() vm.write_virtual_memory(vm.read_register(\"rsp\"), b\"A\" * 0x100) vm.single_step() vm.resume() time.sleep(0.100) vm.interrupt_and_restore_last_snapshot() assert", "_t6(): print(\"* Soft/hard exec breakpoint\") vm.halt() # keep in mind that FDP soft", "exe_ctx, stubvm.FDPSTUB, args.vm_name) def vmsn_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\" Connect to a", "debugger.HandleCommand(\"command alias fi fdp-interrupt\") debugger.HandleCommand(\"command alias fh fdp-hbreakpoint\") # VMSN debugger.HandleCommand(\"command script add", "unset_parser = subparsers.add_parser(\"unset\") unset_parser.add_argument( \"nreg\", type=lambda i: int(i, 0), choices={0, 1, 2, 3},", ") set_parser.add_argument( \"expression\", help=\"Breakpoint address or expression to be evaluated as such.\" )", "only.\", ) set_parser.add_argument( \"nreg\", type=lambda i: int(i, 0), choices={0, 1, 2, 3}, help=\"Breakpoint", "All tests passed!\") def __lldb_init_module(debugger, internal_dict): # FDP debugger.HandleCommand(\"command script add -f lldbagility.fdp_attach", "the attached macOS VM. \"\"\" vm.interrupt() @_attached def fdp_hbreakpoint(debugger, command, exe_ctx, result, internal_dict):", "breakpoints can be active simultaneously. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-hbreakpoint\") subparsers = parser.add_subparsers(dest=\"action\") set_parser", "on attaching. \"\"\" parser = argparse.ArgumentParser(prog=\"vmsn-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.VMSNSTUB,", "address may differ) fdp_attach(debugger, vm.name, exe_ctx, result, internal_dict) else: print(\"* No saved state", "\"gs\", } def _t1(): print(\"* Halt/resume/single step\") vm.halt() assert vm.is_state_halted() vm.resume() assert not", "vm.interrupt_and_restore_last_snapshot() vm.single_step() vm.set_hard_breakpoint(\"e\", 0x0, rip) assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit()", "until reaching kernel code\") vm.complete_attach() print(\"* Kernel load address: 0x{:016x}\".format(vm.kernel_load_vaddr)) print(\"* Kernel slide:", "vm.abort_store_kdp_at_next_write_virtual_memory() def _attached(f): @functools.wraps(f) def _wrapper(*args, **kwargs): global vm if not vm: print(\"*", "print(\"* Soft/hard exec breakpoint\") vm.halt() # keep in mind that FDP soft and", "print(\"* State saved\") # restore soft breakpoints exe_ctx.target.EnableAllBreakpoints() # restore hard breakpoints vm.write_registers(dbgregs)", "Halt/resume/single step\") vm.halt() assert vm.is_state_halted() vm.resume() assert not vm.is_state_halted() vm.halt() for _ in", "_exec_cmd(debugger, \"process status\") @_attached def fdp_restore(debugger, command, exe_ctx, result, internal_dict): \"\"\" Restore the", "0x{:x}\".format(vm.kernel_slide)) print(\"* Kernel cr3: 0x{:x}\".format(vm.kernel_cr3)) print(\"* Kernel version: {}\".format(vm.kernel_version)) print(\"* VM breakpoints deleted\")", "as exc: print(\"* Could not attach! {}\".format(str(exc))) return print(\"* Resuming the VM execution", "Run some tests. Warning: tests change the state of the machine and modify", "else: debugger.HandleCommand(command) return None def _evaluate_expression(exe_ctx, expression): res = exe_ctx.frame.EvaluateExpression(expression) try: vaddr =", "vm_stub, vm_name): global vm print(lldbagilityutils.LLDBAGILITY) print(\"* Attaching to the VM\") try: vm =", "macOS VM via VMSN. Currently not maintained! Existing breakpoints are deleted on attaching.", "vm.read_registers(regs - {\"rflags\"}) == new_values vm.write_registers(orig_values) for reg in regs: assert vm.read_register(reg) ==", "the previous process (if any) exe_ctx.process.Detach() # remove all LLDB breakpoints exe_ctx.target.DeleteAllBreakpoints() print(\"*", "soft and page breakpoints do not work just after a restore # (see", "alias fs fdp-save\") debugger.HandleCommand(\"command alias fr fdp-restore\") debugger.HandleCommand(\"command alias fi fdp-interrupt\") debugger.HandleCommand(\"command alias", "find the 'kdp' symbol. Did you specify the target to debug?\") vm.abort_store_kdp_at_next_write_virtual_memory() def", "DR0, DR1, DR2 and DR3).\", ) args = parser.parse_args(shlex.split(command)) if args.action == \"set\":", "LLDB breakpoints deleted\") # start the fake KDP server kdpsv = kdpserver.KDPServer() th", "saved state. Breakpoints are deleted on restoring. \"\"\" # interrupt and restore the", "import shlex import threading import time import traceback import kdpserver import lldb import", "macOS VM. Breakpoints are not saved (but retained for the current session). \"\"\"", "saving and then re-enable them once the state # has been saved, so", "vm.read_register(\"dr2\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000011001100010000000000101010 vm.set_hard_breakpoint(\"rw\", 0x3, 0x1234) assert vm.read_register(\"dr3\") ==", "soft breakpoints before saving and then re-enable them once the state # has", "choices={0, 1, 2, 3}, help=\"Breakpoint slot to use (corresponding to registers ).\", )", "= lldb.SBCommandReturnObject() debugger.GetCommandInterpreter().HandleCommand(command, cmdretobj) return cmdretobj else: debugger.HandleCommand(command) return None def _evaluate_expression(exe_ctx, expression):", "the machine and modify the last saved state! \"\"\" regs = { \"rax\",", "simultaneously. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-hbreakpoint\") subparsers = parser.add_subparsers(dest=\"action\") set_parser = subparsers.add_parser(\"set\") set_parser.add_argument( \"trigger\",", "else: return vaddr def fdp_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\" Connect to a", "= vm.read_registers((\"dr0\", \"dr1\", \"dr2\", \"dr3\", \"dr6\", \"dr7\")) # interrupt and save the VM", "0), choices={0, 1, 2, 3}, help=\"Breakpoint slot to free (corresponding to registers DR0,", "if exe_ctx.process.is_running: vm.interrupt() vm.unset_all_breakpoints() for _t in (_t1, _t2, _t3, _t4, _t5, _t6):", "new_values[reg]) # modifications to RFLAGS should be disabled assert vm.read_register(\"rflags\") == orig_values[\"rflags\"] del", "vm: print(\"* Not attached to a VM!\") return return f(*args, **kwargs) return _wrapper", "\"unset\": vm.unset_hard_breakpoint(args.nreg) print(\"* Hardware breakpoint unset\") else: raise AssertionError @_attached def fdp_test(debugger, command,", "assert vm.is_state_halted() vm.write_virtual_memory(vm.read_register(\"rsp\"), b\"A\" * 0x100) vm.single_step() vm.resume() time.sleep(0.100) vm.interrupt_and_restore_last_snapshot() assert vm.is_state_halted() assert", "state of the debug registers before saving, # and restore it afterwards dbgregs", "address or expression to be evaluated as such.\" ) unset_parser = subparsers.add_parser(\"unset\") unset_parser.add_argument(", "breakpoints deleted\") # start the fake KDP server kdpsv = kdpserver.KDPServer() th =", "VM to the last saved state. Breakpoints are deleted on restoring. \"\"\" #", "-f lldbagility.fdp_hbreakpoint fdp-hbreakpoint\" ) debugger.HandleCommand(\"command script add -f lldbagility.fdp_test fdp-test\") debugger.HandleCommand(\"command alias fa", "True th.start() # connect LLDB to the fake KDP server kdpsv_addr, kdpsv_port =", "to registers DR0, DR1, DR2 and DR3).\", ) args = parser.parse_args(shlex.split(command)) if args.action", "\"r14\", \"r15\", \"rip\", \"rflags\", \"cs\", \"fs\", \"gs\", } def _t1(): print(\"* Halt/resume/single step\")", "be disabled assert vm.read_register(\"rflags\") == orig_values[\"rflags\"] del new_values[\"rflags\"] assert vm.read_registers(regs - {\"rflags\"}) ==", "them at least for the current session # we disable soft breakpoints before", "0x1234 assert vm.read_register(\"dr7\") == 0b00000000001100010000000000001010 vm.set_hard_breakpoint(\"rw\", 0x2, 0x1234) assert vm.read_register(\"dr2\") == 0x1234 assert", "VMR3AddSoftBreakpoint()) vm.unset_all_breakpoints() vm.single_step() assert not vm.is_breakpoint_hit() vm.interrupt_and_take_snapshot() vm.single_step() vm.single_step() rip = vm.read_register(\"rip\") vm.interrupt_and_restore_last_snapshot()", "fdp-attach\") debugger.HandleCommand(\"command alias fs fdp-save\") debugger.HandleCommand(\"command alias fr fdp-restore\") debugger.HandleCommand(\"command alias fi fdp-interrupt\")", "kdpsv_port)) # trigger a memory write to find out the address of the", "saved state! \"\"\" regs = { \"rax\", \"rbx\", \"rcx\", \"rdx\", \"rdi\", \"rsi\", \"rbp\",", "vm.unset_all_breakpoints() vm.single_step() assert not vm.is_breakpoint_hit() vm.interrupt_and_take_snapshot() vm.single_step() vm.single_step() rip = vm.read_register(\"rip\") vm.interrupt_and_restore_last_snapshot() vm.single_step()", "\"\"\" vm.interrupt() @_attached def fdp_hbreakpoint(debugger, command, exe_ctx, result, internal_dict): \"\"\" Set or unset", "= subparsers.add_parser(\"set\") set_parser.add_argument( \"trigger\", choices={\"e\", \"rw\", \"w\"}, help=\"Type of memory access to trap", "= b\"ABCDEFGH\" vm.write_virtual_memory(vm.read_register(\"rsp\"), new_data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == new_data vm.write_virtual_memory(vm.read_register(\"rsp\"), data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"),", "0x1234) assert vm.read_register(\"dr0\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000000000000110000000000000010 vm.set_hard_breakpoint(\"e\", 0x0, 0x1234) assert", "vm.read_register(\"dr7\") == 0b00110011001100010000000010100000 vm.unset_hard_breakpoint(0x2) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010000000 vm.unset_hard_breakpoint(0x3) assert vm.read_register(\"dr7\") == 0b00110011001100010000000000000000", "# we can preserve them at least for the current session # we", "full reattach (the kernel load address may differ) fdp_attach(debugger, vm.name, exe_ctx, result, internal_dict)", "command, exe_ctx, result, internal_dict): \"\"\" Run some tests. Warning: tests change the state", "hardware breakpoints can be active simultaneously. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-hbreakpoint\") subparsers = parser.add_subparsers(dest=\"action\")", "_t1(): print(\"* Halt/resume/single step\") vm.halt() assert vm.is_state_halted() vm.resume() assert not vm.is_state_halted() vm.halt() for", "import functools import re import shlex import threading import time import traceback import", "Kernel cr3: 0x{:x}\".format(vm.kernel_cr3)) print(\"* Kernel version: {}\".format(vm.kernel_version)) print(\"* VM breakpoints deleted\") # detach", "that LLDB sends again the KDP requests for setting them exe_ctx.target.DisableAllBreakpoints() # similarly,", "print(\"* Restoring the last saved VM state\") if vm.interrupt_and_restore_last_snapshot(): print(\"* State restored\") #", "vm = None def _exec_cmd(debugger, command, capture_output=False): if capture_output: cmdretobj = lldb.SBCommandReturnObject() debugger.GetCommandInterpreter().HandleCommand(command,", "== 0b00000000000000010000000000000010 vm.set_hard_breakpoint(\"rw\", 0x1, 0x1234) assert vm.read_register(\"dr1\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000000001100010000000000001010", "__future__ import print_function import argparse import functools import re import shlex import threading", "DR0, DR1, DR2 and DR3. Consequently, a maximum of four hardware breakpoints can", "fdp_restore(debugger, command, exe_ctx, result, internal_dict): \"\"\" Restore the attached macOS VM to the", "memory write to find out the address of the kdp struct vm.store_kdp_at_next_write_virtual_memory() if", "'{}:{}'\".format(kdpsv_addr, kdpsv_port)) # trigger a memory write to find out the address of", "\"\"\" parser = argparse.ArgumentParser(prog=\"fdp-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.FDPSTUB, args.vm_name) def", "assert vm.read_register(\"dr2\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000011001100010000000000101010 vm.set_hard_breakpoint(\"rw\", 0x3, 0x1234) assert vm.read_register(\"dr3\")", "def fdp_restore(debugger, command, exe_ctx, result, internal_dict): \"\"\" Restore the attached macOS VM to", "0x8) new_data = b\"ABCDEFGH\" vm.write_virtual_memory(vm.read_register(\"rsp\"), new_data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == new_data vm.write_virtual_memory(vm.read_register(\"rsp\"), data)", "vm.read_registers(regs) == orig_values assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) == orig_data def _t5(): print(\"* Debug registers\")", "argparse.ArgumentParser(prog=\"vmsn-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.VMSNSTUB, args.vm_name) def _attach(debugger, exe_ctx, vm_stub,", "again the KDP requests for setting them exe_ctx.target.DisableAllBreakpoints() # similarly, for hard breakpoints", "0x0, 0x1234) assert vm.read_register(\"dr0\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000000000000110000000000000010 vm.set_hard_breakpoint(\"e\", 0x0, 0x1234)", "vm.read_register(\"dr7\") == 0b00110011001100010000000010101010 vm.unset_hard_breakpoint(0x0) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101000 vm.unset_hard_breakpoint(0x1) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010100000", "VM!\") return return f(*args, **kwargs) return _wrapper @_attached def fdp_save(debugger, command, exe_ctx, result,", "if not process_was_stopped: # display stop info _exec_cmd(debugger, \"process status\") @_attached def fdp_restore(debugger,", "command, exe_ctx, result, internal_dict): \"\"\" Interrupt (pause) the execution of the attached macOS", "Soft/hard exec breakpoint\") vm.halt() # keep in mind that FDP soft and page", "fdp-attach\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_save fdp-save\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_restore fdp-restore\")", "_t4, _t5, _t6): _t() print(\"* All tests passed!\") def __lldb_init_module(debugger, internal_dict): # FDP", "as such.\" ) unset_parser = subparsers.add_parser(\"unset\") unset_parser.add_argument( \"nreg\", type=lambda i: int(i, 0), choices={0,", "0x1337 for reg in regs} for reg in regs: vm.write_register(reg, new_values[reg]) # modifications", "maintained! Existing breakpoints are deleted on attaching. \"\"\" parser = argparse.ArgumentParser(prog=\"vmsn-attach\") parser.add_argument(\"vm_name\") args", "for hard breakpoints we save the state of the debug registers before saving,", "def _t3(): print(\"* Read/write virtual memory\") vm.halt() data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) new_data =", "VM execution until reaching kernel code\") vm.complete_attach() print(\"* Kernel load address: 0x{:016x}\".format(vm.kernel_load_vaddr)) print(\"*", "the attached macOS VM. Breakpoints are not saved (but retained for the current", "KDP server kdpsv = kdpserver.KDPServer() th = threading.Thread(target=kdpsv.debug, args=(vm,)) th.daemon = True th.start()", "but # we can preserve them at least for the current session #", "causes all breakpoints (soft and hard) to be unset, but # we can", "been started. Existing breakpoints are deleted on attaching. Re-execute this command every time", "are not saved (but retained for the current session). \"\"\" # saving the", "int(i, 0), choices={0, 1, 2, 3}, help=\"Breakpoint slot to free (corresponding to registers", "orig_values = vm.read_registers(regs) new_values = {reg: 0x1337 for reg in regs} for reg", ") args = parser.parse_args(shlex.split(command)) if args.action == \"set\": vaddr = _evaluate_expression(exe_ctx, args.expression) if", "result, internal_dict) else: print(\"* No saved state found\") @_attached def fdp_interrupt(debugger, command, exe_ctx,", "{}\".format(str(exc))) return print(\"* Resuming the VM execution until reaching kernel code\") vm.complete_attach() print(\"*", "print(\"* Read/write registers\") vm.halt() orig_values = vm.read_registers(regs) new_values = {reg: 0x1337 for reg", "== orig_values assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) == orig_data def _t5(): print(\"* Debug registers\") vm.halt()", "time the VM is rebooted. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command))", "the 'kdp' symbol. Did you specify the target to debug?\") vm.abort_store_kdp_at_next_write_virtual_memory() def _attached(f):", "result, internal_dict): \"\"\" Set or unset hardware breakpoints. Hardware breakpoints are implemented using", "to trap on: execute, read/write, or write only.\", ) set_parser.add_argument( \"nreg\", type=lambda i:", "unset\") else: raise AssertionError @_attached def fdp_test(debugger, command, exe_ctx, result, internal_dict): \"\"\" Run", "state\") if vm.interrupt_and_restore_last_snapshot(): print(\"* State restored\") # do a full reattach (the kernel", "of the machine and modify the last saved state! \"\"\" regs = {", "} def _t1(): print(\"* Halt/resume/single step\") vm.halt() assert vm.is_state_halted() vm.resume() assert not vm.is_state_halted()", "== new_values vm.write_registers(orig_values) for reg in regs: assert vm.read_register(reg) == orig_values[reg] def _t3():", "attach! {}\".format(str(exc))) return print(\"* Resuming the VM execution until reaching kernel code\") vm.complete_attach()", "for reg in regs: assert vm.read_register(reg) == orig_values[reg] def _t3(): print(\"* Read/write virtual", "vm.set_hard_breakpoint(\"rw\", 0x3, 0x1234) assert vm.read_register(\"dr3\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101010 vm.unset_hard_breakpoint(0x0) assert", "fdp-hbreakpoint\") # VMSN debugger.HandleCommand(\"command script add -f lldbagility.vmsn_attach vmsn-attach\") debugger.HandleCommand(\"command alias va vmsn-attach\")", "add -f lldbagility.fdp_save fdp-save\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_restore fdp-restore\") debugger.HandleCommand( \"command script", "Resuming the VM execution until reaching kernel code\") vm.complete_attach() print(\"* Kernel load address:", "fdp_hbreakpoint(debugger, command, exe_ctx, result, internal_dict): \"\"\" Set or unset hardware breakpoints. Hardware breakpoints", "print(\"* Invalid expression\") elif args.action == \"unset\": vm.unset_hard_breakpoint(args.nreg) print(\"* Hardware breakpoint unset\") else:", "registers before saving, # and restore it afterwards dbgregs = vm.read_registers((\"dr0\", \"dr1\", \"dr2\",", "assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101000 vm.unset_hard_breakpoint(0x1) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010100000 vm.unset_hard_breakpoint(0x2) assert vm.read_register(\"dr7\") ==", "orig_data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) vm.interrupt_and_take_snapshot() assert vm.is_state_halted() vm.write_virtual_memory(vm.read_register(\"rsp\"), b\"A\" * 0x100) vm.single_step() vm.resume()", "process_was_stopped: # display stop info _exec_cmd(debugger, \"process status\") @_attached def fdp_restore(debugger, command, exe_ctx,", "0x1234 assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101010 vm.unset_hard_breakpoint(0x0) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101000 vm.unset_hard_breakpoint(0x1) assert vm.read_register(\"dr7\")", "exe_ctx, vm_stub, vm_name): global vm print(lldbagilityutils.LLDBAGILITY) print(\"* Attaching to the VM\") try: vm", "choices={0, 1, 2, 3}, help=\"Breakpoint slot to free (corresponding to registers DR0, DR1,", "execute, read/write, or write only.\", ) set_parser.add_argument( \"nreg\", type=lambda i: int(i, 0), choices={0,", "data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) new_data = b\"ABCDEFGH\" vm.write_virtual_memory(vm.read_register(\"rsp\"), new_data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) ==", "= {reg: 0x1337 for reg in regs} for reg in regs: vm.write_register(reg, new_values[reg])", "vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) new_data = b\"ABCDEFGH\" vm.write_virtual_memory(vm.read_register(\"rsp\"), new_data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == new_data vm.write_virtual_memory(vm.read_register(\"rsp\"),", "fs fdp-save\") debugger.HandleCommand(\"command alias fr fdp-restore\") debugger.HandleCommand(\"command alias fi fdp-interrupt\") debugger.HandleCommand(\"command alias fh", "using the debug registers DR0, DR1, DR2 and DR3. Consequently, a maximum of", "None else: return vaddr def fdp_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\" Connect to", "== 0b00110011001100010000000010000000 vm.unset_hard_breakpoint(0x3) assert vm.read_register(\"dr7\") == 0b00110011001100010000000000000000 def _t6(): print(\"* Soft/hard exec breakpoint\")", "attaching. \"\"\" parser = argparse.ArgumentParser(prog=\"vmsn-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.VMSNSTUB, args.vm_name)", "return return f(*args, **kwargs) return _wrapper @_attached def fdp_save(debugger, command, exe_ctx, result, internal_dict):", "args = parser.parse_args(shlex.split(command)) if args.action == \"set\": vaddr = _evaluate_expression(exe_ctx, args.expression) if vaddr:", "reaching kernel code\") vm.complete_attach() print(\"* Kernel load address: 0x{:016x}\".format(vm.kernel_load_vaddr)) print(\"* Kernel slide: 0x{:x}\".format(vm.kernel_slide))", "_wrapper(*args, **kwargs): global vm if not vm: print(\"* Not attached to a VM!\")", "args.nreg, vaddr) print(\"* Hardware breakpoint set: address = 0x{:016x}\".format(vaddr)) else: print(\"* Invalid expression\")", "vm.read_register(\"dr7\") == 0b00000000001100010000000000001010 vm.set_hard_breakpoint(\"rw\", 0x2, 0x1234) assert vm.read_register(\"dr2\") == 0x1234 assert vm.read_register(\"dr7\") ==", "vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() vm.interrupt_and_restore_last_snapshot() vm.single_step() vm.set_hard_breakpoint(\"e\", 0x0, rip) assert not vm.is_breakpoint_hit()", "cmdretobj = lldb.SBCommandReturnObject() debugger.GetCommandInterpreter().HandleCommand(command, cmdretobj) return cmdretobj else: debugger.HandleCommand(command) return None def _evaluate_expression(exe_ctx,", "b\"ABCDEFGH\" vm.write_virtual_memory(vm.read_register(\"rsp\"), new_data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == new_data vm.write_virtual_memory(vm.read_register(\"rsp\"), data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8)", "the debug registers before saving, # and restore it afterwards dbgregs = vm.read_registers((\"dr0\",", "import lldb import lldbagilityutils import stubvm vm = None def _exec_cmd(debugger, command, capture_output=False):", "print(\"* LLDB breakpoints deleted\") # start the fake KDP server kdpsv = kdpserver.KDPServer()", "on restoring. \"\"\" # interrupt and restore the VM state print(\"* Restoring the", "stubvm.FDPSTUB, args.vm_name) def vmsn_attach(debugger, command, exe_ctx, result, internal_dict): \"\"\" Connect to a macOS", "if capture_output: cmdretobj = lldb.SBCommandReturnObject() debugger.GetCommandInterpreter().HandleCommand(command, cmdretobj) return cmdretobj else: debugger.HandleCommand(command) return None", "use (corresponding to registers ).\", ) set_parser.add_argument( \"expression\", help=\"Breakpoint address or expression to", "vm.read_register(\"dr7\") == 0b00000000000000110000000000000010 vm.set_hard_breakpoint(\"e\", 0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000000000000000000010 vm.set_hard_breakpoint(\"w\", 0x0, 0x1234)", "Existing breakpoints are deleted on attaching. Re-execute this command every time the VM", "State restored\") # do a full reattach (the kernel load address may differ)", "a restore # (see VMR3AddSoftBreakpoint()) vm.unset_all_breakpoints() vm.single_step() assert not vm.is_breakpoint_hit() vm.interrupt_and_take_snapshot() vm.single_step() vm.single_step()", "args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.FDPSTUB, args.vm_name) def vmsn_attach(debugger, command, exe_ctx, result, internal_dict):", "args.vm_name) def _attach(debugger, exe_ctx, vm_stub, vm_name): global vm print(lldbagilityutils.LLDBAGILITY) print(\"* Attaching to the", "before saving, # and restore it afterwards dbgregs = vm.read_registers((\"dr0\", \"dr1\", \"dr2\", \"dr3\",", "state causes all breakpoints (soft and hard) to be unset, but # we", "implemented using the debug registers DR0, DR1, DR2 and DR3. Consequently, a maximum", "orig_values[reg] def _t3(): print(\"* Read/write virtual memory\") vm.halt() data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) new_data", "current session # we disable soft breakpoints before saving and then re-enable them", "vm.read_registers(regs) new_values = {reg: 0x1337 for reg in regs} for reg in regs:", "if vaddr: vm.set_hard_breakpoint(args.trigger, args.nreg, vaddr) print(\"* Hardware breakpoint set: address = 0x{:016x}\".format(vaddr)) else:", "new_data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == new_data vm.write_virtual_memory(vm.read_register(\"rsp\"), data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == data", "vm.write_register(\"dr7\", 0x0) vm.set_hard_breakpoint(\"rw\", 0x0, 0x1234) assert vm.read_register(\"dr0\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000000000000110000000000000010", "vm.read_register(\"dr7\") == 0b00000011001100010000000000101010 vm.set_hard_breakpoint(\"rw\", 0x3, 0x1234) assert vm.read_register(\"dr3\") == 0x1234 assert vm.read_register(\"dr7\") ==", "not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() vm.interrupt_and_restore_last_snapshot() vm.single_step() vm.set_hard_breakpoint(\"e\", 0x0, rip) assert", "# similarly, for hard breakpoints we save the state of the debug registers", "== 0x1234 assert vm.read_register(\"dr7\") == 0b00000000000000110000000000000010 vm.set_hard_breakpoint(\"e\", 0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000000000000000000010", "last saved state! \"\"\" regs = { \"rax\", \"rbx\", \"rcx\", \"rdx\", \"rdi\", \"rsi\",", "(soft and hard) to be unset, but # we can preserve them at", "vm.halt() assert vm.is_state_halted() vm.resume() assert not vm.is_state_halted() vm.halt() for _ in range(100): vm.single_step()", "exe_ctx, stubvm.VMSNSTUB, args.vm_name) def _attach(debugger, exe_ctx, vm_stub, vm_name): global vm print(lldbagilityutils.LLDBAGILITY) print(\"* Attaching", "are deleted on attaching. Re-execute this command every time the VM is rebooted.", "read/write, or write only.\", ) set_parser.add_argument( \"nreg\", type=lambda i: int(i, 0), choices={0, 1,", "(corresponding to registers DR0, DR1, DR2 and DR3).\", ) args = parser.parse_args(shlex.split(command)) if", "vm.write_virtual_memory(vm.read_register(\"rsp\"), b\"A\" * 0x100) vm.single_step() vm.resume() time.sleep(0.100) vm.interrupt_and_restore_last_snapshot() assert vm.is_state_halted() assert not vm.is_breakpoint_hit()", "attached to a VM!\") return return f(*args, **kwargs) return _wrapper @_attached def fdp_save(debugger,", "set_parser = subparsers.add_parser(\"set\") set_parser.add_argument( \"trigger\", choices={\"e\", \"rw\", \"w\"}, help=\"Type of memory access to", "# detach the previous process (if any) exe_ctx.process.Detach() # remove all LLDB breakpoints", "set_parser.add_argument( \"nreg\", type=lambda i: int(i, 0), choices={0, 1, 2, 3}, help=\"Breakpoint slot to", "session # we disable soft breakpoints before saving and then re-enable them once", "1, 2, 3}, help=\"Breakpoint slot to free (corresponding to registers DR0, DR1, DR2", "status\") @_attached def fdp_restore(debugger, command, exe_ctx, result, internal_dict): \"\"\" Restore the attached macOS", "Not attached to a VM!\") return return f(*args, **kwargs) return _wrapper @_attached def", "vm_name): global vm print(lldbagilityutils.LLDBAGILITY) print(\"* Attaching to the VM\") try: vm = stubvm.STUBVM(vm_stub,", "Currently not maintained! Existing breakpoints are deleted on attaching. \"\"\" parser = argparse.ArgumentParser(prog=\"vmsn-attach\")", "\"cs\", \"fs\", \"gs\", } def _t1(): print(\"* Halt/resume/single step\") vm.halt() assert vm.is_state_halted() vm.resume()", "are deleted on attaching. \"\"\" parser = argparse.ArgumentParser(prog=\"vmsn-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger,", "Re-execute this command every time the VM is rebooted. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-attach\")", "save the VM state process_was_stopped = exe_ctx.process.is_stopped print(\"* Saving the VM state\") vm.interrupt_and_take_snapshot()", "script add -f lldbagility.fdp_interrupt fdp-interrupt\" ) debugger.HandleCommand( \"command script add -f lldbagility.fdp_hbreakpoint fdp-hbreakpoint\"", "assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() if exe_ctx.process.is_running: vm.interrupt() vm.unset_all_breakpoints() for", "internal_dict): # FDP debugger.HandleCommand(\"command script add -f lldbagility.fdp_attach fdp-attach\") debugger.HandleCommand(\"command script add -f", "print(\"* Save/restore\") vm.halt() orig_values = vm.read_registers(regs) orig_data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) vm.interrupt_and_take_snapshot() assert vm.is_state_halted()", "parser.parse_args(shlex.split(command)) if args.action == \"set\": vaddr = _evaluate_expression(exe_ctx, args.expression) if vaddr: vm.set_hard_breakpoint(args.trigger, args.nreg,", "vm.set_hard_breakpoint(args.trigger, args.nreg, vaddr) print(\"* Hardware breakpoint set: address = 0x{:016x}\".format(vaddr)) else: print(\"* Invalid", "do not work just after a restore # (see VMR3AddSoftBreakpoint()) vm.unset_all_breakpoints() vm.single_step() assert", "\"rflags\", \"cs\", \"fs\", \"gs\", } def _t1(): print(\"* Halt/resume/single step\") vm.halt() assert vm.is_state_halted()", "tests change the state of the machine and modify the last saved state!", "Save the current state of the attached macOS VM. Breakpoints are not saved", "fake KDP server kdpsv = kdpserver.KDPServer() th = threading.Thread(target=kdpsv.debug, args=(vm,)) th.daemon = True", "exec breakpoint\") vm.halt() # keep in mind that FDP soft and page breakpoints", "vm.write_virtual_memory(vm.read_register(\"rsp\"), data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == data def _t4(): print(\"* Save/restore\") vm.halt() orig_values", "0x8) == data def _t4(): print(\"* Save/restore\") vm.halt() orig_values = vm.read_registers(regs) orig_data =", "vm.halt() orig_values = vm.read_registers(regs) new_values = {reg: 0x1337 for reg in regs} for", "def _t1(): print(\"* Halt/resume/single step\") vm.halt() assert vm.is_state_halted() vm.resume() assert not vm.is_state_halted() vm.halt()", "subparsers.add_parser(\"set\") set_parser.add_argument( \"trigger\", choices={\"e\", \"rw\", \"w\"}, help=\"Type of memory access to trap on:", "to a macOS VM via FDP. The VM must have already been started.", "print(\"* Kernel load address: 0x{:016x}\".format(vm.kernel_load_vaddr)) print(\"* Kernel slide: 0x{:x}\".format(vm.kernel_slide)) print(\"* Kernel cr3: 0x{:x}\".format(vm.kernel_cr3))", "\"\"\" parser = argparse.ArgumentParser(prog=\"fdp-hbreakpoint\") subparsers = parser.add_subparsers(dest=\"action\") set_parser = subparsers.add_parser(\"set\") set_parser.add_argument( \"trigger\", choices={\"e\",", "capture_output: cmdretobj = lldb.SBCommandReturnObject() debugger.GetCommandInterpreter().HandleCommand(command, cmdretobj) return cmdretobj else: debugger.HandleCommand(command) return None def", "2, 3}, help=\"Breakpoint slot to use (corresponding to registers ).\", ) set_parser.add_argument( \"expression\",", "threading.Thread(target=kdpsv.debug, args=(vm,)) th.daemon = True th.start() # connect LLDB to the fake KDP", "Hardware breakpoint set: address = 0x{:016x}\".format(vaddr)) else: print(\"* Invalid expression\") elif args.action ==", "= parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx, stubvm.VMSNSTUB, args.vm_name) def _attach(debugger, exe_ctx, vm_stub, vm_name): global vm", "try: vm = stubvm.STUBVM(vm_stub, vm_name) except Exception as exc: print(\"* Could not attach!", "vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() if exe_ctx.process.is_running: vm.interrupt() vm.unset_all_breakpoints() for _t in (_t1,", "active simultaneously. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-hbreakpoint\") subparsers = parser.add_subparsers(dest=\"action\") set_parser = subparsers.add_parser(\"set\") set_parser.add_argument(", "in regs} for reg in regs: vm.write_register(reg, new_values[reg]) # modifications to RFLAGS should", "print(\"* Halt/resume/single step\") vm.halt() assert vm.is_state_halted() vm.resume() assert not vm.is_state_halted() vm.halt() for _", "vm.is_breakpoint_hit() assert vm.read_registers(regs) == orig_values assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) == orig_data def _t5(): print(\"*", "VM is rebooted. \"\"\" parser = argparse.ArgumentParser(prog=\"fdp-attach\") parser.add_argument(\"vm_name\") args = parser.parse_args(shlex.split(command)) _attach(debugger, exe_ctx,", "current session). \"\"\" # saving the state causes all breakpoints (soft and hard)", "restore soft breakpoints exe_ctx.target.EnableAllBreakpoints() # restore hard breakpoints vm.write_registers(dbgregs) if not process_was_stopped: #", "@_attached def fdp_restore(debugger, command, exe_ctx, result, internal_dict): \"\"\" Restore the attached macOS VM", "requests for setting them exe_ctx.target.DisableAllBreakpoints() # similarly, for hard breakpoints we save the", "vm.is_breakpoint_hit() vm.interrupt_and_take_snapshot() vm.single_step() vm.single_step() rip = vm.read_register(\"rip\") vm.interrupt_and_restore_last_snapshot() vm.single_step() bpid = vm.set_soft_exec_breakpoint(rip) assert", "fdp-save\") debugger.HandleCommand(\"command alias fr fdp-restore\") debugger.HandleCommand(\"command alias fi fdp-interrupt\") debugger.HandleCommand(\"command alias fh fdp-hbreakpoint\")", "unset, but # we can preserve them at least for the current session", "attached macOS VM to the last saved state. Breakpoints are deleted on restoring.", "registers\") vm.halt() vm.write_register(\"dr7\", 0x0) vm.set_hard_breakpoint(\"rw\", 0x0, 0x1234) assert vm.read_register(\"dr0\") == 0x1234 assert vm.read_register(\"dr7\")", "state # has been saved, so that LLDB sends again the KDP requests", "execution until reaching kernel code\") vm.complete_attach() print(\"* Kernel load address: 0x{:016x}\".format(vm.kernel_load_vaddr)) print(\"* Kernel", "\"\"\" Restore the attached macOS VM to the last saved state. Breakpoints are", "vm.read_register(\"dr7\") == 0b00000000000000010000000000000010 vm.set_hard_breakpoint(\"rw\", 0x1, 0x1234) assert vm.read_register(\"dr1\") == 0x1234 assert vm.read_register(\"dr7\") ==", "vm.store_kdp_at_next_write_virtual_memory() if _exec_cmd(debugger, \"memory write &kdp 41\", capture_output=True).GetError(): print(\"* Unable to find the", "fa fdp-attach\") debugger.HandleCommand(\"command alias fs fdp-save\") debugger.HandleCommand(\"command alias fr fdp-restore\") debugger.HandleCommand(\"command alias fi", "vm.unset_hard_breakpoint(args.nreg) print(\"* Hardware breakpoint unset\") else: raise AssertionError @_attached def fdp_test(debugger, command, exe_ctx,", "write only.\", ) set_parser.add_argument( \"nreg\", type=lambda i: int(i, 0), choices={0, 1, 2, 3},", "may differ) fdp_attach(debugger, vm.name, exe_ctx, result, internal_dict) else: print(\"* No saved state found\")", "0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000010000000000000010 vm.set_hard_breakpoint(\"rw\", 0x1, 0x1234) assert vm.read_register(\"dr1\") == 0x1234 assert", "not work just after a restore # (see VMR3AddSoftBreakpoint()) vm.unset_all_breakpoints() vm.single_step() assert not", "out the address of the kdp struct vm.store_kdp_at_next_write_virtual_memory() if _exec_cmd(debugger, \"memory write &kdp", "script add -f lldbagility.fdp_attach fdp-attach\") debugger.HandleCommand(\"command script add -f lldbagility.fdp_save fdp-save\") debugger.HandleCommand(\"command script", "print(\"* Attaching to the VM\") try: vm = stubvm.STUBVM(vm_stub, vm_name) except Exception as", "RFLAGS should be disabled assert vm.read_register(\"rflags\") == orig_values[\"rflags\"] del new_values[\"rflags\"] assert vm.read_registers(regs -", "0b00000000000000010000000000000010 vm.set_hard_breakpoint(\"rw\", 0x1, 0x1234) assert vm.read_register(\"dr1\") == 0x1234 assert vm.read_register(\"dr7\") == 0b00000000001100010000000000001010 vm.set_hard_breakpoint(\"rw\",", "rip) assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert vm.is_breakpoint_hit() if exe_ctx.process.is_running: vm.interrupt() vm.unset_all_breakpoints()", "trigger a memory write to find out the address of the kdp struct", "and save the VM state process_was_stopped = exe_ctx.process.is_stopped print(\"* Saving the VM state\")", "# has been saved, so that LLDB sends again the KDP requests for", "{ \"rax\", \"rbx\", \"rcx\", \"rdx\", \"rdi\", \"rsi\", \"rbp\", \"rsp\", \"r8\", \"r9\", \"r10\", \"r11\",", "0), choices={0, 1, 2, 3}, help=\"Breakpoint slot to use (corresponding to registers ).\",", "hard) to be unset, but # we can preserve them at least for", "0x8) == new_data vm.write_virtual_memory(vm.read_register(\"rsp\"), data) assert vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x8) == data def _t4(): print(\"*", "fdp-restore\") debugger.HandleCommand(\"command alias fi fdp-interrupt\") debugger.HandleCommand(\"command alias fh fdp-hbreakpoint\") # VMSN debugger.HandleCommand(\"command script", "retained for the current session). \"\"\" # saving the state causes all breakpoints", "breakpoints exe_ctx.target.EnableAllBreakpoints() # restore hard breakpoints vm.write_registers(dbgregs) if not process_was_stopped: # display stop", "should be disabled assert vm.read_register(\"rflags\") == orig_values[\"rflags\"] del new_values[\"rflags\"] assert vm.read_registers(regs - {\"rflags\"})", "# interrupt and save the VM state process_was_stopped = exe_ctx.process.is_stopped print(\"* Saving the", "vm.name, exe_ctx, result, internal_dict) else: print(\"* No saved state found\") @_attached def fdp_interrupt(debugger,", "kdpsv_port = kdpsv.sv_sock.getsockname() _exec_cmd(debugger, \"kdp-remote '{}:{}'\".format(kdpsv_addr, kdpsv_port)) # trigger a memory write to", "vm.is_breakpoint_hit() vm.interrupt_and_restore_last_snapshot() vm.single_step() vm.set_hard_breakpoint(\"e\", 0x0, rip) assert not vm.is_breakpoint_hit() vm.resume() time.sleep(0.100) vm.halt() assert", "Could not attach! {}\".format(str(exc))) return print(\"* Resuming the VM execution until reaching kernel", "breakpoints deleted\") # detach the previous process (if any) exe_ctx.process.Detach() # remove all", "import threading import time import traceback import kdpserver import lldb import lldbagilityutils import", "specify the target to debug?\") vm.abort_store_kdp_at_next_write_virtual_memory() def _attached(f): @functools.wraps(f) def _wrapper(*args, **kwargs): global", "similarly, for hard breakpoints we save the state of the debug registers before", "\"dr2\", \"dr3\", \"dr6\", \"dr7\")) # interrupt and save the VM state process_was_stopped =", "Consequently, a maximum of four hardware breakpoints can be active simultaneously. \"\"\" parser", "= kdpserver.KDPServer() th = threading.Thread(target=kdpsv.debug, args=(vm,)) th.daemon = True th.start() # connect LLDB", "capture_output=False): if capture_output: cmdretobj = lldb.SBCommandReturnObject() debugger.GetCommandInterpreter().HandleCommand(command, cmdretobj) return cmdretobj else: debugger.HandleCommand(command) return", "= vm.read_registers(regs) new_values = {reg: 0x1337 for reg in regs} for reg in", "# keep in mind that FDP soft and page breakpoints do not work", "if _exec_cmd(debugger, \"memory write &kdp 41\", capture_output=True).GetError(): print(\"* Unable to find the 'kdp'", "**kwargs) return _wrapper @_attached def fdp_save(debugger, command, exe_ctx, result, internal_dict): \"\"\" Save the", "orig_values = vm.read_registers(regs) orig_data = vm.read_virtual_memory(vm.read_register(\"rsp\"), 0x100) vm.interrupt_and_take_snapshot() assert vm.is_state_halted() vm.write_virtual_memory(vm.read_register(\"rsp\"), b\"A\" *", "vm.unset_hard_breakpoint(0x0) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010101000 vm.unset_hard_breakpoint(0x1) assert vm.read_register(\"dr7\") == 0b00110011001100010000000010100000 vm.unset_hard_breakpoint(0x2) assert vm.read_register(\"dr7\")", "0b00000000000000110000000000000010 vm.set_hard_breakpoint(\"e\", 0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000000000000000000010 vm.set_hard_breakpoint(\"w\", 0x0, 0x1234) assert vm.read_register(\"dr7\")", "cmdretobj else: debugger.HandleCommand(command) return None def _evaluate_expression(exe_ctx, expression): res = exe_ctx.frame.EvaluateExpression(expression) try: vaddr", "vm.interrupt_and_restore_last_snapshot() vm.single_step() bpid = vm.set_soft_exec_breakpoint(rip) assert 0 <= bpid <= 254 assert not", "assert vm.is_state_halted() def _t2(): print(\"* Read/write registers\") vm.halt() orig_values = vm.read_registers(regs) new_values =", "# display stop info _exec_cmd(debugger, \"process status\") @_attached def fdp_restore(debugger, command, exe_ctx, result,", "execution of the attached macOS VM. \"\"\" vm.interrupt() @_attached def fdp_hbreakpoint(debugger, command, exe_ctx,", "already been started. Existing breakpoints are deleted on attaching. Re-execute this command every", "assert vm.read_register(\"dr7\") == 0b00000000000000110000000000000010 vm.set_hard_breakpoint(\"e\", 0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000000000000000000010 vm.set_hard_breakpoint(\"w\", 0x0,", "0b00000000000000000000000000000010 vm.set_hard_breakpoint(\"w\", 0x0, 0x1234) assert vm.read_register(\"dr7\") == 0b00000000000000010000000000000010 vm.set_hard_breakpoint(\"rw\", 0x1, 0x1234) assert vm.read_register(\"dr1\")", "exc: print(\"* Could not attach! {}\".format(str(exc))) return print(\"* Resuming the VM execution until", "vm.write_register(reg, new_values[reg]) # modifications to RFLAGS should be disabled assert vm.read_register(\"rflags\") == orig_values[\"rflags\"]", "args.expression) if vaddr: vm.set_hard_breakpoint(args.trigger, args.nreg, vaddr) print(\"* Hardware breakpoint set: address = 0x{:016x}\".format(vaddr))", "script add -f lldbagility.fdp_restore fdp-restore\") debugger.HandleCommand( \"command script add -f lldbagility.fdp_interrupt fdp-interrupt\" )" ]
[ "@staticmethod def read(q: dict, *args) -> \"WriteGeneratedFilePart\": generation_id = q.get('generation_id') offset = q.get('offset')", "*args) -> \"WriteGeneratedFilePart\": generation_id = q.get('generation_id') offset = q.get('offset') data = q.get('data') return", "write Returns: Ok Raises: :class:`telegram.Error` \"\"\" ID = \"writeGeneratedFilePart\" def __init__(self, generation_id, offset,", "def read(q: dict, *args) -> \"WriteGeneratedFilePart\": generation_id = q.get('generation_id') offset = q.get('offset') data", "if the client has no direct access to TDLib's file system, because it", "to write Returns: Ok Raises: :class:`telegram.Error` \"\"\" ID = \"writeGeneratedFilePart\" def __init__(self, generation_id,", "This method is intended to be used only if the client has no", "to the file data (:obj:`bytes`): The data to write Returns: Ok Raises: :class:`telegram.Error`", "Args: generation_id (:obj:`int`): The identifier of the generation process offset (:obj:`int`): The offset", "bytes @staticmethod def read(q: dict, *args) -> \"WriteGeneratedFilePart\": generation_id = q.get('generation_id') offset =", "method is intended to be used only if the client has no direct", "has no direct access to TDLib's file system, because it is usually slower", "data to write Returns: Ok Raises: :class:`telegram.Error` \"\"\" ID = \"writeGeneratedFilePart\" def __init__(self,", "# bytes @staticmethod def read(q: dict, *args) -> \"WriteGeneratedFilePart\": generation_id = q.get('generation_id') offset", "intended to be used only if the client has no direct access to", "(:obj:`str`): ``WriteGeneratedFilePart`` Args: generation_id (:obj:`int`): The identifier of the generation process offset (:obj:`int`):", "**kwargs): self.extra = extra self.generation_id = generation_id # int self.offset = offset #", "destination file Attributes: ID (:obj:`str`): ``WriteGeneratedFilePart`` Args: generation_id (:obj:`int`): The identifier of the", "(:obj:`int`): The identifier of the generation process offset (:obj:`int`): The offset from which", "Returns: Ok Raises: :class:`telegram.Error` \"\"\" ID = \"writeGeneratedFilePart\" def __init__(self, generation_id, offset, data,", "the destination file Attributes: ID (:obj:`str`): ``WriteGeneratedFilePart`` Args: generation_id (:obj:`int`): The identifier of", "class WriteGeneratedFilePart(Object): \"\"\" Writes a part of a generated file. This method is", "than a direct write to the destination file Attributes: ID (:obj:`str`): ``WriteGeneratedFilePart`` Args:", "to write the data to the file data (:obj:`bytes`): The data to write", "generation_id # int self.offset = offset # int self.data = data # bytes", "generation process offset (:obj:`int`): The offset from which to write the data to", "generated file. This method is intended to be used only if the client", "only if the client has no direct access to TDLib's file system, because", "used only if the client has no direct access to TDLib's file system,", "the generation process offset (:obj:`int`): The offset from which to write the data", "data # bytes @staticmethod def read(q: dict, *args) -> \"WriteGeneratedFilePart\": generation_id = q.get('generation_id')", "access to TDLib's file system, because it is usually slower than a direct", "-> \"WriteGeneratedFilePart\": generation_id = q.get('generation_id') offset = q.get('offset') data = q.get('data') return WriteGeneratedFilePart(generation_id,", "The offset from which to write the data to the file data (:obj:`bytes`):", "Ok Raises: :class:`telegram.Error` \"\"\" ID = \"writeGeneratedFilePart\" def __init__(self, generation_id, offset, data, extra=None,", "write to the destination file Attributes: ID (:obj:`str`): ``WriteGeneratedFilePart`` Args: generation_id (:obj:`int`): The", "= offset # int self.data = data # bytes @staticmethod def read(q: dict,", "is intended to be used only if the client has no direct access", "which to write the data to the file data (:obj:`bytes`): The data to", ":class:`telegram.Error` \"\"\" ID = \"writeGeneratedFilePart\" def __init__(self, generation_id, offset, data, extra=None, **kwargs): self.extra", "Object class WriteGeneratedFilePart(Object): \"\"\" Writes a part of a generated file. This method", "process offset (:obj:`int`): The offset from which to write the data to the", "file. This method is intended to be used only if the client has", "data (:obj:`bytes`): The data to write Returns: Ok Raises: :class:`telegram.Error` \"\"\" ID =", "offset, data, extra=None, **kwargs): self.extra = extra self.generation_id = generation_id # int self.offset", "the client has no direct access to TDLib's file system, because it is", "no direct access to TDLib's file system, because it is usually slower than", "file system, because it is usually slower than a direct write to the", "self.generation_id = generation_id # int self.offset = offset # int self.data = data", "usually slower than a direct write to the destination file Attributes: ID (:obj:`str`):", "of the generation process offset (:obj:`int`): The offset from which to write the", "# int self.offset = offset # int self.data = data # bytes @staticmethod", "(:obj:`bytes`): The data to write Returns: Ok Raises: :class:`telegram.Error` \"\"\" ID = \"writeGeneratedFilePart\"", "data, extra=None, **kwargs): self.extra = extra self.generation_id = generation_id # int self.offset =", "to the destination file Attributes: ID (:obj:`str`): ``WriteGeneratedFilePart`` Args: generation_id (:obj:`int`): The identifier", "ID (:obj:`str`): ``WriteGeneratedFilePart`` Args: generation_id (:obj:`int`): The identifier of the generation process offset", "self.extra = extra self.generation_id = generation_id # int self.offset = offset # int", "\"\"\" ID = \"writeGeneratedFilePart\" def __init__(self, generation_id, offset, data, extra=None, **kwargs): self.extra =", "a part of a generated file. This method is intended to be used", "part of a generated file. This method is intended to be used only", "because it is usually slower than a direct write to the destination file", "generation_id, offset, data, extra=None, **kwargs): self.extra = extra self.generation_id = generation_id # int", "write the data to the file data (:obj:`bytes`): The data to write Returns:", "offset (:obj:`int`): The offset from which to write the data to the file", "is usually slower than a direct write to the destination file Attributes: ID", "a direct write to the destination file Attributes: ID (:obj:`str`): ``WriteGeneratedFilePart`` Args: generation_id", "it is usually slower than a direct write to the destination file Attributes:", "slower than a direct write to the destination file Attributes: ID (:obj:`str`): ``WriteGeneratedFilePart``", "data to the file data (:obj:`bytes`): The data to write Returns: Ok Raises:", "read(q: dict, *args) -> \"WriteGeneratedFilePart\": generation_id = q.get('generation_id') offset = q.get('offset') data =", "\"\"\" Writes a part of a generated file. This method is intended to", "\"WriteGeneratedFilePart\": generation_id = q.get('generation_id') offset = q.get('offset') data = q.get('data') return WriteGeneratedFilePart(generation_id, offset,", "\"writeGeneratedFilePart\" def __init__(self, generation_id, offset, data, extra=None, **kwargs): self.extra = extra self.generation_id =", "``WriteGeneratedFilePart`` Args: generation_id (:obj:`int`): The identifier of the generation process offset (:obj:`int`): The", "offset from which to write the data to the file data (:obj:`bytes`): The", "generation_id = q.get('generation_id') offset = q.get('offset') data = q.get('data') return WriteGeneratedFilePart(generation_id, offset, data)", "Writes a part of a generated file. This method is intended to be", "= \"writeGeneratedFilePart\" def __init__(self, generation_id, offset, data, extra=None, **kwargs): self.extra = extra self.generation_id", "Raises: :class:`telegram.Error` \"\"\" ID = \"writeGeneratedFilePart\" def __init__(self, generation_id, offset, data, extra=None, **kwargs):", "self.data = data # bytes @staticmethod def read(q: dict, *args) -> \"WriteGeneratedFilePart\": generation_id", "dict, *args) -> \"WriteGeneratedFilePart\": generation_id = q.get('generation_id') offset = q.get('offset') data = q.get('data')", "the data to the file data (:obj:`bytes`): The data to write Returns: Ok", "WriteGeneratedFilePart(Object): \"\"\" Writes a part of a generated file. This method is intended", "import Object class WriteGeneratedFilePart(Object): \"\"\" Writes a part of a generated file. This", "= generation_id # int self.offset = offset # int self.data = data #", "..utils import Object class WriteGeneratedFilePart(Object): \"\"\" Writes a part of a generated file.", "direct access to TDLib's file system, because it is usually slower than a", "a generated file. This method is intended to be used only if the", "identifier of the generation process offset (:obj:`int`): The offset from which to write", "system, because it is usually slower than a direct write to the destination", "client has no direct access to TDLib's file system, because it is usually", "__init__(self, generation_id, offset, data, extra=None, **kwargs): self.extra = extra self.generation_id = generation_id #", "file Attributes: ID (:obj:`str`): ``WriteGeneratedFilePart`` Args: generation_id (:obj:`int`): The identifier of the generation", "generation_id (:obj:`int`): The identifier of the generation process offset (:obj:`int`): The offset from", "extra=None, **kwargs): self.extra = extra self.generation_id = generation_id # int self.offset = offset", "int self.data = data # bytes @staticmethod def read(q: dict, *args) -> \"WriteGeneratedFilePart\":", "direct write to the destination file Attributes: ID (:obj:`str`): ``WriteGeneratedFilePart`` Args: generation_id (:obj:`int`):", "= data # bytes @staticmethod def read(q: dict, *args) -> \"WriteGeneratedFilePart\": generation_id =", "file data (:obj:`bytes`): The data to write Returns: Ok Raises: :class:`telegram.Error` \"\"\" ID", "= extra self.generation_id = generation_id # int self.offset = offset # int self.data", "to be used only if the client has no direct access to TDLib's", "int self.offset = offset # int self.data = data # bytes @staticmethod def", "# int self.data = data # bytes @staticmethod def read(q: dict, *args) ->", "extra self.generation_id = generation_id # int self.offset = offset # int self.data =", "be used only if the client has no direct access to TDLib's file", "from ..utils import Object class WriteGeneratedFilePart(Object): \"\"\" Writes a part of a generated", "ID = \"writeGeneratedFilePart\" def __init__(self, generation_id, offset, data, extra=None, **kwargs): self.extra = extra", "def __init__(self, generation_id, offset, data, extra=None, **kwargs): self.extra = extra self.generation_id = generation_id", "from which to write the data to the file data (:obj:`bytes`): The data", "of a generated file. This method is intended to be used only if", "Attributes: ID (:obj:`str`): ``WriteGeneratedFilePart`` Args: generation_id (:obj:`int`): The identifier of the generation process", "self.offset = offset # int self.data = data # bytes @staticmethod def read(q:", "offset # int self.data = data # bytes @staticmethod def read(q: dict, *args)", "the file data (:obj:`bytes`): The data to write Returns: Ok Raises: :class:`telegram.Error` \"\"\"", "to TDLib's file system, because it is usually slower than a direct write", "The identifier of the generation process offset (:obj:`int`): The offset from which to", "TDLib's file system, because it is usually slower than a direct write to", "(:obj:`int`): The offset from which to write the data to the file data", "The data to write Returns: Ok Raises: :class:`telegram.Error` \"\"\" ID = \"writeGeneratedFilePart\" def" ]
[ "in range(0, total - interval, interval * 2): lists[i] = self.merge2(lists[i], lists[i +", "the current list which saves us o (N) space # to become O(1)", "< total: for i in range(0, total - interval, interval * 2): lists[i]", "= l1 l1 = l1.next else: cur.next = l2 l2 = l2.next cur", "[] def merge2(self, l1, l2): dummy = ListNode() cur = dummy while l1", "o(nlogk) where k is the number of ll and uses O(1) additional space", "Nope # Did you finish within 30 min? 12 # Was the solution", "or len(lists) == 0: return None interval = 1 total = len(lists) while", "creates a new list of ll # and one that simply merges the", "Merge all the linked-lists into one sorted linked-list and return it. from types", "additional space # Score Card # Did I need hints? Nope # Did", "= l2 return dummy.next # This works although I forgot that my range", "= 1 total = len(lists) while interval < total: for i in range(0,", "2 at a time) # This runs in o(nlogk) where k is the", "a merge sort no need to explain really # There are two solutions", "-> Optional[ListNode]: if lists is None or len(lists) == 0: return None interval", "# Score Card # Did I need hints? Nope # Did you finish", "i in range(0, total - interval, interval * 2): lists[i] = self.merge2(lists[i], lists[i", "= l2 l2 = l2.next cur = cur.next if l1: cur.next = l1", "linked-lists into one sorted linked-list and return it. from types import List, Optional", "linked-lists lists, each linked-list is sorted in ascending order. # Merge all the", "def __init__(self, val=0, next=None): self.val = val self.next = next # This problem", "merge2(self, l1, l2): dummy = ListNode() cur = dummy while l1 and l2:", "= l1.next else: cur.next = l2 l2 = l2.next cur = cur.next if", "if total > 0 else [] def merge2(self, l1, l2): dummy = ListNode()", "Lists: https://leetcode.com/problems/merge-k-sorted-lists/ # You are given an array of k linked-lists lists, each", "= self.merge2(lists[i], lists[i + interval]) interval *= 2 return lists[0] if total >", "# Merge all the linked-lists into one sorted linked-list and return it. from", "that simply merges the current list which saves us o (N) space #", "l1 else: cur.next = l2 return dummy.next # This works although I forgot", "a time) # This runs in o(nlogk) where k is the number of", "just a merge sort no need to explain really # There are two", "return lists[0] if total > 0 else [] def merge2(self, l1, l2): dummy", "solution optimal? Yup # Were there any bugs? None # 5 5 5", "Optional # Definition for singly-linked list. class ListNode: def __init__(self, val=0, next=None): self.val", "solutions one with a for loop that creates a new list of ll", "for loop that creates a new list of ll # and one that", "# Were there any bugs? None # 5 5 5 5 = 5", "= dummy while l1 and l2: if l1.val < l2.val: cur.next = l1", "val=0, next=None): self.val = val self.next = next # This problem is actually", "l2 return dummy.next # This works although I forgot that my range needs", "singly-linked list. class ListNode: def __init__(self, val=0, next=None): self.val = val self.next =", "List[Optional[ListNode]]) -> Optional[ListNode]: if lists is None or len(lists) == 0: return None", "actually just a merge sort no need to explain really # There are", "merges the current list which saves us o (N) space # to become", "forgot that my range needs to decrease then number of nodes to review", "I need hints? Nope # Did you finish within 30 min? 12 #", "is actually just a merge sort no need to explain really # There", "explain really # There are two solutions one with a for loop that", "linked-list and return it. from types import List, Optional # Definition for singly-linked", "an array of k linked-lists lists, each linked-list is sorted in ascending order.", "else: cur.next = l2 l2 = l2.next cur = cur.next if l1: cur.next", "us o (N) space # to become O(1) class Solution: def mergeKLists(self, lists:", "and uses O(1) additional space # Score Card # Did I need hints?", "Card # Did I need hints? Nope # Did you finish within 30", "import List, Optional # Definition for singly-linked list. class ListNode: def __init__(self, val=0,", "= len(lists) while interval < total: for i in range(0, total - interval,", "the number of ll and uses O(1) additional space # Score Card #", "merge sort no need to explain really # There are two solutions one", "Solution: def mergeKLists(self, lists: List[Optional[ListNode]]) -> Optional[ListNode]: if lists is None or len(lists)", "Definition for singly-linked list. class ListNode: def __init__(self, val=0, next=None): self.val = val", "cur.next = l1 l1 = l1.next else: cur.next = l2 l2 = l2.next", "dummy.next # This works although I forgot that my range needs to decrease", "of merged (going up 2 at a time) # This runs in o(nlogk)", "works although I forgot that my range needs to decrease then number of", "ascending order. # Merge all the linked-lists into one sorted linked-list and return", "needs to decrease then number of nodes to review # by the number", "up 2 at a time) # This runs in o(nlogk) where k is", "if l1.val < l2.val: cur.next = l1 l1 = l1.next else: cur.next =", "to decrease then number of nodes to review # by the number of", "are two solutions one with a for loop that creates a new list", "else [] def merge2(self, l1, l2): dummy = ListNode() cur = dummy while", "Yup # Were there any bugs? None # 5 5 5 5 =", "is None or len(lists) == 0: return None interval = 1 total =", "interval]) interval *= 2 return lists[0] if total > 0 else [] def", "simply merges the current list which saves us o (N) space # to", "mergeKLists(self, lists: List[Optional[ListNode]]) -> Optional[ListNode]: if lists is None or len(lists) == 0:", "l1 = l1.next else: cur.next = l2 l2 = l2.next cur = cur.next", "min? 12 # Was the solution optimal? Yup # Were there any bugs?", "= next # This problem is actually just a merge sort no need", "Merge k Sorted Lists: https://leetcode.com/problems/merge-k-sorted-lists/ # You are given an array of k", "number of merged (going up 2 at a time) # This runs in", "< l2.val: cur.next = l1 l1 = l1.next else: cur.next = l2 l2", "self.merge2(lists[i], lists[i + interval]) interval *= 2 return lists[0] if total > 0", "although I forgot that my range needs to decrease then number of nodes", "and return it. from types import List, Optional # Definition for singly-linked list.", "to review # by the number of merged (going up 2 at a", "This problem is actually just a merge sort no need to explain really", "04/Day 03/c.py # Merge k Sorted Lists: https://leetcode.com/problems/merge-k-sorted-lists/ # You are given an", "to become O(1) class Solution: def mergeKLists(self, lists: List[Optional[ListNode]]) -> Optional[ListNode]: if lists", "with a for loop that creates a new list of ll # and", "None or len(lists) == 0: return None interval = 1 total = len(lists)", "for i in range(0, total - interval, interval * 2): lists[i] = self.merge2(lists[i],", "dummy while l1 and l2: if l1.val < l2.val: cur.next = l1 l1", "val self.next = next # This problem is actually just a merge sort", "by the number of merged (going up 2 at a time) # This", "next # This problem is actually just a merge sort no need to", "O(1) class Solution: def mergeKLists(self, lists: List[Optional[ListNode]]) -> Optional[ListNode]: if lists is None", "def mergeKLists(self, lists: List[Optional[ListNode]]) -> Optional[ListNode]: if lists is None or len(lists) ==", "uses O(1) additional space # Score Card # Did I need hints? Nope", "total: for i in range(0, total - interval, interval * 2): lists[i] =", "to explain really # There are two solutions one with a for loop", "loop that creates a new list of ll # and one that simply", "while interval < total: for i in range(0, total - interval, interval *", "l1.next else: cur.next = l2 l2 = l2.next cur = cur.next if l1:", "# and one that simply merges the current list which saves us o", "given an array of k linked-lists lists, each linked-list is sorted in ascending", "List, Optional # Definition for singly-linked list. class ListNode: def __init__(self, val=0, next=None):", "else: cur.next = l2 return dummy.next # This works although I forgot that", "# This runs in o(nlogk) where k is the number of ll and", "list. class ListNode: def __init__(self, val=0, next=None): self.val = val self.next = next", "This works although I forgot that my range needs to decrease then number", "== 0: return None interval = 1 total = len(lists) while interval <", "03/Week 04/Day 03/c.py # Merge k Sorted Lists: https://leetcode.com/problems/merge-k-sorted-lists/ # You are given", "one with a for loop that creates a new list of ll #", "lists[i + interval]) interval *= 2 return lists[0] if total > 0 else", "merged (going up 2 at a time) # This runs in o(nlogk) where", "cur.next = l2 return dummy.next # This works although I forgot that my", "array of k linked-lists lists, each linked-list is sorted in ascending order. #", "next=None): self.val = val self.next = next # This problem is actually just", "> 0 else [] def merge2(self, l1, l2): dummy = ListNode() cur =", "my range needs to decrease then number of nodes to review # by", "linked-list is sorted in ascending order. # Merge all the linked-lists into one", "# This problem is actually just a merge sort no need to explain", "of nodes to review # by the number of merged (going up 2", "review # by the number of merged (going up 2 at a time)", "l2.next cur = cur.next if l1: cur.next = l1 else: cur.next = l2", "if lists is None or len(lists) == 0: return None interval = 1", "# Was the solution optimal? Yup # Were there any bugs? None #", "2 return lists[0] if total > 0 else [] def merge2(self, l1, l2):", "l2.val: cur.next = l1 l1 = l1.next else: cur.next = l2 l2 =", "lists[0] if total > 0 else [] def merge2(self, l1, l2): dummy =", "class ListNode: def __init__(self, val=0, next=None): self.val = val self.next = next #", "dummy = ListNode() cur = dummy while l1 and l2: if l1.val <", "l1: cur.next = l1 else: cur.next = l2 return dummy.next # This works", "new list of ll # and one that simply merges the current list", "cur.next if l1: cur.next = l1 else: cur.next = l2 return dummy.next #", "the number of merged (going up 2 at a time) # This runs", "within 30 min? 12 # Was the solution optimal? Yup # Were there", "nodes to review # by the number of merged (going up 2 at", "l1.val < l2.val: cur.next = l1 l1 = l1.next else: cur.next = l2", "https://leetcode.com/problems/merge-k-sorted-lists/ # You are given an array of k linked-lists lists, each linked-list", "one that simply merges the current list which saves us o (N) space", "range(0, total - interval, interval * 2): lists[i] = self.merge2(lists[i], lists[i + interval])", "return dummy.next # This works although I forgot that my range needs to", "number of ll and uses O(1) additional space # Score Card # Did", "*= 2 return lists[0] if total > 0 else [] def merge2(self, l1,", "l1 and l2: if l1.val < l2.val: cur.next = l1 l1 = l1.next", "list which saves us o (N) space # to become O(1) class Solution:", "of k linked-lists lists, each linked-list is sorted in ascending order. # Merge", "problem is actually just a merge sort no need to explain really #", "class Solution: def mergeKLists(self, lists: List[Optional[ListNode]]) -> Optional[ListNode]: if lists is None or", "1 total = len(lists) while interval < total: for i in range(0, total", "l2 l2 = l2.next cur = cur.next if l1: cur.next = l1 else:", "This runs in o(nlogk) where k is the number of ll and uses", "# Definition for singly-linked list. class ListNode: def __init__(self, val=0, next=None): self.val =", "lists is None or len(lists) == 0: return None interval = 1 total", "total - interval, interval * 2): lists[i] = self.merge2(lists[i], lists[i + interval]) interval", "interval *= 2 return lists[0] if total > 0 else [] def merge2(self,", "interval, interval * 2): lists[i] = self.merge2(lists[i], lists[i + interval]) interval *= 2", "in o(nlogk) where k is the number of ll and uses O(1) additional", "# There are two solutions one with a for loop that creates a", "which saves us o (N) space # to become O(1) class Solution: def", "total > 0 else [] def merge2(self, l1, l2): dummy = ListNode() cur", "len(lists) == 0: return None interval = 1 total = len(lists) while interval", "k linked-lists lists, each linked-list is sorted in ascending order. # Merge all", "None interval = 1 total = len(lists) while interval < total: for i", "cur.next = l2 l2 = l2.next cur = cur.next if l1: cur.next =", "total = len(lists) while interval < total: for i in range(0, total -", "O(1) additional space # Score Card # Did I need hints? Nope #", "lists[i] = self.merge2(lists[i], lists[i + interval]) interval *= 2 return lists[0] if total", "one sorted linked-list and return it. from types import List, Optional # Definition", "are given an array of k linked-lists lists, each linked-list is sorted in", "each linked-list is sorted in ascending order. # Merge all the linked-lists into", "the solution optimal? Yup # Were there any bugs? None # 5 5", "finish within 30 min? 12 # Was the solution optimal? Yup # Were", "You are given an array of k linked-lists lists, each linked-list is sorted", "the linked-lists into one sorted linked-list and return it. from types import List,", "return it. from types import List, Optional # Definition for singly-linked list. class", "from types import List, Optional # Definition for singly-linked list. class ListNode: def", "list of ll # and one that simply merges the current list which", "really # There are two solutions one with a for loop that creates", "- interval, interval * 2): lists[i] = self.merge2(lists[i], lists[i + interval]) interval *=", "There are two solutions one with a for loop that creates a new", "(N) space # to become O(1) class Solution: def mergeKLists(self, lists: List[Optional[ListNode]]) ->", "2): lists[i] = self.merge2(lists[i], lists[i + interval]) interval *= 2 return lists[0] if", "cur.next = l1 else: cur.next = l2 return dummy.next # This works although", "number of nodes to review # by the number of merged (going up", "that creates a new list of ll # and one that simply merges", "# This works although I forgot that my range needs to decrease then", "l1 l1 = l1.next else: cur.next = l2 l2 = l2.next cur =", "space # Score Card # Did I need hints? Nope # Did you", "into one sorted linked-list and return it. from types import List, Optional #", "Did you finish within 30 min? 12 # Was the solution optimal? Yup", "self.val = val self.next = next # This problem is actually just a", "# to become O(1) class Solution: def mergeKLists(self, lists: List[Optional[ListNode]]) -> Optional[ListNode]: if", "is the number of ll and uses O(1) additional space # Score Card", "hints? Nope # Did you finish within 30 min? 12 # Was the", "# Did you finish within 30 min? 12 # Was the solution optimal?", "in ascending order. # Merge all the linked-lists into one sorted linked-list and", "03/c.py # Merge k Sorted Lists: https://leetcode.com/problems/merge-k-sorted-lists/ # You are given an array", "= l2.next cur = cur.next if l1: cur.next = l1 else: cur.next =", "interval * 2): lists[i] = self.merge2(lists[i], lists[i + interval]) interval *= 2 return", "= val self.next = next # This problem is actually just a merge", "return None interval = 1 total = len(lists) while interval < total: for", "I forgot that my range needs to decrease then number of nodes to", "# You are given an array of k linked-lists lists, each linked-list is", "(going up 2 at a time) # This runs in o(nlogk) where k", "of ll and uses O(1) additional space # Score Card # Did I", "0 else [] def merge2(self, l1, l2): dummy = ListNode() cur = dummy", "you finish within 30 min? 12 # Was the solution optimal? Yup #", "a for loop that creates a new list of ll # and one", "is sorted in ascending order. # Merge all the linked-lists into one sorted", "__init__(self, val=0, next=None): self.val = val self.next = next # This problem is", "a new list of ll # and one that simply merges the current", "need hints? Nope # Did you finish within 30 min? 12 # Was", "for singly-linked list. class ListNode: def __init__(self, val=0, next=None): self.val = val self.next", "lists: List[Optional[ListNode]]) -> Optional[ListNode]: if lists is None or len(lists) == 0: return", "lists, each linked-list is sorted in ascending order. # Merge all the linked-lists", "if l1: cur.next = l1 else: cur.next = l2 return dummy.next # This", "while l1 and l2: if l1.val < l2.val: cur.next = l1 l1 =", "need to explain really # There are two solutions one with a for", "types import List, Optional # Definition for singly-linked list. class ListNode: def __init__(self,", "def merge2(self, l1, l2): dummy = ListNode() cur = dummy while l1 and", "# Merge k Sorted Lists: https://leetcode.com/problems/merge-k-sorted-lists/ # You are given an array of", "all the linked-lists into one sorted linked-list and return it. from types import", "# Did I need hints? Nope # Did you finish within 30 min?", "no need to explain really # There are two solutions one with a", "= ListNode() cur = dummy while l1 and l2: if l1.val < l2.val:", "order. # Merge all the linked-lists into one sorted linked-list and return it.", "that my range needs to decrease then number of nodes to review #", "ll # and one that simply merges the current list which saves us", "+ interval]) interval *= 2 return lists[0] if total > 0 else []", "o (N) space # to become O(1) class Solution: def mergeKLists(self, lists: List[Optional[ListNode]])", "<filename>Month 03/Week 04/Day 03/c.py # Merge k Sorted Lists: https://leetcode.com/problems/merge-k-sorted-lists/ # You are", "space # to become O(1) class Solution: def mergeKLists(self, lists: List[Optional[ListNode]]) -> Optional[ListNode]:", "two solutions one with a for loop that creates a new list of", "sorted linked-list and return it. from types import List, Optional # Definition for", "interval = 1 total = len(lists) while interval < total: for i in", "l2: if l1.val < l2.val: cur.next = l1 l1 = l1.next else: cur.next", "= l1 else: cur.next = l2 return dummy.next # This works although I", "and l2: if l1.val < l2.val: cur.next = l1 l1 = l1.next else:", "decrease then number of nodes to review # by the number of merged", "Did I need hints? Nope # Did you finish within 30 min? 12", "sort no need to explain really # There are two solutions one with", "Score Card # Did I need hints? Nope # Did you finish within", "30 min? 12 # Was the solution optimal? Yup # Were there any", "ListNode: def __init__(self, val=0, next=None): self.val = val self.next = next # This", "it. from types import List, Optional # Definition for singly-linked list. class ListNode:", "l1, l2): dummy = ListNode() cur = dummy while l1 and l2: if", "k is the number of ll and uses O(1) additional space # Score", "Was the solution optimal? Yup # Were there any bugs? None # 5", "l2): dummy = ListNode() cur = dummy while l1 and l2: if l1.val", "l2 = l2.next cur = cur.next if l1: cur.next = l1 else: cur.next", "# by the number of merged (going up 2 at a time) #", "k Sorted Lists: https://leetcode.com/problems/merge-k-sorted-lists/ # You are given an array of k linked-lists", "then number of nodes to review # by the number of merged (going", "sorted in ascending order. # Merge all the linked-lists into one sorted linked-list", "time) # This runs in o(nlogk) where k is the number of ll", "= cur.next if l1: cur.next = l1 else: cur.next = l2 return dummy.next", "at a time) # This runs in o(nlogk) where k is the number", "ListNode() cur = dummy while l1 and l2: if l1.val < l2.val: cur.next", "and one that simply merges the current list which saves us o (N)", "Sorted Lists: https://leetcode.com/problems/merge-k-sorted-lists/ # You are given an array of k linked-lists lists,", "current list which saves us o (N) space # to become O(1) class", "len(lists) while interval < total: for i in range(0, total - interval, interval", "become O(1) class Solution: def mergeKLists(self, lists: List[Optional[ListNode]]) -> Optional[ListNode]: if lists is", "of ll # and one that simply merges the current list which saves", "Optional[ListNode]: if lists is None or len(lists) == 0: return None interval =", "interval < total: for i in range(0, total - interval, interval * 2):", "saves us o (N) space # to become O(1) class Solution: def mergeKLists(self,", "cur = dummy while l1 and l2: if l1.val < l2.val: cur.next =", "cur = cur.next if l1: cur.next = l1 else: cur.next = l2 return", "range needs to decrease then number of nodes to review # by the", "ll and uses O(1) additional space # Score Card # Did I need", "optimal? Yup # Were there any bugs? None # 5 5 5 5", "0: return None interval = 1 total = len(lists) while interval < total:", "self.next = next # This problem is actually just a merge sort no", "* 2): lists[i] = self.merge2(lists[i], lists[i + interval]) interval *= 2 return lists[0]", "12 # Was the solution optimal? Yup # Were there any bugs? None", "runs in o(nlogk) where k is the number of ll and uses O(1)", "where k is the number of ll and uses O(1) additional space #" ]
[ "bruteforce ## import sys import crypt import itertools def main(): # check if", "characters aA = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\" h = sys.argv[1] # check the hash length if", "## import sys import crypt import itertools def main(): # check if called", "crypt.crypt(''.join(p), h) == h: print(p) exit(0) # not cracked print(\"All variations tested -", "show_usage() exit(1) # all possible characters aA = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\" h = sys.argv[1] #", "## # Cracks up to 4 letter alphabetical passwords by bruteforce ## import", "import crypt import itertools def main(): # check if called with exactly one", "<reponame>holodon/CS50 ## # Cracks up to 4 letter alphabetical passwords by bruteforce ##", "itertools.product(aA, repeat=i): p = ''.join(c) if crypt.crypt(''.join(p), h) == h: print(p) exit(0) #", "- none matched!\") exit(1) # shows usage def show_usage(): print(\"Usage: ./crack hash\") if", "repeat=i): p = ''.join(c) if crypt.crypt(''.join(p), h) == h: print(p) exit(0) # not", "# shows usage def show_usage(): print(\"Usage: ./crack hash\") if __name__ == \"__main__\": main()", "if len(h) != 13: show_usage() exit(1) # crack with itertools for i in", "sys.argv[1] # check the hash length if len(h) != 13: show_usage() exit(1) #", "itertools def main(): # check if called with exactly one argument if len(sys.argv)", "matched!\") exit(1) # shows usage def show_usage(): print(\"Usage: ./crack hash\") if __name__ ==", "itertools for i in range(1, 5): for c in itertools.product(aA, repeat=i): p =", "called with exactly one argument if len(sys.argv) != 2: show_usage() exit(1) # all", "hash length if len(h) != 13: show_usage() exit(1) # crack with itertools for", "4 letter alphabetical passwords by bruteforce ## import sys import crypt import itertools", "up to 4 letter alphabetical passwords by bruteforce ## import sys import crypt", "h) == h: print(p) exit(0) # not cracked print(\"All variations tested - none", "!= 2: show_usage() exit(1) # all possible characters aA = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\" h =", "in itertools.product(aA, repeat=i): p = ''.join(c) if crypt.crypt(''.join(p), h) == h: print(p) exit(0)", "print(\"All variations tested - none matched!\") exit(1) # shows usage def show_usage(): print(\"Usage:", "argument if len(sys.argv) != 2: show_usage() exit(1) # all possible characters aA =", "# crack with itertools for i in range(1, 5): for c in itertools.product(aA,", "letter alphabetical passwords by bruteforce ## import sys import crypt import itertools def", "possible characters aA = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\" h = sys.argv[1] # check the hash length", "== h: print(p) exit(0) # not cracked print(\"All variations tested - none matched!\")", "if len(sys.argv) != 2: show_usage() exit(1) # all possible characters aA = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\"", "one argument if len(sys.argv) != 2: show_usage() exit(1) # all possible characters aA", "length if len(h) != 13: show_usage() exit(1) # crack with itertools for i", "i in range(1, 5): for c in itertools.product(aA, repeat=i): p = ''.join(c) if", "alphabetical passwords by bruteforce ## import sys import crypt import itertools def main():", "to 4 letter alphabetical passwords by bruteforce ## import sys import crypt import", "h: print(p) exit(0) # not cracked print(\"All variations tested - none matched!\") exit(1)", "# Cracks up to 4 letter alphabetical passwords by bruteforce ## import sys", "def main(): # check if called with exactly one argument if len(sys.argv) !=", "2: show_usage() exit(1) # all possible characters aA = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\" h = sys.argv[1]", "exit(1) # crack with itertools for i in range(1, 5): for c in", "with itertools for i in range(1, 5): for c in itertools.product(aA, repeat=i): p", "p = ''.join(c) if crypt.crypt(''.join(p), h) == h: print(p) exit(0) # not cracked", "with exactly one argument if len(sys.argv) != 2: show_usage() exit(1) # all possible", "# all possible characters aA = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\" h = sys.argv[1] # check the", "= \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\" h = sys.argv[1] # check the hash length if len(h) !=", "none matched!\") exit(1) # shows usage def show_usage(): print(\"Usage: ./crack hash\") if __name__", "exit(1) # all possible characters aA = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\" h = sys.argv[1] # check", "# check if called with exactly one argument if len(sys.argv) != 2: show_usage()", "the hash length if len(h) != 13: show_usage() exit(1) # crack with itertools", "\"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\" h = sys.argv[1] # check the hash length if len(h) != 13:", "range(1, 5): for c in itertools.product(aA, repeat=i): p = ''.join(c) if crypt.crypt(''.join(p), h)", "cracked print(\"All variations tested - none matched!\") exit(1) # shows usage def show_usage():", "main(): # check if called with exactly one argument if len(sys.argv) != 2:", "by bruteforce ## import sys import crypt import itertools def main(): # check", "for i in range(1, 5): for c in itertools.product(aA, repeat=i): p = ''.join(c)", "c in itertools.product(aA, repeat=i): p = ''.join(c) if crypt.crypt(''.join(p), h) == h: print(p)", "in range(1, 5): for c in itertools.product(aA, repeat=i): p = ''.join(c) if crypt.crypt(''.join(p),", "5): for c in itertools.product(aA, repeat=i): p = ''.join(c) if crypt.crypt(''.join(p), h) ==", "''.join(c) if crypt.crypt(''.join(p), h) == h: print(p) exit(0) # not cracked print(\"All variations", "Cracks up to 4 letter alphabetical passwords by bruteforce ## import sys import", "show_usage() exit(1) # crack with itertools for i in range(1, 5): for c", "not cracked print(\"All variations tested - none matched!\") exit(1) # shows usage def", "crack with itertools for i in range(1, 5): for c in itertools.product(aA, repeat=i):", "exactly one argument if len(sys.argv) != 2: show_usage() exit(1) # all possible characters", "for c in itertools.product(aA, repeat=i): p = ''.join(c) if crypt.crypt(''.join(p), h) == h:", "aA = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\" h = sys.argv[1] # check the hash length if len(h)", "check the hash length if len(h) != 13: show_usage() exit(1) # crack with", "# not cracked print(\"All variations tested - none matched!\") exit(1) # shows usage", "tested - none matched!\") exit(1) # shows usage def show_usage(): print(\"Usage: ./crack hash\")", "if called with exactly one argument if len(sys.argv) != 2: show_usage() exit(1) #", "sys import crypt import itertools def main(): # check if called with exactly", "len(sys.argv) != 2: show_usage() exit(1) # all possible characters aA = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\" h", "len(h) != 13: show_usage() exit(1) # crack with itertools for i in range(1,", "if crypt.crypt(''.join(p), h) == h: print(p) exit(0) # not cracked print(\"All variations tested", "import itertools def main(): # check if called with exactly one argument if", "crypt import itertools def main(): # check if called with exactly one argument", "exit(1) # shows usage def show_usage(): print(\"Usage: ./crack hash\") if __name__ == \"__main__\":", "# check the hash length if len(h) != 13: show_usage() exit(1) # crack", "import sys import crypt import itertools def main(): # check if called with", "= sys.argv[1] # check the hash length if len(h) != 13: show_usage() exit(1)", "13: show_usage() exit(1) # crack with itertools for i in range(1, 5): for", "= ''.join(c) if crypt.crypt(''.join(p), h) == h: print(p) exit(0) # not cracked print(\"All", "!= 13: show_usage() exit(1) # crack with itertools for i in range(1, 5):", "variations tested - none matched!\") exit(1) # shows usage def show_usage(): print(\"Usage: ./crack", "h = sys.argv[1] # check the hash length if len(h) != 13: show_usage()", "passwords by bruteforce ## import sys import crypt import itertools def main(): #", "all possible characters aA = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\" h = sys.argv[1] # check the hash", "exit(0) # not cracked print(\"All variations tested - none matched!\") exit(1) # shows", "check if called with exactly one argument if len(sys.argv) != 2: show_usage() exit(1)", "print(p) exit(0) # not cracked print(\"All variations tested - none matched!\") exit(1) #" ]
[ "the signal from django.dispatch import receiver from .models import Review @receiver(post_save,sender=Post) def create_review(sender,instance,created,**kwargs):", ".models import Post #reciever of the signal from django.dispatch import receiver from .models", "#signal fired after an obj is saved in this cas when a user", "post_save #post to sende the signal from .models import Post #reciever of the", "#reciever of the signal from django.dispatch import receiver from .models import Review @receiver(post_save,sender=Post)", "and performs some task instance:is the instance of Post class created : if", "performs some task instance:is the instance of Post class created : if a", "is saved in this cas when a user is created from django.db.models.signals import", "that fetches the signal and performs some task instance:is the instance of Post", "obj is saved in this cas when a user is created from django.db.models.signals", "from .models import Review @receiver(post_save,sender=Post) def create_review(sender,instance,created,**kwargs): ''' post_save:is the signal that is", "function that fetches the signal and performs some task instance:is the instance of", "of the signal receiver:is the create rating function that fetches the signal and", "is the sender of the signal receiver:is the create rating function that fetches", "''' post_save:is the signal that is fired after and object is saved Post:model", "the signal from .models import Post #reciever of the signal from django.dispatch import", "from .models import Post #reciever of the signal from django.dispatch import receiver from", "Post #reciever of the signal from django.dispatch import receiver from .models import Review", "instance:is the instance of Post class created : if a post was created", "some task instance:is the instance of Post class created : if a post", "fired after an obj is saved in this cas when a user is", "task instance:is the instance of Post class created : if a post was", "after an obj is saved in this cas when a user is created", "is created from django.db.models.signals import post_save #post to sende the signal from .models", "from django.dispatch import receiver from .models import Review @receiver(post_save,sender=Post) def create_review(sender,instance,created,**kwargs): ''' post_save:is", "Post class created : if a post was created ''' if created: Review.objects.create(post=instance)", "the signal receiver:is the create rating function that fetches the signal and performs", "fetches the signal and performs some task instance:is the instance of Post class", "cas when a user is created from django.db.models.signals import post_save #post to sende", "from django.db.models.signals import post_save #post to sende the signal from .models import Post", "signal that is fired after and object is saved Post:model is the sender", "and object is saved Post:model is the sender of the signal receiver:is the", "in this cas when a user is created from django.db.models.signals import post_save #post", "fired after and object is saved Post:model is the sender of the signal", "is saved Post:model is the sender of the signal receiver:is the create rating", "saved Post:model is the sender of the signal receiver:is the create rating function", "is fired after and object is saved Post:model is the sender of the", "@receiver(post_save,sender=Post) def create_review(sender,instance,created,**kwargs): ''' post_save:is the signal that is fired after and object", "to sende the signal from .models import Post #reciever of the signal from", "django.dispatch import receiver from .models import Review @receiver(post_save,sender=Post) def create_review(sender,instance,created,**kwargs): ''' post_save:is the", "import Review @receiver(post_save,sender=Post) def create_review(sender,instance,created,**kwargs): ''' post_save:is the signal that is fired after", "signal from .models import Post #reciever of the signal from django.dispatch import receiver", "saved in this cas when a user is created from django.db.models.signals import post_save", "that is fired after and object is saved Post:model is the sender of", "signal receiver:is the create rating function that fetches the signal and performs some", "object is saved Post:model is the sender of the signal receiver:is the create", "when a user is created from django.db.models.signals import post_save #post to sende the", "of Post class created : if a post was created ''' if created:", "import post_save #post to sende the signal from .models import Post #reciever of", "the sender of the signal receiver:is the create rating function that fetches the", "the instance of Post class created : if a post was created '''", "this cas when a user is created from django.db.models.signals import post_save #post to", "import receiver from .models import Review @receiver(post_save,sender=Post) def create_review(sender,instance,created,**kwargs): ''' post_save:is the signal", "user is created from django.db.models.signals import post_save #post to sende the signal from", "the signal and performs some task instance:is the instance of Post class created", "receiver:is the create rating function that fetches the signal and performs some task", "#post to sende the signal from .models import Post #reciever of the signal", "a user is created from django.db.models.signals import post_save #post to sende the signal", "Post:model is the sender of the signal receiver:is the create rating function that", "create_review(sender,instance,created,**kwargs): ''' post_save:is the signal that is fired after and object is saved", "sender of the signal receiver:is the create rating function that fetches the signal", "sende the signal from .models import Post #reciever of the signal from django.dispatch", "signal and performs some task instance:is the instance of Post class created :", "the signal that is fired after and object is saved Post:model is the", "instance of Post class created : if a post was created ''' if", ".models import Review @receiver(post_save,sender=Post) def create_review(sender,instance,created,**kwargs): ''' post_save:is the signal that is fired", "create rating function that fetches the signal and performs some task instance:is the", "receiver from .models import Review @receiver(post_save,sender=Post) def create_review(sender,instance,created,**kwargs): ''' post_save:is the signal that", "an obj is saved in this cas when a user is created from", "def create_review(sender,instance,created,**kwargs): ''' post_save:is the signal that is fired after and object is", "django.db.models.signals import post_save #post to sende the signal from .models import Post #reciever", "Review @receiver(post_save,sender=Post) def create_review(sender,instance,created,**kwargs): ''' post_save:is the signal that is fired after and", "signal from django.dispatch import receiver from .models import Review @receiver(post_save,sender=Post) def create_review(sender,instance,created,**kwargs): '''", "of the signal from django.dispatch import receiver from .models import Review @receiver(post_save,sender=Post) def", "post_save:is the signal that is fired after and object is saved Post:model is", "rating function that fetches the signal and performs some task instance:is the instance", "import Post #reciever of the signal from django.dispatch import receiver from .models import", "after and object is saved Post:model is the sender of the signal receiver:is", "the create rating function that fetches the signal and performs some task instance:is", "created from django.db.models.signals import post_save #post to sende the signal from .models import" ]
[ "Gaussian elimination \"\"\" A = deepcopy(A) A.dtype = np.float if isinstance(A, CoordinateSparseMatrix): A", "n): Ajk = A.get(j, k) if Ajk == 0: continue for i in", "range(k + 1, n): Aki = A.get(k, i) if Aki == 0: continue", "of shape (n, n) :return: matrix A after Gaussian elimination \"\"\" n =", "CoordinateSparseMatrix, CSRMatrix def sparse_gauss_elimination_row(A: Union[CoordinateSparseMatrix, CSRMatrix]) \\ -> Union[CoordinateSparseMatrix, CSRMatrix]: \"\"\" Performs Gaussian", "0: continue Aji, ji_index = A.get(j, i, index=True) val = (-1) * (Aki", "i in range(k + 1, n): Aki, ki_index = A.get(k, i, index=True) if", "format of shape (n, n) :return: matrix A after Gaussian elimination \"\"\" n", "Allows either coordinate format or CSR format. :param A: sparse square matrix of", "i) if Aki == 0: continue Aji, ji_index = A.get(j, i, index=True) val", "on sparse matrix A in the coordinate format row-wise. :param A: sparse square", "import numpy as np from .sparse_matrices import CoordinateSparseMatrix, CSRMatrix def sparse_gauss_elimination_row(A: Union[CoordinateSparseMatrix, CSRMatrix])", "= np.float if isinstance(A, CoordinateSparseMatrix): A = _coordinate_row(A) elif isinstance(A, CSRMatrix): pass #", "coordinate format or CSR format. :param A: sparse square matrix of shape (n,", "= A.shape[0] for k in range(n - 1): Akk = A.get(k, k) assert", "+ 1, n): Ajk = A.get(j, k) if Ajk == 0: continue for", "== 0: continue Aji, ji_index = A.get(j, i, index=True) val = (-1) *", "A: sparse square matrix of shape (n, n) :return: matrix A after Gaussian", "Union[CoordinateSparseMatrix, CSRMatrix]: \"\"\" Performs Gaussian elimination on sparse matrix A row-wise. Allows either", "# not yet implemented return A def _coordinate_row(A: CoordinateSparseMatrix) -> CoordinateSparseMatrix: \"\"\" Performs", "Aji, ji_index = A.get(j, i, index=True) val = (-1) * (Aki * Ajk)", "else: # we have to insert new non-zero value A.insert(j, i, val) return", "A.get(k, k) assert Akk != 0, \"Akk = 0\" for i in range(k", "Ajk == 0: continue for i in range(k + 1, n): Aki =", "A after Gaussian elimination \"\"\" A = deepcopy(A) A.dtype = np.float if isinstance(A,", "square matrix of shape (n, n) :return: matrix A after Gaussian elimination \"\"\"", "pass # not yet implemented return A def _coordinate_row(A: CoordinateSparseMatrix) -> CoordinateSparseMatrix: \"\"\"", "yet implemented return A def _coordinate_row(A: CoordinateSparseMatrix) -> CoordinateSparseMatrix: \"\"\" Performs Gaussian elimination", "- 1): Akk = A.get(k, k) assert Akk != 0, \"Akk = 0\"", "n) :return: matrix A after Gaussian elimination \"\"\" A = deepcopy(A) A.dtype =", "np from .sparse_matrices import CoordinateSparseMatrix, CSRMatrix def sparse_gauss_elimination_row(A: Union[CoordinateSparseMatrix, CSRMatrix]) \\ -> Union[CoordinateSparseMatrix,", "Union[CoordinateSparseMatrix, CSRMatrix]) \\ -> Union[CoordinateSparseMatrix, CSRMatrix]: \"\"\" Performs Gaussian elimination on sparse matrix", "coordinate format of shape (n, n) :return: matrix A after Gaussian elimination \"\"\"", "0: continue for i in range(k + 1, n): Aki = A.get(k, i)", "Gaussian elimination on sparse matrix A row-wise. Allows either coordinate format or CSR", "A after Gaussian elimination \"\"\" n = A.shape[0] for k in range(n -", "already exists A.vals[ji_index] += val else: # we have to insert new non-zero", "elif isinstance(A, CSRMatrix): pass # not yet implemented return A def _coordinate_row(A: CoordinateSparseMatrix)", "Akk for j in range(k + 1, n): Ajk = A.get(j, k) if", "Gaussian elimination \"\"\" n = A.shape[0] for k in range(n - 1): Akk", "ki_index = A.get(k, i, index=True) if ki_index != -1: A.vals[ki_index] /= Akk for", "= A.get(k, i, index=True) if ki_index != -1: A.vals[ki_index] /= Akk for j", "Performs Gaussian elimination on sparse matrix A in the coordinate format row-wise. :param", "deepcopy from typing import Union import numpy as np from .sparse_matrices import CoordinateSparseMatrix,", "isinstance(A, CSRMatrix): pass # not yet implemented return A def _coordinate_row(A: CoordinateSparseMatrix) ->", "of shape (n, n) :return: matrix A after Gaussian elimination \"\"\" A =", ":param A: sparse square matrix in the coordinate format of shape (n, n)", "shape (n, n) :return: matrix A after Gaussian elimination \"\"\" n = A.shape[0]", "if ki_index != -1: A.vals[ki_index] /= Akk for j in range(k + 1,", "= _coordinate_row(A) elif isinstance(A, CSRMatrix): pass # not yet implemented return A def", "np.float if isinstance(A, CoordinateSparseMatrix): A = _coordinate_row(A) elif isinstance(A, CSRMatrix): pass # not", "in range(k + 1, n): Aki, ki_index = A.get(k, i, index=True) if ki_index", "CSRMatrix]) \\ -> Union[CoordinateSparseMatrix, CSRMatrix]: \"\"\" Performs Gaussian elimination on sparse matrix A", "elimination \"\"\" A = deepcopy(A) A.dtype = np.float if isinstance(A, CoordinateSparseMatrix): A =", "assert Akk != 0, \"Akk = 0\" for i in range(k + 1,", "+ 1, n): Aki, ki_index = A.get(k, i, index=True) if ki_index != -1:", "CSR format. :param A: sparse square matrix of shape (n, n) :return: matrix", "sparse square matrix of shape (n, n) :return: matrix A after Gaussian elimination", "A = _coordinate_row(A) elif isinstance(A, CSRMatrix): pass # not yet implemented return A", "index=True) val = (-1) * (Aki * Ajk) if ji_index >= 0: #", "# value already exists A.vals[ji_index] += val else: # we have to insert", "n = A.shape[0] for k in range(n - 1): Akk = A.get(k, k)", "not yet implemented return A def _coordinate_row(A: CoordinateSparseMatrix) -> CoordinateSparseMatrix: \"\"\" Performs Gaussian", "= A.get(j, i, index=True) val = (-1) * (Aki * Ajk) if ji_index", "square matrix in the coordinate format of shape (n, n) :return: matrix A", "in range(k + 1, n): Aki = A.get(k, i) if Aki == 0:", "ji_index = A.get(j, i, index=True) val = (-1) * (Aki * Ajk) if", ">= 0: # value already exists A.vals[ji_index] += val else: # we have", "!= 0, \"Akk = 0\" for i in range(k + 1, n): Aki,", "A: sparse square matrix in the coordinate format of shape (n, n) :return:", "= A.get(k, k) assert Akk != 0, \"Akk = 0\" for i in", "CSRMatrix]: \"\"\" Performs Gaussian elimination on sparse matrix A row-wise. Allows either coordinate", "i, index=True) if ki_index != -1: A.vals[ki_index] /= Akk for j in range(k", "value already exists A.vals[ji_index] += val else: # we have to insert new", "\"\"\" Performs Gaussian elimination on sparse matrix A in the coordinate format row-wise.", "A in the coordinate format row-wise. :param A: sparse square matrix in the", "Aki = A.get(k, i) if Aki == 0: continue Aji, ji_index = A.get(j,", "from copy import deepcopy from typing import Union import numpy as np from", "or CSR format. :param A: sparse square matrix of shape (n, n) :return:", "Performs Gaussian elimination on sparse matrix A row-wise. Allows either coordinate format or", "A = deepcopy(A) A.dtype = np.float if isinstance(A, CoordinateSparseMatrix): A = _coordinate_row(A) elif", "range(k + 1, n): Aki, ki_index = A.get(k, i, index=True) if ki_index !=", "for i in range(k + 1, n): Aki, ki_index = A.get(k, i, index=True)", "A row-wise. Allows either coordinate format or CSR format. :param A: sparse square", "typing import Union import numpy as np from .sparse_matrices import CoordinateSparseMatrix, CSRMatrix def", ":return: matrix A after Gaussian elimination \"\"\" A = deepcopy(A) A.dtype = np.float", "A.shape[0] for k in range(n - 1): Akk = A.get(k, k) assert Akk", "continue Aji, ji_index = A.get(j, i, index=True) val = (-1) * (Aki *", "row-wise. Allows either coordinate format or CSR format. :param A: sparse square matrix", "shape (n, n) :return: matrix A after Gaussian elimination \"\"\" A = deepcopy(A)", "0\" for i in range(k + 1, n): Aki, ki_index = A.get(k, i,", "import deepcopy from typing import Union import numpy as np from .sparse_matrices import", "A.get(k, i, index=True) if ki_index != -1: A.vals[ki_index] /= Akk for j in", "sparse_gauss_elimination_row(A: Union[CoordinateSparseMatrix, CSRMatrix]) \\ -> Union[CoordinateSparseMatrix, CSRMatrix]: \"\"\" Performs Gaussian elimination on sparse", "n): Aki = A.get(k, i) if Aki == 0: continue Aji, ji_index =", "matrix A after Gaussian elimination \"\"\" n = A.shape[0] for k in range(n", "row-wise. :param A: sparse square matrix in the coordinate format of shape (n,", "coordinate format row-wise. :param A: sparse square matrix in the coordinate format of", "the coordinate format of shape (n, n) :return: matrix A after Gaussian elimination", "k) assert Akk != 0, \"Akk = 0\" for i in range(k +", "A.dtype = np.float if isinstance(A, CoordinateSparseMatrix): A = _coordinate_row(A) elif isinstance(A, CSRMatrix): pass", "val else: # we have to insert new non-zero value A.insert(j, i, val)", "in range(k + 1, n): Ajk = A.get(j, k) if Ajk == 0:", "* Ajk) if ji_index >= 0: # value already exists A.vals[ji_index] += val", "def _coordinate_row(A: CoordinateSparseMatrix) -> CoordinateSparseMatrix: \"\"\" Performs Gaussian elimination on sparse matrix A", "matrix A row-wise. Allows either coordinate format or CSR format. :param A: sparse", "import CoordinateSparseMatrix, CSRMatrix def sparse_gauss_elimination_row(A: Union[CoordinateSparseMatrix, CSRMatrix]) \\ -> Union[CoordinateSparseMatrix, CSRMatrix]: \"\"\" Performs", "CSRMatrix): pass # not yet implemented return A def _coordinate_row(A: CoordinateSparseMatrix) -> CoordinateSparseMatrix:", "\\ -> Union[CoordinateSparseMatrix, CSRMatrix]: \"\"\" Performs Gaussian elimination on sparse matrix A row-wise.", "-> Union[CoordinateSparseMatrix, CSRMatrix]: \"\"\" Performs Gaussian elimination on sparse matrix A row-wise. Allows", "CSRMatrix def sparse_gauss_elimination_row(A: Union[CoordinateSparseMatrix, CSRMatrix]) \\ -> Union[CoordinateSparseMatrix, CSRMatrix]: \"\"\" Performs Gaussian elimination", "for j in range(k + 1, n): Ajk = A.get(j, k) if Ajk", "continue for i in range(k + 1, n): Aki = A.get(k, i) if", "from typing import Union import numpy as np from .sparse_matrices import CoordinateSparseMatrix, CSRMatrix", "j in range(k + 1, n): Ajk = A.get(j, k) if Ajk ==", "index=True) if ki_index != -1: A.vals[ki_index] /= Akk for j in range(k +", "(-1) * (Aki * Ajk) if ji_index >= 0: # value already exists", "Akk = A.get(k, k) assert Akk != 0, \"Akk = 0\" for i", "n) :return: matrix A after Gaussian elimination \"\"\" n = A.shape[0] for k", "0, \"Akk = 0\" for i in range(k + 1, n): Aki, ki_index", "k) if Ajk == 0: continue for i in range(k + 1, n):", "from .sparse_matrices import CoordinateSparseMatrix, CSRMatrix def sparse_gauss_elimination_row(A: Union[CoordinateSparseMatrix, CSRMatrix]) \\ -> Union[CoordinateSparseMatrix, CSRMatrix]:", "deepcopy(A) A.dtype = np.float if isinstance(A, CoordinateSparseMatrix): A = _coordinate_row(A) elif isinstance(A, CSRMatrix):", "= 0\" for i in range(k + 1, n): Aki, ki_index = A.get(k,", "1): Akk = A.get(k, k) assert Akk != 0, \"Akk = 0\" for", "elimination \"\"\" n = A.shape[0] for k in range(n - 1): Akk =", "range(n - 1): Akk = A.get(k, k) assert Akk != 0, \"Akk =", "def sparse_gauss_elimination_row(A: Union[CoordinateSparseMatrix, CSRMatrix]) \\ -> Union[CoordinateSparseMatrix, CSRMatrix]: \"\"\" Performs Gaussian elimination on", "matrix in the coordinate format of shape (n, n) :return: matrix A after", "for k in range(n - 1): Akk = A.get(k, k) assert Akk !=", "ji_index >= 0: # value already exists A.vals[ji_index] += val else: # we", "in the coordinate format row-wise. :param A: sparse square matrix in the coordinate", "A.get(k, i) if Aki == 0: continue Aji, ji_index = A.get(j, i, index=True)", "matrix A in the coordinate format row-wise. :param A: sparse square matrix in", "on sparse matrix A row-wise. Allows either coordinate format or CSR format. :param", "CoordinateSparseMatrix): A = _coordinate_row(A) elif isinstance(A, CSRMatrix): pass # not yet implemented return", "(n, n) :return: matrix A after Gaussian elimination \"\"\" A = deepcopy(A) A.dtype", "after Gaussian elimination \"\"\" A = deepcopy(A) A.dtype = np.float if isinstance(A, CoordinateSparseMatrix):", "after Gaussian elimination \"\"\" n = A.shape[0] for k in range(n - 1):", "\"Akk = 0\" for i in range(k + 1, n): Aki, ki_index =", "A.vals[ki_index] /= Akk for j in range(k + 1, n): Ajk = A.get(j,", "1, n): Ajk = A.get(j, k) if Ajk == 0: continue for i", "+ 1, n): Aki = A.get(k, i) if Aki == 0: continue Aji,", "i, index=True) val = (-1) * (Aki * Ajk) if ji_index >= 0:", "if ji_index >= 0: # value already exists A.vals[ji_index] += val else: #", "1, n): Aki, ki_index = A.get(k, i, index=True) if ki_index != -1: A.vals[ki_index]", "A.get(j, k) if Ajk == 0: continue for i in range(k + 1,", "= (-1) * (Aki * Ajk) if ji_index >= 0: # value already", "as np from .sparse_matrices import CoordinateSparseMatrix, CSRMatrix def sparse_gauss_elimination_row(A: Union[CoordinateSparseMatrix, CSRMatrix]) \\ ->", "if Ajk == 0: continue for i in range(k + 1, n): Aki", "format. :param A: sparse square matrix of shape (n, n) :return: matrix A", "ki_index != -1: A.vals[ki_index] /= Akk for j in range(k + 1, n):", "/= Akk for j in range(k + 1, n): Ajk = A.get(j, k)", "in range(n - 1): Akk = A.get(k, k) assert Akk != 0, \"Akk", "A.get(j, i, index=True) val = (-1) * (Aki * Ajk) if ji_index >=", "matrix A after Gaussian elimination \"\"\" A = deepcopy(A) A.dtype = np.float if", "exists A.vals[ji_index] += val else: # we have to insert new non-zero value", "= deepcopy(A) A.dtype = np.float if isinstance(A, CoordinateSparseMatrix): A = _coordinate_row(A) elif isinstance(A,", "return A def _coordinate_row(A: CoordinateSparseMatrix) -> CoordinateSparseMatrix: \"\"\" Performs Gaussian elimination on sparse", "k in range(n - 1): Akk = A.get(k, k) assert Akk != 0,", "import Union import numpy as np from .sparse_matrices import CoordinateSparseMatrix, CSRMatrix def sparse_gauss_elimination_row(A:", "elimination on sparse matrix A row-wise. Allows either coordinate format or CSR format.", "the coordinate format row-wise. :param A: sparse square matrix in the coordinate format", "i in range(k + 1, n): Aki = A.get(k, i) if Aki ==", "\"\"\" Performs Gaussian elimination on sparse matrix A row-wise. Allows either coordinate format", "= A.get(j, k) if Ajk == 0: continue for i in range(k +", "-1: A.vals[ki_index] /= Akk for j in range(k + 1, n): Ajk =", "sparse square matrix in the coordinate format of shape (n, n) :return: matrix", "\"\"\" n = A.shape[0] for k in range(n - 1): Akk = A.get(k,", "isinstance(A, CoordinateSparseMatrix): A = _coordinate_row(A) elif isinstance(A, CSRMatrix): pass # not yet implemented", ":param A: sparse square matrix of shape (n, n) :return: matrix A after", "(n, n) :return: matrix A after Gaussian elimination \"\"\" n = A.shape[0] for", "matrix of shape (n, n) :return: matrix A after Gaussian elimination \"\"\" A", "-> CoordinateSparseMatrix: \"\"\" Performs Gaussian elimination on sparse matrix A in the coordinate", "= A.get(k, i) if Aki == 0: continue Aji, ji_index = A.get(j, i,", "# we have to insert new non-zero value A.insert(j, i, val) return A", "A def _coordinate_row(A: CoordinateSparseMatrix) -> CoordinateSparseMatrix: \"\"\" Performs Gaussian elimination on sparse matrix", "sparse matrix A in the coordinate format row-wise. :param A: sparse square matrix", "1, n): Aki = A.get(k, i) if Aki == 0: continue Aji, ji_index", "numpy as np from .sparse_matrices import CoordinateSparseMatrix, CSRMatrix def sparse_gauss_elimination_row(A: Union[CoordinateSparseMatrix, CSRMatrix]) \\", "if isinstance(A, CoordinateSparseMatrix): A = _coordinate_row(A) elif isinstance(A, CSRMatrix): pass # not yet", "CoordinateSparseMatrix: \"\"\" Performs Gaussian elimination on sparse matrix A in the coordinate format", "implemented return A def _coordinate_row(A: CoordinateSparseMatrix) -> CoordinateSparseMatrix: \"\"\" Performs Gaussian elimination on", "Ajk = A.get(j, k) if Ajk == 0: continue for i in range(k", "for i in range(k + 1, n): Aki = A.get(k, i) if Aki", "* (Aki * Ajk) if ji_index >= 0: # value already exists A.vals[ji_index]", "== 0: continue for i in range(k + 1, n): Aki = A.get(k,", "Union import numpy as np from .sparse_matrices import CoordinateSparseMatrix, CSRMatrix def sparse_gauss_elimination_row(A: Union[CoordinateSparseMatrix,", "_coordinate_row(A: CoordinateSparseMatrix) -> CoordinateSparseMatrix: \"\"\" Performs Gaussian elimination on sparse matrix A in", ":return: matrix A after Gaussian elimination \"\"\" n = A.shape[0] for k in", "\"\"\" A = deepcopy(A) A.dtype = np.float if isinstance(A, CoordinateSparseMatrix): A = _coordinate_row(A)", "Aki, ki_index = A.get(k, i, index=True) if ki_index != -1: A.vals[ki_index] /= Akk", "val = (-1) * (Aki * Ajk) if ji_index >= 0: # value", "if Aki == 0: continue Aji, ji_index = A.get(j, i, index=True) val =", "format or CSR format. :param A: sparse square matrix of shape (n, n)", "(Aki * Ajk) if ji_index >= 0: # value already exists A.vals[ji_index] +=", "CoordinateSparseMatrix) -> CoordinateSparseMatrix: \"\"\" Performs Gaussian elimination on sparse matrix A in the", "in the coordinate format of shape (n, n) :return: matrix A after Gaussian", "format row-wise. :param A: sparse square matrix in the coordinate format of shape", "Ajk) if ji_index >= 0: # value already exists A.vals[ji_index] += val else:", "range(k + 1, n): Ajk = A.get(j, k) if Ajk == 0: continue", "+= val else: # we have to insert new non-zero value A.insert(j, i,", "sparse matrix A row-wise. Allows either coordinate format or CSR format. :param A:", "!= -1: A.vals[ki_index] /= Akk for j in range(k + 1, n): Ajk", "n): Aki, ki_index = A.get(k, i, index=True) if ki_index != -1: A.vals[ki_index] /=", "A.vals[ji_index] += val else: # we have to insert new non-zero value A.insert(j,", ".sparse_matrices import CoordinateSparseMatrix, CSRMatrix def sparse_gauss_elimination_row(A: Union[CoordinateSparseMatrix, CSRMatrix]) \\ -> Union[CoordinateSparseMatrix, CSRMatrix]: \"\"\"", "either coordinate format or CSR format. :param A: sparse square matrix of shape", "Gaussian elimination on sparse matrix A in the coordinate format row-wise. :param A:", "Aki == 0: continue Aji, ji_index = A.get(j, i, index=True) val = (-1)", "copy import deepcopy from typing import Union import numpy as np from .sparse_matrices", "0: # value already exists A.vals[ji_index] += val else: # we have to", "<gh_stars>1-10 from copy import deepcopy from typing import Union import numpy as np", "elimination on sparse matrix A in the coordinate format row-wise. :param A: sparse", "Akk != 0, \"Akk = 0\" for i in range(k + 1, n):", "_coordinate_row(A) elif isinstance(A, CSRMatrix): pass # not yet implemented return A def _coordinate_row(A:" ]
[ "'MultiResUNet implementation in PyTorch; MultiResUNet: Rethinking the U-Net Architecture for Multimodal', author =", "implementation in PyTorch; MultiResUNet: Rethinking the U-Net Architecture for Multimodal', author = '<NAME>',", "in PyTorch; MultiResUNet: Rethinking the U-Net Architecture for Multimodal', author = '<NAME>', author_email", "from setuptools import setup, find_packages setup( name = 'multiresunet', version = '0.1', description", "Architecture for Multimodal', author = '<NAME>', author_email = '<EMAIL>', install_requires= [], packages =", "author = '<NAME>', author_email = '<EMAIL>', install_requires= [], packages = find_packages(), python_requires =", "= '0.1', description = 'MultiResUNet implementation in PyTorch; MultiResUNet: Rethinking the U-Net Architecture", "'multiresunet', version = '0.1', description = 'MultiResUNet implementation in PyTorch; MultiResUNet: Rethinking the", "Multimodal', author = '<NAME>', author_email = '<EMAIL>', install_requires= [], packages = find_packages(), python_requires", "setuptools import setup, find_packages setup( name = 'multiresunet', version = '0.1', description =", "PyTorch; MultiResUNet: Rethinking the U-Net Architecture for Multimodal', author = '<NAME>', author_email =", "for Multimodal', author = '<NAME>', author_email = '<EMAIL>', install_requires= [], packages = find_packages(),", "'<NAME>', author_email = '<EMAIL>', install_requires= [], packages = find_packages(), python_requires = '>=3.6' )", "Rethinking the U-Net Architecture for Multimodal', author = '<NAME>', author_email = '<EMAIL>', install_requires=", "= 'MultiResUNet implementation in PyTorch; MultiResUNet: Rethinking the U-Net Architecture for Multimodal', author", "version = '0.1', description = 'MultiResUNet implementation in PyTorch; MultiResUNet: Rethinking the U-Net", "MultiResUNet: Rethinking the U-Net Architecture for Multimodal', author = '<NAME>', author_email = '<EMAIL>',", "setup( name = 'multiresunet', version = '0.1', description = 'MultiResUNet implementation in PyTorch;", "the U-Net Architecture for Multimodal', author = '<NAME>', author_email = '<EMAIL>', install_requires= [],", "= '<NAME>', author_email = '<EMAIL>', install_requires= [], packages = find_packages(), python_requires = '>=3.6'", "description = 'MultiResUNet implementation in PyTorch; MultiResUNet: Rethinking the U-Net Architecture for Multimodal',", "'0.1', description = 'MultiResUNet implementation in PyTorch; MultiResUNet: Rethinking the U-Net Architecture for", "setup, find_packages setup( name = 'multiresunet', version = '0.1', description = 'MultiResUNet implementation", "name = 'multiresunet', version = '0.1', description = 'MultiResUNet implementation in PyTorch; MultiResUNet:", "find_packages setup( name = 'multiresunet', version = '0.1', description = 'MultiResUNet implementation in", "import setup, find_packages setup( name = 'multiresunet', version = '0.1', description = 'MultiResUNet", "= 'multiresunet', version = '0.1', description = 'MultiResUNet implementation in PyTorch; MultiResUNet: Rethinking", "U-Net Architecture for Multimodal', author = '<NAME>', author_email = '<EMAIL>', install_requires= [], packages" ]
[ "# Find each shade html_shades = color_group.find_all('li') shades = [] for shade in", "foreground = foreground_color_dark if has_class(shade, 'dark') else foreground_color_light shades.append({ 'name': shade_name, 'hex': hex,", "for color_group in color_groups: name_span = color_group.find(attrs={'class', 'name'}) # We skip black +", "= '${var_name}: {indent}{value};\\n' for name, value in zip(names, values): output_handle.write(fill_placeholders(pattern, { 'var_name': name,", "colors]) print(output_file + ' created, containing ' + str(colors_count) + ' colors and", "parsed_html = BeautifulSoup(raw_html, 'html.parser') # Parse it! html_palette = parsed_html.body.find('div', attrs={'class': 'color-palette'}) color_groups", "len(name)) })) def has_class(element, classname): class_attr = element.get('class') if class_attr is None: return", "-*- import urllib.request from bs4 import BeautifulSoup import re material_guidelines_url = 'http://www.google.com/design/spec/style/color.html#color-color-palette' output_file", "{indent}{value},\\n' for key, value in zip(keys, values): output_handle.write(fill_placeholders(pattern, { 'key': key, 'value': value,", "color_slug, 'shades': shades }) # Print vars & map definitions to output file", "for key in keys) pattern = ' \"{key}\": {indent}{value},\\n' for key, value in", "is None: continue color_name = name_span.text color_slug = slugify(color_name) # Find each shade", "guidelines HTML response = urllib.request.urlopen(material_guidelines_url) data = response.read() raw_html = data.decode('utf-8') parsed_html =", "re.sub(r'[-\\s]+', '-', (re.sub(r'[^\\w\\s-]', '', string).strip().lower())) def print_scss_map(output_handle, name, keys, values): output_handle.write('$' + name", "'colors', [color['slug'] for color in colors], ['$color-' + color['slug'] + '-list' for color", "in shades], [shade['hex'] for shade in shades]) # Foreground color output.writelines('\\n'.join([ '', '//", "with_what in dict.items(): string = string.replace('{' + what + '}', with_what) return string", "shade.find(attrs={'class', 'hex'}).text foreground = foreground_color_dark if has_class(shade, 'dark') else foreground_color_light shades.append({ 'name': shade_name,", "what + '}', with_what) return string # Download & parse guidelines HTML response", "color_group in color_groups: name_span = color_group.find(attrs={'class', 'name'}) # We skip black + white", "value, 'indent': ' ' * (longest_key - len(key)) })) output_handle.write(');\\n') def print_scss_vars(output_handle, names,", "shades]) # Foreground color output.writelines('\\n'.join([ '', '// Foreground', '' ])) print_scss_map(output, 'color-' +", "shade['name'] + '-foreground' for shade in shades], [shade['hex'] for shade in shades]) output.write('\\n\\n')", "pattern = '${var_name}: {indent}{value};\\n' for name, value in zip(names, values): output_handle.write(fill_placeholders(pattern, { 'var_name':", "= BeautifulSoup(raw_html, 'html.parser') # Parse it! html_palette = parsed_html.body.find('div', attrs={'class': 'color-palette'}) color_groups =", "skip black + white colors if name_span is None: continue color_name = name_span.text", "'color-group'}) colors = [] for color_group in color_groups: name_span = color_group.find(attrs={'class', 'name'}) #", "else foreground_color_light shades.append({ 'name': shade_name, 'hex': hex, 'foreground': foreground, }) colors.append({ 'name': color_name,", "string.replace('{' + what + '}', with_what) return string # Download & parse guidelines", "shade['name'] == '500') print_scss_vars(output, ['color-' + color_slug], [main_shade['hex']]) output.write('\\n') # All shades print_scss_vars(output,", "print_scss_vars(output, ['color-' + color_slug + '-' + shade['name'] for shade in shades], [shade['hex']", "values): output_handle.write('$' + name + ': (\\n') longest_key = max(len(key) for key in", "for key, value in zip(keys, values): output_handle.write(fill_placeholders(pattern, { 'key': key, 'value': value, 'indent':", "shade['name'] for shade in shades], [shade['hex'] for shade in shades]) # Foreground color", "= len(colors) shades_count = sum([len(color['shades']) for color in colors]) print(output_file + ' created,", "-*- coding: utf-8 -*- import urllib.request from bs4 import BeautifulSoup import re material_guidelines_url", "shades], [shade['foreground'] for shade in shades]) output.write('\\n') # Separate colors # Main shade", "for name, value in zip(names, values): output_handle.write(fill_placeholders(pattern, { 'var_name': name, 'value': value, 'indent':", "Write to file output.writelines('\\n'.join([ '//', '// ' + color_name, '//', '' ])) #", "{ 'key': key, 'value': value, 'indent': ' ' * (longest_key - len(key)) }))", "'-list' for color in colors]) colors_count = len(colors) shades_count = sum([len(color['shades']) for color", "coding: utf-8 -*- import urllib.request from bs4 import BeautifulSoup import re material_guidelines_url =", "name + ': (\\n') longest_key = max(len(key) for key in keys) pattern =", "- len(name)) })) def has_class(element, classname): class_attr = element.get('class') if class_attr is None:", "output.truncate() output.writelines('\\n'.join([ \"/**\", \" * Material-Colors-SCSS-Variables\", \" * https://github.com/chteuchteu/Material-Colors-SCSS-Variables\", \" */\\n\\n\" ])) for", "= shade.find(attrs={'class', 'shade'}).text hex = shade.find(attrs={'class', 'hex'}).text foreground = foreground_color_dark if has_class(shade, 'dark')", "# Write to file output.writelines('\\n'.join([ '//', '// ' + color_name, '//', '' ]))", "vars & map definitions to output file with open(output_file, 'w') as output: output.truncate()", "color_group.find(attrs={'class', 'name'}) # We skip black + white colors if name_span is None:", "for shade in shades]) output.write('\\n\\n') # Print a map of all colors print_scss_map(output,", "color_groups: name_span = color_group.find(attrs={'class', 'name'}) # We skip black + white colors if", "= [] for shade in html_shades: if has_class(shade, 'main-color'): continue shade_name = shade.find(attrs={'class',", "containing ' + str(colors_count) + ' colors and ' + str(shades_count) + '", "output.writelines('\\n'.join([ \"/**\", \" * Material-Colors-SCSS-Variables\", \" * https://github.com/chteuchteu/Material-Colors-SCSS-Variables\", \" */\\n\\n\" ])) for color", "+ '-foreground-list', [shade['name'] for shade in shades], [shade['foreground'] for shade in shades]) output.write('\\n')", "colors], ['$color-' + color['slug'] + '-list' for color in colors]) colors_count = len(colors)", "shades if shade['name'] == '500') print_scss_vars(output, ['color-' + color_slug + '-foreground'], [main_shade['hex']]) output.write('\\n')", "max(len(key) for key in keys) pattern = ' \"{key}\": {indent}{value},\\n' for key, value", "print_scss_vars(output_handle, names, values): indent = max(len(name) for name in names) pattern = '${var_name}:", "# Print vars & map definitions to output file with open(output_file, 'w') as", "fill_placeholders(string, dict): for what, with_what in dict.items(): string = string.replace('{' + what +", "bs4 import BeautifulSoup import re material_guidelines_url = 'http://www.google.com/design/spec/style/color.html#color-color-palette' output_file = 'dist/_material-colors.scss' foreground_color_light =", "= foreground_color_dark if has_class(shade, 'dark') else foreground_color_light shades.append({ 'name': shade_name, 'hex': hex, 'foreground':", "response.read() raw_html = data.decode('utf-8') parsed_html = BeautifulSoup(raw_html, 'html.parser') # Parse it! html_palette =", "\" * Material-Colors-SCSS-Variables\", \" * https://github.com/chteuchteu/Material-Colors-SCSS-Variables\", \" */\\n\\n\" ])) for color in colors:", "# Foreground color output.writelines('\\n'.join([ '', '// Foreground', '' ])) print_scss_map(output, 'color-' + color_slug", "Foreground', '' ])) print_scss_map(output, 'color-' + color_slug + '-foreground-list', [shade['name'] for shade in", "output.write('\\n\\n') # Print a map of all colors print_scss_map(output, 'colors', [color['slug'] for color", "colors print_scss_map(output, 'colors', [color['slug'] for color in colors], ['$color-' + color['slug'] + '-list'", "+ shade['name'] for shade in shades], [shade['hex'] for shade in shades]) # Foreground", "'foreground': foreground, }) colors.append({ 'name': color_name, 'slug': color_slug, 'shades': shades }) # Print", "in colors]) print(output_file + ' created, containing ' + str(colors_count) + ' colors", "html_palette = parsed_html.body.find('div', attrs={'class': 'color-palette'}) color_groups = html_palette.find_all('section', attrs={'class', 'color-group'}) colors = []", "'-list', [shade['name'] for shade in shades], [shade['hex'] for shade in shades]) output.write('\\n') #", "{ 'var_name': name, 'value': value, 'indent': ' ' * (indent - len(name)) }))", "'', string).strip().lower())) def print_scss_map(output_handle, name, keys, values): output_handle.write('$' + name + ': (\\n')", "= '#000000' def slugify(string): return re.sub(r'[-\\s]+', '-', (re.sub(r'[^\\w\\s-]', '', string).strip().lower())) def print_scss_map(output_handle, name,", "* Material-Colors-SCSS-Variables\", \" * https://github.com/chteuchteu/Material-Colors-SCSS-Variables\", \" */\\n\\n\" ])) for color in colors: color_name", "+ '-list' for color in colors]) colors_count = len(colors) shades_count = sum([len(color['shades']) for", "value, 'indent': ' ' * (indent - len(name)) })) def has_class(element, classname): class_attr", "if has_class(shade, 'dark') else foreground_color_light shades.append({ 'name': shade_name, 'hex': hex, 'foreground': foreground, })", "shade html_shades = color_group.find_all('li') shades = [] for shade in html_shades: if has_class(shade,", "for color in colors]) print(output_file + ' created, containing ' + str(colors_count) +", "for shade in shades], [shade['hex'] for shade in shades]) output.write('\\n\\n') # Print a", "# Print a map of all colors print_scss_map(output, 'colors', [color['slug'] for color in", "for shade in shades], [shade['hex'] for shade in shades]) output.write('\\n') # Separate colors", "colors # Main shade main_shade = next(shade for shade in shades if shade['name']", "== '500') print_scss_vars(output, ['color-' + color_slug], [main_shade['hex']]) output.write('\\n') # All shades print_scss_vars(output, ['color-'", "raw_html = data.decode('utf-8') parsed_html = BeautifulSoup(raw_html, 'html.parser') # Parse it! html_palette = parsed_html.body.find('div',", "[main_shade['hex']]) output.write('\\n') print_scss_vars(output, ['color-' + color_slug + '-' + shade['name'] + '-foreground' for", "output: output.truncate() output.writelines('\\n'.join([ \"/**\", \" * Material-Colors-SCSS-Variables\", \" * https://github.com/chteuchteu/Material-Colors-SCSS-Variables\", \" */\\n\\n\" ]))", "= 'http://www.google.com/design/spec/style/color.html#color-color-palette' output_file = 'dist/_material-colors.scss' foreground_color_light = '#ffffff' foreground_color_dark = '#000000' def slugify(string):", "' ' * (indent - len(name)) })) def has_class(element, classname): class_attr = element.get('class')", "for shade in shades if shade['name'] == '500') print_scss_vars(output, ['color-' + color_slug +", "'http://www.google.com/design/spec/style/color.html#color-color-palette' output_file = 'dist/_material-colors.scss' foreground_color_light = '#ffffff' foreground_color_dark = '#000000' def slugify(string): return", "max(len(name) for name in names) pattern = '${var_name}: {indent}{value};\\n' for name, value in", "Download & parse guidelines HTML response = urllib.request.urlopen(material_guidelines_url) data = response.read() raw_html =", "classname in class_attr def fill_placeholders(string, dict): for what, with_what in dict.items(): string =", "print_scss_map(output, 'color-' + color_slug + '-foreground-list', [shade['name'] for shade in shades], [shade['foreground'] for", "'' ])) print_scss_map(output, 'color-' + color_slug + '-foreground-list', [shade['name'] for shade in shades],", "def print_scss_vars(output_handle, names, values): indent = max(len(name) for name in names) pattern =", "shades = [] for shade in html_shades: if has_class(shade, 'main-color'): continue shade_name =", "= element.get('class') if class_attr is None: return False return classname in class_attr def", "for name in names) pattern = '${var_name}: {indent}{value};\\n' for name, value in zip(names,", "foreground, }) colors.append({ 'name': color_name, 'slug': color_slug, 'shades': shades }) # Print vars", "color in colors], ['$color-' + color['slug'] + '-list' for color in colors]) colors_count", "'dist/_material-colors.scss' foreground_color_light = '#ffffff' foreground_color_dark = '#000000' def slugify(string): return re.sub(r'[-\\s]+', '-', (re.sub(r'[^\\w\\s-]',", "black + white colors if name_span is None: continue color_name = name_span.text color_slug", "in colors]) colors_count = len(colors) shades_count = sum([len(color['shades']) for color in colors]) print(output_file", "attrs={'class', 'color-group'}) colors = [] for color_group in color_groups: name_span = color_group.find(attrs={'class', 'name'})", "+ shade['name'] + '-foreground' for shade in shades], [shade['hex'] for shade in shades])", "what, with_what in dict.items(): string = string.replace('{' + what + '}', with_what) return", "+ '}', with_what) return string # Download & parse guidelines HTML response =", "for shade in shades]) # Foreground color output.writelines('\\n'.join([ '', '// Foreground', '' ]))", "])) for color in colors: color_name = color['name'] color_slug = color['slug'] shades =", "foreground_color_light shades.append({ 'name': shade_name, 'hex': hex, 'foreground': foreground, }) colors.append({ 'name': color_name, 'slug':", "[main_shade['hex']]) output.write('\\n') # All shades print_scss_vars(output, ['color-' + color_slug + '-' + shade['name']", "return classname in class_attr def fill_placeholders(string, dict): for what, with_what in dict.items(): string", "continue color_name = name_span.text color_slug = slugify(color_name) # Find each shade html_shades =", "next(shade for shade in shades if shade['name'] == '500') print_scss_vars(output, ['color-' + color_slug],", "white colors if name_span is None: continue color_name = name_span.text color_slug = slugify(color_name)", "string = string.replace('{' + what + '}', with_what) return string # Download &", "hex, 'foreground': foreground, }) colors.append({ 'name': color_name, 'slug': color_slug, 'shades': shades }) #", "# Download & parse guidelines HTML response = urllib.request.urlopen(material_guidelines_url) data = response.read() raw_html", "color_slug + '-foreground'], [main_shade['hex']]) output.write('\\n') print_scss_vars(output, ['color-' + color_slug + '-' + shade['name']", "(indent - len(name)) })) def has_class(element, classname): class_attr = element.get('class') if class_attr is", "(re.sub(r'[^\\w\\s-]', '', string).strip().lower())) def print_scss_map(output_handle, name, keys, values): output_handle.write('$' + name + ':", "False return classname in class_attr def fill_placeholders(string, dict): for what, with_what in dict.items():", "colors]) colors_count = len(colors) shades_count = sum([len(color['shades']) for color in colors]) print(output_file +", "def has_class(element, classname): class_attr = element.get('class') if class_attr is None: return False return", "Print a map of all colors print_scss_map(output, 'colors', [color['slug'] for color in colors],", "color_name, 'slug': color_slug, 'shades': shades }) # Print vars & map definitions to", "'${var_name}: {indent}{value};\\n' for name, value in zip(names, values): output_handle.write(fill_placeholders(pattern, { 'var_name': name, 'value':", "for shade in shades], [shade['foreground'] for shade in shades]) output.write('\\n') # Separate colors", "dict.items(): string = string.replace('{' + what + '}', with_what) return string # Download", "'indent': ' ' * (longest_key - len(key)) })) output_handle.write(');\\n') def print_scss_vars(output_handle, names, values):", "'value': value, 'indent': ' ' * (longest_key - len(key)) })) output_handle.write(');\\n') def print_scss_vars(output_handle,", "slugify(color_name) # Find each shade html_shades = color_group.find_all('li') shades = [] for shade", "pattern = ' \"{key}\": {indent}{value},\\n' for key, value in zip(keys, values): output_handle.write(fill_placeholders(pattern, {", "+ '-' + shade['name'] + '-foreground' for shade in shades], [shade['hex'] for shade", "len(colors) shades_count = sum([len(color['shades']) for color in colors]) print(output_file + ' created, containing", "of all colors print_scss_map(output, 'colors', [color['slug'] for color in colors], ['$color-' + color['slug']", "for color in colors: color_name = color['name'] color_slug = color['slug'] shades = color['shades']", "keys) pattern = ' \"{key}\": {indent}{value},\\n' for key, value in zip(keys, values): output_handle.write(fill_placeholders(pattern,", "* (indent - len(name)) })) def has_class(element, classname): class_attr = element.get('class') if class_attr", "shade.find(attrs={'class', 'shade'}).text hex = shade.find(attrs={'class', 'hex'}).text foreground = foreground_color_dark if has_class(shade, 'dark') else", "file with open(output_file, 'w') as output: output.truncate() output.writelines('\\n'.join([ \"/**\", \" * Material-Colors-SCSS-Variables\", \"", "}) colors.append({ 'name': color_name, 'slug': color_slug, 'shades': shades }) # Print vars &", "\"/**\", \" * Material-Colors-SCSS-Variables\", \" * https://github.com/chteuchteu/Material-Colors-SCSS-Variables\", \" */\\n\\n\" ])) for color in", "in keys) pattern = ' \"{key}\": {indent}{value},\\n' for key, value in zip(keys, values):", "classname): class_attr = element.get('class') if class_attr is None: return False return classname in", "output_handle.write('$' + name + ': (\\n') longest_key = max(len(key) for key in keys)", "color_slug + '-list', [shade['name'] for shade in shades], [shade['hex'] for shade in shades])", "class_attr = element.get('class') if class_attr is None: return False return classname in class_attr", "names) pattern = '${var_name}: {indent}{value};\\n' for name, value in zip(names, values): output_handle.write(fill_placeholders(pattern, {", "is None: return False return classname in class_attr def fill_placeholders(string, dict): for what,", "color['shades'] # Write to file output.writelines('\\n'.join([ '//', '// ' + color_name, '//', ''", "def slugify(string): return re.sub(r'[-\\s]+', '-', (re.sub(r'[^\\w\\s-]', '', string).strip().lower())) def print_scss_map(output_handle, name, keys, values):", "shades], [shade['hex'] for shade in shades]) # Foreground color output.writelines('\\n'.join([ '', '// Foreground',", "key, value in zip(keys, values): output_handle.write(fill_placeholders(pattern, { 'key': key, 'value': value, 'indent': '", "values): indent = max(len(name) for name in names) pattern = '${var_name}: {indent}{value};\\n' for", "value in zip(keys, values): output_handle.write(fill_placeholders(pattern, { 'key': key, 'value': value, 'indent': ' '", "https://github.com/chteuchteu/Material-Colors-SCSS-Variables\", \" */\\n\\n\" ])) for color in colors: color_name = color['name'] color_slug =", "'' ])) # Map print_scss_map(output, 'color-' + color_slug + '-list', [shade['name'] for shade", "in colors], ['$color-' + color['slug'] + '-list' for color in colors]) colors_count =", "# All shades print_scss_vars(output, ['color-' + color_slug + '-' + shade['name'] for shade", "'name': shade_name, 'hex': hex, 'foreground': foreground, }) colors.append({ 'name': color_name, 'slug': color_slug, 'shades':", "for color in colors], ['$color-' + color['slug'] + '-list' for color in colors])", "to file output.writelines('\\n'.join([ '//', '// ' + color_name, '//', '' ])) # Map", "'500') print_scss_vars(output, ['color-' + color_slug + '-foreground'], [main_shade['hex']]) output.write('\\n') print_scss_vars(output, ['color-' + color_slug", "output_handle.write(fill_placeholders(pattern, { 'key': key, 'value': value, 'indent': ' ' * (longest_key - len(key))", "colors_count = len(colors) shades_count = sum([len(color['shades']) for color in colors]) print(output_file + '", "+ name + ': (\\n') longest_key = max(len(key) for key in keys) pattern", "'value': value, 'indent': ' ' * (indent - len(name)) })) def has_class(element, classname):", "+ color_slug + '-' + shade['name'] + '-foreground' for shade in shades], [shade['hex']", "+ what + '}', with_what) return string # Download & parse guidelines HTML", "'// ' + color_name, '//', '' ])) # Map print_scss_map(output, 'color-' + color_slug", "Material-Colors-SCSS-Variables\", \" * https://github.com/chteuchteu/Material-Colors-SCSS-Variables\", \" */\\n\\n\" ])) for color in colors: color_name =", "+ '-foreground' for shade in shades], [shade['hex'] for shade in shades]) output.write('\\n\\n') #", "+ ' created, containing ' + str(colors_count) + ' colors and ' +", "output_handle.write(');\\n') def print_scss_vars(output_handle, names, values): indent = max(len(name) for name in names) pattern", "+ color['slug'] + '-list' for color in colors]) colors_count = len(colors) shades_count =", "}) # Print vars & map definitions to output file with open(output_file, 'w')", "shade['name'] == '500') print_scss_vars(output, ['color-' + color_slug + '-foreground'], [main_shade['hex']]) output.write('\\n') print_scss_vars(output, ['color-'", "longest_key = max(len(key) for key in keys) pattern = ' \"{key}\": {indent}{value},\\n' for", "None: return False return classname in class_attr def fill_placeholders(string, dict): for what, with_what", "': (\\n') longest_key = max(len(key) for key in keys) pattern = ' \"{key}\":", "created, containing ' + str(colors_count) + ' colors and ' + str(shades_count) +", "'hex'}).text foreground = foreground_color_dark if has_class(shade, 'dark') else foreground_color_light shades.append({ 'name': shade_name, 'hex':", "shades }) # Print vars & map definitions to output file with open(output_file,", "in shades if shade['name'] == '500') print_scss_vars(output, ['color-' + color_slug], [main_shade['hex']]) output.write('\\n') #", "[shade['name'] for shade in shades], [shade['hex'] for shade in shades]) output.write('\\n') # Separate", "[shade['foreground'] for shade in shades]) output.write('\\n') # Separate colors # Main shade main_shade", "in shades], [shade['foreground'] for shade in shades]) output.write('\\n') # Separate colors # Main", "html_shades: if has_class(shade, 'main-color'): continue shade_name = shade.find(attrs={'class', 'shade'}).text hex = shade.find(attrs={'class', 'hex'}).text", "<reponame>chteuchteu/Material-Colors-SCSS-Variables #!/usr/bin/python3 # -*- coding: utf-8 -*- import urllib.request from bs4 import BeautifulSoup", "key in keys) pattern = ' \"{key}\": {indent}{value},\\n' for key, value in zip(keys,", "foreground_color_light = '#ffffff' foreground_color_dark = '#000000' def slugify(string): return re.sub(r'[-\\s]+', '-', (re.sub(r'[^\\w\\s-]', '',", "* https://github.com/chteuchteu/Material-Colors-SCSS-Variables\", \" */\\n\\n\" ])) for color in colors: color_name = color['name'] color_slug", "'key': key, 'value': value, 'indent': ' ' * (longest_key - len(key)) })) output_handle.write(');\\n')", "def print_scss_map(output_handle, name, keys, values): output_handle.write('$' + name + ': (\\n') longest_key =", "name_span.text color_slug = slugify(color_name) # Find each shade html_shades = color_group.find_all('li') shades =", "'', '// Foreground', '' ])) print_scss_map(output, 'color-' + color_slug + '-foreground-list', [shade['name'] for", "})) def has_class(element, classname): class_attr = element.get('class') if class_attr is None: return False", "])) # Map print_scss_map(output, 'color-' + color_slug + '-list', [shade['name'] for shade in", "= html_palette.find_all('section', attrs={'class', 'color-group'}) colors = [] for color_group in color_groups: name_span =", "[shade['hex'] for shade in shades]) output.write('\\n') # Separate colors # Main shade main_shade", "re material_guidelines_url = 'http://www.google.com/design/spec/style/color.html#color-color-palette' output_file = 'dist/_material-colors.scss' foreground_color_light = '#ffffff' foreground_color_dark = '#000000'", "shades]) output.write('\\n') # Separate colors # Main shade main_shade = next(shade for shade", "data.decode('utf-8') parsed_html = BeautifulSoup(raw_html, 'html.parser') # Parse it! html_palette = parsed_html.body.find('div', attrs={'class': 'color-palette'})", "in shades]) output.write('\\n\\n') # Print a map of all colors print_scss_map(output, 'colors', [color['slug']", "for shade in html_shades: if has_class(shade, 'main-color'): continue shade_name = shade.find(attrs={'class', 'shade'}).text hex", "# We skip black + white colors if name_span is None: continue color_name", "'name'}) # We skip black + white colors if name_span is None: continue", "len(key)) })) output_handle.write(');\\n') def print_scss_vars(output_handle, names, values): indent = max(len(name) for name in", "'color-palette'}) color_groups = html_palette.find_all('section', attrs={'class', 'color-group'}) colors = [] for color_group in color_groups:", "shades print_scss_vars(output, ['color-' + color_slug + '-' + shade['name'] for shade in shades],", "urllib.request from bs4 import BeautifulSoup import re material_guidelines_url = 'http://www.google.com/design/spec/style/color.html#color-color-palette' output_file = 'dist/_material-colors.scss'", "color['name'] color_slug = color['slug'] shades = color['shades'] # Write to file output.writelines('\\n'.join([ '//',", "['color-' + color_slug + '-' + shade['name'] + '-foreground' for shade in shades],", "shades], [shade['hex'] for shade in shades]) output.write('\\n\\n') # Print a map of all", "[color['slug'] for color in colors], ['$color-' + color['slug'] + '-list' for color in", "'var_name': name, 'value': value, 'indent': ' ' * (indent - len(name)) })) def", "in zip(keys, values): output_handle.write(fill_placeholders(pattern, { 'key': key, 'value': value, 'indent': ' ' *", "[] for shade in html_shades: if has_class(shade, 'main-color'): continue shade_name = shade.find(attrs={'class', 'shade'}).text", "'color-' + color_slug + '-list', [shade['name'] for shade in shades], [shade['hex'] for shade", "in color_groups: name_span = color_group.find(attrs={'class', 'name'}) # We skip black + white colors", "*/\\n\\n\" ])) for color in colors: color_name = color['name'] color_slug = color['slug'] shades", "shade in shades]) output.write('\\n') # Separate colors # Main shade main_shade = next(shade", "& map definitions to output file with open(output_file, 'w') as output: output.truncate() output.writelines('\\n'.join([", "'color-' + color_slug + '-foreground-list', [shade['name'] for shade in shades], [shade['foreground'] for shade", "+ '-' + shade['name'] for shade in shades], [shade['hex'] for shade in shades])", "a map of all colors print_scss_map(output, 'colors', [color['slug'] for color in colors], ['$color-'", "with open(output_file, 'w') as output: output.truncate() output.writelines('\\n'.join([ \"/**\", \" * Material-Colors-SCSS-Variables\", \" *", "shades = color['shades'] # Write to file output.writelines('\\n'.join([ '//', '// ' + color_name,", "color_name, '//', '' ])) # Map print_scss_map(output, 'color-' + color_slug + '-list', [shade['name']", "= next(shade for shade in shades if shade['name'] == '500') print_scss_vars(output, ['color-' +", "color in colors]) colors_count = len(colors) shades_count = sum([len(color['shades']) for color in colors])", "values): output_handle.write(fill_placeholders(pattern, { 'key': key, 'value': value, 'indent': ' ' * (longest_key -", "' ' * (longest_key - len(key)) })) output_handle.write(');\\n') def print_scss_vars(output_handle, names, values): indent", "all colors print_scss_map(output, 'colors', [color['slug'] for color in colors], ['$color-' + color['slug'] +", "['color-' + color_slug], [main_shade['hex']]) output.write('\\n') # All shades print_scss_vars(output, ['color-' + color_slug +", "'-foreground'], [main_shade['hex']]) output.write('\\n') print_scss_vars(output, ['color-' + color_slug + '-' + shade['name'] + '-foreground'", "in names) pattern = '${var_name}: {indent}{value};\\n' for name, value in zip(names, values): output_handle.write(fill_placeholders(pattern,", "return re.sub(r'[-\\s]+', '-', (re.sub(r'[^\\w\\s-]', '', string).strip().lower())) def print_scss_map(output_handle, name, keys, values): output_handle.write('$' +", "key, 'value': value, 'indent': ' ' * (longest_key - len(key)) })) output_handle.write(');\\n') def", "' * (longest_key - len(key)) })) output_handle.write(');\\n') def print_scss_vars(output_handle, names, values): indent =", "output_handle.write(fill_placeholders(pattern, { 'var_name': name, 'value': value, 'indent': ' ' * (indent - len(name))", "= 'dist/_material-colors.scss' foreground_color_light = '#ffffff' foreground_color_dark = '#000000' def slugify(string): return re.sub(r'[-\\s]+', '-',", "shade in html_shades: if has_class(shade, 'main-color'): continue shade_name = shade.find(attrs={'class', 'shade'}).text hex =", "['$color-' + color['slug'] + '-list' for color in colors]) colors_count = len(colors) shades_count", "with_what) return string # Download & parse guidelines HTML response = urllib.request.urlopen(material_guidelines_url) data", "string # Download & parse guidelines HTML response = urllib.request.urlopen(material_guidelines_url) data = response.read()", "shade in shades], [shade['hex'] for shade in shades]) # Foreground color output.writelines('\\n'.join([ '',", "for shade in shades]) output.write('\\n') # Separate colors # Main shade main_shade =", "+ color_slug], [main_shade['hex']]) output.write('\\n') # All shades print_scss_vars(output, ['color-' + color_slug + '-'", "import BeautifulSoup import re material_guidelines_url = 'http://www.google.com/design/spec/style/color.html#color-color-palette' output_file = 'dist/_material-colors.scss' foreground_color_light = '#ffffff'", "map definitions to output file with open(output_file, 'w') as output: output.truncate() output.writelines('\\n'.join([ \"/**\",", "'indent': ' ' * (indent - len(name)) })) def has_class(element, classname): class_attr =", "' * (indent - len(name)) })) def has_class(element, classname): class_attr = element.get('class') if", "\" * https://github.com/chteuchteu/Material-Colors-SCSS-Variables\", \" */\\n\\n\" ])) for color in colors: color_name = color['name']", "import urllib.request from bs4 import BeautifulSoup import re material_guidelines_url = 'http://www.google.com/design/spec/style/color.html#color-color-palette' output_file =", "= max(len(key) for key in keys) pattern = ' \"{key}\": {indent}{value},\\n' for key,", "= color['slug'] shades = color['shades'] # Write to file output.writelines('\\n'.join([ '//', '// '", "+ color_slug + '-list', [shade['name'] for shade in shades], [shade['hex'] for shade in", "+ color_slug + '-foreground-list', [shade['name'] for shade in shades], [shade['foreground'] for shade in", "next(shade for shade in shades if shade['name'] == '500') print_scss_vars(output, ['color-' + color_slug", "class_attr def fill_placeholders(string, dict): for what, with_what in dict.items(): string = string.replace('{' +", "print_scss_map(output, 'colors', [color['slug'] for color in colors], ['$color-' + color['slug'] + '-list' for", "parsed_html.body.find('div', attrs={'class': 'color-palette'}) color_groups = html_palette.find_all('section', attrs={'class', 'color-group'}) colors = [] for color_group", "'name': color_name, 'slug': color_slug, 'shades': shades }) # Print vars & map definitions", "parse guidelines HTML response = urllib.request.urlopen(material_guidelines_url) data = response.read() raw_html = data.decode('utf-8') parsed_html", "+ ': (\\n') longest_key = max(len(key) for key in keys) pattern = '", "html_palette.find_all('section', attrs={'class', 'color-group'}) colors = [] for color_group in color_groups: name_span = color_group.find(attrs={'class',", "sum([len(color['shades']) for color in colors]) print(output_file + ' created, containing ' + str(colors_count)", "+ color_name, '//', '' ])) # Map print_scss_map(output, 'color-' + color_slug + '-list',", "it! html_palette = parsed_html.body.find('div', attrs={'class': 'color-palette'}) color_groups = html_palette.find_all('section', attrs={'class', 'color-group'}) colors =", "'#ffffff' foreground_color_dark = '#000000' def slugify(string): return re.sub(r'[-\\s]+', '-', (re.sub(r'[^\\w\\s-]', '', string).strip().lower())) def", "def fill_placeholders(string, dict): for what, with_what in dict.items(): string = string.replace('{' + what", "shade in shades], [shade['hex'] for shade in shades]) output.write('\\n\\n') # Print a map", "We skip black + white colors if name_span is None: continue color_name =", "in class_attr def fill_placeholders(string, dict): for what, with_what in dict.items(): string = string.replace('{'", "color output.writelines('\\n'.join([ '', '// Foreground', '' ])) print_scss_map(output, 'color-' + color_slug + '-foreground-list',", "Map print_scss_map(output, 'color-' + color_slug + '-list', [shade['name'] for shade in shades], [shade['hex']", "Find each shade html_shades = color_group.find_all('li') shades = [] for shade in html_shades:", "string).strip().lower())) def print_scss_map(output_handle, name, keys, values): output_handle.write('$' + name + ': (\\n') longest_key", "has_class(shade, 'dark') else foreground_color_light shades.append({ 'name': shade_name, 'hex': hex, 'foreground': foreground, }) colors.append({", "name, value in zip(names, values): output_handle.write(fill_placeholders(pattern, { 'var_name': name, 'value': value, 'indent': '", "+ '-list', [shade['name'] for shade in shades], [shade['hex'] for shade in shades]) output.write('\\n')", "- len(key)) })) output_handle.write(');\\n') def print_scss_vars(output_handle, names, values): indent = max(len(name) for name", "in html_shades: if has_class(shade, 'main-color'): continue shade_name = shade.find(attrs={'class', 'shade'}).text hex = shade.find(attrs={'class',", "shade in shades if shade['name'] == '500') print_scss_vars(output, ['color-' + color_slug], [main_shade['hex']]) output.write('\\n')", "output_file = 'dist/_material-colors.scss' foreground_color_light = '#ffffff' foreground_color_dark = '#000000' def slugify(string): return re.sub(r'[-\\s]+',", "output.writelines('\\n'.join([ '//', '// ' + color_name, '//', '' ])) # Map print_scss_map(output, 'color-'", "shade in shades], [shade['foreground'] for shade in shades]) output.write('\\n') # Separate colors #", "'-foreground' for shade in shades], [shade['hex'] for shade in shades]) output.write('\\n\\n') # Print", "})) output_handle.write(');\\n') def print_scss_vars(output_handle, names, values): indent = max(len(name) for name in names)", "for what, with_what in dict.items(): string = string.replace('{' + what + '}', with_what)", "foreground_color_dark if has_class(shade, 'dark') else foreground_color_light shades.append({ 'name': shade_name, 'hex': hex, 'foreground': foreground,", "return string # Download & parse guidelines HTML response = urllib.request.urlopen(material_guidelines_url) data =", "= '#ffffff' foreground_color_dark = '#000000' def slugify(string): return re.sub(r'[-\\s]+', '-', (re.sub(r'[^\\w\\s-]', '', string).strip().lower()))", "'-', (re.sub(r'[^\\w\\s-]', '', string).strip().lower())) def print_scss_map(output_handle, name, keys, values): output_handle.write('$' + name +", "' created, containing ' + str(colors_count) + ' colors and ' + str(shades_count)", "color_slug + '-' + shade['name'] + '-foreground' for shade in shades], [shade['hex'] for", "urllib.request.urlopen(material_guidelines_url) data = response.read() raw_html = data.decode('utf-8') parsed_html = BeautifulSoup(raw_html, 'html.parser') # Parse", "['color-' + color_slug + '-foreground'], [main_shade['hex']]) output.write('\\n') print_scss_vars(output, ['color-' + color_slug + '-'", "= [] for color_group in color_groups: name_span = color_group.find(attrs={'class', 'name'}) # We skip", "color['slug'] shades = color['shades'] # Write to file output.writelines('\\n'.join([ '//', '// ' +", "in shades]) output.write('\\n') # Separate colors # Main shade main_shade = next(shade for", "data = response.read() raw_html = data.decode('utf-8') parsed_html = BeautifulSoup(raw_html, 'html.parser') # Parse it!", "# Separate colors # Main shade main_shade = next(shade for shade in shades", "colors.append({ 'name': color_name, 'slug': color_slug, 'shades': shades }) # Print vars & map", "# -*- coding: utf-8 -*- import urllib.request from bs4 import BeautifulSoup import re", "zip(names, values): output_handle.write(fill_placeholders(pattern, { 'var_name': name, 'value': value, 'indent': ' ' * (indent", "material_guidelines_url = 'http://www.google.com/design/spec/style/color.html#color-color-palette' output_file = 'dist/_material-colors.scss' foreground_color_light = '#ffffff' foreground_color_dark = '#000000' def", "foreground_color_dark = '#000000' def slugify(string): return re.sub(r'[-\\s]+', '-', (re.sub(r'[^\\w\\s-]', '', string).strip().lower())) def print_scss_map(output_handle,", "BeautifulSoup(raw_html, 'html.parser') # Parse it! html_palette = parsed_html.body.find('div', attrs={'class': 'color-palette'}) color_groups = html_palette.find_all('section',", "All shades print_scss_vars(output, ['color-' + color_slug + '-' + shade['name'] for shade in", "'slug': color_slug, 'shades': shades }) # Print vars & map definitions to output", "hex = shade.find(attrs={'class', 'hex'}).text foreground = foreground_color_dark if has_class(shade, 'dark') else foreground_color_light shades.append({", "in colors: color_name = color['name'] color_slug = color['slug'] shades = color['shades'] # Write", "= color_group.find_all('li') shades = [] for shade in html_shades: if has_class(shade, 'main-color'): continue", "print_scss_map(output, 'color-' + color_slug + '-list', [shade['name'] for shade in shades], [shade['hex'] for", "import re material_guidelines_url = 'http://www.google.com/design/spec/style/color.html#color-color-palette' output_file = 'dist/_material-colors.scss' foreground_color_light = '#ffffff' foreground_color_dark =", "'500') print_scss_vars(output, ['color-' + color_slug], [main_shade['hex']]) output.write('\\n') # All shades print_scss_vars(output, ['color-' +", "print(output_file + ' created, containing ' + str(colors_count) + ' colors and '", "output file with open(output_file, 'w') as output: output.truncate() output.writelines('\\n'.join([ \"/**\", \" * Material-Colors-SCSS-Variables\",", "name, 'value': value, 'indent': ' ' * (indent - len(name)) })) def has_class(element,", "' + color_name, '//', '' ])) # Map print_scss_map(output, 'color-' + color_slug +", "color_slug = color['slug'] shades = color['shades'] # Write to file output.writelines('\\n'.join([ '//', '//", "'// Foreground', '' ])) print_scss_map(output, 'color-' + color_slug + '-foreground-list', [shade['name'] for shade", "(longest_key - len(key)) })) output_handle.write(');\\n') def print_scss_vars(output_handle, names, values): indent = max(len(name) for", "each shade html_shades = color_group.find_all('li') shades = [] for shade in html_shades: if", "'shade'}).text hex = shade.find(attrs={'class', 'hex'}).text foreground = foreground_color_dark if has_class(shade, 'dark') else foreground_color_light", "has_class(shade, 'main-color'): continue shade_name = shade.find(attrs={'class', 'shade'}).text hex = shade.find(attrs={'class', 'hex'}).text foreground =", "from bs4 import BeautifulSoup import re material_guidelines_url = 'http://www.google.com/design/spec/style/color.html#color-color-palette' output_file = 'dist/_material-colors.scss' foreground_color_light", "'}', with_what) return string # Download & parse guidelines HTML response = urllib.request.urlopen(material_guidelines_url)", "[shade['name'] for shade in shades], [shade['foreground'] for shade in shades]) output.write('\\n') # Separate", "color_slug = slugify(color_name) # Find each shade html_shades = color_group.find_all('li') shades = []", "indent = max(len(name) for name in names) pattern = '${var_name}: {indent}{value};\\n' for name,", "for color in colors]) colors_count = len(colors) shades_count = sum([len(color['shades']) for color in", "# Main shade main_shade = next(shade for shade in shades if shade['name'] ==", "definitions to output file with open(output_file, 'w') as output: output.truncate() output.writelines('\\n'.join([ \"/**\", \"", "colors: color_name = color['name'] color_slug = color['slug'] shades = color['shades'] # Write to", "['color-' + color_slug + '-' + shade['name'] for shade in shades], [shade['hex'] for", "'-foreground-list', [shade['name'] for shade in shades], [shade['foreground'] for shade in shades]) output.write('\\n') #", "colors if name_span is None: continue color_name = name_span.text color_slug = slugify(color_name) #", "shade in shades if shade['name'] == '500') print_scss_vars(output, ['color-' + color_slug + '-foreground'],", "HTML response = urllib.request.urlopen(material_guidelines_url) data = response.read() raw_html = data.decode('utf-8') parsed_html = BeautifulSoup(raw_html,", "'//', '' ])) # Map print_scss_map(output, 'color-' + color_slug + '-list', [shade['name'] for", "output.writelines('\\n'.join([ '', '// Foreground', '' ])) print_scss_map(output, 'color-' + color_slug + '-foreground-list', [shade['name']", "names, values): indent = max(len(name) for name in names) pattern = '${var_name}: {indent}{value};\\n'", "attrs={'class': 'color-palette'}) color_groups = html_palette.find_all('section', attrs={'class', 'color-group'}) colors = [] for color_group in", "[] for color_group in color_groups: name_span = color_group.find(attrs={'class', 'name'}) # We skip black", "output.write('\\n') # Separate colors # Main shade main_shade = next(shade for shade in", "#!/usr/bin/python3 # -*- coding: utf-8 -*- import urllib.request from bs4 import BeautifulSoup import", "Main shade main_shade = next(shade for shade in shades if shade['name'] == '500')", "print_scss_vars(output, ['color-' + color_slug + '-' + shade['name'] + '-foreground' for shade in", "= color_group.find(attrs={'class', 'name'}) # We skip black + white colors if name_span is", "'#000000' def slugify(string): return re.sub(r'[-\\s]+', '-', (re.sub(r'[^\\w\\s-]', '', string).strip().lower())) def print_scss_map(output_handle, name, keys,", "' + str(colors_count) + ' colors and ' + str(shades_count) + ' shades')", "html_shades = color_group.find_all('li') shades = [] for shade in html_shades: if has_class(shade, 'main-color'):", "= sum([len(color['shades']) for color in colors]) print(output_file + ' created, containing ' +", "print_scss_map(output_handle, name, keys, values): output_handle.write('$' + name + ': (\\n') longest_key = max(len(key)", "'w') as output: output.truncate() output.writelines('\\n'.join([ \"/**\", \" * Material-Colors-SCSS-Variables\", \" * https://github.com/chteuchteu/Material-Colors-SCSS-Variables\", \"", "= response.read() raw_html = data.decode('utf-8') parsed_html = BeautifulSoup(raw_html, 'html.parser') # Parse it! html_palette", "if name_span is None: continue color_name = name_span.text color_slug = slugify(color_name) # Find", "shades.append({ 'name': shade_name, 'hex': hex, 'foreground': foreground, }) colors.append({ 'name': color_name, 'slug': color_slug,", "color_group.find_all('li') shades = [] for shade in html_shades: if has_class(shade, 'main-color'): continue shade_name", "'main-color'): continue shade_name = shade.find(attrs={'class', 'shade'}).text hex = shade.find(attrs={'class', 'hex'}).text foreground = foreground_color_dark", "print_scss_vars(output, ['color-' + color_slug + '-foreground'], [main_shade['hex']]) output.write('\\n') print_scss_vars(output, ['color-' + color_slug +", "slugify(string): return re.sub(r'[-\\s]+', '-', (re.sub(r'[^\\w\\s-]', '', string).strip().lower())) def print_scss_map(output_handle, name, keys, values): output_handle.write('$'", "if has_class(shade, 'main-color'): continue shade_name = shade.find(attrs={'class', 'shade'}).text hex = shade.find(attrs={'class', 'hex'}).text foreground", "= string.replace('{' + what + '}', with_what) return string # Download & parse", "color in colors]) print(output_file + ' created, containing ' + str(colors_count) + '", "continue shade_name = shade.find(attrs={'class', 'shade'}).text hex = shade.find(attrs={'class', 'hex'}).text foreground = foreground_color_dark if", "# Map print_scss_map(output, 'color-' + color_slug + '-list', [shade['name'] for shade in shades],", "for shade in shades], [shade['hex'] for shade in shades]) # Foreground color output.writelines('\\n'.join([", "zip(keys, values): output_handle.write(fill_placeholders(pattern, { 'key': key, 'value': value, 'indent': ' ' * (longest_key", "= slugify(color_name) # Find each shade html_shades = color_group.find_all('li') shades = [] for", "main_shade = next(shade for shade in shades if shade['name'] == '500') print_scss_vars(output, ['color-'", "& parse guidelines HTML response = urllib.request.urlopen(material_guidelines_url) data = response.read() raw_html = data.decode('utf-8')", "== '500') print_scss_vars(output, ['color-' + color_slug + '-foreground'], [main_shade['hex']]) output.write('\\n') print_scss_vars(output, ['color-' +", "print_scss_vars(output, ['color-' + color_slug], [main_shade['hex']]) output.write('\\n') # All shades print_scss_vars(output, ['color-' + color_slug", "shade in shades]) # Foreground color output.writelines('\\n'.join([ '', '// Foreground', '' ])) print_scss_map(output,", "to output file with open(output_file, 'w') as output: output.truncate() output.writelines('\\n'.join([ \"/**\", \" *", "Foreground color output.writelines('\\n'.join([ '', '// Foreground', '' ])) print_scss_map(output, 'color-' + color_slug +", "in shades]) # Foreground color output.writelines('\\n'.join([ '', '// Foreground', '' ])) print_scss_map(output, 'color-'", "+ color_slug + '-foreground'], [main_shade['hex']]) output.write('\\n') print_scss_vars(output, ['color-' + color_slug + '-' +", "color_name = color['name'] color_slug = color['slug'] shades = color['shades'] # Write to file", "in zip(names, values): output_handle.write(fill_placeholders(pattern, { 'var_name': name, 'value': value, 'indent': ' ' *", "dict): for what, with_what in dict.items(): string = string.replace('{' + what + '}',", "None: continue color_name = name_span.text color_slug = slugify(color_name) # Find each shade html_shades", "color in colors: color_name = color['name'] color_slug = color['slug'] shades = color['shades'] #", "name, keys, values): output_handle.write('$' + name + ': (\\n') longest_key = max(len(key) for", "file output.writelines('\\n'.join([ '//', '// ' + color_name, '//', '' ])) # Map print_scss_map(output,", "Parse it! html_palette = parsed_html.body.find('div', attrs={'class': 'color-palette'}) color_groups = html_palette.find_all('section', attrs={'class', 'color-group'}) colors", "values): output_handle.write(fill_placeholders(pattern, { 'var_name': name, 'value': value, 'indent': ' ' * (indent -", "response = urllib.request.urlopen(material_guidelines_url) data = response.read() raw_html = data.decode('utf-8') parsed_html = BeautifulSoup(raw_html, 'html.parser')", "value in zip(names, values): output_handle.write(fill_placeholders(pattern, { 'var_name': name, 'value': value, 'indent': ' '", "keys, values): output_handle.write('$' + name + ': (\\n') longest_key = max(len(key) for key", "in dict.items(): string = string.replace('{' + what + '}', with_what) return string #", "= color['name'] color_slug = color['slug'] shades = color['shades'] # Write to file output.writelines('\\n'.join([", "= color['shades'] # Write to file output.writelines('\\n'.join([ '//', '// ' + color_name, '//',", "shade_name, 'hex': hex, 'foreground': foreground, }) colors.append({ 'name': color_name, 'slug': color_slug, 'shades': shades", "output.write('\\n') print_scss_vars(output, ['color-' + color_slug + '-' + shade['name'] + '-foreground' for shade", "= name_span.text color_slug = slugify(color_name) # Find each shade html_shades = color_group.find_all('li') shades", "'html.parser') # Parse it! html_palette = parsed_html.body.find('div', attrs={'class': 'color-palette'}) color_groups = html_palette.find_all('section', attrs={'class',", "shades if shade['name'] == '500') print_scss_vars(output, ['color-' + color_slug], [main_shade['hex']]) output.write('\\n') # All", "Print vars & map definitions to output file with open(output_file, 'w') as output:", "'hex': hex, 'foreground': foreground, }) colors.append({ 'name': color_name, 'slug': color_slug, 'shades': shades })", "{indent}{value};\\n' for name, value in zip(names, values): output_handle.write(fill_placeholders(pattern, { 'var_name': name, 'value': value,", "open(output_file, 'w') as output: output.truncate() output.writelines('\\n'.join([ \"/**\", \" * Material-Colors-SCSS-Variables\", \" * https://github.com/chteuchteu/Material-Colors-SCSS-Variables\",", "= max(len(name) for name in names) pattern = '${var_name}: {indent}{value};\\n' for name, value", "if class_attr is None: return False return classname in class_attr def fill_placeholders(string, dict):", "color_name = name_span.text color_slug = slugify(color_name) # Find each shade html_shades = color_group.find_all('li')", "element.get('class') if class_attr is None: return False return classname in class_attr def fill_placeholders(string,", "= ' \"{key}\": {indent}{value},\\n' for key, value in zip(keys, values): output_handle.write(fill_placeholders(pattern, { 'key':", "shade in shades], [shade['hex'] for shade in shades]) output.write('\\n') # Separate colors #", "+ color_slug + '-' + shade['name'] for shade in shades], [shade['hex'] for shade", "for shade in shades if shade['name'] == '500') print_scss_vars(output, ['color-' + color_slug], [main_shade['hex']])", "'shades': shades }) # Print vars & map definitions to output file with", "color_slug + '-' + shade['name'] for shade in shades], [shade['hex'] for shade in", "shades_count = sum([len(color['shades']) for color in colors]) print(output_file + ' created, containing '", "in shades], [shade['hex'] for shade in shades]) output.write('\\n\\n') # Print a map of", "color_slug + '-foreground-list', [shade['name'] for shade in shades], [shade['foreground'] for shade in shades])", "in shades], [shade['hex'] for shade in shades]) output.write('\\n') # Separate colors # Main", "shade main_shade = next(shade for shade in shades if shade['name'] == '500') print_scss_vars(output,", "])) print_scss_map(output, 'color-' + color_slug + '-foreground-list', [shade['name'] for shade in shades], [shade['foreground']", "'-' + shade['name'] for shade in shades], [shade['hex'] for shade in shades]) #", "in shades if shade['name'] == '500') print_scss_vars(output, ['color-' + color_slug + '-foreground'], [main_shade['hex']])", "= parsed_html.body.find('div', attrs={'class': 'color-palette'}) color_groups = html_palette.find_all('section', attrs={'class', 'color-group'}) colors = [] for", "name_span = color_group.find(attrs={'class', 'name'}) # We skip black + white colors if name_span", "= shade.find(attrs={'class', 'hex'}).text foreground = foreground_color_dark if has_class(shade, 'dark') else foreground_color_light shades.append({ 'name':", "\" */\\n\\n\" ])) for color in colors: color_name = color['name'] color_slug = color['slug']", "name in names) pattern = '${var_name}: {indent}{value};\\n' for name, value in zip(names, values):", "'dark') else foreground_color_light shades.append({ 'name': shade_name, 'hex': hex, 'foreground': foreground, }) colors.append({ 'name':", "[shade['hex'] for shade in shades]) # Foreground color output.writelines('\\n'.join([ '', '// Foreground', ''", "shades], [shade['hex'] for shade in shades]) output.write('\\n') # Separate colors # Main shade", "Separate colors # Main shade main_shade = next(shade for shade in shades if", "utf-8 -*- import urllib.request from bs4 import BeautifulSoup import re material_guidelines_url = 'http://www.google.com/design/spec/style/color.html#color-color-palette'", "' \"{key}\": {indent}{value},\\n' for key, value in zip(keys, values): output_handle.write(fill_placeholders(pattern, { 'key': key,", "shade_name = shade.find(attrs={'class', 'shade'}).text hex = shade.find(attrs={'class', 'hex'}).text foreground = foreground_color_dark if has_class(shade,", "(\\n') longest_key = max(len(key) for key in keys) pattern = ' \"{key}\": {indent}{value},\\n'", "+ white colors if name_span is None: continue color_name = name_span.text color_slug =", "\"{key}\": {indent}{value},\\n' for key, value in zip(keys, values): output_handle.write(fill_placeholders(pattern, { 'key': key, 'value':", "output.write('\\n') # All shades print_scss_vars(output, ['color-' + color_slug + '-' + shade['name'] for", "shades]) output.write('\\n\\n') # Print a map of all colors print_scss_map(output, 'colors', [color['slug'] for", "class_attr is None: return False return classname in class_attr def fill_placeholders(string, dict): for", "as output: output.truncate() output.writelines('\\n'.join([ \"/**\", \" * Material-Colors-SCSS-Variables\", \" * https://github.com/chteuchteu/Material-Colors-SCSS-Variables\", \" */\\n\\n\"", "has_class(element, classname): class_attr = element.get('class') if class_attr is None: return False return classname", "'-' + shade['name'] + '-foreground' for shade in shades], [shade['hex'] for shade in", "map of all colors print_scss_map(output, 'colors', [color['slug'] for color in colors], ['$color-' +", "BeautifulSoup import re material_guidelines_url = 'http://www.google.com/design/spec/style/color.html#color-color-palette' output_file = 'dist/_material-colors.scss' foreground_color_light = '#ffffff' foreground_color_dark", "color_slug], [main_shade['hex']]) output.write('\\n') # All shades print_scss_vars(output, ['color-' + color_slug + '-' +", "shade in shades]) output.write('\\n\\n') # Print a map of all colors print_scss_map(output, 'colors',", "colors = [] for color_group in color_groups: name_span = color_group.find(attrs={'class', 'name'}) # We", "color_groups = html_palette.find_all('section', attrs={'class', 'color-group'}) colors = [] for color_group in color_groups: name_span", "= urllib.request.urlopen(material_guidelines_url) data = response.read() raw_html = data.decode('utf-8') parsed_html = BeautifulSoup(raw_html, 'html.parser') #", "if shade['name'] == '500') print_scss_vars(output, ['color-' + color_slug], [main_shade['hex']]) output.write('\\n') # All shades", "= data.decode('utf-8') parsed_html = BeautifulSoup(raw_html, 'html.parser') # Parse it! html_palette = parsed_html.body.find('div', attrs={'class':", "if shade['name'] == '500') print_scss_vars(output, ['color-' + color_slug + '-foreground'], [main_shade['hex']]) output.write('\\n') print_scss_vars(output,", "# Parse it! html_palette = parsed_html.body.find('div', attrs={'class': 'color-palette'}) color_groups = html_palette.find_all('section', attrs={'class', 'color-group'})", "+ '-foreground'], [main_shade['hex']]) output.write('\\n') print_scss_vars(output, ['color-' + color_slug + '-' + shade['name'] +", "name_span is None: continue color_name = name_span.text color_slug = slugify(color_name) # Find each", "[shade['hex'] for shade in shades]) output.write('\\n\\n') # Print a map of all colors", "color['slug'] + '-list' for color in colors]) colors_count = len(colors) shades_count = sum([len(color['shades'])", "'//', '// ' + color_name, '//', '' ])) # Map print_scss_map(output, 'color-' +", "* (longest_key - len(key)) })) output_handle.write(');\\n') def print_scss_vars(output_handle, names, values): indent = max(len(name)", "return False return classname in class_attr def fill_placeholders(string, dict): for what, with_what in" ]
[ "__init__(self, settings): self.money = settings['money'] self.apples = settings['apples'] self.price = settings['price'] def buy_apples(self,", "self.apples = settings['apples'] self.price = settings['price'] def buy_apples(self, num_apples): needed = calc.multiply(self.price, num_apples)", "+= num_apples else: raise Exception(\"Not enough money to buy \" + str(num_apples) +", "settings['money'] self.apples = settings['apples'] self.price = settings['price'] def buy_apples(self, num_apples): needed = calc.multiply(self.price,", "calc.subtract(self.money, needed) if would_be_left >= 0: self.money -= self.price * num_apples self.apples +=", "* num_apples self.apples += num_apples else: raise Exception(\"Not enough money to buy \"", "num_apples self.apples += num_apples else: raise Exception(\"Not enough money to buy \" +", "def __init__(self, settings): self.money = settings['money'] self.apples = settings['apples'] self.price = settings['price'] def", "else: raise Exception(\"Not enough money to buy \" + str(num_apples) + \" apples\")", "self.apples += num_apples else: raise Exception(\"Not enough money to buy \" + str(num_apples)", "def buy_apples(self, num_apples): needed = calc.multiply(self.price, num_apples) would_be_left = calc.subtract(self.money, needed) if would_be_left", "needed = calc.multiply(self.price, num_apples) would_be_left = calc.subtract(self.money, needed) if would_be_left >= 0: self.money", "0: self.money -= self.price * num_apples self.apples += num_apples else: raise Exception(\"Not enough", "settings): self.money = settings['money'] self.apples = settings['apples'] self.price = settings['price'] def buy_apples(self, num_apples):", "would_be_left = calc.subtract(self.money, needed) if would_be_left >= 0: self.money -= self.price * num_apples", "needed) if would_be_left >= 0: self.money -= self.price * num_apples self.apples += num_apples", "if would_be_left >= 0: self.money -= self.price * num_apples self.apples += num_apples else:", "would_be_left >= 0: self.money -= self.price * num_apples self.apples += num_apples else: raise", "= calc.multiply(self.price, num_apples) would_be_left = calc.subtract(self.money, needed) if would_be_left >= 0: self.money -=", "self.price = settings['price'] def buy_apples(self, num_apples): needed = calc.multiply(self.price, num_apples) would_be_left = calc.subtract(self.money,", "settings['price'] def buy_apples(self, num_apples): needed = calc.multiply(self.price, num_apples) would_be_left = calc.subtract(self.money, needed) if", "Pocket: def __init__(self, settings): self.money = settings['money'] self.apples = settings['apples'] self.price = settings['price']", "num_apples else: raise Exception(\"Not enough money to buy \" + str(num_apples) + \"", "self.price * num_apples self.apples += num_apples else: raise Exception(\"Not enough money to buy", "settings['apples'] self.price = settings['price'] def buy_apples(self, num_apples): needed = calc.multiply(self.price, num_apples) would_be_left =", "num_apples): needed = calc.multiply(self.price, num_apples) would_be_left = calc.subtract(self.money, needed) if would_be_left >= 0:", ">= 0: self.money -= self.price * num_apples self.apples += num_apples else: raise Exception(\"Not", "buy_apples(self, num_apples): needed = calc.multiply(self.price, num_apples) would_be_left = calc.subtract(self.money, needed) if would_be_left >=", "self.money -= self.price * num_apples self.apples += num_apples else: raise Exception(\"Not enough money", "calc class Pocket: def __init__(self, settings): self.money = settings['money'] self.apples = settings['apples'] self.price", "<filename>buy_stuff.py import calc class Pocket: def __init__(self, settings): self.money = settings['money'] self.apples =", "= settings['price'] def buy_apples(self, num_apples): needed = calc.multiply(self.price, num_apples) would_be_left = calc.subtract(self.money, needed)", "num_apples) would_be_left = calc.subtract(self.money, needed) if would_be_left >= 0: self.money -= self.price *", "= settings['apples'] self.price = settings['price'] def buy_apples(self, num_apples): needed = calc.multiply(self.price, num_apples) would_be_left", "self.money = settings['money'] self.apples = settings['apples'] self.price = settings['price'] def buy_apples(self, num_apples): needed", "import calc class Pocket: def __init__(self, settings): self.money = settings['money'] self.apples = settings['apples']", "-= self.price * num_apples self.apples += num_apples else: raise Exception(\"Not enough money to", "calc.multiply(self.price, num_apples) would_be_left = calc.subtract(self.money, needed) if would_be_left >= 0: self.money -= self.price", "class Pocket: def __init__(self, settings): self.money = settings['money'] self.apples = settings['apples'] self.price =", "= settings['money'] self.apples = settings['apples'] self.price = settings['price'] def buy_apples(self, num_apples): needed =", "= calc.subtract(self.money, needed) if would_be_left >= 0: self.money -= self.price * num_apples self.apples" ]
[ "in range(len(dd)): ri = dd[i] / 2 make_block_mesh(mesh, [x0, 0, 0], [d3 -", "= 2 # slope 30 degree d3 = d1 / np.cos(theta) mesh =", "as np # version3 slope 15 degree theta = np.pi / 12.0 #", "d5 = d4 * np.cos(theta) d6 = d2 * np.cos(theta) d7 = 40", "0], [d4, 0, 0], [0, 0, 8*d2]) om.write_mesh(\"coin_sorter_v5_p1.obj\", mesh) # part 2 holder", "0, 8*d2]) om.write_mesh(\"coin_sorter_v5_p1.obj\", mesh) # part 2 holder mesh = om.TriMesh() d5 =", "[d6, 0, 0], [0, d1, 0], [0, 0, d7 + d4 * np.sin(theta)])", "= 40 + 2*d2 d8 = 1.0 x1 = 0 make_block_mesh(mesh, [0, 0,", "= d9 - 2*d8 d11 = d7 - 2*d2 make_rod_mesh(mesh, [0, 0, 0],", "dd[i] make_pipe_square_mesh(mesh, [x0 + ri, ri, 0], [x0 + ri, ri, d2], p2=[x0", "sorter # Nickel 5 cents 21.2mm (1.76mm) # Dime 10 cents 18.03mm (1.22mm)", "0, d7 + d4 * np.sin(theta)]) make_block_mesh(mesh, [x1, 0, 0], [d6 + 2*d2,", "0, d2]) x0 = x0 + d3 - dd[i] make_pipe_square_mesh(mesh, [x0 + ri,", "mesh) # part 2 holder mesh = om.TriMesh() d5 = d4 * np.cos(theta)", "for i in range(len(dd)): x1 = x1 + d1 dh = d7 +", "for coin sorter # Nickel 5 cents 21.2mm (1.76mm) # Dime 10 cents", "sortor d4 = x0 make_block_mesh(mesh, [0, 0, 0], [0, -d2, 0], [d4, 0,", "/ 2.0, n=256) make_pipe_mesh(mesh, [0,0,d8], [0,0,d11], p2=[1.0, 0.0, 0.0], r1=d10/2.0, r2=d9/2.0, n=256) om.write_mesh(\"coin_sorter_v5_p3.obj\",", "dd[i] / 2 make_block_mesh(mesh, [x0, 0, 0], [d3 - dd[i], 0, 0], [0,", "[0, d1, 0], [d8, 0, 0]) om.write_mesh(\"coin_sorter_v5_p2.obj\", mesh) # part 2 coin collector", "0, d2]) make_block_mesh(mesh, [x1, 0, 0], [d6, 0, 0], [0, d1, 0], [0,", "d1, 0], [0, 0, d7 + d4 * np.sin(theta)]) make_block_mesh(mesh, [x1, 0, 0],", "[0, 0, dh + 6*d2]) make_block_mesh(mesh, [x1 + d8, 0, 0], [d8, 0,", "https://en.wikipedia.org/wiki/Coins_of_the_Canadian_dollar # create mesh for coin sorter # Nickel 5 cents 21.2mm (1.76mm)", "np.pi / 12.0 # https://en.wikipedia.org/wiki/Coins_of_the_Canadian_dollar # create mesh for coin sorter # Nickel", "[0, 0, 0], [d5, 0, 0], [0, d1, 0], [0, 0, d2]) make_block_mesh(mesh,", "0], [0, d8, 0], [0, 0, dh + 6*d2]) make_block_mesh(mesh, [x1 + d8,", "d6 = d2 * np.cos(theta) d7 = 40 + 2*d2 d8 = 1.0", "0], [d8, 0, 0], [0, d1, 0], [0, 0, dh]) make_block_triangle_mesh(mesh, [x1, 0,", "= 0 make_block_mesh(mesh, [0, 0, 0], [d5, 0, 0], [0, d1, 0], [0,", "0], [d8 + d8, 0, 0], [0, d8, 0], [0, 0, dh +", "ri, ri, d2], p2=[x0 + ri, 0.0, 0.0], r1=ri, r2=ri, n=256) make_block_mesh(mesh, [x0,", "0, 0]) x1 = x1 + d6 for i in range(len(dd)): x1 =", "om.TriMesh() # part 1 sorter x0 = 0 # screen layer for i", "x0 make_block_mesh(mesh, [0, 0, 0], [0, -d2, 0], [d4, 0, 0], [0, 0,", "make_block_mesh(mesh, [x1 + d8, 0, 0], [d8, 0, 0], [0, d1, 0], [0,", "d1, 0], [0, 0, d2]) x0 = x0 + d2 # wall block", "* np.sin(theta)]) make_block_mesh(mesh, [x1, 0, 0], [d6 + 2*d2, 0, 0], [0, d8,", "0], [0, 0, d8], p2=[1.0, 0.0, 0.0], r=d9 / 2.0, n=256) make_pipe_mesh(mesh, [0,0,d8],", "[0, d1, 0], [0, 0, d2]) x0 = x0 + d3 - dd[i]", "0, 0], [0, 0, d8], p2=[1.0, 0.0, 0.0], r=d9 / 2.0, n=256) make_pipe_mesh(mesh,", "x0 + d3 - dd[i] make_pipe_square_mesh(mesh, [x0 + ri, ri, 0], [x0 +", "= d1 - d8 d10 = d9 - 2*d8 d11 = d7 -", "make_block_mesh(mesh, [x0, 0, 0], [d3 - dd[i], 0, 0], [0, d1, 0], [0,", "= np.pi / 12.0 # https://en.wikipedia.org/wiki/Coins_of_the_Canadian_dollar # create mesh for coin sorter #", "2 holder mesh = om.TriMesh() d5 = d4 * np.cos(theta) d6 = d2", "dd[i], 0], [dd[i], 0, 0], [0, d1 - dd[i], 0], [0, 0, d2])", "+ d8, 0, dh + 6*d2], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d8,", "(1.75mm) # Toonie $2 28mm (1.8mm) dd0 = [18.03, 21.2, 23.88, 26.5, 28]", "0], [0, 0, d2]) x0 = x0 + d2 # wall block #", "d1, d1*np.tan(theta)], [0, d1, 0], [d8, 0, 0]) if i < len(dd) -", "0], [0, d1, 0], [0, 0, dh + 6*d2]) make_block_triangle_mesh(mesh, [x1 + d8,", "numpy as np # version3 slope 15 degree theta = np.pi / 12.0", "[x0 + ri, ri, 0], [x0 + ri, ri, d2], p2=[x0 + ri,", "np.sin(theta) + 6*d2]) make_block_triangle_mesh(mesh, [x1, 0, d7 + d4 * np.sin(theta)], [0, d1,", "0, 0], [0, d8, 0], [0, 0, dh + 6*d2]) else: make_block_mesh(mesh, [x1,", "np.cos(theta) d7 = 40 + 2*d2 d8 = 1.0 x1 = 0 make_block_mesh(mesh,", "make_block_triangle_mesh(mesh, [x1, 0, dh], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d8, 0, 0])", "x1 = 0 make_block_mesh(mesh, [0, 0, 0], [d5, 0, 0], [0, d1, 0],", "make_block_mesh(mesh, [x1, 0, 0], [d8 + d8, 0, 0], [0, d8, 0], [0,", "d0 = 0.8 dd = [d + d0 for d in dd0] d1", "in dd0] d1 = 35 d2 = 2 # slope 30 degree d3", "sorter x0 = 0 # screen layer for i in range(len(dd)): ri =", "2*d2 make_rod_mesh(mesh, [0, 0, 0], [0, 0, d8], p2=[1.0, 0.0, 0.0], r=d9 /", "slope 30 degree d3 = d1 / np.cos(theta) mesh = om.TriMesh() # part", "dh = d7 + (d4 - (i + 1) * d3) * np.sin(theta)", "+ 6*d2]) make_block_triangle_mesh(mesh, [x1 + d8, 0, dh + 6*d2], [0, d1, d1*np.tan(theta)],", "import openmesh as om import numpy as np # version3 slope 15 degree", "dh + 6*d2]) make_block_mesh(mesh, [x1 + d8, 0, 0], [d8, 0, 0], [0,", "-d2, 0], [d4, 0, 0], [0, 0, 8*d2]) om.write_mesh(\"coin_sorter_v5_p1.obj\", mesh) # part 2", "[0, d1, 0], [0, 0, dh]) make_block_triangle_mesh(mesh, [x1, 0, dh], [0, d1, d1*np.tan(theta)],", "= x0 + d2 # wall block # total length of sortor d4", "om.TriMesh() d5 = d4 * np.cos(theta) d6 = d2 * np.cos(theta) d7 =", "26.5mm (1.75mm) # Toonie $2 28mm (1.8mm) dd0 = [18.03, 21.2, 23.88, 26.5,", "make_block_mesh(mesh, [x0, dd[i], 0], [dd[i], 0, 0], [0, d1 - dd[i], 0], [0,", "x1 + d1 dh = d7 + (d4 - (i + 1) *", "= d1 / np.cos(theta) mesh = om.TriMesh() # part 1 sorter x0 =", "+ d8, 0, 0], [0, d8, 0], [0, 0, dh + 6*d2]) make_block_mesh(mesh,", "d2 = 2 # slope 30 degree d3 = d1 / np.cos(theta) mesh", "dd0] d1 = 35 d2 = 2 # slope 30 degree d3 =", "d1*np.tan(theta)], [0, d1, 0], [d8, 0, 0]) if i < len(dd) - 1:", "- (i + 1) * d3) * np.sin(theta) make_block_mesh(mesh, [x1, 0, 0], [d8,", "2 coin collector mesh = om.TriMesh() d9 = d1 - d8 d10 =", "d1 / np.cos(theta) mesh = om.TriMesh() # part 1 sorter x0 = 0", "= 35 d2 = 2 # slope 30 degree d3 = d1 /", "[d4, 0, 0], [0, 0, 8*d2]) om.write_mesh(\"coin_sorter_v5_p1.obj\", mesh) # part 2 holder mesh", "0], [x0 + ri, ri, d2], p2=[x0 + ri, 0.0, 0.0], r1=ri, r2=ri,", "1) * d3) * np.sin(theta) make_block_mesh(mesh, [x1, 0, 0], [d8, 0, 0], [0,", "n=256) make_block_mesh(mesh, [x0, dd[i], 0], [dd[i], 0, 0], [0, d1 - dd[i], 0],", "= d7 + (d4 - (i + 1) * d3) * np.sin(theta) make_block_mesh(mesh,", "+ d4 * np.sin(theta)], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d6, 0, 0])", "len(dd) - 1: make_block_mesh(mesh, [x1, 0, 0], [d8 + 2*d2, 0, 0], [0,", "[0, 0, dh + 6*d2]) else: make_block_mesh(mesh, [x1, 0, 0], [d8 + d8,", "d9 - 2*d8 d11 = d7 - 2*d2 make_rod_mesh(mesh, [0, 0, 0], [0,", "d2]) x0 = x0 + d3 - dd[i] make_pipe_square_mesh(mesh, [x0 + ri, ri,", "cents 23.88mm (1.58mm) # Loonie $1 26.5mm (1.75mm) # Toonie $2 28mm (1.8mm)", "x0 = x0 + d2 # wall block # total length of sortor", "= om.TriMesh() d9 = d1 - d8 d10 = d9 - 2*d8 d11", "[0, d1 - dd[i], 0], [0, 0, d2]) x0 = x0 + dd[i]", "[d3 - dd[i], 0, 0], [0, d1, 0], [0, 0, d2]) x0 =", "coin collector mesh = om.TriMesh() d9 = d1 - d8 d10 = d9", "[0, d1, 0], [0, 0, d2]) make_block_mesh(mesh, [x1, 0, 0], [d6, 0, 0],", "d11 = d7 - 2*d2 make_rod_mesh(mesh, [0, 0, 0], [0, 0, d8], p2=[1.0,", "create mesh for coin sorter # Nickel 5 cents 21.2mm (1.76mm) # Dime", "35 d2 = 2 # slope 30 degree d3 = d1 / np.cos(theta)", "r=d9 / 2.0, n=256) make_pipe_mesh(mesh, [0,0,d8], [0,0,d11], p2=[1.0, 0.0, 0.0], r1=d10/2.0, r2=d9/2.0, n=256)", "d10 = d9 - 2*d8 d11 = d7 - 2*d2 make_rod_mesh(mesh, [0, 0,", "from vector import * import openmesh as om import numpy as np #", "0.0], r1=ri, r2=ri, n=256) make_block_mesh(mesh, [x0, dd[i], 0], [dd[i], 0, 0], [0, d1", "make_block_mesh(mesh, [0, 0, 0], [d5, 0, 0], [0, d1, 0], [0, 0, d2])", "0, 0], [0, d1, 0], [0, 0, dh]) make_block_triangle_mesh(mesh, [x1, 0, dh], [0,", "[0, d8, 0], [0, 0, dh + 6*d2]) make_block_mesh(mesh, [x1 + d8, 0,", "0, 0], [d8, 0, 0], [0, d1, 0], [0, 0, dh + 6*d2])", "d1 = 35 d2 = 2 # slope 30 degree d3 = d1", "+ 2*d2, 0, 0], [0, d8, 0], [0, 0, dh + 6*d2]) else:", "[d8 + 2*d2, 0, 0], [0, d8, 0], [0, 0, dh + 6*d2])", "d1, 0], [d6, 0, 0]) x1 = x1 + d6 for i in", "0], [d3 - dd[i], 0, 0], [0, d1, 0], [0, 0, d2]) x0", "ri, 0], [x0 + ri, ri, d2], p2=[x0 + ri, 0.0, 0.0], r1=ri,", "d1, 0], [0, 0, dh + 6*d2]) make_block_triangle_mesh(mesh, [x1 + d8, 0, dh", "# Nickel 5 cents 21.2mm (1.76mm) # Dime 10 cents 18.03mm (1.22mm) #", "0, 0], [0, d8, 0], [0, 0, dh + 6*d2]) make_block_mesh(mesh, [x1 +", "coin sorter # Nickel 5 cents 21.2mm (1.76mm) # Dime 10 cents 18.03mm", "1: make_block_mesh(mesh, [x1, 0, 0], [d8 + 2*d2, 0, 0], [0, d8, 0],", "[x1 + d8, 0, dh + 6*d2], [0, d1, d1*np.tan(theta)], [0, d1, 0],", "d in dd0] d1 = 35 d2 = 2 # slope 30 degree", "d1, 0], [0, 0, d2]) make_block_mesh(mesh, [x1, 0, 0], [d6, 0, 0], [0,", "dh + 6*d2]) else: make_block_mesh(mesh, [x1, 0, 0], [d8 + d8, 0, 0],", "+ d2 # wall block # total length of sortor d4 = x0", "* np.cos(theta) d7 = 40 + 2*d2 d8 = 1.0 x1 = 0", "0, 0], [0, d8, 0], [0, 0, d7 + d4 * np.sin(theta) +", "# Quatoer 25 cents 23.88mm (1.58mm) # Loonie $1 26.5mm (1.75mm) # Toonie", "(i + 1) * d3) * np.sin(theta) make_block_mesh(mesh, [x1, 0, 0], [d8, 0,", "# part 2 holder mesh = om.TriMesh() d5 = d4 * np.cos(theta) d6", "- 2*d2 make_rod_mesh(mesh, [0, 0, 0], [0, 0, d8], p2=[1.0, 0.0, 0.0], r=d9", "[x1, 0, 0], [d6 + 2*d2, 0, 0], [0, d8, 0], [0, 0,", "[0, d1, d1*np.tan(theta)], [0, d1, 0], [d8, 0, 0]) if i < len(dd)", "+ ri, 0.0, 0.0], r1=ri, r2=ri, n=256) make_block_mesh(mesh, [x0, dd[i], 0], [dd[i], 0,", "else: make_block_mesh(mesh, [x1, 0, 0], [d8 + d8, 0, 0], [0, d8, 0],", "- d8 d10 = d9 - 2*d8 d11 = d7 - 2*d2 make_rod_mesh(mesh,", "[18.03, 21.2, 23.88, 26.5, 28] d0 = 0.8 dd = [d + d0", "+ 6*d2]) make_block_mesh(mesh, [x1 + d8, 0, 0], [d8, 0, 0], [0, d1,", "r1=ri, r2=ri, n=256) make_block_mesh(mesh, [x0, dd[i], 0], [dd[i], 0, 0], [0, d1 -", "0, d2]) x0 = x0 + d2 # wall block # total length", "[x1, 0, d7 + d4 * np.sin(theta)], [0, d1, d1*np.tan(theta)], [0, d1, 0],", "6*d2]) make_block_mesh(mesh, [x1 + d8, 0, 0], [d8, 0, 0], [0, d1, 0],", "0, 0]) if i < len(dd) - 1: make_block_mesh(mesh, [x1, 0, 0], [d8", "collector mesh = om.TriMesh() d9 = d1 - d8 d10 = d9 -", "Loonie $1 26.5mm (1.75mm) # Toonie $2 28mm (1.8mm) dd0 = [18.03, 21.2,", "x0 + d2 # wall block # total length of sortor d4 =", "d8 d10 = d9 - 2*d8 d11 = d7 - 2*d2 make_rod_mesh(mesh, [0,", "= 1.0 x1 = 0 make_block_mesh(mesh, [0, 0, 0], [d5, 0, 0], [0,", "= x0 + dd[i] make_block_mesh(mesh, [x0, 0, 0], [d2, 0, 0], [0, d1,", "mesh) # part 2 coin collector mesh = om.TriMesh() d9 = d1 -", "d8, 0, 0], [0, d8, 0], [0, 0, dh + 6*d2]) make_block_mesh(mesh, [x1", "10 cents 18.03mm (1.22mm) # Quatoer 25 cents 23.88mm (1.58mm) # Loonie $1", "+ d8, 0, 0], [d8, 0, 0], [0, d1, 0], [0, 0, dh", "2*d2, 0, 0], [0, d8, 0], [0, 0, d7 + d4 * np.sin(theta)", "0], [0, 0, dh + 6*d2]) make_block_triangle_mesh(mesh, [x1 + d8, 0, dh +", "6*d2]) make_block_triangle_mesh(mesh, [x1 + d8, 0, dh + 6*d2], [0, d1, d1*np.tan(theta)], [0,", "make_rod_mesh(mesh, [0, 0, 0], [0, 0, d8], p2=[1.0, 0.0, 0.0], r=d9 / 2.0,", "0], [0, d8, 0], [0, 0, d7 + d4 * np.sin(theta) + 6*d2])", "[0, 0, 8*d2]) om.write_mesh(\"coin_sorter_v5_p1.obj\", mesh) # part 2 holder mesh = om.TriMesh() d5", "d1 dh = d7 + (d4 - (i + 1) * d3) *", "[0, d1, 0], [0, 0, d7 + d4 * np.sin(theta)]) make_block_mesh(mesh, [x1, 0,", "[0, 0, d7 + d4 * np.sin(theta) + 6*d2]) make_block_triangle_mesh(mesh, [x1, 0, d7", "30 degree d3 = d1 / np.cos(theta) mesh = om.TriMesh() # part 1", "d4 * np.sin(theta)], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d6, 0, 0]) x1", "Toonie $2 28mm (1.8mm) dd0 = [18.03, 21.2, 23.88, 26.5, 28] d0 =", "0 # screen layer for i in range(len(dd)): ri = dd[i] / 2", "0, dh + 6*d2], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d8, 0, 0])", "x1 + d6 for i in range(len(dd)): x1 = x1 + d1 dh", "= x0 + d3 - dd[i] make_pipe_square_mesh(mesh, [x0 + ri, ri, 0], [x0", "version3 slope 15 degree theta = np.pi / 12.0 # https://en.wikipedia.org/wiki/Coins_of_the_Canadian_dollar # create", "om.write_mesh(\"coin_sorter_v5_p2.obj\", mesh) # part 2 coin collector mesh = om.TriMesh() d9 = d1", "d1, d1*np.tan(theta)], [0, d1, 0], [d8, 0, 0]) om.write_mesh(\"coin_sorter_v5_p2.obj\", mesh) # part 2", "= [18.03, 21.2, 23.88, 26.5, 28] d0 = 0.8 dd = [d +", "in range(len(dd)): x1 = x1 + d1 dh = d7 + (d4 -", "5 cents 21.2mm (1.76mm) # Dime 10 cents 18.03mm (1.22mm) # Quatoer 25", "x1 = x1 + d1 dh = d7 + (d4 - (i +", "d8, 0], [0, 0, d7 + d4 * np.sin(theta) + 6*d2]) make_block_triangle_mesh(mesh, [x1,", "p2=[x0 + ri, 0.0, 0.0], r1=ri, r2=ri, n=256) make_block_mesh(mesh, [x0, dd[i], 0], [dd[i],", "range(len(dd)): ri = dd[i] / 2 make_block_mesh(mesh, [x0, 0, 0], [d3 - dd[i],", "+ d4 * np.sin(theta) + 6*d2]) make_block_triangle_mesh(mesh, [x1, 0, d7 + d4 *", "# total length of sortor d4 = x0 make_block_mesh(mesh, [0, 0, 0], [0,", "[0, 0, d2]) x0 = x0 + d3 - dd[i] make_pipe_square_mesh(mesh, [x0 +", "d7 + d4 * np.sin(theta)]) make_block_mesh(mesh, [x1, 0, 0], [d6 + 2*d2, 0,", "d3 = d1 / np.cos(theta) mesh = om.TriMesh() # part 1 sorter x0", "ri, d2], p2=[x0 + ri, 0.0, 0.0], r1=ri, r2=ri, n=256) make_block_mesh(mesh, [x0, dd[i],", "[x0, 0, 0], [d3 - dd[i], 0, 0], [0, d1, 0], [0, 0,", "ri, ri, 0], [x0 + ri, ri, d2], p2=[x0 + ri, 0.0, 0.0],", "x0 + dd[i] make_block_mesh(mesh, [x0, 0, 0], [d2, 0, 0], [0, d1, 0],", "dd[i] make_block_mesh(mesh, [x0, 0, 0], [d2, 0, 0], [0, d1, 0], [0, 0,", "d1, 0], [d8, 0, 0]) if i < len(dd) - 1: make_block_mesh(mesh, [x1,", "[0, 0, d8], p2=[1.0, 0.0, 0.0], r=d9 / 2.0, n=256) make_pipe_mesh(mesh, [0,0,d8], [0,0,d11],", "part 2 holder mesh = om.TriMesh() d5 = d4 * np.cos(theta) d6 =", "< len(dd) - 1: make_block_mesh(mesh, [x1, 0, 0], [d8 + 2*d2, 0, 0],", "make_block_mesh(mesh, [x0, 0, 0], [d2, 0, 0], [0, d1, 0], [0, 0, d2])", "for i in range(len(dd)): ri = dd[i] / 2 make_block_mesh(mesh, [x0, 0, 0],", "[0, d1, 0], [0, 0, d2]) x0 = x0 + d2 # wall", "dh]) make_block_triangle_mesh(mesh, [x1, 0, dh], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d8, 0,", "6*d2], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d8, 0, 0]) om.write_mesh(\"coin_sorter_v5_p2.obj\", mesh) #", "import numpy as np # version3 slope 15 degree theta = np.pi /", "0], [0, d1 - dd[i], 0], [0, 0, d2]) x0 = x0 +", "[0, d1, 0], [d6, 0, 0]) x1 = x1 + d6 for i", "np # version3 slope 15 degree theta = np.pi / 12.0 # https://en.wikipedia.org/wiki/Coins_of_the_Canadian_dollar", "0, 0], [d5, 0, 0], [0, d1, 0], [0, 0, d2]) make_block_mesh(mesh, [x1,", "0], [0, 0, d2]) make_block_mesh(mesh, [x1, 0, 0], [d6, 0, 0], [0, d1,", "d9 = d1 - d8 d10 = d9 - 2*d8 d11 = d7", "$1 26.5mm (1.75mm) # Toonie $2 28mm (1.8mm) dd0 = [18.03, 21.2, 23.88,", "[x1, 0, 0], [d8, 0, 0], [0, d1, 0], [0, 0, dh]) make_block_triangle_mesh(mesh,", "+ d1 dh = d7 + (d4 - (i + 1) * d3)", "d0 for d in dd0] d1 = 35 d2 = 2 # slope", "d1 - d8 d10 = d9 - 2*d8 d11 = d7 - 2*d2", "[0, d1, 0], [0, 0, dh + 6*d2]) make_block_triangle_mesh(mesh, [x1 + d8, 0,", "6*d2]) else: make_block_mesh(mesh, [x1, 0, 0], [d8 + d8, 0, 0], [0, d8,", "degree theta = np.pi / 12.0 # https://en.wikipedia.org/wiki/Coins_of_the_Canadian_dollar # create mesh for coin", "cents 18.03mm (1.22mm) # Quatoer 25 cents 23.88mm (1.58mm) # Loonie $1 26.5mm", "part 2 coin collector mesh = om.TriMesh() d9 = d1 - d8 d10", "[d8, 0, 0]) if i < len(dd) - 1: make_block_mesh(mesh, [x1, 0, 0],", "[0, 0, d2]) x0 = x0 + d2 # wall block # total", "dd = [d + d0 for d in dd0] d1 = 35 d2", "0, 0]) om.write_mesh(\"coin_sorter_v5_p2.obj\", mesh) # part 2 coin collector mesh = om.TriMesh() d9", "slope 15 degree theta = np.pi / 12.0 # https://en.wikipedia.org/wiki/Coins_of_the_Canadian_dollar # create mesh", "range(len(dd)): x1 = x1 + d1 dh = d7 + (d4 - (i", "0]) if i < len(dd) - 1: make_block_mesh(mesh, [x1, 0, 0], [d8 +", "= [d + d0 for d in dd0] d1 = 35 d2 =", "[0, 0, 0], [0, -d2, 0], [d4, 0, 0], [0, 0, 8*d2]) om.write_mesh(\"coin_sorter_v5_p1.obj\",", "vector import * import openmesh as om import numpy as np # version3", "2 make_block_mesh(mesh, [x0, 0, 0], [d3 - dd[i], 0, 0], [0, d1, 0],", "d1, 0], [0, 0, d2]) x0 = x0 + d3 - dd[i] make_pipe_square_mesh(mesh,", "Nickel 5 cents 21.2mm (1.76mm) # Dime 10 cents 18.03mm (1.22mm) # Quatoer", "d4 * np.cos(theta) d6 = d2 * np.cos(theta) d7 = 40 + 2*d2", "0], [0, 0, d2]) x0 = x0 + dd[i] make_block_mesh(mesh, [x0, 0, 0],", "26.5, 28] d0 = 0.8 dd = [d + d0 for d in", "0], [0, -d2, 0], [d4, 0, 0], [0, 0, 8*d2]) om.write_mesh(\"coin_sorter_v5_p1.obj\", mesh) #", "mesh for coin sorter # Nickel 5 cents 21.2mm (1.76mm) # Dime 10", "[d2, 0, 0], [0, d1, 0], [0, 0, d2]) x0 = x0 +", "0.8 dd = [d + d0 for d in dd0] d1 = 35", "0, 0], [0, d1, 0], [0, 0, dh + 6*d2]) make_block_triangle_mesh(mesh, [x1 +", "- 1: make_block_mesh(mesh, [x1, 0, 0], [d8 + 2*d2, 0, 0], [0, d8,", "0], [d8 + 2*d2, 0, 0], [0, d8, 0], [0, 0, dh +", "d2], p2=[x0 + ri, 0.0, 0.0], r1=ri, r2=ri, n=256) make_block_mesh(mesh, [x0, dd[i], 0],", "0], [0, d1, 0], [0, 0, d7 + d4 * np.sin(theta)]) make_block_mesh(mesh, [x1,", "* np.sin(theta) + 6*d2]) make_block_triangle_mesh(mesh, [x1, 0, d7 + d4 * np.sin(theta)], [0,", "[x0 + ri, ri, d2], p2=[x0 + ri, 0.0, 0.0], r1=ri, r2=ri, n=256)", "dh + 6*d2]) make_block_triangle_mesh(mesh, [x1 + d8, 0, dh + 6*d2], [0, d1,", "1.0 x1 = 0 make_block_mesh(mesh, [0, 0, 0], [d5, 0, 0], [0, d1,", "d4 * np.sin(theta) + 6*d2]) make_block_triangle_mesh(mesh, [x1, 0, d7 + d4 * np.sin(theta)],", "[d8, 0, 0]) om.write_mesh(\"coin_sorter_v5_p2.obj\", mesh) # part 2 coin collector mesh = om.TriMesh()", "d7 = 40 + 2*d2 d8 = 1.0 x1 = 0 make_block_mesh(mesh, [0,", "12.0 # https://en.wikipedia.org/wiki/Coins_of_the_Canadian_dollar # create mesh for coin sorter # Nickel 5 cents", "= x0 make_block_mesh(mesh, [0, 0, 0], [0, -d2, 0], [d4, 0, 0], [0,", "Quatoer 25 cents 23.88mm (1.58mm) # Loonie $1 26.5mm (1.75mm) # Toonie $2", "make_block_mesh(mesh, [x1, 0, 0], [d8, 0, 0], [0, d1, 0], [0, 0, dh])", "r2=ri, n=256) make_block_mesh(mesh, [x0, dd[i], 0], [dd[i], 0, 0], [0, d1 - dd[i],", "as om import numpy as np # version3 slope 15 degree theta =", "d8, 0], [0, 0, dh + 6*d2]) make_block_mesh(mesh, [x1 + d8, 0, 0],", "= om.TriMesh() # part 1 sorter x0 = 0 # screen layer for", "d3) * np.sin(theta) make_block_mesh(mesh, [x1, 0, 0], [d8, 0, 0], [0, d1, 0],", "(1.76mm) # Dime 10 cents 18.03mm (1.22mm) # Quatoer 25 cents 23.88mm (1.58mm)", "make_block_triangle_mesh(mesh, [x1, 0, d7 + d4 * np.sin(theta)], [0, d1, d1*np.tan(theta)], [0, d1,", "= x1 + d1 dh = d7 + (d4 - (i + 1)", "i < len(dd) - 1: make_block_mesh(mesh, [x1, 0, 0], [d8 + 2*d2, 0,", "Dime 10 cents 18.03mm (1.22mm) # Quatoer 25 cents 23.88mm (1.58mm) # Loonie", "[d8, 0, 0], [0, d1, 0], [0, 0, dh + 6*d2]) make_block_triangle_mesh(mesh, [x1", "d1, 0], [d8, 0, 0]) om.write_mesh(\"coin_sorter_v5_p2.obj\", mesh) # part 2 coin collector mesh", "0 make_block_mesh(mesh, [0, 0, 0], [d5, 0, 0], [0, d1, 0], [0, 0,", "ri = dd[i] / 2 make_block_mesh(mesh, [x0, 0, 0], [d3 - dd[i], 0,", "p2=[1.0, 0.0, 0.0], r=d9 / 2.0, n=256) make_pipe_mesh(mesh, [0,0,d8], [0,0,d11], p2=[1.0, 0.0, 0.0],", "0], [dd[i], 0, 0], [0, d1 - dd[i], 0], [0, 0, d2]) x0", "+ ri, ri, d2], p2=[x0 + ri, 0.0, 0.0], r1=ri, r2=ri, n=256) make_block_mesh(mesh,", "0]) x1 = x1 + d6 for i in range(len(dd)): x1 = x1", "0], [0, d1, 0], [0, 0, d2]) x0 = x0 + d3 -", "d2]) x0 = x0 + d2 # wall block # total length of", "0], [d8, 0, 0], [0, d1, 0], [0, 0, dh + 6*d2]) make_block_triangle_mesh(mesh,", "23.88, 26.5, 28] d0 = 0.8 dd = [d + d0 for d", "0]) om.write_mesh(\"coin_sorter_v5_p2.obj\", mesh) # part 2 coin collector mesh = om.TriMesh() d9 =", "0], [d8, 0, 0]) if i < len(dd) - 1: make_block_mesh(mesh, [x1, 0,", "* np.sin(theta)], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d6, 0, 0]) x1 =", "np.sin(theta)]) make_block_mesh(mesh, [x1, 0, 0], [d6 + 2*d2, 0, 0], [0, d8, 0],", "+ d6 for i in range(len(dd)): x1 = x1 + d1 dh =", "[x0, 0, 0], [d2, 0, 0], [0, d1, 0], [0, 0, d2]) x0", "- dd[i] make_pipe_square_mesh(mesh, [x0 + ri, ri, 0], [x0 + ri, ri, d2],", "d8], p2=[1.0, 0.0, 0.0], r=d9 / 2.0, n=256) make_pipe_mesh(mesh, [0,0,d8], [0,0,d11], p2=[1.0, 0.0,", "d4 = x0 make_block_mesh(mesh, [0, 0, 0], [0, -d2, 0], [d4, 0, 0],", "[0, -d2, 0], [d4, 0, 0], [0, 0, 8*d2]) om.write_mesh(\"coin_sorter_v5_p1.obj\", mesh) # part", "d8, 0, 0], [d8, 0, 0], [0, d1, 0], [0, 0, dh +", "[d8 + d8, 0, 0], [0, d8, 0], [0, 0, dh + 6*d2])", "part 1 sorter x0 = 0 # screen layer for i in range(len(dd)):", "40 + 2*d2 d8 = 1.0 x1 = 0 make_block_mesh(mesh, [0, 0, 0],", "0.0, 0.0], r1=ri, r2=ri, n=256) make_block_mesh(mesh, [x0, dd[i], 0], [dd[i], 0, 0], [0,", "0, dh]) make_block_triangle_mesh(mesh, [x1, 0, dh], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d8,", "0], [0, d8, 0], [0, 0, dh + 6*d2]) else: make_block_mesh(mesh, [x1, 0,", "cents 21.2mm (1.76mm) # Dime 10 cents 18.03mm (1.22mm) # Quatoer 25 cents", "0, d8], p2=[1.0, 0.0, 0.0], r=d9 / 2.0, n=256) make_pipe_mesh(mesh, [0,0,d8], [0,0,d11], p2=[1.0,", "0, d7 + d4 * np.sin(theta) + 6*d2]) make_block_triangle_mesh(mesh, [x1, 0, d7 +", "(1.8mm) dd0 = [18.03, 21.2, 23.88, 26.5, 28] d0 = 0.8 dd =", "0], [0, 0, d2]) x0 = x0 + d3 - dd[i] make_pipe_square_mesh(mesh, [x0", "d1*np.tan(theta)], [0, d1, 0], [d8, 0, 0]) om.write_mesh(\"coin_sorter_v5_p2.obj\", mesh) # part 2 coin", "np.sin(theta) make_block_mesh(mesh, [x1, 0, 0], [d8, 0, 0], [0, d1, 0], [0, 0,", "ri, 0.0, 0.0], r1=ri, r2=ri, n=256) make_block_mesh(mesh, [x0, dd[i], 0], [dd[i], 0, 0],", "[0, 0, d2]) make_block_mesh(mesh, [x1, 0, 0], [d6, 0, 0], [0, d1, 0],", "d1, d1*np.tan(theta)], [0, d1, 0], [d6, 0, 0]) x1 = x1 + d6", "0, dh + 6*d2]) else: make_block_mesh(mesh, [x1, 0, 0], [d8 + d8, 0,", "om import numpy as np # version3 slope 15 degree theta = np.pi", "[dd[i], 0, 0], [0, d1 - dd[i], 0], [0, 0, d2]) x0 =", "+ dd[i] make_block_mesh(mesh, [x0, 0, 0], [d2, 0, 0], [0, d1, 0], [0,", "make_block_mesh(mesh, [x1, 0, 0], [d6, 0, 0], [0, d1, 0], [0, 0, d7", "mesh = om.TriMesh() d9 = d1 - d8 d10 = d9 - 2*d8", "layer for i in range(len(dd)): ri = dd[i] / 2 make_block_mesh(mesh, [x0, 0,", "# https://en.wikipedia.org/wiki/Coins_of_the_Canadian_dollar # create mesh for coin sorter # Nickel 5 cents 21.2mm", "0], [0, d1, 0], [0, 0, dh]) make_block_triangle_mesh(mesh, [x1, 0, dh], [0, d1,", "- 2*d8 d11 = d7 - 2*d2 make_rod_mesh(mesh, [0, 0, 0], [0, 0,", "2 # slope 30 degree d3 = d1 / np.cos(theta) mesh = om.TriMesh()", "0, 0], [0, -d2, 0], [d4, 0, 0], [0, 0, 8*d2]) om.write_mesh(\"coin_sorter_v5_p1.obj\", mesh)", "d1 - dd[i], 0], [0, 0, d2]) x0 = x0 + dd[i] make_block_mesh(mesh,", "= d4 * np.cos(theta) d6 = d2 * np.cos(theta) d7 = 40 +", "of sortor d4 = x0 make_block_mesh(mesh, [0, 0, 0], [0, -d2, 0], [d4,", "= d2 * np.cos(theta) d7 = 40 + 2*d2 d8 = 1.0 x1", "d2]) make_block_mesh(mesh, [x1, 0, 0], [d6, 0, 0], [0, d1, 0], [0, 0,", "screen layer for i in range(len(dd)): ri = dd[i] / 2 make_block_mesh(mesh, [x0,", "0], [d5, 0, 0], [0, d1, 0], [0, 0, d2]) make_block_mesh(mesh, [x1, 0,", "# create mesh for coin sorter # Nickel 5 cents 21.2mm (1.76mm) #", "28mm (1.8mm) dd0 = [18.03, 21.2, 23.88, 26.5, 28] d0 = 0.8 dd", "0], [d6, 0, 0], [0, d1, 0], [0, 0, d7 + d4 *", "[0, 0, d2]) x0 = x0 + dd[i] make_block_mesh(mesh, [x0, 0, 0], [d2,", "d6 for i in range(len(dd)): x1 = x1 + d1 dh = d7", "2*d8 d11 = d7 - 2*d2 make_rod_mesh(mesh, [0, 0, 0], [0, 0, d8],", "om.TriMesh() d9 = d1 - d8 d10 = d9 - 2*d8 d11 =", "d7 + d4 * np.sin(theta) + 6*d2]) make_block_triangle_mesh(mesh, [x1, 0, d7 + d4", "[0, 0, dh]) make_block_triangle_mesh(mesh, [x1, 0, dh], [0, d1, d1*np.tan(theta)], [0, d1, 0],", "0, d7 + d4 * np.sin(theta)], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d6,", "wall block # total length of sortor d4 = x0 make_block_mesh(mesh, [0, 0,", "make_block_mesh(mesh, [x1, 0, 0], [d6 + 2*d2, 0, 0], [0, d8, 0], [0,", "np.sin(theta)], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d6, 0, 0]) x1 = x1", "0, 0], [0, d1, 0], [0, 0, d2]) x0 = x0 + d2", "d8 = 1.0 x1 = 0 make_block_mesh(mesh, [0, 0, 0], [d5, 0, 0],", "[x1, 0, 0], [d8 + d8, 0, 0], [0, d8, 0], [0, 0,", "1 sorter x0 = 0 # screen layer for i in range(len(dd)): ri", "25 cents 23.88mm (1.58mm) # Loonie $1 26.5mm (1.75mm) # Toonie $2 28mm", "+ 6*d2]) else: make_block_mesh(mesh, [x1, 0, 0], [d8 + d8, 0, 0], [0,", "$2 28mm (1.8mm) dd0 = [18.03, 21.2, 23.88, 26.5, 28] d0 = 0.8", "[0, d1, d1*np.tan(theta)], [0, d1, 0], [d8, 0, 0]) om.write_mesh(\"coin_sorter_v5_p2.obj\", mesh) # part", "[0, 0, 0], [0, 0, d8], p2=[1.0, 0.0, 0.0], r=d9 / 2.0, n=256)", "(1.22mm) # Quatoer 25 cents 23.88mm (1.58mm) # Loonie $1 26.5mm (1.75mm) #", "= om.TriMesh() d5 = d4 * np.cos(theta) d6 = d2 * np.cos(theta) d7", "0, 0], [0, 0, 8*d2]) om.write_mesh(\"coin_sorter_v5_p1.obj\", mesh) # part 2 holder mesh =", "total length of sortor d4 = x0 make_block_mesh(mesh, [0, 0, 0], [0, -d2,", "0], [d6 + 2*d2, 0, 0], [0, d8, 0], [0, 0, d7 +", "0, 0], [0, d1, 0], [0, 0, d2]) make_block_mesh(mesh, [x1, 0, 0], [d6,", "<gh_stars>0 from vector import * import openmesh as om import numpy as np", "i in range(len(dd)): ri = dd[i] / 2 make_block_mesh(mesh, [x0, 0, 0], [d3", "np.cos(theta) mesh = om.TriMesh() # part 1 sorter x0 = 0 # screen", "holder mesh = om.TriMesh() d5 = d4 * np.cos(theta) d6 = d2 *", "np.cos(theta) d6 = d2 * np.cos(theta) d7 = 40 + 2*d2 d8 =", "# wall block # total length of sortor d4 = x0 make_block_mesh(mesh, [0,", "[x1, 0, 0], [d8 + 2*d2, 0, 0], [0, d8, 0], [0, 0,", "mesh = om.TriMesh() d5 = d4 * np.cos(theta) d6 = d2 * np.cos(theta)", "x1 = x1 + d6 for i in range(len(dd)): x1 = x1 +", "+ 2*d2, 0, 0], [0, d8, 0], [0, 0, d7 + d4 *", "# Loonie $1 26.5mm (1.75mm) # Toonie $2 28mm (1.8mm) dd0 = [18.03,", "make_block_triangle_mesh(mesh, [x1 + d8, 0, dh + 6*d2], [0, d1, d1*np.tan(theta)], [0, d1,", "x0 = 0 # screen layer for i in range(len(dd)): ri = dd[i]", "d8, 0, dh + 6*d2], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d8, 0,", "# part 1 sorter x0 = 0 # screen layer for i in", "degree d3 = d1 / np.cos(theta) mesh = om.TriMesh() # part 1 sorter", "8*d2]) om.write_mesh(\"coin_sorter_v5_p1.obj\", mesh) # part 2 holder mesh = om.TriMesh() d5 = d4", "[d + d0 for d in dd0] d1 = 35 d2 = 2", "[x1, 0, dh], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d8, 0, 0]) if", "+ 1) * d3) * np.sin(theta) make_block_mesh(mesh, [x1, 0, 0], [d8, 0, 0],", "d2]) x0 = x0 + dd[i] make_block_mesh(mesh, [x0, 0, 0], [d2, 0, 0],", "[0, d1, d1*np.tan(theta)], [0, d1, 0], [d6, 0, 0]) x1 = x1 +", "21.2mm (1.76mm) # Dime 10 cents 18.03mm (1.22mm) # Quatoer 25 cents 23.88mm", "mesh = om.TriMesh() # part 1 sorter x0 = 0 # screen layer", "x0 = x0 + d3 - dd[i] make_pipe_square_mesh(mesh, [x0 + ri, ri, 0],", "d2 # wall block # total length of sortor d4 = x0 make_block_mesh(mesh,", "0, 0], [0, d1, 0], [0, 0, d2]) x0 = x0 + d3", "0], [d8, 0, 0]) om.write_mesh(\"coin_sorter_v5_p2.obj\", mesh) # part 2 coin collector mesh =", "2*d2, 0, 0], [0, d8, 0], [0, 0, dh + 6*d2]) else: make_block_mesh(mesh,", "0, dh], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d8, 0, 0]) if i", "0, 0], [d6 + 2*d2, 0, 0], [0, d8, 0], [0, 0, d7", "0], [0, 0, dh]) make_block_triangle_mesh(mesh, [x1, 0, dh], [0, d1, d1*np.tan(theta)], [0, d1,", "21.2, 23.88, 26.5, 28] d0 = 0.8 dd = [d + d0 for", "length of sortor d4 = x0 make_block_mesh(mesh, [0, 0, 0], [0, -d2, 0],", "d1*np.tan(theta)], [0, d1, 0], [d6, 0, 0]) x1 = x1 + d6 for", "= 0.8 dd = [d + d0 for d in dd0] d1 =", "d7 + d4 * np.sin(theta)], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d6, 0,", "openmesh as om import numpy as np # version3 slope 15 degree theta", "dd[i], 0, 0], [0, d1, 0], [0, 0, d2]) x0 = x0 +", "* np.cos(theta) d6 = d2 * np.cos(theta) d7 = 40 + 2*d2 d8", "[0, d8, 0], [0, 0, dh + 6*d2]) else: make_block_mesh(mesh, [x1, 0, 0],", "dh], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d8, 0, 0]) if i <", "[0, 0, dh + 6*d2]) make_block_triangle_mesh(mesh, [x1 + d8, 0, dh + 6*d2],", "i in range(len(dd)): x1 = x1 + d1 dh = d7 + (d4", "0, 0], [0, d1 - dd[i], 0], [0, 0, d2]) x0 = x0", "0], [0, 0, dh + 6*d2]) make_block_mesh(mesh, [x1 + d8, 0, 0], [d8,", "0, 0], [d8, 0, 0], [0, d1, 0], [0, 0, dh]) make_block_triangle_mesh(mesh, [x1,", "0], [0, 0, d7 + d4 * np.sin(theta)]) make_block_mesh(mesh, [x1, 0, 0], [d6", "if i < len(dd) - 1: make_block_mesh(mesh, [x1, 0, 0], [d8 + 2*d2,", "[x0, dd[i], 0], [dd[i], 0, 0], [0, d1 - dd[i], 0], [0, 0,", "[d6, 0, 0]) x1 = x1 + d6 for i in range(len(dd)): x1", "# version3 slope 15 degree theta = np.pi / 12.0 # https://en.wikipedia.org/wiki/Coins_of_the_Canadian_dollar #", "2.0, n=256) make_pipe_mesh(mesh, [0,0,d8], [0,0,d11], p2=[1.0, 0.0, 0.0], r1=d10/2.0, r2=d9/2.0, n=256) om.write_mesh(\"coin_sorter_v5_p3.obj\", mesh)", "0], [0, d1, 0], [0, 0, d2]) x0 = x0 + d2 #", "/ 12.0 # https://en.wikipedia.org/wiki/Coins_of_the_Canadian_dollar # create mesh for coin sorter # Nickel 5", "* import openmesh as om import numpy as np # version3 slope 15", "23.88mm (1.58mm) # Loonie $1 26.5mm (1.75mm) # Toonie $2 28mm (1.8mm) dd0", "0], [d2, 0, 0], [0, d1, 0], [0, 0, d2]) x0 = x0", "- dd[i], 0], [0, 0, d2]) x0 = x0 + dd[i] make_block_mesh(mesh, [x0,", "[0, d1, 0], [d8, 0, 0]) if i < len(dd) - 1: make_block_mesh(mesh,", "0, 0], [d8 + d8, 0, 0], [0, d8, 0], [0, 0, dh", "(d4 - (i + 1) * d3) * np.sin(theta) make_block_mesh(mesh, [x1, 0, 0],", "d1, 0], [0, 0, dh]) make_block_triangle_mesh(mesh, [x1, 0, dh], [0, d1, d1*np.tan(theta)], [0,", "+ ri, ri, 0], [x0 + ri, ri, d2], p2=[x0 + ri, 0.0,", "dd[i], 0], [0, 0, d2]) x0 = x0 + dd[i] make_block_mesh(mesh, [x0, 0,", "# Toonie $2 28mm (1.8mm) dd0 = [18.03, 21.2, 23.88, 26.5, 28] d0", "+ 6*d2]) make_block_triangle_mesh(mesh, [x1, 0, d7 + d4 * np.sin(theta)], [0, d1, d1*np.tan(theta)],", "dd0 = [18.03, 21.2, 23.88, 26.5, 28] d0 = 0.8 dd = [d", "(1.58mm) # Loonie $1 26.5mm (1.75mm) # Toonie $2 28mm (1.8mm) dd0 =", "make_block_mesh(mesh, [0, 0, 0], [0, -d2, 0], [d4, 0, 0], [0, 0, 8*d2])", "0], [0, 0, d7 + d4 * np.sin(theta) + 6*d2]) make_block_triangle_mesh(mesh, [x1, 0,", "+ 6*d2], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d8, 0, 0]) om.write_mesh(\"coin_sorter_v5_p2.obj\", mesh)", "dh + 6*d2], [0, d1, d1*np.tan(theta)], [0, d1, 0], [d8, 0, 0]) om.write_mesh(\"coin_sorter_v5_p2.obj\",", "18.03mm (1.22mm) # Quatoer 25 cents 23.88mm (1.58mm) # Loonie $1 26.5mm (1.75mm)", "om.write_mesh(\"coin_sorter_v5_p1.obj\", mesh) # part 2 holder mesh = om.TriMesh() d5 = d4 *", "0, 0], [d3 - dd[i], 0, 0], [0, d1, 0], [0, 0, d2])", "d7 + (d4 - (i + 1) * d3) * np.sin(theta) make_block_mesh(mesh, [x1,", "[0, d8, 0], [0, 0, d7 + d4 * np.sin(theta) + 6*d2]) make_block_triangle_mesh(mesh,", "0, 0], [d2, 0, 0], [0, d1, 0], [0, 0, d2]) x0 =", "0, d2]) x0 = x0 + dd[i] make_block_mesh(mesh, [x0, 0, 0], [d2, 0,", "+ d0 for d in dd0] d1 = 35 d2 = 2 #", "[x1, 0, 0], [d6, 0, 0], [0, d1, 0], [0, 0, d7 +", "+ (d4 - (i + 1) * d3) * np.sin(theta) make_block_mesh(mesh, [x1, 0,", "2*d2 d8 = 1.0 x1 = 0 make_block_mesh(mesh, [0, 0, 0], [d5, 0,", "+ d4 * np.sin(theta)]) make_block_mesh(mesh, [x1, 0, 0], [d6 + 2*d2, 0, 0],", "0, 0], [d6, 0, 0], [0, d1, 0], [0, 0, d7 + d4", "0.0], r=d9 / 2.0, n=256) make_pipe_mesh(mesh, [0,0,d8], [0,0,d11], p2=[1.0, 0.0, 0.0], r1=d10/2.0, r2=d9/2.0,", "d2 * np.cos(theta) d7 = 40 + 2*d2 d8 = 1.0 x1 =", "= x1 + d6 for i in range(len(dd)): x1 = x1 + d1", "/ 2 make_block_mesh(mesh, [x0, 0, 0], [d3 - dd[i], 0, 0], [0, d1,", "+ 2*d2 d8 = 1.0 x1 = 0 make_block_mesh(mesh, [0, 0, 0], [d5,", "# screen layer for i in range(len(dd)): ri = dd[i] / 2 make_block_mesh(mesh,", "/ np.cos(theta) mesh = om.TriMesh() # part 1 sorter x0 = 0 #", "28] d0 = 0.8 dd = [d + d0 for d in dd0]", "make_pipe_square_mesh(mesh, [x0 + ri, ri, 0], [x0 + ri, ri, d2], p2=[x0 +", "0], [0, 0, dh + 6*d2]) else: make_block_mesh(mesh, [x1, 0, 0], [d8 +", "* np.sin(theta) make_block_mesh(mesh, [x1, 0, 0], [d8, 0, 0], [0, d1, 0], [0,", "0.0, 0.0], r=d9 / 2.0, n=256) make_pipe_mesh(mesh, [0,0,d8], [0,0,d11], p2=[1.0, 0.0, 0.0], r1=d10/2.0,", "[d8, 0, 0], [0, d1, 0], [0, 0, dh]) make_block_triangle_mesh(mesh, [x1, 0, dh],", "d7 - 2*d2 make_rod_mesh(mesh, [0, 0, 0], [0, 0, d8], p2=[1.0, 0.0, 0.0],", "import * import openmesh as om import numpy as np # version3 slope", "+ d3 - dd[i] make_pipe_square_mesh(mesh, [x0 + ri, ri, 0], [x0 + ri,", "= 0 # screen layer for i in range(len(dd)): ri = dd[i] /", "0], [0, 0, 8*d2]) om.write_mesh(\"coin_sorter_v5_p1.obj\", mesh) # part 2 holder mesh = om.TriMesh()", "6*d2]) make_block_triangle_mesh(mesh, [x1, 0, d7 + d4 * np.sin(theta)], [0, d1, d1*np.tan(theta)], [0,", "= dd[i] / 2 make_block_mesh(mesh, [x0, 0, 0], [d3 - dd[i], 0, 0],", "= d7 - 2*d2 make_rod_mesh(mesh, [0, 0, 0], [0, 0, d8], p2=[1.0, 0.0,", "d8, 0], [0, 0, dh + 6*d2]) else: make_block_mesh(mesh, [x1, 0, 0], [d8", "0], [0, d1, 0], [0, 0, d2]) make_block_mesh(mesh, [x1, 0, 0], [d6, 0,", "x0 = x0 + dd[i] make_block_mesh(mesh, [x0, 0, 0], [d2, 0, 0], [0,", "[d5, 0, 0], [0, d1, 0], [0, 0, d2]) make_block_mesh(mesh, [x1, 0, 0],", "* d3) * np.sin(theta) make_block_mesh(mesh, [x1, 0, 0], [d8, 0, 0], [0, d1,", "0, 0], [d8 + 2*d2, 0, 0], [0, d8, 0], [0, 0, dh", "[x1 + d8, 0, 0], [d8, 0, 0], [0, d1, 0], [0, 0,", "block # total length of sortor d4 = x0 make_block_mesh(mesh, [0, 0, 0],", "# part 2 coin collector mesh = om.TriMesh() d9 = d1 - d8", "[0, 0, d7 + d4 * np.sin(theta)]) make_block_mesh(mesh, [x1, 0, 0], [d6 +", "for d in dd0] d1 = 35 d2 = 2 # slope 30", "make_block_mesh(mesh, [x1, 0, 0], [d8 + 2*d2, 0, 0], [0, d8, 0], [0,", "15 degree theta = np.pi / 12.0 # https://en.wikipedia.org/wiki/Coins_of_the_Canadian_dollar # create mesh for", "0, dh + 6*d2]) make_block_triangle_mesh(mesh, [x1 + d8, 0, dh + 6*d2], [0,", "d3 - dd[i] make_pipe_square_mesh(mesh, [x0 + ri, ri, 0], [x0 + ri, ri,", "[d6 + 2*d2, 0, 0], [0, d8, 0], [0, 0, d7 + d4", "- dd[i], 0, 0], [0, d1, 0], [0, 0, d2]) x0 = x0", "# slope 30 degree d3 = d1 / np.cos(theta) mesh = om.TriMesh() #", "0, 0], [0, d1, 0], [0, 0, d7 + d4 * np.sin(theta)]) make_block_mesh(mesh,", "theta = np.pi / 12.0 # https://en.wikipedia.org/wiki/Coins_of_the_Canadian_dollar # create mesh for coin sorter", "d4 * np.sin(theta)]) make_block_mesh(mesh, [x1, 0, 0], [d6 + 2*d2, 0, 0], [0,", "0, dh + 6*d2]) make_block_mesh(mesh, [x1 + d8, 0, 0], [d8, 0, 0],", "# Dime 10 cents 18.03mm (1.22mm) # Quatoer 25 cents 23.88mm (1.58mm) #", "0], [d6, 0, 0]) x1 = x1 + d6 for i in range(len(dd)):" ]
[ "consisting of 50 univariate time series of length 150. The Markov transition field", "# Load the GunPoint dataset X, _, _, _ = load_gunpoint(return_X_y=True) # Get", "dataset X, _, _, _ = load_gunpoint(return_X_y=True) # Get the recurrence plots for", "probabilities for a discretized time series. Different strategies can be used to bin", "transition field is an image obtained from a time series, representing a field", "grid[0].get_xaxis().set_ticks([]) plt.colorbar(im, cax=grid.cbar_axes[0]) ax.cax.toggle_label(True) fig.suptitle(\"Markov transition fields for the 50 time series in", "50 Gramian angular fields fig = plt.figure(figsize=(10, 5)) grid = ImageGrid(fig, 111, nrows_ncols=(5,", "transition fields for the 50 time series in the \" \"'GunPoint' dataset\", y=0.92)", "of the `GunPoint dataset <http://timeseriesclassification.com/description.php?Dataset=GunPoint>`_, consisting of 50 univariate time series of length", "strategies can be used to bin time series. It is implemented as :class:`pyts.image.MarkovTransitionField`.", "transition field of each time series is independently computed and the 50 Markov", "\"\"\" ==================================== Data set of Markov transition fields ==================================== A Markov transition field", "of 50 univariate time series of length 150. The Markov transition field of", "==================================== Data set of Markov transition fields ==================================== A Markov transition field is", "field of transition probabilities for a discretized time series. Different strategies can be", "transition fields ==================================== A Markov transition field is an image obtained from a", "ax in enumerate(grid): im = ax.imshow(X_mtf[i], cmap='rainbow', origin='lower', vmin=0., vmax=1.) grid[0].get_yaxis().set_ticks([]) grid[0].get_xaxis().set_ticks([]) plt.colorbar(im,", "= ax.imshow(X_mtf[i], cmap='rainbow', origin='lower', vmin=0., vmax=1.) grid[0].get_yaxis().set_ticks([]) grid[0].get_xaxis().set_ticks([]) plt.colorbar(im, cax=grid.cbar_axes[0]) ax.cax.toggle_label(True) fig.suptitle(\"Markov transition", "X, _, _, _ = load_gunpoint(return_X_y=True) # Get the recurrence plots for all", "from a time series, representing a field of transition probabilities for a discretized", "\"\"\" # noqa:E501 # Author: <NAME> <<EMAIL>> # License: BSD-3-Clause import matplotlib.pyplot as", "can be used to bin time series. It is implemented as :class:`pyts.image.MarkovTransitionField`. In", "time series is independently computed and the 50 Markov transition fields are plotted.", "Markov transition field of each time series is independently computed and the 50", "import ImageGrid from pyts.image import MarkovTransitionField from pyts.datasets import load_gunpoint # Load the", "50 univariate time series of length 150. The Markov transition field of each", "= mtf.fit_transform(X) # Plot the 50 Gramian angular fields fig = plt.figure(figsize=(10, 5))", "Get the recurrence plots for all the time series mtf = MarkovTransitionField(n_bins=8) X_mtf", "of Markov transition fields ==================================== A Markov transition field is an image obtained", "this example, we consider the training samples of the `GunPoint dataset <http://timeseriesclassification.com/description.php?Dataset=GunPoint>`_, consisting", "vmin=0., vmax=1.) grid[0].get_yaxis().set_ticks([]) grid[0].get_xaxis().set_ticks([]) plt.colorbar(im, cax=grid.cbar_axes[0]) ax.cax.toggle_label(True) fig.suptitle(\"Markov transition fields for the 50", "angular fields fig = plt.figure(figsize=(10, 5)) grid = ImageGrid(fig, 111, nrows_ncols=(5, 10), axes_pad=0.1,", "load_gunpoint(return_X_y=True) # Get the recurrence plots for all the time series mtf =", "series mtf = MarkovTransitionField(n_bins=8) X_mtf = mtf.fit_transform(X) # Plot the 50 Gramian angular", "X_mtf = mtf.fit_transform(X) # Plot the 50 Gramian angular fields fig = plt.figure(figsize=(10,", "pyts.datasets import load_gunpoint # Load the GunPoint dataset X, _, _, _ =", "share_all=True, cbar_mode='single') for i, ax in enumerate(grid): im = ax.imshow(X_mtf[i], cmap='rainbow', origin='lower', vmin=0.,", "It is implemented as :class:`pyts.image.MarkovTransitionField`. In this example, we consider the training samples", "consider the training samples of the `GunPoint dataset <http://timeseriesclassification.com/description.php?Dataset=GunPoint>`_, consisting of 50 univariate", "time series mtf = MarkovTransitionField(n_bins=8) X_mtf = mtf.fit_transform(X) # Plot the 50 Gramian", "of transition probabilities for a discretized time series. Different strategies can be used", "field is an image obtained from a time series, representing a field of", "The Markov transition field of each time series is independently computed and the", "is an image obtained from a time series, representing a field of transition", "<http://timeseriesclassification.com/description.php?Dataset=GunPoint>`_, consisting of 50 univariate time series of length 150. The Markov transition", "i, ax in enumerate(grid): im = ax.imshow(X_mtf[i], cmap='rainbow', origin='lower', vmin=0., vmax=1.) grid[0].get_yaxis().set_ticks([]) grid[0].get_xaxis().set_ticks([])", "used to bin time series. It is implemented as :class:`pyts.image.MarkovTransitionField`. In this example,", "mtf = MarkovTransitionField(n_bins=8) X_mtf = mtf.fit_transform(X) # Plot the 50 Gramian angular fields", "an image obtained from a time series, representing a field of transition probabilities", "Author: <NAME> <<EMAIL>> # License: BSD-3-Clause import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import", "image obtained from a time series, representing a field of transition probabilities for", "fields ==================================== A Markov transition field is an image obtained from a time", "time series. Different strategies can be used to bin time series. It is", "is implemented as :class:`pyts.image.MarkovTransitionField`. In this example, we consider the training samples of", "dataset <http://timeseriesclassification.com/description.php?Dataset=GunPoint>`_, consisting of 50 univariate time series of length 150. The Markov", "<<EMAIL>> # License: BSD-3-Clause import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import ImageGrid from", "cbar_mode='single') for i, ax in enumerate(grid): im = ax.imshow(X_mtf[i], cmap='rainbow', origin='lower', vmin=0., vmax=1.)", "axes_pad=0.1, share_all=True, cbar_mode='single') for i, ax in enumerate(grid): im = ax.imshow(X_mtf[i], cmap='rainbow', origin='lower',", "in enumerate(grid): im = ax.imshow(X_mtf[i], cmap='rainbow', origin='lower', vmin=0., vmax=1.) grid[0].get_yaxis().set_ticks([]) grid[0].get_xaxis().set_ticks([]) plt.colorbar(im, cax=grid.cbar_axes[0])", "for all the time series mtf = MarkovTransitionField(n_bins=8) X_mtf = mtf.fit_transform(X) # Plot", "implemented as :class:`pyts.image.MarkovTransitionField`. In this example, we consider the training samples of the", "= plt.figure(figsize=(10, 5)) grid = ImageGrid(fig, 111, nrows_ncols=(5, 10), axes_pad=0.1, share_all=True, cbar_mode='single') for", "the training samples of the `GunPoint dataset <http://timeseriesclassification.com/description.php?Dataset=GunPoint>`_, consisting of 50 univariate time", "series, representing a field of transition probabilities for a discretized time series. Different", "enumerate(grid): im = ax.imshow(X_mtf[i], cmap='rainbow', origin='lower', vmin=0., vmax=1.) grid[0].get_yaxis().set_ticks([]) grid[0].get_xaxis().set_ticks([]) plt.colorbar(im, cax=grid.cbar_axes[0]) ax.cax.toggle_label(True)", "# Plot the 50 Gramian angular fields fig = plt.figure(figsize=(10, 5)) grid =", "the recurrence plots for all the time series mtf = MarkovTransitionField(n_bins=8) X_mtf =", "from pyts.datasets import load_gunpoint # Load the GunPoint dataset X, _, _, _", "to bin time series. It is implemented as :class:`pyts.image.MarkovTransitionField`. In this example, we", "im = ax.imshow(X_mtf[i], cmap='rainbow', origin='lower', vmin=0., vmax=1.) grid[0].get_yaxis().set_ticks([]) grid[0].get_xaxis().set_ticks([]) plt.colorbar(im, cax=grid.cbar_axes[0]) ax.cax.toggle_label(True) fig.suptitle(\"Markov", "be used to bin time series. It is implemented as :class:`pyts.image.MarkovTransitionField`. In this", "from pyts.image import MarkovTransitionField from pyts.datasets import load_gunpoint # Load the GunPoint dataset", "each time series is independently computed and the 50 Markov transition fields are", "discretized time series. Different strategies can be used to bin time series. It", "grid[0].get_yaxis().set_ticks([]) grid[0].get_xaxis().set_ticks([]) plt.colorbar(im, cax=grid.cbar_axes[0]) ax.cax.toggle_label(True) fig.suptitle(\"Markov transition fields for the 50 time series", "the 50 Gramian angular fields fig = plt.figure(figsize=(10, 5)) grid = ImageGrid(fig, 111,", "of each time series is independently computed and the 50 Markov transition fields", "pyts.image import MarkovTransitionField from pyts.datasets import load_gunpoint # Load the GunPoint dataset X,", ":class:`pyts.image.MarkovTransitionField`. In this example, we consider the training samples of the `GunPoint dataset", "series. It is implemented as :class:`pyts.image.MarkovTransitionField`. In this example, we consider the training", "111, nrows_ncols=(5, 10), axes_pad=0.1, share_all=True, cbar_mode='single') for i, ax in enumerate(grid): im =", "`GunPoint dataset <http://timeseriesclassification.com/description.php?Dataset=GunPoint>`_, consisting of 50 univariate time series of length 150. The", "transition probabilities for a discretized time series. Different strategies can be used to", "ax.imshow(X_mtf[i], cmap='rainbow', origin='lower', vmin=0., vmax=1.) grid[0].get_yaxis().set_ticks([]) grid[0].get_xaxis().set_ticks([]) plt.colorbar(im, cax=grid.cbar_axes[0]) ax.cax.toggle_label(True) fig.suptitle(\"Markov transition fields", "fields for the 50 time series in the \" \"'GunPoint' dataset\", y=0.92) plt.show()", "are plotted. \"\"\" # noqa:E501 # Author: <NAME> <<EMAIL>> # License: BSD-3-Clause import", "Markov transition field is an image obtained from a time series, representing a", "origin='lower', vmin=0., vmax=1.) grid[0].get_yaxis().set_ticks([]) grid[0].get_xaxis().set_ticks([]) plt.colorbar(im, cax=grid.cbar_axes[0]) ax.cax.toggle_label(True) fig.suptitle(\"Markov transition fields for the", "univariate time series of length 150. The Markov transition field of each time", "noqa:E501 # Author: <NAME> <<EMAIL>> # License: BSD-3-Clause import matplotlib.pyplot as plt from", "plotted. \"\"\" # noqa:E501 # Author: <NAME> <<EMAIL>> # License: BSD-3-Clause import matplotlib.pyplot", "fields fig = plt.figure(figsize=(10, 5)) grid = ImageGrid(fig, 111, nrows_ncols=(5, 10), axes_pad=0.1, share_all=True,", "grid = ImageGrid(fig, 111, nrows_ncols=(5, 10), axes_pad=0.1, share_all=True, cbar_mode='single') for i, ax in", "A Markov transition field is an image obtained from a time series, representing", "<NAME> <<EMAIL>> # License: BSD-3-Clause import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import ImageGrid", "time series of length 150. The Markov transition field of each time series", "import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import ImageGrid from pyts.image import MarkovTransitionField from", "for a discretized time series. Different strategies can be used to bin time", "samples of the `GunPoint dataset <http://timeseriesclassification.com/description.php?Dataset=GunPoint>`_, consisting of 50 univariate time series of", "Plot the 50 Gramian angular fields fig = plt.figure(figsize=(10, 5)) grid = ImageGrid(fig,", "representing a field of transition probabilities for a discretized time series. Different strategies", "load_gunpoint # Load the GunPoint dataset X, _, _, _ = load_gunpoint(return_X_y=True) #", "the time series mtf = MarkovTransitionField(n_bins=8) X_mtf = mtf.fit_transform(X) # Plot the 50", "set of Markov transition fields ==================================== A Markov transition field is an image", "ax.cax.toggle_label(True) fig.suptitle(\"Markov transition fields for the 50 time series in the \" \"'GunPoint'", "nrows_ncols=(5, 10), axes_pad=0.1, share_all=True, cbar_mode='single') for i, ax in enumerate(grid): im = ax.imshow(X_mtf[i],", "all the time series mtf = MarkovTransitionField(n_bins=8) X_mtf = mtf.fit_transform(X) # Plot the", "plots for all the time series mtf = MarkovTransitionField(n_bins=8) X_mtf = mtf.fit_transform(X) #", "_ = load_gunpoint(return_X_y=True) # Get the recurrence plots for all the time series", "field of each time series is independently computed and the 50 Markov transition", "as :class:`pyts.image.MarkovTransitionField`. In this example, we consider the training samples of the `GunPoint", "150. The Markov transition field of each time series is independently computed and", "MarkovTransitionField(n_bins=8) X_mtf = mtf.fit_transform(X) # Plot the 50 Gramian angular fields fig =", "50 Markov transition fields are plotted. \"\"\" # noqa:E501 # Author: <NAME> <<EMAIL>>", "License: BSD-3-Clause import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import ImageGrid from pyts.image import", "plt.figure(figsize=(10, 5)) grid = ImageGrid(fig, 111, nrows_ncols=(5, 10), axes_pad=0.1, share_all=True, cbar_mode='single') for i,", "10), axes_pad=0.1, share_all=True, cbar_mode='single') for i, ax in enumerate(grid): im = ax.imshow(X_mtf[i], cmap='rainbow',", "and the 50 Markov transition fields are plotted. \"\"\" # noqa:E501 # Author:", "import MarkovTransitionField from pyts.datasets import load_gunpoint # Load the GunPoint dataset X, _,", "Markov transition fields ==================================== A Markov transition field is an image obtained from", "a time series, representing a field of transition probabilities for a discretized time", "5)) grid = ImageGrid(fig, 111, nrows_ncols=(5, 10), axes_pad=0.1, share_all=True, cbar_mode='single') for i, ax", "# License: BSD-3-Clause import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import ImageGrid from pyts.image", "fig.suptitle(\"Markov transition fields for the 50 time series in the \" \"'GunPoint' dataset\",", "= load_gunpoint(return_X_y=True) # Get the recurrence plots for all the time series mtf", "a discretized time series. Different strategies can be used to bin time series.", "MarkovTransitionField from pyts.datasets import load_gunpoint # Load the GunPoint dataset X, _, _,", "the GunPoint dataset X, _, _, _ = load_gunpoint(return_X_y=True) # Get the recurrence", "length 150. The Markov transition field of each time series is independently computed", "training samples of the `GunPoint dataset <http://timeseriesclassification.com/description.php?Dataset=GunPoint>`_, consisting of 50 univariate time series", "matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import ImageGrid from pyts.image import MarkovTransitionField from pyts.datasets", "BSD-3-Clause import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import ImageGrid from pyts.image import MarkovTransitionField", "In this example, we consider the training samples of the `GunPoint dataset <http://timeseriesclassification.com/description.php?Dataset=GunPoint>`_,", "vmax=1.) grid[0].get_yaxis().set_ticks([]) grid[0].get_xaxis().set_ticks([]) plt.colorbar(im, cax=grid.cbar_axes[0]) ax.cax.toggle_label(True) fig.suptitle(\"Markov transition fields for the 50 time", "is independently computed and the 50 Markov transition fields are plotted. \"\"\" #", "a field of transition probabilities for a discretized time series. Different strategies can", "import load_gunpoint # Load the GunPoint dataset X, _, _, _ = load_gunpoint(return_X_y=True)", "ImageGrid from pyts.image import MarkovTransitionField from pyts.datasets import load_gunpoint # Load the GunPoint", "we consider the training samples of the `GunPoint dataset <http://timeseriesclassification.com/description.php?Dataset=GunPoint>`_, consisting of 50", "# noqa:E501 # Author: <NAME> <<EMAIL>> # License: BSD-3-Clause import matplotlib.pyplot as plt", "the `GunPoint dataset <http://timeseriesclassification.com/description.php?Dataset=GunPoint>`_, consisting of 50 univariate time series of length 150.", "# Get the recurrence plots for all the time series mtf = MarkovTransitionField(n_bins=8)", "fig = plt.figure(figsize=(10, 5)) grid = ImageGrid(fig, 111, nrows_ncols=(5, 10), axes_pad=0.1, share_all=True, cbar_mode='single')", "= ImageGrid(fig, 111, nrows_ncols=(5, 10), axes_pad=0.1, share_all=True, cbar_mode='single') for i, ax in enumerate(grid):", "cax=grid.cbar_axes[0]) ax.cax.toggle_label(True) fig.suptitle(\"Markov transition fields for the 50 time series in the \"", "fields are plotted. \"\"\" # noqa:E501 # Author: <NAME> <<EMAIL>> # License: BSD-3-Clause", "transition fields are plotted. \"\"\" # noqa:E501 # Author: <NAME> <<EMAIL>> # License:", "computed and the 50 Markov transition fields are plotted. \"\"\" # noqa:E501 #", "plt from mpl_toolkits.axes_grid1 import ImageGrid from pyts.image import MarkovTransitionField from pyts.datasets import load_gunpoint", "Gramian angular fields fig = plt.figure(figsize=(10, 5)) grid = ImageGrid(fig, 111, nrows_ncols=(5, 10),", "series is independently computed and the 50 Markov transition fields are plotted. \"\"\"", "mtf.fit_transform(X) # Plot the 50 Gramian angular fields fig = plt.figure(figsize=(10, 5)) grid", "independently computed and the 50 Markov transition fields are plotted. \"\"\" # noqa:E501", "_, _ = load_gunpoint(return_X_y=True) # Get the recurrence plots for all the time", "time series. It is implemented as :class:`pyts.image.MarkovTransitionField`. In this example, we consider the", "the 50 Markov transition fields are plotted. \"\"\" # noqa:E501 # Author: <NAME>", "= MarkovTransitionField(n_bins=8) X_mtf = mtf.fit_transform(X) # Plot the 50 Gramian angular fields fig", "Data set of Markov transition fields ==================================== A Markov transition field is an", "for i, ax in enumerate(grid): im = ax.imshow(X_mtf[i], cmap='rainbow', origin='lower', vmin=0., vmax=1.) grid[0].get_yaxis().set_ticks([])", "from mpl_toolkits.axes_grid1 import ImageGrid from pyts.image import MarkovTransitionField from pyts.datasets import load_gunpoint #", "series. Different strategies can be used to bin time series. It is implemented", "Markov transition fields are plotted. \"\"\" # noqa:E501 # Author: <NAME> <<EMAIL>> #", "_, _, _ = load_gunpoint(return_X_y=True) # Get the recurrence plots for all the", "obtained from a time series, representing a field of transition probabilities for a", "recurrence plots for all the time series mtf = MarkovTransitionField(n_bins=8) X_mtf = mtf.fit_transform(X)", "==================================== A Markov transition field is an image obtained from a time series,", "series of length 150. The Markov transition field of each time series is", "bin time series. It is implemented as :class:`pyts.image.MarkovTransitionField`. In this example, we consider", "example, we consider the training samples of the `GunPoint dataset <http://timeseriesclassification.com/description.php?Dataset=GunPoint>`_, consisting of", "cmap='rainbow', origin='lower', vmin=0., vmax=1.) grid[0].get_yaxis().set_ticks([]) grid[0].get_xaxis().set_ticks([]) plt.colorbar(im, cax=grid.cbar_axes[0]) ax.cax.toggle_label(True) fig.suptitle(\"Markov transition fields for", "GunPoint dataset X, _, _, _ = load_gunpoint(return_X_y=True) # Get the recurrence plots", "# Author: <NAME> <<EMAIL>> # License: BSD-3-Clause import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1", "of length 150. The Markov transition field of each time series is independently", "as plt from mpl_toolkits.axes_grid1 import ImageGrid from pyts.image import MarkovTransitionField from pyts.datasets import", "Different strategies can be used to bin time series. It is implemented as", "mpl_toolkits.axes_grid1 import ImageGrid from pyts.image import MarkovTransitionField from pyts.datasets import load_gunpoint # Load", "Load the GunPoint dataset X, _, _, _ = load_gunpoint(return_X_y=True) # Get the", "plt.colorbar(im, cax=grid.cbar_axes[0]) ax.cax.toggle_label(True) fig.suptitle(\"Markov transition fields for the 50 time series in the", "ImageGrid(fig, 111, nrows_ncols=(5, 10), axes_pad=0.1, share_all=True, cbar_mode='single') for i, ax in enumerate(grid): im", "time series, representing a field of transition probabilities for a discretized time series." ]
[ "def find_match(self, known_faces, person_names, face): matches = self.get_face_matches(known_faces, face) # get a list", "python3.5 import os import dlib import numpy as np import cv2 import time", "cap.read() draw_frame = frame_read.copy() gray = cv2.cvtColor(frame_read, cv2.COLOR_BGR2GRAY) overlay = frame_read.copy() cv2.rectangle(overlay, (0,", "= rn.find_match(face_encodings, person_names, face_encodings_in_image[0]) if match == \"Not Found\": cv2.putText(draw_frame, \"Unknow\", (x+5, y-15),", "(x + w, y + h), (0, 0, 255), 2) else: cv2.putText(draw_frame, match,", "ymin = int(round(y - (h / 2))) ymax = int(round(y + (h /", "filter_detections: x1, y1, w1, h1 = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax,", "| re.MULTILINE) if match: result = match.group(1) else: result = None try: if", "(0, 0, 255), 2) cv2.rectangle(draw_frame, (x, y), (x + w, y + h),", "= int(xmax*sx) ymax = int(ymax*sy) pt1 = (xmin, ymin) pt2 = (xmax, ymax)", "y), (x + w, y + h), (0, 0, 255), 2) else: for", "- face, axis=1) def find_match(self, known_faces, person_names, face): matches = self.get_face_matches(known_faces, face) #", "# initialize video input cap = cv2.VideoCapture(1) cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 360) face_encodings, person_names", "if self.altNames is None: try: with open(self.metaPath) as metaFH: metaContents = metaFH.read() import", "minSize = (50, 50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons = len(face_rects) if len(face_rects) >", "cap = cv2.VideoCapture(1) cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 360) face_encodings, person_names = rn.load_face_encodings() faceClassifier =", "rn = YOLO_NN('.') # initialize video input cap = cv2.VideoCapture(1) cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT,", "in face_rects: face = cropped[y:y + h, x:x + w] face_encodings_in_image = rn.get_face_encodings(face)", "`q` key was pressed, break from the loop if key == ord(\"q\"): break", "= 1 if self.metaMain is None: self.metaMain = darknet.load_meta(self.metaPath.encode(\"ascii\")) if self.altNames is None:", "pt1, pt2, (0, 255, 0), 1) cv2.putText(img, detection[0].decode() + \" [\" + str(round(detection[1]", "# get a list of True/False min_index = matches.argmin() min_value = matches[min_index] if", "for face_bounds in bounds] try: h = [np.array(self.face_recognition_model.compute_face_descriptor(face, face_pose, 1)) for face_pose in", "y), (x + w, y + h), (0, 0, 255), 2) else: cv2.putText(draw_frame,", "cv2.rectangle(img, (x,y), (x_plus_w,y_plus_h), (0, 0, 255), 2) #cv2.putText(img, label, (x-10,y-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color,", "confidence, x, y, x_plus_w, y_plus_h): cv2.rectangle(img, (x,y), (x_plus_w,y_plus_h), (0, 0, 255), 2) #cv2.putText(img,", "n_persons = 0 for detection in detections: if detection[0] == b'person': # It", "5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.putText(draw_frame, \"InteliCam Users: \" + str(n_users)", "+ str(n_persons), (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 255, 255], 1) cv2.imshow(\"Frame\", draw_frame) key", "ymax def cvDrawBoxes(self, detections, img): for detection in detections: x, y, w, h", "draw bounding box on the detected object with class name def draw_bounding_box(self,img, class_id,", "(0, 0), (640, 35), (0, 0, 0), -1) alpha = 0.8 draw_frame =", "if not os.path.exists(self.configPath): raise ValueError(\"Invalid config path `\" + os.path.abspath(self.configPath)+\"`\") if not os.path.exists(self.weightPath):", "h1 = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax = rn.convertBack( float(x1),", "0.5, [0, 255, 0], 2) return img def get_face_encodings(self, face): bounds = self.face_detector(face,", "255), 2) #cv2.putText(img, label, (x-10,y-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2) if __name__ == \"__main__\":", "face_encodings, person_names def detect(self, frame_read): prev_time = time.time() frame_resized = cv2.resize(frame_read, (darknet.network_width(rn.netMain), darknet.network_height(rn.netMain)),", "for (x, y, w, h) in face_rects: face = draw_frame[y:y + h, x:x", "> 0: for (x, y, w, h) in face_rects: face = cropped[y:y +", "int(round(y - (h / 2))) ymax = int(round(y + (h / 2))) return", "if min_value < 0.58: return person_names[min_index]+\" ({0:.2f})\".format(min_value) if min_value < 0.65: return person_names[min_index]+\"?\"+\"", "print(\"self.metaPath: \" + self.metaPath) self.netMain = None self.metaMain = None self.altNames = None", "self.netMain is None: self.netMain = darknet.load_net_custom(self.configPath.encode( \"ascii\"), self.weightPath.encode(\"ascii\"), 0, 1) # batch size", "matches = self.get_face_matches(known_faces, face) # get a list of True/False min_index = matches.argmin()", "self.weightPath.encode(\"ascii\"), 0, 1) # batch size = 1 if self.metaMain is None: self.metaMain", "import numpy as np import cv2 import time import darknet from ctypes import", "dlib face_rects = faceClassifier.detectMultiScale( # Detect faces with dlib gray, scaleFactor = 1.1,", "#x += xmin #y += ymin if (face_encodings_in_image): match = rn.find_match(face_encodings, person_names, face_encodings_in_image[0])", "pass # Create an image we reuse for each detect self.darknet_image = darknet.make_image(darknet.network_width(self.netMain),", "float(w1), float(h1)) sx = 640.0/416.0 sy = 360.0/416.0 xmin = int(xmin*sx) ymin =", "cv2.cvtColor(face, cv2.COLOR_BGR2RGB) faces_bounds = self.face_detector(face, 1) if len(faces_bounds) != 1: print(\"Expected one and", "*= *(.*)$\", metaContents, re.IGNORECASE | re.MULTILINE) if match: result = match.group(1) else: result", "frame_read.copy() gray = cv2.cvtColor(frame_read, cv2.COLOR_BGR2GRAY) overlay = frame_read.copy() cv2.rectangle(overlay, (0, 0), (640, 35),", "\"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.rectangle(draw_frame, (x, y), (x", "= cv2.CASCADE_SCALE_IMAGE) n_persons += 1 if len(face_rects) > 0: for (x, y, w,", "255), 2) else: cv2.rectangle(draw_frame, pt1, pt2, (0, 0, 255), 2) cv2.putText(draw_frame, \"Unknow\", (pt1[0],", "gray = cv2.cvtColor(frame_read, cv2.COLOR_BGR2GRAY) overlay = frame_read.copy() cv2.rectangle(overlay, (0, 0), (640, 35), (0,", "overlay = frame_read.copy() cv2.rectangle(overlay, (0, 0), (640, 35), (0, 0, 0), -1) alpha", "with dlib face_rects = faceClassifier.detectMultiScale( # Detect faces with dlib gray, scaleFactor =", "person_names, face): matches = self.get_face_matches(known_faces, face) # get a list of True/False min_index", "YOLO_NN: def __init__(self, yoloDataFolder): self.configPath = yoloDataFolder + \"/cfg/yolov3-tiny.cfg\" self.weightPath = yoloDataFolder +", "a list of True/False min_index = matches.argmin() min_value = matches[min_index] if min_value <", "(pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [0, 255, 0], 2) return img def", "#dlib.hit_enter_to_continue() return face_encodings, person_names def detect(self, frame_read): prev_time = time.time() frame_resized = cv2.resize(frame_read,", "= (xmin, ymin) pt2 = (xmax, ymax) cropped = gray[ymin:ymax, xmin:xmax] face_rects =", "= cv2.waitKey(3) & 0xFF # if the `q` key was pressed, break from", "detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax = self.convertBack( float(x), float(y), float(w),", "+ str(round(detection[1] * 100, 2)) + \"]\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5,", "get_face_matches(self, known_faces, face): return np.linalg.norm(known_faces - face, axis=1) def find_match(self, known_faces, person_names, face):", "cv2.imread(path_to_image) face = cv2.cvtColor(face, cv2.COLOR_BGR2RGB) faces_bounds = self.face_detector(face, 1) if len(faces_bounds) != 1:", "draw_bounding_box(self,img, class_id, confidence, x, y, x_plus_w, y_plus_h): cv2.rectangle(img, (x,y), (x_plus_w,y_plus_h), (0, 0, 255),", "img): for detection in detections: x, y, w, h = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\", "if self.netMain is None: self.netMain = darknet.load_net_custom(self.configPath.encode( \"ascii\"), self.weightPath.encode(\"ascii\"), 0, 1) # batch", "({0:.2f})\".format(min_value) if min_value < 0.58: return person_names[min_index]+\" ({0:.2f})\".format(min_value) if min_value < 0.65: return", "xmax, ymax def cvDrawBoxes(self, detections, img): for detection in detections: x, y, w,", "= dlib.image_window() for path_to_image in full_paths_to_images: print(\"Loading user: \" + path_to_image) #face =", "+ w, y + h), (0, 0, 255), 2) else: for detection in", "255, 255], 1) cv2.imshow(\"Frame\", draw_frame) key = cv2.waitKey(3) & 0xFF # if the", "draw_frame) key = cv2.waitKey(3) & 0xFF # if the `q` key was pressed,", "w, y + h), (0, 255, 0), 2) n_users += 1 else: cv2.putText(draw_frame,", "y, x_plus_w, y_plus_h): cv2.rectangle(img, (x,y), (x_plus_w,y_plus_h), (0, 0, 255), 2) #cv2.putText(img, label, (x-10,y-10),", "None try: if os.path.exists(result): with open(result) as namesFH: namesList = namesFH.read().strip().split(\"\\n\") self.altNames =", "with dlib gray, scaleFactor = 1.1, minNeighbors = 5, minSize = (50, 50),", "+ (h / 2))) return xmin, ymin, xmax, ymax def cvDrawBoxes(self, detections, img):", "y), (x + w, y + h), (0, 255, 0), 2) n_users +=", "\" + path_to_image + \" - it has \" + str(len(faces_bounds))) exit() face_bounds", "net.getLayerNames() output_layers = [layer_names[i[0] - 1] for i in net.getUnconnectedOutLayers()] return output_layers #", "= int(ymin*sy) xmax = int(xmax*sx) ymax = int(ymax*sy) pt1 = (xmin, ymin) pt2", "cv2.rectangle(overlay, (0, 0), (640, 35), (0, 0, 0), -1) alpha = 0.8 draw_frame", "detection[2][3] xmin, ymin, xmax, ymax = self.convertBack( float(x), float(y), float(w), float(h)) pt1 =", "h, x:x + w] face_encodings_in_image = rn.get_face_encodings(face) if (face_encodings_in_image): match = rn.find_match(face_encodings, person_names,", "person_names[min_index]+\" ({0:.2f})\".format(min_value) if min_value < 0.65: return person_names[min_index]+\"?\"+\" ({0:.2f})\".format(min_value) return 'Not Found' def", "#rn.recognize_faces_in_video(face_encodings, person_names) while True: ret, frame_read = cap.read() draw_frame = frame_read.copy() gray =", "0.8 draw_frame = cv2.addWeighted(overlay, alpha, draw_frame, 1 - alpha, 0) # Yolo Detection", "if the `q` key was pressed, break from the loop if key ==", "layer names # in the architecture def get_output_layers(self,net): layer_names = net.getLayerNames() output_layers =", "names # in the architecture def get_output_layers(self,net): layer_names = net.getLayerNames() output_layers = [layer_names[i[0]", "# in the architecture def get_output_layers(self,net): layer_names = net.getLayerNames() output_layers = [layer_names[i[0] -", "self.metaPath) self.netMain = None self.metaMain = None self.altNames = None if not os.path.exists(self.configPath):", "the detected object with class name def draw_bounding_box(self,img, class_id, confidence, x, y, x_plus_w,", "2) cv2.rectangle(draw_frame, (x, y), (x + w, y + h), (0, 0, 255),", "metaContents = metaFH.read() import re match = re.search(\"names *= *(.*)$\", metaContents, re.IGNORECASE |", "pt1 = (xmin, ymin) pt2 = (xmax, ymax) cropped = gray[ymin:ymax, xmin:xmax] face_rects", "= namesFH.read().strip().split(\"\\n\") self.altNames = [x.strip() for x in namesList] except TypeError: pass except", "detect(self, frame_read): prev_time = time.time() frame_resized = cv2.resize(frame_read, (darknet.network_width(rn.netMain), darknet.network_height(rn.netMain)), interpolation=cv2.INTER_LINEAR) frame_rgb =", "return output_layers # function to draw bounding box on the detected object with", "output layer names # in the architecture def get_output_layers(self,net): layer_names = net.getLayerNames() output_layers", "\" | \"+ \\ \"Persons: \" + str(n_persons), (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255,", "key = cv2.waitKey(3) & 0xFF # if the `q` key was pressed, break", "size = 1 if self.metaMain is None: self.metaMain = darknet.load_meta(self.metaPath.encode(\"ascii\")) if self.altNames is", "0.58: return person_names[min_index]+\" ({0:.2f})\".format(min_value) if min_value < 0.65: return person_names[min_index]+\"?\"+\" ({0:.2f})\".format(min_value) return 'Not", "not os.path.exists(self.metaPath): raise ValueError(\"Invalid data file path `\" + os.path.abspath(self.metaPath)+\"`\") if self.netMain is", "(0, 0, 255), 2) cv2.putText(draw_frame, \"Unknow\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,", "os.path.exists(self.configPath): raise ValueError(\"Invalid config path `\" + os.path.abspath(self.configPath)+\"`\") if not os.path.exists(self.weightPath): raise ValueError(\"Invalid", "self.altNames = [x.strip() for x in namesList] except TypeError: pass except Exception: pass", "xmax, ymax = self.convertBack( float(x), float(y), float(w), float(h)) pt1 = (xmin, ymin) pt2", "w, y + h), (0, 0, 255), 2) else: for detection in filter_detections:", "sy = 360.0/416.0 xmin = int(xmin*sx) ymin = int(ymin*sy) xmax = int(xmax*sx) ymax", "in bounds] try: h = [np.array(self.face_recognition_model.compute_face_descriptor(face, face_pose, 1)) for face_pose in faces_landmarks] except:", "0, 1) # batch size = 1 if self.metaMain is None: self.metaMain =", "1 if len(face_rects) > 0: for (x, y, w, h) in face_rects: face", "rn.get_face_encodings(face) #x += xmin #y += ymin if (face_encodings_in_image): match = rn.find_match(face_encodings, person_names,", "metaContents, re.IGNORECASE | re.MULTILINE) if match: result = match.group(1) else: result = None", "= detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax = rn.convertBack( float(x1), float(y1),", "w, h = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax = self.convertBack(", "file path `\" + os.path.abspath(self.metaPath)+\"`\") if self.netMain is None: self.netMain = darknet.load_net_custom(self.configPath.encode( \"ascii\"),", "io.imread(path_to_image) face = cv2.imread(path_to_image) face = cv2.cvtColor(face, cv2.COLOR_BGR2RGB) faces_bounds = self.face_detector(face, 1) if", "= dlib.face_recognition_model_v1(self.data_dir + '/dlib/dlib_face_recognition_resnet_model_v1.dat') def convertBack(self, x, y, w, h): xmin = int(round(x", "= time.time() frame_resized = cv2.resize(frame_read, (darknet.network_width(rn.netMain), darknet.network_height(rn.netMain)), interpolation=cv2.INTER_LINEAR) frame_rgb = cv2.cvtColor(frame_resized, cv2.COLOR_BGR2RGB) darknet.copy_image_from_bytes(self.darknet_image,", "self.darknet_image, thresh=0.25) #print(1/(time.time()-prev_time)) return detections # function to get the output layer names", "h) in face_rects: face = cropped[y:y + h, x:x + w] face_encodings_in_image =", "os.path.expanduser(yoloDataFolder+'/face_data') self.faces_folder_path = self.data_dir + '/users/' self.face_detector = dlib.get_frontal_face_detector() self.shape_predictor = dlib.shape_predictor(self.data_dir +", "1)) for face_pose in faces_landmarks] except: return [] return h def get_face_matches(self, known_faces,", "win.add_overlay(face_landmarks) face_encodings.append(face_encoding) #print(face_encoding) #dlib.hit_enter_to_continue() return face_encodings, person_names def detect(self, frame_read): prev_time = time.time()", "def load_face_encodings(self): image_filenames = filter(lambda x: x.endswith('.jpg'), os.listdir(self.faces_folder_path)) image_filenames = sorted(image_filenames) person_names =", "int(xmin*sx) ymin = int(ymin*sy) xmax = int(xmax*sx) ymax = int(ymax*sy) pt1 = (xmin,", "ymin) pt2 = (xmax, ymax) cropped = gray[ymin:ymax, xmin:xmax] face_rects = faceClassifier.detectMultiScale( #", "255, 0), 2) cv2.rectangle(draw_frame, (x, y), (x + w, y + h), (0,", "the architecture def get_output_layers(self,net): layer_names = net.getLayerNames() output_layers = [layer_names[i[0] - 1] for", "None if not os.path.exists(self.configPath): raise ValueError(\"Invalid config path `\" + os.path.abspath(self.configPath)+\"`\") if not", "[x[:-4] for x in image_filenames] full_paths_to_images = [self.faces_folder_path + x for x in", "layer_names = net.getLayerNames() output_layers = [layer_names[i[0] - 1] for i in net.getUnconnectedOutLayers()] return", "video input cap = cv2.VideoCapture(1) cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 360) face_encodings, person_names = rn.load_face_encodings()", "255, 0), 2) n_users += 1 else: cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5,", "\" + self.metaPath) self.netMain = None self.metaMain = None self.altNames = None if", "2) n_users += 1 else: cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0,", "faces_landmarks = [self.shape_predictor(face, face_bounds) for face_bounds in bounds] try: h = [np.array(self.face_recognition_model.compute_face_descriptor(face, face_pose,", "int(round(x + (w / 2))) ymin = int(round(y - (h / 2))) ymax", "raise ValueError(\"Invalid weight path `\" + os.path.abspath(self.weightPath)+\"`\") if not os.path.exists(self.metaPath): raise ValueError(\"Invalid data", "dlib.face_recognition_model_v1(self.data_dir + '/dlib/dlib_face_recognition_resnet_model_v1.dat') def convertBack(self, x, y, w, h): xmin = int(round(x -", "cv2 import time import darknet from ctypes import * import math import random", "+ self.weightPath) print(\"self.metaPath: \" + self.metaPath) self.netMain = None self.metaMain = None self.altNames", "x.endswith('.jpg'), os.listdir(self.faces_folder_path)) image_filenames = sorted(image_filenames) person_names = [x[:-4] for x in image_filenames] full_paths_to_images", "self.face_detector(face, 1) if len(faces_bounds) != 1: print(\"Expected one and only one face per", "for (x, y, w, h) in face_rects: face = cropped[y:y + h, x:x", "self.weightPath = yoloDataFolder + \"/yolov3-tiny.weights\" self.metaPath = yoloDataFolder + \"/cfg/coco.data\" print(\"self.configPath: \" +", "2) cv2.putText(draw_frame, \"Unknow\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)", "#!/usr/bin/env python3.5 import os import dlib import numpy as np import cv2 import", "= len(face_rects) if len(face_rects) > 0: # Case find any face for (x,", "self.metaMain = None self.altNames = None if not os.path.exists(self.configPath): raise ValueError(\"Invalid config path", "min_value < 0.58: return person_names[min_index]+\" ({0:.2f})\".format(min_value) if min_value < 0.65: return person_names[min_index]+\"?\"+\" ({0:.2f})\".format(min_value)", "full_paths_to_images = [self.faces_folder_path + x for x in image_filenames] face_encodings = [] win", "minSize = (50, 50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons += 1 if len(face_rects) >", "True/False min_index = matches.argmin() min_value = matches[min_index] if min_value < 0.55: return person_names[min_index]+\"!", "if self.metaMain is None: self.metaMain = darknet.load_meta(self.metaPath.encode(\"ascii\")) if self.altNames is None: try: with", "YOLO_NN('.') # initialize video input cap = cv2.VideoCapture(1) cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 360) face_encodings,", "rn.convertBack( float(x1), float(y1), float(w1), float(h1)) sx = 640.0/416.0 sy = 360.0/416.0 xmin =", "x, y, w, h): xmin = int(round(x - (w / 2))) xmax =", "is None: try: with open(self.metaPath) as metaFH: metaContents = metaFH.read() import re match", "ymax = int(round(y + (h / 2))) return xmin, ymin, xmax, ymax def", "0), 1) cv2.putText(img, detection[0].decode() + \" [\" + str(round(detection[1] * 100, 2)) +", "if not os.path.exists(self.metaPath): raise ValueError(\"Invalid data file path `\" + os.path.abspath(self.metaPath)+\"`\") if self.netMain", "detections # function to get the output layer names # in the architecture", "int(xmax*sx) ymax = int(ymax*sy) pt1 = (xmin, ymin) pt2 = (xmax, ymax) cropped", "+ w, y + h), (0, 0, 255), 2) else: cv2.rectangle(draw_frame, pt1, pt2,", "+ path_to_image) #face = io.imread(path_to_image) face = cv2.imread(path_to_image) face = cv2.cvtColor(face, cv2.COLOR_BGR2RGB) faces_bounds", "0.5, color, 2) if __name__ == \"__main__\": # Start Yolo Setup rn =", "cv2.COLOR_BGR2GRAY) overlay = frame_read.copy() cv2.rectangle(overlay, (0, 0), (640, 35), (0, 0, 0), -1)", "+ self.metaPath) self.netMain = None self.metaMain = None self.altNames = None if not", "*(.*)$\", metaContents, re.IGNORECASE | re.MULTILINE) if match: result = match.group(1) else: result =", "user: \" + path_to_image) #face = io.imread(path_to_image) face = cv2.imread(path_to_image) face = cv2.cvtColor(face,", "detections: if detection[0] == b'person': # It is a person filter_detections.append(detection) if len(filter_detections)", "else: cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.rectangle(draw_frame, (x,", "= rn.get_face_encodings(face) #x += xmin #y += ymin if (face_encodings_in_image): match = rn.find_match(face_encodings,", "+ str(len(faces_bounds))) exit() face_bounds = faces_bounds[0] face_landmarks = self.shape_predictor(face, face_bounds) face_encoding = np.array(self.face_recognition_model.compute_face_descriptor(face,", "x in image_filenames] face_encodings = [] win = dlib.image_window() for path_to_image in full_paths_to_images:", "= darknet.detect_image(self.netMain, self.metaMain, self.darknet_image, thresh=0.25) #print(1/(time.time()-prev_time)) return detections # function to get the", "0, 255), 2) else: cv2.rectangle(draw_frame, pt1, pt2, (0, 0, 255), 2) cv2.putText(draw_frame, \"Unknow\",", "= self.data_dir + '/users/' self.face_detector = dlib.get_frontal_face_detector() self.shape_predictor = dlib.shape_predictor(self.data_dir + '/dlib/shape_predictor_68_face_landmarks.dat') self.face_recognition_model", "0, 255), 2) else: cv2.putText(draw_frame, match, (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0),", "[255, 255, 255], 1) cv2.imshow(\"Frame\", draw_frame) key = cv2.waitKey(3) & 0xFF # if", "2) if __name__ == \"__main__\": # Start Yolo Setup rn = YOLO_NN('.') #", "TypeError: pass except Exception: pass # Create an image we reuse for each", "- 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.putText(draw_frame, \"InteliCam Users: \" +", "5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [0, 255, 0], 2) return img def get_face_encodings(self, face): bounds", "= 5, minSize = (50, 50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons = len(face_rects) if", "0: # Case find any face for (x, y, w, h) in face_rects:", "x, y, w, h = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax", "and only one face per image: \" + path_to_image + \" - it", "rn.find_match(face_encodings, person_names, face_encodings_in_image[0]) if match == \"Not Found\": cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX,", "#y += ymin if (face_encodings_in_image): match = rn.find_match(face_encodings, person_names, face_encodings_in_image[0]) if match ==", "return np.linalg.norm(known_faces - face, axis=1) def find_match(self, known_faces, person_names, face): matches = self.get_face_matches(known_faces,", "1: print(\"Expected one and only one face per image: \" + path_to_image +", "else: result = None try: if os.path.exists(result): with open(result) as namesFH: namesList =", "n_persons = len(face_rects) if len(face_rects) > 0: # Case find any face for", "gray, scaleFactor = 1.1, minNeighbors = 5, minSize = (50, 50), flags =", "detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax = rn.convertBack( float(x1), float(y1), float(w1),", "= int(xmin*sx) ymin = int(ymin*sy) xmax = int(xmax*sx) ymax = int(ymax*sy) pt1 =", "i in net.getUnconnectedOutLayers()] return output_layers # function to draw bounding box on the", "__init__(self, yoloDataFolder): self.configPath = yoloDataFolder + \"/cfg/yolov3-tiny.cfg\" self.weightPath = yoloDataFolder + \"/yolov3-tiny.weights\" self.metaPath", "person_names def detect(self, frame_read): prev_time = time.time() frame_resized = cv2.resize(frame_read, (darknet.network_width(rn.netMain), darknet.network_height(rn.netMain)), interpolation=cv2.INTER_LINEAR)", "cvDrawBoxes(self, detections, img): for detection in detections: x, y, w, h = detection[2][0],\\", "+ \" | \"+ \\ \"Persons: \" + str(n_persons), (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5,", "ymax) cropped = gray[ymin:ymax, xmin:xmax] face_rects = faceClassifier.detectMultiScale( # Detect faces with dlib", "y + h), (0, 0, 255), 2) else: for detection in filter_detections: x1,", "win.add_overlay(face_bounds) win.add_overlay(face_landmarks) face_encodings.append(face_encoding) #print(face_encoding) #dlib.hit_enter_to_continue() return face_encodings, person_names def detect(self, frame_read): prev_time =", "w, y + h), (0, 0, 255), 2) else: cv2.rectangle(draw_frame, pt1, pt2, (0,", "cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 255, 255], 1) cv2.imshow(\"Frame\", draw_frame) key = cv2.waitKey(3) & 0xFF", "\\ \"Persons: \" + str(n_persons), (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 255, 255], 1)", "It is a person filter_detections.append(detection) if len(filter_detections) == 0: # Case Yolo didn't", "cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 360) face_encodings, person_names = rn.load_face_encodings() faceClassifier = cv2.CascadeClassifier(rn.data_dir + '/dlib/haarcascade_frontalface_default.xml') #rn.recognize_faces_in_video(face_encodings, person_names)", "get_output_layers(self,net): layer_names = net.getLayerNames() output_layers = [layer_names[i[0] - 1] for i in net.getUnconnectedOutLayers()]", "list of True/False min_index = matches.argmin() min_value = matches[min_index] if min_value < 0.55:", "0) # Yolo Detection detections = rn.detect(frame_read.copy()) filter_detections = [] n_users = 0", "pt2, (0, 0, 255), 2) cv2.putText(draw_frame, \"Unknow\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5,", "the `q` key was pressed, break from the loop if key == ord(\"q\"):", "print(\"Loading user: \" + path_to_image) #face = io.imread(path_to_image) face = cv2.imread(path_to_image) face =", "is None: self.netMain = darknet.load_net_custom(self.configPath.encode( \"ascii\"), self.weightPath.encode(\"ascii\"), 0, 1) # batch size =", "__name__ == \"__main__\": # Start Yolo Setup rn = YOLO_NN('.') # initialize video", "frame_read.copy() cv2.rectangle(overlay, (0, 0), (640, 35), (0, 0, 0), -1) alpha = 0.8", "full_paths_to_images: print(\"Loading user: \" + path_to_image) #face = io.imread(path_to_image) face = cv2.imread(path_to_image) face", "frame_rgb.tobytes()) detections = darknet.detect_image(self.netMain, self.metaMain, self.darknet_image, thresh=0.25) #print(1/(time.time()-prev_time)) return detections # function to", "+ (w / 2))) ymin = int(round(y - (h / 2))) ymax =", "'/dlib/shape_predictor_68_face_landmarks.dat') self.face_recognition_model = dlib.face_recognition_model_v1(self.data_dir + '/dlib/dlib_face_recognition_resnet_model_v1.dat') def convertBack(self, x, y, w, h): xmin", "({0:.2f})\".format(min_value) if min_value < 0.65: return person_names[min_index]+\"?\"+\" ({0:.2f})\".format(min_value) return 'Not Found' def load_face_encodings(self):", "ymin) pt2 = (xmax, ymax) cv2.rectangle(img, pt1, pt2, (0, 255, 0), 1) cv2.putText(img,", "pt1, pt2, (0, 0, 255), 2) cv2.putText(draw_frame, \"Unknow\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX,", "= io.imread(path_to_image) face = cv2.imread(path_to_image) face = cv2.cvtColor(face, cv2.COLOR_BGR2RGB) faces_bounds = self.face_detector(face, 1)", "pass except Exception: pass # Create an image we reuse for each detect", "* 100, 2)) + \"]\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [0, 255,", "None: self.metaMain = darknet.load_meta(self.metaPath.encode(\"ascii\")) if self.altNames is None: try: with open(self.metaPath) as metaFH:", "= dlib.shape_predictor(self.data_dir + '/dlib/shape_predictor_68_face_landmarks.dat') self.face_recognition_model = dlib.face_recognition_model_v1(self.data_dir + '/dlib/dlib_face_recognition_resnet_model_v1.dat') def convertBack(self, x, y,", "Found\": cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.rectangle(draw_frame, (x,", "ValueError(\"Invalid data file path `\" + os.path.abspath(self.metaPath)+\"`\") if self.netMain is None: self.netMain =", "= rn.get_face_encodings(face) if (face_encodings_in_image): match = rn.find_match(face_encodings, person_names, face_encodings_in_image[0]) if match == \"Not", "cropped = gray[ymin:ymax, xmin:xmax] face_rects = faceClassifier.detectMultiScale( # Detect faces with dlib gray,", "\" + self.configPath) print(\"self.weightPath: \" + self.weightPath) print(\"self.metaPath: \" + self.metaPath) self.netMain =", "\"Persons: \" + str(n_persons), (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 255, 255], 1) cv2.imshow(\"Frame\",", "float(x), float(y), float(w), float(h)) pt1 = (xmin, ymin) pt2 = (xmax, ymax) cv2.rectangle(img,", "\"ascii\"), self.weightPath.encode(\"ascii\"), 0, 1) # batch size = 1 if self.metaMain is None:", "/ 2))) ymax = int(round(y + (h / 2))) return xmin, ymin, xmax,", "# function to get the output layer names # in the architecture def", "| \"+ \\ \"Persons: \" + str(n_persons), (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 255,", "360.0/416.0 xmin = int(xmin*sx) ymin = int(ymin*sy) xmax = int(xmax*sx) ymax = int(ymax*sy)", "self.get_face_matches(known_faces, face) # get a list of True/False min_index = matches.argmin() min_value =", "for path_to_image in full_paths_to_images: print(\"Loading user: \" + path_to_image) #face = io.imread(path_to_image) face", "darknet.network_height(rn.netMain)), interpolation=cv2.INTER_LINEAR) frame_rgb = cv2.cvtColor(frame_resized, cv2.COLOR_BGR2RGB) darknet.copy_image_from_bytes(self.darknet_image, frame_rgb.tobytes()) detections = darknet.detect_image(self.netMain, self.metaMain, self.darknet_image,", "+ \"/cfg/coco.data\" print(\"self.configPath: \" + self.configPath) print(\"self.weightPath: \" + self.weightPath) print(\"self.metaPath: \" +", "return xmin, ymin, xmax, ymax def cvDrawBoxes(self, detections, img): for detection in detections:", "= darknet.load_net_custom(self.configPath.encode( \"ascii\"), self.weightPath.encode(\"ascii\"), 0, 1) # batch size = 1 if self.metaMain", "n_users += 1 else: cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255),", "+ w] face_encodings_in_image = rn.get_face_encodings(face) #x += xmin #y += ymin if (face_encodings_in_image):", "= cv2.CascadeClassifier(rn.data_dir + '/dlib/haarcascade_frontalface_default.xml') #rn.recognize_faces_in_video(face_encodings, person_names) while True: ret, frame_read = cap.read() draw_frame", "try with dlib face_rects = faceClassifier.detectMultiScale( # Detect faces with dlib gray, scaleFactor", "self.face_detector = dlib.get_frontal_face_detector() self.shape_predictor = dlib.shape_predictor(self.data_dir + '/dlib/shape_predictor_68_face_landmarks.dat') self.face_recognition_model = dlib.face_recognition_model_v1(self.data_dir + '/dlib/dlib_face_recognition_resnet_model_v1.dat')", "try: with open(self.metaPath) as metaFH: metaContents = metaFH.read() import re match = re.search(\"names", "in filter_detections: x1, y1, w1, h1 = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin,", "known_faces, person_names, face): matches = self.get_face_matches(known_faces, face) # get a list of True/False", "def detect(self, frame_read): prev_time = time.time() frame_resized = cv2.resize(frame_read, (darknet.network_width(rn.netMain), darknet.network_height(rn.netMain)), interpolation=cv2.INTER_LINEAR) frame_rgb", "person_names = rn.load_face_encodings() faceClassifier = cv2.CascadeClassifier(rn.data_dir + '/dlib/haarcascade_frontalface_default.xml') #rn.recognize_faces_in_video(face_encodings, person_names) while True: ret,", "face): bounds = self.face_detector(face, 1) faces_landmarks = [self.shape_predictor(face, face_bounds) for face_bounds in bounds]", "frame_rgb = cv2.cvtColor(frame_resized, cv2.COLOR_BGR2RGB) darknet.copy_image_from_bytes(self.darknet_image, frame_rgb.tobytes()) detections = darknet.detect_image(self.netMain, self.metaMain, self.darknet_image, thresh=0.25) #print(1/(time.time()-prev_time))", "0, 255), 2) #cv2.putText(img, label, (x-10,y-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2) if __name__ ==", "faceClassifier.detectMultiScale( # Detect faces with dlib gray, scaleFactor = 1.1, minNeighbors = 5,", "(face_encodings_in_image): match = rn.find_match(face_encodings, person_names, face_encodings_in_image[0]) if match == \"Not Found\": cv2.putText(draw_frame, \"Unknow\",", "except Exception: pass # Create an image we reuse for each detect self.darknet_image", "(darknet.network_width(rn.netMain), darknet.network_height(rn.netMain)), interpolation=cv2.INTER_LINEAR) frame_rgb = cv2.cvtColor(frame_resized, cv2.COLOR_BGR2RGB) darknet.copy_image_from_bytes(self.darknet_image, frame_rgb.tobytes()) detections = darknet.detect_image(self.netMain, self.metaMain,", "np import cv2 import time import darknet from ctypes import * import math", "try: if os.path.exists(result): with open(result) as namesFH: namesList = namesFH.read().strip().split(\"\\n\") self.altNames = [x.strip()", "x1, y1, w1, h1 = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax", "+= ymin if (face_encodings_in_image): match = rn.find_match(face_encodings, person_names, face_encodings_in_image[0]) if match == \"Not", "len(filter_detections) == 0: # Case Yolo didn't detected any person, try with dlib", "pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [0, 255, 0], 2) return img def get_face_encodings(self,", "rn.detect(frame_read.copy()) filter_detections = [] n_users = 0 n_persons = 0 for detection in", "rn.get_face_encodings(face) if (face_encodings_in_image): match = rn.find_match(face_encodings, person_names, face_encodings_in_image[0]) if match == \"Not Found\":", "import os import dlib import numpy as np import cv2 import time import", "if match: result = match.group(1) else: result = None try: if os.path.exists(result): with", "in net.getUnconnectedOutLayers()] return output_layers # function to draw bounding box on the detected", "output_layers = [layer_names[i[0] - 1] for i in net.getUnconnectedOutLayers()] return output_layers # function", "(0, 0, 255), 2) else: for detection in filter_detections: x1, y1, w1, h1", "= (xmax, ymax) cropped = gray[ymin:ymax, xmin:xmax] face_rects = faceClassifier.detectMultiScale( # Detect faces", "face per image: \" + path_to_image + \" - it has \" +", "len(face_rects) > 0: for (x, y, w, h) in face_rects: face = cropped[y:y", "ymax = rn.convertBack( float(x1), float(y1), float(w1), float(h1)) sx = 640.0/416.0 sy = 360.0/416.0", "if min_value < 0.55: return person_names[min_index]+\"! ({0:.2f})\".format(min_value) if min_value < 0.58: return person_names[min_index]+\"", "print(\"self.configPath: \" + self.configPath) print(\"self.weightPath: \" + self.weightPath) print(\"self.metaPath: \" + self.metaPath) self.netMain", "+ path_to_image + \" - it has \" + str(len(faces_bounds))) exit() face_bounds =", "self.shape_predictor = dlib.shape_predictor(self.data_dir + '/dlib/shape_predictor_68_face_landmarks.dat') self.face_recognition_model = dlib.face_recognition_model_v1(self.data_dir + '/dlib/dlib_face_recognition_resnet_model_v1.dat') def convertBack(self, x,", "= self.shape_predictor(face, face_bounds) face_encoding = np.array(self.face_recognition_model.compute_face_descriptor(face, face_landmarks, 1)) win.clear_overlay() win.set_image(face) win.add_overlay(face_bounds) win.add_overlay(face_landmarks) face_encodings.append(face_encoding)", "print(\"self.weightPath: \" + self.weightPath) print(\"self.metaPath: \" + self.metaPath) self.netMain = None self.metaMain =", "w, h) in face_rects: face = draw_frame[y:y + h, x:x + w] face_encodings_in_image", "config path `\" + os.path.abspath(self.configPath)+\"`\") if not os.path.exists(self.weightPath): raise ValueError(\"Invalid weight path `\"", "= YOLO_NN('.') # initialize video input cap = cv2.VideoCapture(1) cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 360)", "- (h / 2))) ymax = int(round(y + (h / 2))) return xmin,", "self.metaMain, self.darknet_image, thresh=0.25) #print(1/(time.time()-prev_time)) return detections # function to get the output layer", "xmin = int(xmin*sx) ymin = int(ymin*sy) xmax = int(xmax*sx) ymax = int(ymax*sy) pt1", "ymax) cv2.rectangle(img, pt1, pt2, (0, 255, 0), 1) cv2.putText(img, detection[0].decode() + \" [\"", "y, w, h): xmin = int(round(x - (w / 2))) xmax = int(round(x", "= [self.shape_predictor(face, face_bounds) for face_bounds in bounds] try: h = [np.array(self.face_recognition_model.compute_face_descriptor(face, face_pose, 1))", "detections: x, y, w, h = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax,", "import * import math import random class YOLO_NN: def __init__(self, yoloDataFolder): self.configPath =", "b'person': # It is a person filter_detections.append(detection) if len(filter_detections) == 0: # Case", "(xmax, ymax) cropped = gray[ymin:ymax, xmin:xmax] face_rects = faceClassifier.detectMultiScale( # Detect faces with", "it has \" + str(len(faces_bounds))) exit() face_bounds = faces_bounds[0] face_landmarks = self.shape_predictor(face, face_bounds)", "sx = 640.0/416.0 sy = 360.0/416.0 xmin = int(xmin*sx) ymin = int(ymin*sy) xmax", "re.MULTILINE) if match: result = match.group(1) else: result = None try: if os.path.exists(result):", "self.weightPath) print(\"self.metaPath: \" + self.metaPath) self.netMain = None self.metaMain = None self.altNames =", "detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax = self.convertBack( float(x), float(y), float(w), float(h))", "(5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 255, 255], 1) cv2.imshow(\"Frame\", draw_frame) key = cv2.waitKey(3)", "darknet from ctypes import * import math import random class YOLO_NN: def __init__(self,", "has \" + str(len(faces_bounds))) exit() face_bounds = faces_bounds[0] face_landmarks = self.shape_predictor(face, face_bounds) face_encoding", "not os.path.exists(self.configPath): raise ValueError(\"Invalid config path `\" + os.path.abspath(self.configPath)+\"`\") if not os.path.exists(self.weightPath): raise", "object with class name def draw_bounding_box(self,img, class_id, confidence, x, y, x_plus_w, y_plus_h): cv2.rectangle(img,", "detection[0].decode() + \" [\" + str(round(detection[1] * 100, 2)) + \"]\", (pt1[0], pt1[1]", "+ w, y + h), (0, 0, 255), 2) else: cv2.putText(draw_frame, match, (x+5,", "get_face_encodings(self, face): bounds = self.face_detector(face, 1) faces_landmarks = [self.shape_predictor(face, face_bounds) for face_bounds in", "0.5, (0, 255, 0), 2) cv2.rectangle(draw_frame, (x, y), (x + w, y +", "\" [\" + str(round(detection[1] * 100, 2)) + \"]\", (pt1[0], pt1[1] - 5),", "h = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax = self.convertBack( float(x),", "= yoloDataFolder + \"/cfg/coco.data\" print(\"self.configPath: \" + self.configPath) print(\"self.weightPath: \" + self.weightPath) print(\"self.metaPath:", "match = re.search(\"names *= *(.*)$\", metaContents, re.IGNORECASE | re.MULTILINE) if match: result =", "print(\"Expected one and only one face per image: \" + path_to_image + \"", "(h / 2))) ymax = int(round(y + (h / 2))) return xmin, ymin,", "output_layers # function to draw bounding box on the detected object with class", "Users: \" + str(n_users) + \" | \"+ \\ \"Persons: \" + str(n_persons),", "\" + str(len(faces_bounds))) exit() face_bounds = faces_bounds[0] face_landmarks = self.shape_predictor(face, face_bounds) face_encoding =", "in detections: if detection[0] == b'person': # It is a person filter_detections.append(detection) if", "x for x in image_filenames] face_encodings = [] win = dlib.image_window() for path_to_image", "if os.path.exists(result): with open(result) as namesFH: namesList = namesFH.read().strip().split(\"\\n\") self.altNames = [x.strip() for", "image_filenames = sorted(image_filenames) person_names = [x[:-4] for x in image_filenames] full_paths_to_images = [self.faces_folder_path", "def get_output_layers(self,net): layer_names = net.getLayerNames() output_layers = [layer_names[i[0] - 1] for i in", "(pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.putText(draw_frame, \"InteliCam Users:", "os.listdir(self.faces_folder_path)) image_filenames = sorted(image_filenames) person_names = [x[:-4] for x in image_filenames] full_paths_to_images =", "(x, y), (x + w, y + h), (0, 0, 255), 2) else:", "1) cv2.imshow(\"Frame\", draw_frame) key = cv2.waitKey(3) & 0xFF # if the `q` key", "re.IGNORECASE | re.MULTILINE) if match: result = match.group(1) else: result = None try:", "= 1.1, minNeighbors = 5, minSize = (50, 50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons", "detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax = self.convertBack( float(x), float(y), float(w), float(h)) pt1", "draw_frame = frame_read.copy() gray = cv2.cvtColor(frame_read, cv2.COLOR_BGR2GRAY) overlay = frame_read.copy() cv2.rectangle(overlay, (0, 0),", "n_persons += 1 if len(face_rects) > 0: for (x, y, w, h) in", "while True: ret, frame_read = cap.read() draw_frame = frame_read.copy() gray = cv2.cvtColor(frame_read, cv2.COLOR_BGR2GRAY)", "if len(face_rects) > 0: for (x, y, w, h) in face_rects: face =", "return 'Not Found' def load_face_encodings(self): image_filenames = filter(lambda x: x.endswith('.jpg'), os.listdir(self.faces_folder_path)) image_filenames =", "(x, y, w, h) in face_rects: face = draw_frame[y:y + h, x:x +", "Case find any face for (x, y, w, h) in face_rects: face =", "None self.altNames = None if not os.path.exists(self.configPath): raise ValueError(\"Invalid config path `\" +", "[] n_users = 0 n_persons = 0 for detection in detections: if detection[0]", "= frame_read.copy() gray = cv2.cvtColor(frame_read, cv2.COLOR_BGR2GRAY) overlay = frame_read.copy() cv2.rectangle(overlay, (0, 0), (640,", "xmin, ymin, xmax, ymax def cvDrawBoxes(self, detections, img): for detection in detections: x,", "return person_names[min_index]+\" ({0:.2f})\".format(min_value) if min_value < 0.65: return person_names[min_index]+\"?\"+\" ({0:.2f})\".format(min_value) return 'Not Found'", "= cap.read() draw_frame = frame_read.copy() gray = cv2.cvtColor(frame_read, cv2.COLOR_BGR2GRAY) overlay = frame_read.copy() cv2.rectangle(overlay,", "Yolo didn't detected any person, try with dlib face_rects = faceClassifier.detectMultiScale( # Detect", "+ \"/cfg/yolov3-tiny.cfg\" self.weightPath = yoloDataFolder + \"/yolov3-tiny.weights\" self.metaPath = yoloDataFolder + \"/cfg/coco.data\" print(\"self.configPath:", "x:x + w] face_encodings_in_image = rn.get_face_encodings(face) if (face_encodings_in_image): match = rn.find_match(face_encodings, person_names, face_encodings_in_image[0])", "0), -1) alpha = 0.8 draw_frame = cv2.addWeighted(overlay, alpha, draw_frame, 1 - alpha,", "frame_resized = cv2.resize(frame_read, (darknet.network_width(rn.netMain), darknet.network_height(rn.netMain)), interpolation=cv2.INTER_LINEAR) frame_rgb = cv2.cvtColor(frame_resized, cv2.COLOR_BGR2RGB) darknet.copy_image_from_bytes(self.darknet_image, frame_rgb.tobytes()) detections", "xmin, ymin, xmax, ymax = self.convertBack( float(x), float(y), float(w), float(h)) pt1 = (xmin,", "rn.load_face_encodings() faceClassifier = cv2.CascadeClassifier(rn.data_dir + '/dlib/haarcascade_frontalface_default.xml') #rn.recognize_faces_in_video(face_encodings, person_names) while True: ret, frame_read =", "with open(self.metaPath) as metaFH: metaContents = metaFH.read() import re match = re.search(\"names *=", "face_bounds) face_encoding = np.array(self.face_recognition_model.compute_face_descriptor(face, face_landmarks, 1)) win.clear_overlay() win.set_image(face) win.add_overlay(face_bounds) win.add_overlay(face_landmarks) face_encodings.append(face_encoding) #print(face_encoding) #dlib.hit_enter_to_continue()", "+ '/dlib/dlib_face_recognition_resnet_model_v1.dat') def convertBack(self, x, y, w, h): xmin = int(round(x - (w", "0.5, (0, 0, 255), 2) cv2.rectangle(draw_frame, (x, y), (x + w, y +", "255), 2) else: for detection in filter_detections: x1, y1, w1, h1 = detection[2][0],\\", "\" + self.weightPath) print(\"self.metaPath: \" + self.metaPath) self.netMain = None self.metaMain = None", "image_filenames] full_paths_to_images = [self.faces_folder_path + x for x in image_filenames] face_encodings = []", "class name def draw_bounding_box(self,img, class_id, confidence, x, y, x_plus_w, y_plus_h): cv2.rectangle(img, (x,y), (x_plus_w,y_plus_h),", "(x + w, y + h), (0, 0, 255), 2) else: cv2.rectangle(draw_frame, pt1,", "namesFH.read().strip().split(\"\\n\") self.altNames = [x.strip() for x in namesList] except TypeError: pass except Exception:", "cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.rectangle(draw_frame, (x, y), (x + w, y", "= rn.detect(frame_read.copy()) filter_detections = [] n_users = 0 n_persons = 0 for detection", "[x.strip() for x in namesList] except TypeError: pass except Exception: pass # Create", "detection in detections: if detection[0] == b'person': # It is a person filter_detections.append(detection)", "cv2.CASCADE_SCALE_IMAGE) n_persons = len(face_rects) if len(face_rects) > 0: # Case find any face", "= darknet.load_meta(self.metaPath.encode(\"ascii\")) if self.altNames is None: try: with open(self.metaPath) as metaFH: metaContents =", "= os.path.expanduser(yoloDataFolder+'/face_data') self.faces_folder_path = self.data_dir + '/users/' self.face_detector = dlib.get_frontal_face_detector() self.shape_predictor = dlib.shape_predictor(self.data_dir", "frame_read = cap.read() draw_frame = frame_read.copy() gray = cv2.cvtColor(frame_read, cv2.COLOR_BGR2GRAY) overlay = frame_read.copy()", "= faceClassifier.detectMultiScale( # Detect faces with dlib gray, scaleFactor = 1.1, minNeighbors =", "\" + str(n_users) + \" | \"+ \\ \"Persons: \" + str(n_persons), (5,", "[] win = dlib.image_window() for path_to_image in full_paths_to_images: print(\"Loading user: \" + path_to_image)", "(x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.rectangle(draw_frame, (x, y), (x +", "the output layer names # in the architecture def get_output_layers(self,net): layer_names = net.getLayerNames()", "def get_face_encodings(self, face): bounds = self.face_detector(face, 1) faces_landmarks = [self.shape_predictor(face, face_bounds) for face_bounds", "filter(lambda x: x.endswith('.jpg'), os.listdir(self.faces_folder_path)) image_filenames = sorted(image_filenames) person_names = [x[:-4] for x in", "person_names[min_index]+\"! ({0:.2f})\".format(min_value) if min_value < 0.58: return person_names[min_index]+\" ({0:.2f})\".format(min_value) if min_value < 0.65:", "detection in detections: x, y, w, h = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin,", "== 0: # Case Yolo didn't detected any person, try with dlib face_rects", "draw_frame = cv2.addWeighted(overlay, alpha, draw_frame, 1 - alpha, 0) # Yolo Detection detections", "ymin, xmax, ymax def cvDrawBoxes(self, detections, img): for detection in detections: x, y,", "cv2.resize(frame_read, (darknet.network_width(rn.netMain), darknet.network_height(rn.netMain)), interpolation=cv2.INTER_LINEAR) frame_rgb = cv2.cvtColor(frame_resized, cv2.COLOR_BGR2RGB) darknet.copy_image_from_bytes(self.darknet_image, frame_rgb.tobytes()) detections = darknet.detect_image(self.netMain,", "namesList = namesFH.read().strip().split(\"\\n\") self.altNames = [x.strip() for x in namesList] except TypeError: pass", "an image we reuse for each detect self.darknet_image = darknet.make_image(darknet.network_width(self.netMain), darknet.network_height(self.netMain),3) self.data_dir =", "Setup rn = YOLO_NN('.') # initialize video input cap = cv2.VideoCapture(1) cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640)", "2) else: cv2.rectangle(draw_frame, pt1, pt2, (0, 0, 255), 2) cv2.putText(draw_frame, \"Unknow\", (pt1[0], pt1[1]", "xmin, ymin, xmax, ymax = rn.convertBack( float(x1), float(y1), float(w1), float(h1)) sx = 640.0/416.0", "35), (0, 0, 0), -1) alpha = 0.8 draw_frame = cv2.addWeighted(overlay, alpha, draw_frame,", "= 0 n_persons = 0 for detection in detections: if detection[0] == b'person':", "y + h), (0, 255, 0), 2) n_users += 1 else: cv2.putText(draw_frame, \"Unknow\",", "ymin, xmax, ymax = rn.convertBack( float(x1), float(y1), float(w1), float(h1)) sx = 640.0/416.0 sy", "= None if not os.path.exists(self.configPath): raise ValueError(\"Invalid config path `\" + os.path.abspath(self.configPath)+\"`\") if", "detection in filter_detections: x1, y1, w1, h1 = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin,", "y, w, h) in face_rects: face = cropped[y:y + h, x:x + w]", "for x in image_filenames] full_paths_to_images = [self.faces_folder_path + x for x in image_filenames]", "detection[0] == b'person': # It is a person filter_detections.append(detection) if len(filter_detections) == 0:", "int(round(y + (h / 2))) return xmin, ymin, xmax, ymax def cvDrawBoxes(self, detections,", "np.linalg.norm(known_faces - face, axis=1) def find_match(self, known_faces, person_names, face): matches = self.get_face_matches(known_faces, face)", "face_bounds in bounds] try: h = [np.array(self.face_recognition_model.compute_face_descriptor(face, face_pose, 1)) for face_pose in faces_landmarks]", "as np import cv2 import time import darknet from ctypes import * import", "per image: \" + path_to_image + \" - it has \" + str(len(faces_bounds)))", "name def draw_bounding_box(self,img, class_id, confidence, x, y, x_plus_w, y_plus_h): cv2.rectangle(img, (x,y), (x_plus_w,y_plus_h), (0,", "int(ymax*sy) pt1 = (xmin, ymin) pt2 = (xmax, ymax) cropped = gray[ymin:ymax, xmin:xmax]", "import math import random class YOLO_NN: def __init__(self, yoloDataFolder): self.configPath = yoloDataFolder +", "xmax, ymax = rn.convertBack( float(x1), float(y1), float(w1), float(h1)) sx = 640.0/416.0 sy =", "640.0/416.0 sy = 360.0/416.0 xmin = int(xmin*sx) ymin = int(ymin*sy) xmax = int(xmax*sx)", "face_bounds = faces_bounds[0] face_landmarks = self.shape_predictor(face, face_bounds) face_encoding = np.array(self.face_recognition_model.compute_face_descriptor(face, face_landmarks, 1)) win.clear_overlay()", "in the architecture def get_output_layers(self,net): layer_names = net.getLayerNames() output_layers = [layer_names[i[0] - 1]", "alpha, draw_frame, 1 - alpha, 0) # Yolo Detection detections = rn.detect(frame_read.copy()) filter_detections", "& 0xFF # if the `q` key was pressed, break from the loop", "xmin:xmax] face_rects = faceClassifier.detectMultiScale( # Detect faces with dlib gray, scaleFactor = 1.1,", "except: return [] return h def get_face_matches(self, known_faces, face): return np.linalg.norm(known_faces - face,", "(0, 255, 0), 2) cv2.rectangle(draw_frame, (x, y), (x + w, y + h),", "Yolo Setup rn = YOLO_NN('.') # initialize video input cap = cv2.VideoCapture(1) cap.set(cv2.CAP_PROP_FRAME_WIDTH,", "else: cv2.rectangle(draw_frame, pt1, pt2, (0, 0, 255), 2) cv2.putText(draw_frame, \"Unknow\", (pt1[0], pt1[1] -", "architecture def get_output_layers(self,net): layer_names = net.getLayerNames() output_layers = [layer_names[i[0] - 1] for i", "for detection in detections: if detection[0] == b'person': # It is a person", "+ h, x:x + w] face_encodings_in_image = rn.get_face_encodings(face) #x += xmin #y +=", "255], 1) cv2.imshow(\"Frame\", draw_frame) key = cv2.waitKey(3) & 0xFF # if the `q`", "2) cv2.rectangle(draw_frame, (x, y), (x + w, y + h), (0, 255, 0),", "# function to draw bounding box on the detected object with class name", "str(round(detection[1] * 100, 2)) + \"]\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [0,", "0.5, [255, 255, 255], 1) cv2.imshow(\"Frame\", draw_frame) key = cv2.waitKey(3) & 0xFF #", "a person filter_detections.append(detection) if len(filter_detections) == 0: # Case Yolo didn't detected any", "cv2.COLOR_BGR2RGB) darknet.copy_image_from_bytes(self.darknet_image, frame_rgb.tobytes()) detections = darknet.detect_image(self.netMain, self.metaMain, self.darknet_image, thresh=0.25) #print(1/(time.time()-prev_time)) return detections #", "dlib gray, scaleFactor = 1.1, minNeighbors = 5, minSize = (50, 50), flags", "load_face_encodings(self): image_filenames = filter(lambda x: x.endswith('.jpg'), os.listdir(self.faces_folder_path)) image_filenames = sorted(image_filenames) person_names = [x[:-4]", "1 else: cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.rectangle(draw_frame,", "h def get_face_matches(self, known_faces, face): return np.linalg.norm(known_faces - face, axis=1) def find_match(self, known_faces,", "image we reuse for each detect self.darknet_image = darknet.make_image(darknet.network_width(self.netMain), darknet.network_height(self.netMain),3) self.data_dir = os.path.expanduser(yoloDataFolder+'/face_data')", "[\" + str(round(detection[1] * 100, 2)) + \"]\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX,", "return person_names[min_index]+\"! ({0:.2f})\".format(min_value) if min_value < 0.58: return person_names[min_index]+\" ({0:.2f})\".format(min_value) if min_value <", "cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 360) face_encodings, person_names = rn.load_face_encodings() faceClassifier = cv2.CascadeClassifier(rn.data_dir + '/dlib/haarcascade_frontalface_default.xml')", "\"InteliCam Users: \" + str(n_users) + \" | \"+ \\ \"Persons: \" +", "y, w, h = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax =", "exit() face_bounds = faces_bounds[0] face_landmarks = self.shape_predictor(face, face_bounds) face_encoding = np.array(self.face_recognition_model.compute_face_descriptor(face, face_landmarks, 1))", "import time import darknet from ctypes import * import math import random class", "[] return h def get_face_matches(self, known_faces, face): return np.linalg.norm(known_faces - face, axis=1) def", "str(n_persons), (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 255, 255], 1) cv2.imshow(\"Frame\", draw_frame) key =", "data file path `\" + os.path.abspath(self.metaPath)+\"`\") if self.netMain is None: self.netMain = darknet.load_net_custom(self.configPath.encode(", "img def get_face_encodings(self, face): bounds = self.face_detector(face, 1) faces_landmarks = [self.shape_predictor(face, face_bounds) for", "darknet.detect_image(self.netMain, self.metaMain, self.darknet_image, thresh=0.25) #print(1/(time.time()-prev_time)) return detections # function to get the output", "bounding box on the detected object with class name def draw_bounding_box(self,img, class_id, confidence,", "0xFF # if the `q` key was pressed, break from the loop if", "\" + path_to_image) #face = io.imread(path_to_image) face = cv2.imread(path_to_image) face = cv2.cvtColor(face, cv2.COLOR_BGR2RGB)", "Start Yolo Setup rn = YOLO_NN('.') # initialize video input cap = cv2.VideoCapture(1)", "def __init__(self, yoloDataFolder): self.configPath = yoloDataFolder + \"/cfg/yolov3-tiny.cfg\" self.weightPath = yoloDataFolder + \"/yolov3-tiny.weights\"", "(640, 35), (0, 0, 0), -1) alpha = 0.8 draw_frame = cv2.addWeighted(overlay, alpha,", "2) else: cv2.putText(draw_frame, match, (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2) cv2.rectangle(draw_frame,", "cv2.waitKey(3) & 0xFF # if the `q` key was pressed, break from the", "255), 2) else: cv2.putText(draw_frame, match, (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)", "== \"__main__\": # Start Yolo Setup rn = YOLO_NN('.') # initialize video input", "detection[2][3] xmin, ymin, xmax, ymax = rn.convertBack( float(x1), float(y1), float(w1), float(h1)) sx =", "0.55: return person_names[min_index]+\"! ({0:.2f})\".format(min_value) if min_value < 0.58: return person_names[min_index]+\" ({0:.2f})\".format(min_value) if min_value", "cv2.imshow(\"Frame\", draw_frame) key = cv2.waitKey(3) & 0xFF # if the `q` key was", "= [np.array(self.face_recognition_model.compute_face_descriptor(face, face_pose, 1)) for face_pose in faces_landmarks] except: return [] return h", "'Not Found' def load_face_encodings(self): image_filenames = filter(lambda x: x.endswith('.jpg'), os.listdir(self.faces_folder_path)) image_filenames = sorted(image_filenames)", "cv2.CascadeClassifier(rn.data_dir + '/dlib/haarcascade_frontalface_default.xml') #rn.recognize_faces_in_video(face_encodings, person_names) while True: ret, frame_read = cap.read() draw_frame =", "= int(round(x - (w / 2))) xmax = int(round(x + (w / 2)))", "face): return np.linalg.norm(known_faces - face, axis=1) def find_match(self, known_faces, person_names, face): matches =", "str(len(faces_bounds))) exit() face_bounds = faces_bounds[0] face_landmarks = self.shape_predictor(face, face_bounds) face_encoding = np.array(self.face_recognition_model.compute_face_descriptor(face, face_landmarks,", "640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 360) face_encodings, person_names = rn.load_face_encodings() faceClassifier = cv2.CascadeClassifier(rn.data_dir + '/dlib/haarcascade_frontalface_default.xml') #rn.recognize_faces_in_video(face_encodings,", "float(w), float(h)) pt1 = (xmin, ymin) pt2 = (xmax, ymax) cv2.rectangle(img, pt1, pt2,", "5, minSize = (50, 50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons += 1 if len(face_rects)", "(0, 0, 255), 2) #cv2.putText(img, label, (x-10,y-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2) if __name__", "#face = io.imread(path_to_image) face = cv2.imread(path_to_image) face = cv2.cvtColor(face, cv2.COLOR_BGR2RGB) faces_bounds = self.face_detector(face,", "= self.face_detector(face, 1) if len(faces_bounds) != 1: print(\"Expected one and only one face", "match == \"Not Found\": cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255),", "2) #cv2.putText(img, label, (x-10,y-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2) if __name__ == \"__main__\": #", "= draw_frame[y:y + h, x:x + w] face_encodings_in_image = rn.get_face_encodings(face) if (face_encodings_in_image): match", "< 0.65: return person_names[min_index]+\"?\"+\" ({0:.2f})\".format(min_value) return 'Not Found' def load_face_encodings(self): image_filenames = filter(lambda", "prev_time = time.time() frame_resized = cv2.resize(frame_read, (darknet.network_width(rn.netMain), darknet.network_height(rn.netMain)), interpolation=cv2.INTER_LINEAR) frame_rgb = cv2.cvtColor(frame_resized, cv2.COLOR_BGR2RGB)", "darknet.load_meta(self.metaPath.encode(\"ascii\")) if self.altNames is None: try: with open(self.metaPath) as metaFH: metaContents = metaFH.read()", "color, 2) if __name__ == \"__main__\": # Start Yolo Setup rn = YOLO_NN('.')", "math import random class YOLO_NN: def __init__(self, yoloDataFolder): self.configPath = yoloDataFolder + \"/cfg/yolov3-tiny.cfg\"", "min_value < 0.65: return person_names[min_index]+\"?\"+\" ({0:.2f})\".format(min_value) return 'Not Found' def load_face_encodings(self): image_filenames =", "w1, h1 = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax = rn.convertBack(", "with class name def draw_bounding_box(self,img, class_id, confidence, x, y, x_plus_w, y_plus_h): cv2.rectangle(img, (x,y),", "1.1, minNeighbors = 5, minSize = (50, 50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons +=", "numpy as np import cv2 import time import darknet from ctypes import *", "cv2.addWeighted(overlay, alpha, draw_frame, 1 - alpha, 0) # Yolo Detection detections = rn.detect(frame_read.copy())", "255), 2) cv2.putText(draw_frame, \"InteliCam Users: \" + str(n_users) + \" | \"+ \\", "import cv2 import time import darknet from ctypes import * import math import", "+ x for x in image_filenames] face_encodings = [] win = dlib.image_window() for", "x, y, x_plus_w, y_plus_h): cv2.rectangle(img, (x,y), (x_plus_w,y_plus_h), (0, 0, 255), 2) #cv2.putText(img, label,", "= net.getLayerNames() output_layers = [layer_names[i[0] - 1] for i in net.getUnconnectedOutLayers()] return output_layers", "# Yolo Detection detections = rn.detect(frame_read.copy()) filter_detections = [] n_users = 0 n_persons", "y_plus_h): cv2.rectangle(img, (x,y), (x_plus_w,y_plus_h), (0, 0, 255), 2) #cv2.putText(img, label, (x-10,y-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5,", "== b'person': # It is a person filter_detections.append(detection) if len(filter_detections) == 0: #", "+= 1 if len(face_rects) > 0: for (x, y, w, h) in face_rects:", "face_pose, 1)) for face_pose in faces_landmarks] except: return [] return h def get_face_matches(self,", "image: \" + path_to_image + \" - it has \" + str(len(faces_bounds))) exit()", "self.configPath) print(\"self.weightPath: \" + self.weightPath) print(\"self.metaPath: \" + self.metaPath) self.netMain = None self.metaMain", "if min_value < 0.65: return person_names[min_index]+\"?\"+\" ({0:.2f})\".format(min_value) return 'Not Found' def load_face_encodings(self): image_filenames", "dlib.shape_predictor(self.data_dir + '/dlib/shape_predictor_68_face_landmarks.dat') self.face_recognition_model = dlib.face_recognition_model_v1(self.data_dir + '/dlib/dlib_face_recognition_resnet_model_v1.dat') def convertBack(self, x, y, w,", "* import math import random class YOLO_NN: def __init__(self, yoloDataFolder): self.configPath = yoloDataFolder", "pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.putText(draw_frame, \"InteliCam Users: \"", "cv2.FONT_HERSHEY_SIMPLEX, 0.5, [0, 255, 0], 2) return img def get_face_encodings(self, face): bounds =", "detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax = rn.convertBack( float(x1), float(y1), float(w1), float(h1))", "def cvDrawBoxes(self, detections, img): for detection in detections: x, y, w, h =", "= matches[min_index] if min_value < 0.55: return person_names[min_index]+\"! ({0:.2f})\".format(min_value) if min_value < 0.58:", "face = cropped[y:y + h, x:x + w] face_encodings_in_image = rn.get_face_encodings(face) #x +=", "(0, 0, 0), -1) alpha = 0.8 draw_frame = cv2.addWeighted(overlay, alpha, draw_frame, 1", "face_landmarks, 1)) win.clear_overlay() win.set_image(face) win.add_overlay(face_bounds) win.add_overlay(face_landmarks) face_encodings.append(face_encoding) #print(face_encoding) #dlib.hit_enter_to_continue() return face_encodings, person_names def", "random class YOLO_NN: def __init__(self, yoloDataFolder): self.configPath = yoloDataFolder + \"/cfg/yolov3-tiny.cfg\" self.weightPath =", "0 n_persons = 0 for detection in detections: if detection[0] == b'person': #", "0, 255), 2) else: for detection in filter_detections: x1, y1, w1, h1 =", "win.clear_overlay() win.set_image(face) win.add_overlay(face_bounds) win.add_overlay(face_landmarks) face_encodings.append(face_encoding) #print(face_encoding) #dlib.hit_enter_to_continue() return face_encodings, person_names def detect(self, frame_read):", "h), (0, 255, 0), 2) n_users += 1 else: cv2.putText(draw_frame, \"Unknow\", (x+5, y-15),", "x_plus_w, y_plus_h): cv2.rectangle(img, (x,y), (x_plus_w,y_plus_h), (0, 0, 255), 2) #cv2.putText(img, label, (x-10,y-10), cv2.FONT_HERSHEY_SIMPLEX,", "namesList] except TypeError: pass except Exception: pass # Create an image we reuse", "= 0.8 draw_frame = cv2.addWeighted(overlay, alpha, draw_frame, 1 - alpha, 0) # Yolo", "alpha, 0) # Yolo Detection detections = rn.detect(frame_read.copy()) filter_detections = [] n_users =", "w] face_encodings_in_image = rn.get_face_encodings(face) #x += xmin #y += ymin if (face_encodings_in_image): match", "x in image_filenames] full_paths_to_images = [self.faces_folder_path + x for x in image_filenames] face_encodings", "draw_frame, 1 - alpha, 0) # Yolo Detection detections = rn.detect(frame_read.copy()) filter_detections =", "2) return img def get_face_encodings(self, face): bounds = self.face_detector(face, 1) faces_landmarks = [self.shape_predictor(face,", "cv2.putText(draw_frame, match, (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2) cv2.rectangle(draw_frame, (x, y),", "= (xmin, ymin) pt2 = (xmax, ymax) cv2.rectangle(img, pt1, pt2, (0, 255, 0),", "face_rects = faceClassifier.detectMultiScale( # Detect faces with dlib gray, scaleFactor = 1.1, minNeighbors", "= None try: if os.path.exists(result): with open(result) as namesFH: namesList = namesFH.read().strip().split(\"\\n\") self.altNames", "0, 255), 2) cv2.putText(draw_frame, \"Unknow\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0,", "= cv2.resize(frame_read, (darknet.network_width(rn.netMain), darknet.network_height(rn.netMain)), interpolation=cv2.INTER_LINEAR) frame_rgb = cv2.cvtColor(frame_resized, cv2.COLOR_BGR2RGB) darknet.copy_image_from_bytes(self.darknet_image, frame_rgb.tobytes()) detections =", "bounds] try: h = [np.array(self.face_recognition_model.compute_face_descriptor(face, face_pose, 1)) for face_pose in faces_landmarks] except: return", "cv2.cvtColor(frame_read, cv2.COLOR_BGR2GRAY) overlay = frame_read.copy() cv2.rectangle(overlay, (0, 0), (640, 35), (0, 0, 0),", "50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons += 1 if len(face_rects) > 0: for (x,", "(0, 0, 255), 2) cv2.putText(draw_frame, \"InteliCam Users: \" + str(n_users) + \" |", "= sorted(image_filenames) person_names = [x[:-4] for x in image_filenames] full_paths_to_images = [self.faces_folder_path +", "-1) alpha = 0.8 draw_frame = cv2.addWeighted(overlay, alpha, draw_frame, 1 - alpha, 0)", "xmin #y += ymin if (face_encodings_in_image): match = rn.find_match(face_encodings, person_names, face_encodings_in_image[0]) if match", "= cv2.addWeighted(overlay, alpha, draw_frame, 1 - alpha, 0) # Yolo Detection detections =", "face_encodings.append(face_encoding) #print(face_encoding) #dlib.hit_enter_to_continue() return face_encodings, person_names def detect(self, frame_read): prev_time = time.time() frame_resized", "= (50, 50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons = len(face_rects) if len(face_rects) > 0:", "- alpha, 0) # Yolo Detection detections = rn.detect(frame_read.copy()) filter_detections = [] n_users", "(x,y), (x_plus_w,y_plus_h), (0, 0, 255), 2) #cv2.putText(img, label, (x-10,y-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2)", "ValueError(\"Invalid config path `\" + os.path.abspath(self.configPath)+\"`\") if not os.path.exists(self.weightPath): raise ValueError(\"Invalid weight path", "in faces_landmarks] except: return [] return h def get_face_matches(self, known_faces, face): return np.linalg.norm(known_faces", "match.group(1) else: result = None try: if os.path.exists(result): with open(result) as namesFH: namesList", "2))) xmax = int(round(x + (w / 2))) ymin = int(round(y - (h", "cv2.putText(draw_frame, \"InteliCam Users: \" + str(n_users) + \" | \"+ \\ \"Persons: \"", "= (50, 50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons += 1 if len(face_rects) > 0:", "darknet.make_image(darknet.network_width(self.netMain), darknet.network_height(self.netMain),3) self.data_dir = os.path.expanduser(yoloDataFolder+'/face_data') self.faces_folder_path = self.data_dir + '/users/' self.face_detector = dlib.get_frontal_face_detector()", "os.path.abspath(self.configPath)+\"`\") if not os.path.exists(self.weightPath): raise ValueError(\"Invalid weight path `\" + os.path.abspath(self.weightPath)+\"`\") if not", "= np.array(self.face_recognition_model.compute_face_descriptor(face, face_landmarks, 1)) win.clear_overlay() win.set_image(face) win.add_overlay(face_bounds) win.add_overlay(face_landmarks) face_encodings.append(face_encoding) #print(face_encoding) #dlib.hit_enter_to_continue() return face_encodings,", "# batch size = 1 if self.metaMain is None: self.metaMain = darknet.load_meta(self.metaPath.encode(\"ascii\")) if", "import re match = re.search(\"names *= *(.*)$\", metaContents, re.IGNORECASE | re.MULTILINE) if match:", "pt1 = (xmin, ymin) pt2 = (xmax, ymax) cv2.rectangle(img, pt1, pt2, (0, 255,", "+ \"/yolov3-tiny.weights\" self.metaPath = yoloDataFolder + \"/cfg/coco.data\" print(\"self.configPath: \" + self.configPath) print(\"self.weightPath: \"", "\" - it has \" + str(len(faces_bounds))) exit() face_bounds = faces_bounds[0] face_landmarks =", "= int(ymax*sy) pt1 = (xmin, ymin) pt2 = (xmax, ymax) cropped = gray[ymin:ymax,", "to draw bounding box on the detected object with class name def draw_bounding_box(self,img,", "= cv2.cvtColor(face, cv2.COLOR_BGR2RGB) faces_bounds = self.face_detector(face, 1) if len(faces_bounds) != 1: print(\"Expected one", "if detection[0] == b'person': # It is a person filter_detections.append(detection) if len(filter_detections) ==", "`\" + os.path.abspath(self.weightPath)+\"`\") if not os.path.exists(self.metaPath): raise ValueError(\"Invalid data file path `\" +", "y1, w1, h1 = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax =", "ValueError(\"Invalid weight path `\" + os.path.abspath(self.weightPath)+\"`\") if not os.path.exists(self.metaPath): raise ValueError(\"Invalid data file", "one and only one face per image: \" + path_to_image + \" -", "360) face_encodings, person_names = rn.load_face_encodings() faceClassifier = cv2.CascadeClassifier(rn.data_dir + '/dlib/haarcascade_frontalface_default.xml') #rn.recognize_faces_in_video(face_encodings, person_names) while", "1 if self.metaMain is None: self.metaMain = darknet.load_meta(self.metaPath.encode(\"ascii\")) if self.altNames is None: try:", "100, 2)) + \"]\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [0, 255, 0],", "thresh=0.25) #print(1/(time.time()-prev_time)) return detections # function to get the output layer names #", "(0, 0, 255), 2) else: cv2.putText(draw_frame, match, (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255,", "'/dlib/dlib_face_recognition_resnet_model_v1.dat') def convertBack(self, x, y, w, h): xmin = int(round(x - (w /", "in face_rects: face = draw_frame[y:y + h, x:x + w] face_encodings_in_image = rn.get_face_encodings(face)", "y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2) cv2.rectangle(draw_frame, (x, y), (x + w,", "< 0.55: return person_names[min_index]+\"! ({0:.2f})\".format(min_value) if min_value < 0.58: return person_names[min_index]+\" ({0:.2f})\".format(min_value) if", "def convertBack(self, x, y, w, h): xmin = int(round(x - (w / 2)))", "+ h), (0, 0, 255), 2) else: for detection in filter_detections: x1, y1,", "on the detected object with class name def draw_bounding_box(self,img, class_id, confidence, x, y,", "for x in image_filenames] face_encodings = [] win = dlib.image_window() for path_to_image in", "result = None try: if os.path.exists(result): with open(result) as namesFH: namesList = namesFH.read().strip().split(\"\\n\")", "self.configPath = yoloDataFolder + \"/cfg/yolov3-tiny.cfg\" self.weightPath = yoloDataFolder + \"/yolov3-tiny.weights\" self.metaPath = yoloDataFolder", "'/users/' self.face_detector = dlib.get_frontal_face_detector() self.shape_predictor = dlib.shape_predictor(self.data_dir + '/dlib/shape_predictor_68_face_landmarks.dat') self.face_recognition_model = dlib.face_recognition_model_v1(self.data_dir +", "reuse for each detect self.darknet_image = darknet.make_image(darknet.network_width(self.netMain), darknet.network_height(self.netMain),3) self.data_dir = os.path.expanduser(yoloDataFolder+'/face_data') self.faces_folder_path =", "(0, 255, 0), 2) n_users += 1 else: cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX,", "minNeighbors = 5, minSize = (50, 50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons += 1", "self.netMain = darknet.load_net_custom(self.configPath.encode( \"ascii\"), self.weightPath.encode(\"ascii\"), 0, 1) # batch size = 1 if", "raise ValueError(\"Invalid config path `\" + os.path.abspath(self.configPath)+\"`\") if not os.path.exists(self.weightPath): raise ValueError(\"Invalid weight", "face_encodings_in_image[0]) if match == \"Not Found\": cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,", "import darknet from ctypes import * import math import random class YOLO_NN: def", "0], 2) return img def get_face_encodings(self, face): bounds = self.face_detector(face, 1) faces_landmarks =", "h, x:x + w] face_encodings_in_image = rn.get_face_encodings(face) #x += xmin #y += ymin", "x:x + w] face_encodings_in_image = rn.get_face_encodings(face) #x += xmin #y += ymin if", "for detection in detections: x, y, w, h = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3]", "self.data_dir = os.path.expanduser(yoloDataFolder+'/face_data') self.faces_folder_path = self.data_dir + '/users/' self.face_detector = dlib.get_frontal_face_detector() self.shape_predictor =", "'/dlib/haarcascade_frontalface_default.xml') #rn.recognize_faces_in_video(face_encodings, person_names) while True: ret, frame_read = cap.read() draw_frame = frame_read.copy() gray", "None: self.netMain = darknet.load_net_custom(self.configPath.encode( \"ascii\"), self.weightPath.encode(\"ascii\"), 0, 1) # batch size = 1", "pt2, (0, 255, 0), 1) cv2.putText(img, detection[0].decode() + \" [\" + str(round(detection[1] *", "`\" + os.path.abspath(self.configPath)+\"`\") if not os.path.exists(self.weightPath): raise ValueError(\"Invalid weight path `\" + os.path.abspath(self.weightPath)+\"`\")", "axis=1) def find_match(self, known_faces, person_names, face): matches = self.get_face_matches(known_faces, face) # get a", "net.getUnconnectedOutLayers()] return output_layers # function to draw bounding box on the detected object", "self.metaPath = yoloDataFolder + \"/cfg/coco.data\" print(\"self.configPath: \" + self.configPath) print(\"self.weightPath: \" + self.weightPath)", "path `\" + os.path.abspath(self.configPath)+\"`\") if not os.path.exists(self.weightPath): raise ValueError(\"Invalid weight path `\" +", "(x_plus_w,y_plus_h), (0, 0, 255), 2) #cv2.putText(img, label, (x-10,y-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2) if", "person, try with dlib face_rects = faceClassifier.detectMultiScale( # Detect faces with dlib gray,", "Yolo Detection detections = rn.detect(frame_read.copy()) filter_detections = [] n_users = 0 n_persons =", "ymin, xmax, ymax = self.convertBack( float(x), float(y), float(w), float(h)) pt1 = (xmin, ymin)", "cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2) cv2.rectangle(draw_frame, (x, y), (x + w, y", "in image_filenames] face_encodings = [] win = dlib.image_window() for path_to_image in full_paths_to_images: print(\"Loading", "face_rects: face = cropped[y:y + h, x:x + w] face_encodings_in_image = rn.get_face_encodings(face) #x", "y + h), (0, 0, 255), 2) else: cv2.putText(draw_frame, match, (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX,", "+ h), (0, 255, 0), 2) n_users += 1 else: cv2.putText(draw_frame, \"Unknow\", (x+5,", "box on the detected object with class name def draw_bounding_box(self,img, class_id, confidence, x,", "path_to_image in full_paths_to_images: print(\"Loading user: \" + path_to_image) #face = io.imread(path_to_image) face =", "input cap = cv2.VideoCapture(1) cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 360) face_encodings, person_names = rn.load_face_encodings() faceClassifier", "is a person filter_detections.append(detection) if len(filter_detections) == 0: # Case Yolo didn't detected", "\"/cfg/yolov3-tiny.cfg\" self.weightPath = yoloDataFolder + \"/yolov3-tiny.weights\" self.metaPath = yoloDataFolder + \"/cfg/coco.data\" print(\"self.configPath: \"", "path `\" + os.path.abspath(self.weightPath)+\"`\") if not os.path.exists(self.metaPath): raise ValueError(\"Invalid data file path `\"", "face for (x, y, w, h) in face_rects: face = draw_frame[y:y + h,", "self.altNames is None: try: with open(self.metaPath) as metaFH: metaContents = metaFH.read() import re", "= faces_bounds[0] face_landmarks = self.shape_predictor(face, face_bounds) face_encoding = np.array(self.face_recognition_model.compute_face_descriptor(face, face_landmarks, 1)) win.clear_overlay() win.set_image(face)", "np.array(self.face_recognition_model.compute_face_descriptor(face, face_landmarks, 1)) win.clear_overlay() win.set_image(face) win.add_overlay(face_bounds) win.add_overlay(face_landmarks) face_encodings.append(face_encoding) #print(face_encoding) #dlib.hit_enter_to_continue() return face_encodings, person_names", "time import darknet from ctypes import * import math import random class YOLO_NN:", "dlib import numpy as np import cv2 import time import darknet from ctypes", "faces with dlib gray, scaleFactor = 1.1, minNeighbors = 5, minSize = (50,", "face, axis=1) def find_match(self, known_faces, person_names, face): matches = self.get_face_matches(known_faces, face) # get", "xmax = int(round(x + (w / 2))) ymin = int(round(y - (h /", "matches[min_index] if min_value < 0.55: return person_names[min_index]+\"! ({0:.2f})\".format(min_value) if min_value < 0.58: return", "self.darknet_image = darknet.make_image(darknet.network_width(self.netMain), darknet.network_height(self.netMain),3) self.data_dir = os.path.expanduser(yoloDataFolder+'/face_data') self.faces_folder_path = self.data_dir + '/users/' self.face_detector", "+ \" - it has \" + str(len(faces_bounds))) exit() face_bounds = faces_bounds[0] face_landmarks", "Case Yolo didn't detected any person, try with dlib face_rects = faceClassifier.detectMultiScale( #", "1.1, minNeighbors = 5, minSize = (50, 50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons =", "function to draw bounding box on the detected object with class name def", "import dlib import numpy as np import cv2 import time import darknet from", "[np.array(self.face_recognition_model.compute_face_descriptor(face, face_pose, 1)) for face_pose in faces_landmarks] except: return [] return h def", "0.65: return person_names[min_index]+\"?\"+\" ({0:.2f})\".format(min_value) return 'Not Found' def load_face_encodings(self): image_filenames = filter(lambda x:", "Exception: pass # Create an image we reuse for each detect self.darknet_image =", "as metaFH: metaContents = metaFH.read() import re match = re.search(\"names *= *(.*)$\", metaContents,", "< 0.58: return person_names[min_index]+\" ({0:.2f})\".format(min_value) if min_value < 0.65: return person_names[min_index]+\"?\"+\" ({0:.2f})\".format(min_value) return", "h) in face_rects: face = draw_frame[y:y + h, x:x + w] face_encodings_in_image =", "return img def get_face_encodings(self, face): bounds = self.face_detector(face, 1) faces_landmarks = [self.shape_predictor(face, face_bounds)", "if (face_encodings_in_image): match = rn.find_match(face_encodings, person_names, face_encodings_in_image[0]) if match == \"Not Found\": cv2.putText(draw_frame,", "= (xmax, ymax) cv2.rectangle(img, pt1, pt2, (0, 255, 0), 1) cv2.putText(img, detection[0].decode() +", "Found' def load_face_encodings(self): image_filenames = filter(lambda x: x.endswith('.jpg'), os.listdir(self.faces_folder_path)) image_filenames = sorted(image_filenames) person_names", "int(ymin*sy) xmax = int(xmax*sx) ymax = int(ymax*sy) pt1 = (xmin, ymin) pt2 =", "cv2.COLOR_BGR2RGB) faces_bounds = self.face_detector(face, 1) if len(faces_bounds) != 1: print(\"Expected one and only", "only one face per image: \" + path_to_image + \" - it has", "== \"Not Found\": cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)", "cropped[y:y + h, x:x + w] face_encodings_in_image = rn.get_face_encodings(face) #x += xmin #y", "# It is a person filter_detections.append(detection) if len(filter_detections) == 0: # Case Yolo", "20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 255, 255], 1) cv2.imshow(\"Frame\", draw_frame) key = cv2.waitKey(3) &", "[self.shape_predictor(face, face_bounds) for face_bounds in bounds] try: h = [np.array(self.face_recognition_model.compute_face_descriptor(face, face_pose, 1)) for", "self.face_detector(face, 1) faces_landmarks = [self.shape_predictor(face, face_bounds) for face_bounds in bounds] try: h =", "`\" + os.path.abspath(self.metaPath)+\"`\") if self.netMain is None: self.netMain = darknet.load_net_custom(self.configPath.encode( \"ascii\"), self.weightPath.encode(\"ascii\"), 0,", "batch size = 1 if self.metaMain is None: self.metaMain = darknet.load_meta(self.metaPath.encode(\"ascii\")) if self.altNames", "for x in namesList] except TypeError: pass except Exception: pass # Create an", "# Create an image we reuse for each detect self.darknet_image = darknet.make_image(darknet.network_width(self.netMain), darknet.network_height(self.netMain),3)", "darknet.network_height(self.netMain),3) self.data_dir = os.path.expanduser(yoloDataFolder+'/face_data') self.faces_folder_path = self.data_dir + '/users/' self.face_detector = dlib.get_frontal_face_detector() self.shape_predictor", "cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2) if __name__ == \"__main__\": # Start Yolo Setup rn", "detected any person, try with dlib face_rects = faceClassifier.detectMultiScale( # Detect faces with", "= [] win = dlib.image_window() for path_to_image in full_paths_to_images: print(\"Loading user: \" +", "= cropped[y:y + h, x:x + w] face_encodings_in_image = rn.get_face_encodings(face) #x += xmin", "= self.face_detector(face, 1) faces_landmarks = [self.shape_predictor(face, face_bounds) for face_bounds in bounds] try: h", "= cv2.cvtColor(frame_resized, cv2.COLOR_BGR2RGB) darknet.copy_image_from_bytes(self.darknet_image, frame_rgb.tobytes()) detections = darknet.detect_image(self.netMain, self.metaMain, self.darknet_image, thresh=0.25) #print(1/(time.time()-prev_time)) return", "except TypeError: pass except Exception: pass # Create an image we reuse for", "True: ret, frame_read = cap.read() draw_frame = frame_read.copy() gray = cv2.cvtColor(frame_read, cv2.COLOR_BGR2GRAY) overlay", "float(h1)) sx = 640.0/416.0 sy = 360.0/416.0 xmin = int(xmin*sx) ymin = int(ymin*sy)", "win = dlib.image_window() for path_to_image in full_paths_to_images: print(\"Loading user: \" + path_to_image) #face", "= cv2.CASCADE_SCALE_IMAGE) n_persons = len(face_rects) if len(face_rects) > 0: # Case find any", "ymin if (face_encodings_in_image): match = rn.find_match(face_encodings, person_names, face_encodings_in_image[0]) if match == \"Not Found\":", "# Detect faces with dlib gray, scaleFactor = 1.1, minNeighbors = 5, minSize", "face): matches = self.get_face_matches(known_faces, face) # get a list of True/False min_index =", "try: h = [np.array(self.face_recognition_model.compute_face_descriptor(face, face_pose, 1)) for face_pose in faces_landmarks] except: return []", "= [] n_users = 0 n_persons = 0 for detection in detections: if", "any person, try with dlib face_rects = faceClassifier.detectMultiScale( # Detect faces with dlib", "0), 2) cv2.rectangle(draw_frame, (x, y), (x + w, y + h), (0, 255,", "(xmax, ymax) cv2.rectangle(img, pt1, pt2, (0, 255, 0), 1) cv2.putText(img, detection[0].decode() + \"", "face_landmarks = self.shape_predictor(face, face_bounds) face_encoding = np.array(self.face_recognition_model.compute_face_descriptor(face, face_landmarks, 1)) win.clear_overlay() win.set_image(face) win.add_overlay(face_bounds) win.add_overlay(face_landmarks)", "= gray[ymin:ymax, xmin:xmax] face_rects = faceClassifier.detectMultiScale( # Detect faces with dlib gray, scaleFactor", "not os.path.exists(self.weightPath): raise ValueError(\"Invalid weight path `\" + os.path.abspath(self.weightPath)+\"`\") if not os.path.exists(self.metaPath): raise", "self.convertBack( float(x), float(y), float(w), float(h)) pt1 = (xmin, ymin) pt2 = (xmax, ymax)", "for face_pose in faces_landmarks] except: return [] return h def get_face_matches(self, known_faces, face):", "return detections # function to get the output layer names # in the", "person_names) while True: ret, frame_read = cap.read() draw_frame = frame_read.copy() gray = cv2.cvtColor(frame_read,", "h), (0, 0, 255), 2) else: cv2.rectangle(draw_frame, pt1, pt2, (0, 0, 255), 2)", "= metaFH.read() import re match = re.search(\"names *= *(.*)$\", metaContents, re.IGNORECASE | re.MULTILINE)", "def draw_bounding_box(self,img, class_id, confidence, x, y, x_plus_w, y_plus_h): cv2.rectangle(img, (x,y), (x_plus_w,y_plus_h), (0, 0,", "ymin = int(ymin*sy) xmax = int(xmax*sx) ymax = int(ymax*sy) pt1 = (xmin, ymin)", "h): xmin = int(round(x - (w / 2))) xmax = int(round(x + (w", "as namesFH: namesList = namesFH.read().strip().split(\"\\n\") self.altNames = [x.strip() for x in namesList] except", "raise ValueError(\"Invalid data file path `\" + os.path.abspath(self.metaPath)+\"`\") if self.netMain is None: self.netMain", "is None: self.metaMain = darknet.load_meta(self.metaPath.encode(\"ascii\")) if self.altNames is None: try: with open(self.metaPath) as", "# Case find any face for (x, y, w, h) in face_rects: face", "h = [np.array(self.face_recognition_model.compute_face_descriptor(face, face_pose, 1)) for face_pose in faces_landmarks] except: return [] return", "(50, 50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons = len(face_rects) if len(face_rects) > 0: #", "+ str(n_users) + \" | \"+ \\ \"Persons: \" + str(n_persons), (5, 20),", "y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.rectangle(draw_frame, (x, y), (x + w,", "0, 255), 2) cv2.rectangle(draw_frame, (x, y), (x + w, y + h), (0,", "1] for i in net.getUnconnectedOutLayers()] return output_layers # function to draw bounding box", "\"Unknow\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.putText(draw_frame, \"InteliCam", "+ '/users/' self.face_detector = dlib.get_frontal_face_detector() self.shape_predictor = dlib.shape_predictor(self.data_dir + '/dlib/shape_predictor_68_face_landmarks.dat') self.face_recognition_model = dlib.face_recognition_model_v1(self.data_dir", "0 for detection in detections: if detection[0] == b'person': # It is a", "class_id, confidence, x, y, x_plus_w, y_plus_h): cv2.rectangle(img, (x,y), (x_plus_w,y_plus_h), (0, 0, 255), 2)", "cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.putText(draw_frame, \"InteliCam Users: \" + str(n_users) +", "detections = darknet.detect_image(self.netMain, self.metaMain, self.darknet_image, thresh=0.25) #print(1/(time.time()-prev_time)) return detections # function to get", "h), (0, 0, 255), 2) else: cv2.putText(draw_frame, match, (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,", "open(self.metaPath) as metaFH: metaContents = metaFH.read() import re match = re.search(\"names *= *(.*)$\",", "#cv2.putText(img, label, (x-10,y-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2) if __name__ == \"__main__\": # Start", "def get_face_matches(self, known_faces, face): return np.linalg.norm(known_faces - face, axis=1) def find_match(self, known_faces, person_names,", "if match == \"Not Found\": cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0,", "pt2 = (xmax, ymax) cropped = gray[ymin:ymax, xmin:xmax] face_rects = faceClassifier.detectMultiScale( # Detect", "(w / 2))) xmax = int(round(x + (w / 2))) ymin = int(round(y", "cv2.rectangle(img, pt1, pt2, (0, 255, 0), 1) cv2.putText(img, detection[0].decode() + \" [\" +", "in full_paths_to_images: print(\"Loading user: \" + path_to_image) #face = io.imread(path_to_image) face = cv2.imread(path_to_image)", "detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax = rn.convertBack( float(x1), float(y1), float(w1), float(h1)) sx", "\"Not Found\": cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.rectangle(draw_frame,", "+ h), (0, 0, 255), 2) else: cv2.putText(draw_frame, match, (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5,", "= filter(lambda x: x.endswith('.jpg'), os.listdir(self.faces_folder_path)) image_filenames = sorted(image_filenames) person_names = [x[:-4] for x", "\"+ \\ \"Persons: \" + str(n_persons), (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 255, 255],", "# Start Yolo Setup rn = YOLO_NN('.') # initialize video input cap =", "h), (0, 0, 255), 2) else: for detection in filter_detections: x1, y1, w1,", "+ \"]\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [0, 255, 0], 2) return", "self.metaMain is None: self.metaMain = darknet.load_meta(self.metaPath.encode(\"ascii\")) if self.altNames is None: try: with open(self.metaPath)", "(xmin, ymin) pt2 = (xmax, ymax) cropped = gray[ymin:ymax, xmin:xmax] face_rects = faceClassifier.detectMultiScale(", "= cv2.imread(path_to_image) face = cv2.cvtColor(face, cv2.COLOR_BGR2RGB) faces_bounds = self.face_detector(face, 1) if len(faces_bounds) !=", "cv2.rectangle(draw_frame, pt1, pt2, (0, 0, 255), 2) cv2.putText(draw_frame, \"Unknow\", (pt1[0], pt1[1] - 5),", "= None self.metaMain = None self.altNames = None if not os.path.exists(self.configPath): raise ValueError(\"Invalid", "n_users = 0 n_persons = 0 for detection in detections: if detection[0] ==", "image_filenames = filter(lambda x: x.endswith('.jpg'), os.listdir(self.faces_folder_path)) image_filenames = sorted(image_filenames) person_names = [x[:-4] for", "+ w] face_encodings_in_image = rn.get_face_encodings(face) if (face_encodings_in_image): match = rn.find_match(face_encodings, person_names, face_encodings_in_image[0]) if", "(x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2) cv2.rectangle(draw_frame, (x, y), (x +", "= [self.faces_folder_path + x for x in image_filenames] face_encodings = [] win =", "namesFH: namesList = namesFH.read().strip().split(\"\\n\") self.altNames = [x.strip() for x in namesList] except TypeError:", "if __name__ == \"__main__\": # Start Yolo Setup rn = YOLO_NN('.') # initialize", "to get the output layer names # in the architecture def get_output_layers(self,net): layer_names", "face_bounds) for face_bounds in bounds] try: h = [np.array(self.face_recognition_model.compute_face_descriptor(face, face_pose, 1)) for face_pose", "face_encodings_in_image = rn.get_face_encodings(face) #x += xmin #y += ymin if (face_encodings_in_image): match =", "dlib.get_frontal_face_detector() self.shape_predictor = dlib.shape_predictor(self.data_dir + '/dlib/shape_predictor_68_face_landmarks.dat') self.face_recognition_model = dlib.face_recognition_model_v1(self.data_dir + '/dlib/dlib_face_recognition_resnet_model_v1.dat') def convertBack(self,", "2))) ymin = int(round(y - (h / 2))) ymax = int(round(y + (h", "1) # batch size = 1 if self.metaMain is None: self.metaMain = darknet.load_meta(self.metaPath.encode(\"ascii\"))", "self.netMain = None self.metaMain = None self.altNames = None if not os.path.exists(self.configPath): raise", "scaleFactor = 1.1, minNeighbors = 5, minSize = (50, 50), flags = cv2.CASCADE_SCALE_IMAGE)", "+ h), (0, 0, 255), 2) else: cv2.rectangle(draw_frame, pt1, pt2, (0, 0, 255),", "face_rects: face = draw_frame[y:y + h, x:x + w] face_encodings_in_image = rn.get_face_encodings(face) if", "match, (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2) cv2.rectangle(draw_frame, (x, y), (x", "2) cv2.putText(draw_frame, \"InteliCam Users: \" + str(n_users) + \" | \"+ \\ \"Persons:", "else: for detection in filter_detections: x1, y1, w1, h1 = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\", "Detect faces with dlib gray, scaleFactor = 1.1, minNeighbors = 5, minSize =", "= int(round(y - (h / 2))) ymax = int(round(y + (h / 2)))", "yoloDataFolder + \"/cfg/coco.data\" print(\"self.configPath: \" + self.configPath) print(\"self.weightPath: \" + self.weightPath) print(\"self.metaPath: \"", "self.faces_folder_path = self.data_dir + '/users/' self.face_detector = dlib.get_frontal_face_detector() self.shape_predictor = dlib.shape_predictor(self.data_dir + '/dlib/shape_predictor_68_face_landmarks.dat')", "(x, y, w, h) in face_rects: face = cropped[y:y + h, x:x +", "/ 2))) xmax = int(round(x + (w / 2))) ymin = int(round(y -", "= rn.convertBack( float(x1), float(y1), float(w1), float(h1)) sx = 640.0/416.0 sy = 360.0/416.0 xmin", "if len(faces_bounds) != 1: print(\"Expected one and only one face per image: \"", "function to get the output layer names # in the architecture def get_output_layers(self,net):", "known_faces, face): return np.linalg.norm(known_faces - face, axis=1) def find_match(self, known_faces, person_names, face): matches", "alpha = 0.8 draw_frame = cv2.addWeighted(overlay, alpha, draw_frame, 1 - alpha, 0) #", "we reuse for each detect self.darknet_image = darknet.make_image(darknet.network_width(self.netMain), darknet.network_height(self.netMain),3) self.data_dir = os.path.expanduser(yoloDataFolder+'/face_data') self.faces_folder_path", "minNeighbors = 5, minSize = (50, 50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons = len(face_rects)", "cv2.rectangle(draw_frame, (x, y), (x + w, y + h), (0, 0, 255), 2)", "cv2.VideoCapture(1) cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 360) face_encodings, person_names = rn.load_face_encodings() faceClassifier = cv2.CascadeClassifier(rn.data_dir +", "yoloDataFolder + \"/cfg/yolov3-tiny.cfg\" self.weightPath = yoloDataFolder + \"/yolov3-tiny.weights\" self.metaPath = yoloDataFolder + \"/cfg/coco.data\"", "= [x.strip() for x in namesList] except TypeError: pass except Exception: pass #", "= matches.argmin() min_value = matches[min_index] if min_value < 0.55: return person_names[min_index]+\"! ({0:.2f})\".format(min_value) if", "1) if len(faces_bounds) != 1: print(\"Expected one and only one face per image:", "match = rn.find_match(face_encodings, person_names, face_encodings_in_image[0]) if match == \"Not Found\": cv2.putText(draw_frame, \"Unknow\", (x+5,", "face = cv2.cvtColor(face, cv2.COLOR_BGR2RGB) faces_bounds = self.face_detector(face, 1) if len(faces_bounds) != 1: print(\"Expected", "(x + w, y + h), (0, 255, 0), 2) n_users += 1", "min_value < 0.55: return person_names[min_index]+\"! ({0:.2f})\".format(min_value) if min_value < 0.58: return person_names[min_index]+\" ({0:.2f})\".format(min_value)", "= 5, minSize = (50, 50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons += 1 if", "weight path `\" + os.path.abspath(self.weightPath)+\"`\") if not os.path.exists(self.metaPath): raise ValueError(\"Invalid data file path", "find any face for (x, y, w, h) in face_rects: face = draw_frame[y:y", "= rn.load_face_encodings() faceClassifier = cv2.CascadeClassifier(rn.data_dir + '/dlib/haarcascade_frontalface_default.xml') #rn.recognize_faces_in_video(face_encodings, person_names) while True: ret, frame_read", "os.path.exists(result): with open(result) as namesFH: namesList = namesFH.read().strip().split(\"\\n\") self.altNames = [x.strip() for x", "get the output layer names # in the architecture def get_output_layers(self,net): layer_names =", "face_pose in faces_landmarks] except: return [] return h def get_face_matches(self, known_faces, face): return", "> 0: # Case find any face for (x, y, w, h) in", "re match = re.search(\"names *= *(.*)$\", metaContents, re.IGNORECASE | re.MULTILINE) if match: result", "darknet.copy_image_from_bytes(self.darknet_image, frame_rgb.tobytes()) detections = darknet.detect_image(self.netMain, self.metaMain, self.darknet_image, thresh=0.25) #print(1/(time.time()-prev_time)) return detections # function", "[layer_names[i[0] - 1] for i in net.getUnconnectedOutLayers()] return output_layers # function to draw", "# Case Yolo didn't detected any person, try with dlib face_rects = faceClassifier.detectMultiScale(", "+ os.path.abspath(self.metaPath)+\"`\") if self.netMain is None: self.netMain = darknet.load_net_custom(self.configPath.encode( \"ascii\"), self.weightPath.encode(\"ascii\"), 0, 1)", "frame_read): prev_time = time.time() frame_resized = cv2.resize(frame_read, (darknet.network_width(rn.netMain), darknet.network_height(rn.netMain)), interpolation=cv2.INTER_LINEAR) frame_rgb = cv2.cvtColor(frame_resized,", "len(face_rects) if len(face_rects) > 0: # Case find any face for (x, y,", "return [] return h def get_face_matches(self, known_faces, face): return np.linalg.norm(known_faces - face, axis=1)", "detected object with class name def draw_bounding_box(self,img, class_id, confidence, x, y, x_plus_w, y_plus_h):", "- 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [0, 255, 0], 2) return img def get_face_encodings(self, face):", "255, 0], 2) return img def get_face_encodings(self, face): bounds = self.face_detector(face, 1) faces_landmarks", "path_to_image) #face = io.imread(path_to_image) face = cv2.imread(path_to_image) face = cv2.cvtColor(face, cv2.COLOR_BGR2RGB) faces_bounds =", "\"/cfg/coco.data\" print(\"self.configPath: \" + self.configPath) print(\"self.weightPath: \" + self.weightPath) print(\"self.metaPath: \" + self.metaPath)", "ymax = int(ymax*sy) pt1 = (xmin, ymin) pt2 = (xmax, ymax) cropped =", "if not os.path.exists(self.weightPath): raise ValueError(\"Invalid weight path `\" + os.path.abspath(self.weightPath)+\"`\") if not os.path.exists(self.metaPath):", "[self.faces_folder_path + x for x in image_filenames] face_encodings = [] win = dlib.image_window()", "detections, img): for detection in detections: x, y, w, h = detection[2][0],\\ detection[2][1],\\", "(50, 50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons += 1 if len(face_rects) > 0: for", "y, w, h) in face_rects: face = draw_frame[y:y + h, x:x + w]", "faces_bounds[0] face_landmarks = self.shape_predictor(face, face_bounds) face_encoding = np.array(self.face_recognition_model.compute_face_descriptor(face, face_landmarks, 1)) win.clear_overlay() win.set_image(face) win.add_overlay(face_bounds)", "any face for (x, y, w, h) in face_rects: face = draw_frame[y:y +", "os.path.exists(self.metaPath): raise ValueError(\"Invalid data file path `\" + os.path.abspath(self.metaPath)+\"`\") if self.netMain is None:", "re.search(\"names *= *(.*)$\", metaContents, re.IGNORECASE | re.MULTILINE) if match: result = match.group(1) else:", "\"__main__\": # Start Yolo Setup rn = YOLO_NN('.') # initialize video input cap", "float(y1), float(w1), float(h1)) sx = 640.0/416.0 sy = 360.0/416.0 xmin = int(xmin*sx) ymin", "y + h), (0, 0, 255), 2) else: cv2.rectangle(draw_frame, pt1, pt2, (0, 0,", "1) faces_landmarks = [self.shape_predictor(face, face_bounds) for face_bounds in bounds] try: h = [np.array(self.face_recognition_model.compute_face_descriptor(face,", "\" + str(n_persons), (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [255, 255, 255], 1) cv2.imshow(\"Frame\", draw_frame)", "len(faces_bounds) != 1: print(\"Expected one and only one face per image: \" +", "self.metaMain = darknet.load_meta(self.metaPath.encode(\"ascii\")) if self.altNames is None: try: with open(self.metaPath) as metaFH: metaContents", "os.path.exists(self.weightPath): raise ValueError(\"Invalid weight path `\" + os.path.abspath(self.weightPath)+\"`\") if not os.path.exists(self.metaPath): raise ValueError(\"Invalid", "get a list of True/False min_index = matches.argmin() min_value = matches[min_index] if min_value", "= int(round(x + (w / 2))) ymin = int(round(y - (h / 2)))", "len(face_rects) > 0: # Case find any face for (x, y, w, h)", "w, y + h), (0, 0, 255), 2) else: cv2.putText(draw_frame, match, (x+5, y-15),", "flags = cv2.CASCADE_SCALE_IMAGE) n_persons = len(face_rects) if len(face_rects) > 0: # Case find", "bounds = self.face_detector(face, 1) faces_landmarks = [self.shape_predictor(face, face_bounds) for face_bounds in bounds] try:", "1 - alpha, 0) # Yolo Detection detections = rn.detect(frame_read.copy()) filter_detections = []", "[0, 255, 0], 2) return img def get_face_encodings(self, face): bounds = self.face_detector(face, 1)", "path_to_image + \" - it has \" + str(len(faces_bounds))) exit() face_bounds = faces_bounds[0]", "yoloDataFolder): self.configPath = yoloDataFolder + \"/cfg/yolov3-tiny.cfg\" self.weightPath = yoloDataFolder + \"/yolov3-tiny.weights\" self.metaPath =", "filter_detections.append(detection) if len(filter_detections) == 0: # Case Yolo didn't detected any person, try", "float(x1), float(y1), float(w1), float(h1)) sx = 640.0/416.0 sy = 360.0/416.0 xmin = int(xmin*sx)", "255, 0), 1) cv2.putText(img, detection[0].decode() + \" [\" + str(round(detection[1] * 100, 2))", "(0, 0, 255), 2) else: cv2.rectangle(draw_frame, pt1, pt2, (0, 0, 255), 2) cv2.putText(draw_frame,", "50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons = len(face_rects) if len(face_rects) > 0: # Case", "sorted(image_filenames) person_names = [x[:-4] for x in image_filenames] full_paths_to_images = [self.faces_folder_path + x", "5, minSize = (50, 50), flags = cv2.CASCADE_SCALE_IMAGE) n_persons = len(face_rects) if len(face_rects)", "+ h, x:x + w] face_encodings_in_image = rn.get_face_encodings(face) if (face_encodings_in_image): match = rn.find_match(face_encodings,", "2))) ymax = int(round(y + (h / 2))) return xmin, ymin, xmax, ymax", "label, (x-10,y-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2) if __name__ == \"__main__\": # Start Yolo", "(x-10,y-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2) if __name__ == \"__main__\": # Start Yolo Setup", "ret, frame_read = cap.read() draw_frame = frame_read.copy() gray = cv2.cvtColor(frame_read, cv2.COLOR_BGR2GRAY) overlay =", "if len(filter_detections) == 0: # Case Yolo didn't detected any person, try with", "in namesList] except TypeError: pass except Exception: pass # Create an image we", "self.altNames = None if not os.path.exists(self.configPath): raise ValueError(\"Invalid config path `\" + os.path.abspath(self.configPath)+\"`\")", "Create an image we reuse for each detect self.darknet_image = darknet.make_image(darknet.network_width(self.netMain), darknet.network_height(self.netMain),3) self.data_dir", "cv2.rectangle(draw_frame, (x, y), (x + w, y + h), (0, 255, 0), 2)", "= 640.0/416.0 sy = 360.0/416.0 xmin = int(xmin*sx) ymin = int(ymin*sy) xmax =", "min_index = matches.argmin() min_value = matches[min_index] if min_value < 0.55: return person_names[min_index]+\"! ({0:.2f})\".format(min_value)", "return h def get_face_matches(self, known_faces, face): return np.linalg.norm(known_faces - face, axis=1) def find_match(self,", "os import dlib import numpy as np import cv2 import time import darknet", "255), 2) cv2.rectangle(draw_frame, (x, y), (x + w, y + h), (0, 0,", "for i in net.getUnconnectedOutLayers()] return output_layers # function to draw bounding box on", "for detection in filter_detections: x1, y1, w1, h1 = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3]", "flags = cv2.CASCADE_SCALE_IMAGE) n_persons += 1 if len(face_rects) > 0: for (x, y,", "metaFH.read() import re match = re.search(\"names *= *(.*)$\", metaContents, re.IGNORECASE | re.MULTILINE) if", "return person_names[min_index]+\"?\"+\" ({0:.2f})\".format(min_value) return 'Not Found' def load_face_encodings(self): image_filenames = filter(lambda x: x.endswith('.jpg'),", "faces_bounds = self.face_detector(face, 1) if len(faces_bounds) != 1: print(\"Expected one and only one", "face = cv2.imread(path_to_image) face = cv2.cvtColor(face, cv2.COLOR_BGR2RGB) faces_bounds = self.face_detector(face, 1) if len(faces_bounds)", "import random class YOLO_NN: def __init__(self, yoloDataFolder): self.configPath = yoloDataFolder + \"/cfg/yolov3-tiny.cfg\" self.weightPath", "0.5, (0, 0, 255), 2) cv2.putText(draw_frame, \"InteliCam Users: \" + str(n_users) + \"", "float(h)) pt1 = (xmin, ymin) pt2 = (xmax, ymax) cv2.rectangle(img, pt1, pt2, (0,", "open(result) as namesFH: namesList = namesFH.read().strip().split(\"\\n\") self.altNames = [x.strip() for x in namesList]", "face_encodings, person_names = rn.load_face_encodings() faceClassifier = cv2.CascadeClassifier(rn.data_dir + '/dlib/haarcascade_frontalface_default.xml') #rn.recognize_faces_in_video(face_encodings, person_names) while True:", "didn't detected any person, try with dlib face_rects = faceClassifier.detectMultiScale( # Detect faces", "cv2.CASCADE_SCALE_IMAGE) n_persons += 1 if len(face_rects) > 0: for (x, y, w, h)", "class YOLO_NN: def __init__(self, yoloDataFolder): self.configPath = yoloDataFolder + \"/cfg/yolov3-tiny.cfg\" self.weightPath = yoloDataFolder", "\"]\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [0, 255, 0], 2) return img", "+ os.path.abspath(self.configPath)+\"`\") if not os.path.exists(self.weightPath): raise ValueError(\"Invalid weight path `\" + os.path.abspath(self.weightPath)+\"`\") if", "x in namesList] except TypeError: pass except Exception: pass # Create an image", "cv2.putText(img, detection[0].decode() + \" [\" + str(round(detection[1] * 100, 2)) + \"]\", (pt1[0],", "= None self.altNames = None if not os.path.exists(self.configPath): raise ValueError(\"Invalid config path `\"", "= yoloDataFolder + \"/cfg/yolov3-tiny.cfg\" self.weightPath = yoloDataFolder + \"/yolov3-tiny.weights\" self.metaPath = yoloDataFolder +", "= frame_read.copy() cv2.rectangle(overlay, (0, 0), (640, 35), (0, 0, 0), -1) alpha =", "each detect self.darknet_image = darknet.make_image(darknet.network_width(self.netMain), darknet.network_height(self.netMain),3) self.data_dir = os.path.expanduser(yoloDataFolder+'/face_data') self.faces_folder_path = self.data_dir +", "= detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin, xmax, ymax = self.convertBack( float(x), float(y),", "+ w, y + h), (0, 255, 0), 2) n_users += 1 else:", "self.data_dir + '/users/' self.face_detector = dlib.get_frontal_face_detector() self.shape_predictor = dlib.shape_predictor(self.data_dir + '/dlib/shape_predictor_68_face_landmarks.dat') self.face_recognition_model =", "person_names = [x[:-4] for x in image_filenames] full_paths_to_images = [self.faces_folder_path + x for", "= self.get_face_matches(known_faces, face) # get a list of True/False min_index = matches.argmin() min_value", "2))) return xmin, ymin, xmax, ymax def cvDrawBoxes(self, detections, img): for detection in", "pt2 = (xmax, ymax) cv2.rectangle(img, pt1, pt2, (0, 255, 0), 1) cv2.putText(img, detection[0].decode()", "0), 2) n_users += 1 else: cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,", "self.shape_predictor(face, face_bounds) face_encoding = np.array(self.face_recognition_model.compute_face_descriptor(face, face_landmarks, 1)) win.clear_overlay() win.set_image(face) win.add_overlay(face_bounds) win.add_overlay(face_landmarks) face_encodings.append(face_encoding) #print(face_encoding)", "initialize video input cap = cv2.VideoCapture(1) cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 360) face_encodings, person_names =", "person filter_detections.append(detection) if len(filter_detections) == 0: # Case Yolo didn't detected any person,", "path `\" + os.path.abspath(self.metaPath)+\"`\") if self.netMain is None: self.netMain = darknet.load_net_custom(self.configPath.encode( \"ascii\"), self.weightPath.encode(\"ascii\"),", "convertBack(self, x, y, w, h): xmin = int(round(x - (w / 2))) xmax", "= dlib.get_frontal_face_detector() self.shape_predictor = dlib.shape_predictor(self.data_dir + '/dlib/shape_predictor_68_face_landmarks.dat') self.face_recognition_model = dlib.face_recognition_model_v1(self.data_dir + '/dlib/dlib_face_recognition_resnet_model_v1.dat') def", "cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.rectangle(draw_frame, (x, y),", "os.path.abspath(self.weightPath)+\"`\") if not os.path.exists(self.metaPath): raise ValueError(\"Invalid data file path `\" + os.path.abspath(self.metaPath)+\"`\") if", "w] face_encodings_in_image = rn.get_face_encodings(face) if (face_encodings_in_image): match = rn.find_match(face_encodings, person_names, face_encodings_in_image[0]) if match", "gray[ymin:ymax, xmin:xmax] face_rects = faceClassifier.detectMultiScale( # Detect faces with dlib gray, scaleFactor =", "w, h) in face_rects: face = cropped[y:y + h, x:x + w] face_encodings_in_image", "draw_frame[y:y + h, x:x + w] face_encodings_in_image = rn.get_face_encodings(face) if (face_encodings_in_image): match =", "faces_landmarks] except: return [] return h def get_face_matches(self, known_faces, face): return np.linalg.norm(known_faces -", "image_filenames] face_encodings = [] win = dlib.image_window() for path_to_image in full_paths_to_images: print(\"Loading user:", "(x + w, y + h), (0, 0, 255), 2) else: for detection", "0: # Case Yolo didn't detected any person, try with dlib face_rects =", "yoloDataFolder + \"/yolov3-tiny.weights\" self.metaPath = yoloDataFolder + \"/cfg/coco.data\" print(\"self.configPath: \" + self.configPath) print(\"self.weightPath:", "matches.argmin() min_value = matches[min_index] if min_value < 0.55: return person_names[min_index]+\"! ({0:.2f})\".format(min_value) if min_value", "match: result = match.group(1) else: result = None try: if os.path.exists(result): with open(result)", "= int(round(y + (h / 2))) return xmin, ymin, xmax, ymax def cvDrawBoxes(self,", "#print(1/(time.time()-prev_time)) return detections # function to get the output layer names # in", "= cv2.cvtColor(frame_read, cv2.COLOR_BGR2GRAY) overlay = frame_read.copy() cv2.rectangle(overlay, (0, 0), (640, 35), (0, 0,", "None self.metaMain = None self.altNames = None if not os.path.exists(self.configPath): raise ValueError(\"Invalid config", "face_encodings = [] win = dlib.image_window() for path_to_image in full_paths_to_images: print(\"Loading user: \"", "+ os.path.abspath(self.weightPath)+\"`\") if not os.path.exists(self.metaPath): raise ValueError(\"Invalid data file path `\" + os.path.abspath(self.metaPath)+\"`\")", "= [layer_names[i[0] - 1] for i in net.getUnconnectedOutLayers()] return output_layers # function to", "0, 255), 2) cv2.putText(draw_frame, \"InteliCam Users: \" + str(n_users) + \" | \"+", "detections = rn.detect(frame_read.copy()) filter_detections = [] n_users = 0 n_persons = 0 for", "xmax = int(xmax*sx) ymax = int(ymax*sy) pt1 = (xmin, ymin) pt2 = (xmax,", "255), 2) cv2.putText(draw_frame, \"Unknow\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255),", "min_value = matches[min_index] if min_value < 0.55: return person_names[min_index]+\"! ({0:.2f})\".format(min_value) if min_value <", "(w / 2))) ymin = int(round(y - (h / 2))) ymax = int(round(y", "+= xmin #y += ymin if (face_encodings_in_image): match = rn.find_match(face_encodings, person_names, face_encodings_in_image[0]) if", "one face per image: \" + path_to_image + \" - it has \"", "face) # get a list of True/False min_index = matches.argmin() min_value = matches[min_index]", "/ 2))) return xmin, ymin, xmax, ymax def cvDrawBoxes(self, detections, img): for detection", "xmin = int(round(x - (w / 2))) xmax = int(round(x + (w /", "(xmin, ymin) pt2 = (xmax, ymax) cv2.rectangle(img, pt1, pt2, (0, 255, 0), 1)", "1) cv2.putText(img, detection[0].decode() + \" [\" + str(round(detection[1] * 100, 2)) + \"]\",", "- 1] for i in net.getUnconnectedOutLayers()] return output_layers # function to draw bounding", "in image_filenames] full_paths_to_images = [self.faces_folder_path + x for x in image_filenames] face_encodings =", "interpolation=cv2.INTER_LINEAR) frame_rgb = cv2.cvtColor(frame_resized, cv2.COLOR_BGR2RGB) darknet.copy_image_from_bytes(self.darknet_image, frame_rgb.tobytes()) detections = darknet.detect_image(self.netMain, self.metaMain, self.darknet_image, thresh=0.25)", "\"/yolov3-tiny.weights\" self.metaPath = yoloDataFolder + \"/cfg/coco.data\" print(\"self.configPath: \" + self.configPath) print(\"self.weightPath: \" +", "2) else: for detection in filter_detections: x1, y1, w1, h1 = detection[2][0],\\ detection[2][1],\\", "time.time() frame_resized = cv2.resize(frame_read, (darknet.network_width(rn.netMain), darknet.network_height(rn.netMain)), interpolation=cv2.INTER_LINEAR) frame_rgb = cv2.cvtColor(frame_resized, cv2.COLOR_BGR2RGB) darknet.copy_image_from_bytes(self.darknet_image, frame_rgb.tobytes())", "face_encoding = np.array(self.face_recognition_model.compute_face_descriptor(face, face_landmarks, 1)) win.clear_overlay() win.set_image(face) win.add_overlay(face_bounds) win.add_overlay(face_landmarks) face_encodings.append(face_encoding) #print(face_encoding) #dlib.hit_enter_to_continue() return", "cv2.cvtColor(frame_resized, cv2.COLOR_BGR2RGB) darknet.copy_image_from_bytes(self.darknet_image, frame_rgb.tobytes()) detections = darknet.detect_image(self.netMain, self.metaMain, self.darknet_image, thresh=0.25) #print(1/(time.time()-prev_time)) return detections", "filter_detections = [] n_users = 0 n_persons = 0 for detection in detections:", "metaFH: metaContents = metaFH.read() import re match = re.search(\"names *= *(.*)$\", metaContents, re.IGNORECASE", "= cv2.VideoCapture(1) cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 360) face_encodings, person_names = rn.load_face_encodings() faceClassifier = cv2.CascadeClassifier(rn.data_dir", "x: x.endswith('.jpg'), os.listdir(self.faces_folder_path)) image_filenames = sorted(image_filenames) person_names = [x[:-4] for x in image_filenames]", "= 0 for detection in detections: if detection[0] == b'person': # It is", "Detection detections = rn.detect(frame_read.copy()) filter_detections = [] n_users = 0 n_persons = 0", "y), (x + w, y + h), (0, 0, 255), 2) else: cv2.rectangle(draw_frame,", "+ '/dlib/haarcascade_frontalface_default.xml') #rn.recognize_faces_in_video(face_encodings, person_names) while True: ret, frame_read = cap.read() draw_frame = frame_read.copy()", "+= 1 else: cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)", "person_names[min_index]+\"?\"+\" ({0:.2f})\".format(min_value) return 'Not Found' def load_face_encodings(self): image_filenames = filter(lambda x: x.endswith('.jpg'), os.listdir(self.faces_folder_path))", "else: cv2.putText(draw_frame, match, (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2) cv2.rectangle(draw_frame, (x,", "(x, y), (x + w, y + h), (0, 255, 0), 2) n_users", "float(y), float(w), float(h)) pt1 = (xmin, ymin) pt2 = (xmax, ymax) cv2.rectangle(img, pt1,", "result = match.group(1) else: result = None try: if os.path.exists(result): with open(result) as", "+ \" [\" + str(round(detection[1] * 100, 2)) + \"]\", (pt1[0], pt1[1] -", "- (w / 2))) xmax = int(round(x + (w / 2))) ymin =", "!= 1: print(\"Expected one and only one face per image: \" + path_to_image", "#print(face_encoding) #dlib.hit_enter_to_continue() return face_encodings, person_names def detect(self, frame_read): prev_time = time.time() frame_resized =", "win.set_image(face) win.add_overlay(face_bounds) win.add_overlay(face_landmarks) face_encodings.append(face_encoding) #print(face_encoding) #dlib.hit_enter_to_continue() return face_encodings, person_names def detect(self, frame_read): prev_time", "ctypes import * import math import random class YOLO_NN: def __init__(self, yoloDataFolder): self.configPath", "None: try: with open(self.metaPath) as metaFH: metaContents = metaFH.read() import re match =", "str(n_users) + \" | \"+ \\ \"Persons: \" + str(n_persons), (5, 20), cv2.FONT_HERSHEY_SIMPLEX,", "in detections: x, y, w, h = detection[2][0],\\ detection[2][1],\\ detection[2][2],\\ detection[2][3] xmin, ymin,", "+ '/dlib/shape_predictor_68_face_landmarks.dat') self.face_recognition_model = dlib.face_recognition_model_v1(self.data_dir + '/dlib/dlib_face_recognition_resnet_model_v1.dat') def convertBack(self, x, y, w, h):", "= darknet.make_image(darknet.network_width(self.netMain), darknet.network_height(self.netMain),3) self.data_dir = os.path.expanduser(yoloDataFolder+'/face_data') self.faces_folder_path = self.data_dir + '/users/' self.face_detector =", "find_match(self, known_faces, person_names, face): matches = self.get_face_matches(known_faces, face) # get a list of", "= yoloDataFolder + \"/yolov3-tiny.weights\" self.metaPath = yoloDataFolder + \"/cfg/coco.data\" print(\"self.configPath: \" + self.configPath)", "os.path.abspath(self.metaPath)+\"`\") if self.netMain is None: self.netMain = darknet.load_net_custom(self.configPath.encode( \"ascii\"), self.weightPath.encode(\"ascii\"), 0, 1) #", "= [x[:-4] for x in image_filenames] full_paths_to_images = [self.faces_folder_path + x for x", "+ self.configPath) print(\"self.weightPath: \" + self.weightPath) print(\"self.metaPath: \" + self.metaPath) self.netMain = None", "of True/False min_index = matches.argmin() min_value = matches[min_index] if min_value < 0.55: return", "face = draw_frame[y:y + h, x:x + w] face_encodings_in_image = rn.get_face_encodings(face) if (face_encodings_in_image):", "= 360.0/416.0 xmin = int(xmin*sx) ymin = int(ymin*sy) xmax = int(xmax*sx) ymax =", "= match.group(1) else: result = None try: if os.path.exists(result): with open(result) as namesFH:", "1)) win.clear_overlay() win.set_image(face) win.add_overlay(face_bounds) win.add_overlay(face_landmarks) face_encodings.append(face_encoding) #print(face_encoding) #dlib.hit_enter_to_continue() return face_encodings, person_names def detect(self,", "# if the `q` key was pressed, break from the loop if key", "= re.search(\"names *= *(.*)$\", metaContents, re.IGNORECASE | re.MULTILINE) if match: result = match.group(1)", "person_names, face_encodings_in_image[0]) if match == \"Not Found\": cv2.putText(draw_frame, \"Unknow\", (x+5, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.5,", "0, 0), -1) alpha = 0.8 draw_frame = cv2.addWeighted(overlay, alpha, draw_frame, 1 -", "detect self.darknet_image = darknet.make_image(darknet.network_width(self.netMain), darknet.network_height(self.netMain),3) self.data_dir = os.path.expanduser(yoloDataFolder+'/face_data') self.faces_folder_path = self.data_dir + '/users/'", "darknet.load_net_custom(self.configPath.encode( \"ascii\"), self.weightPath.encode(\"ascii\"), 0, 1) # batch size = 1 if self.metaMain is", "int(round(x - (w / 2))) xmax = int(round(x + (w / 2))) ymin", "ymax = self.convertBack( float(x), float(y), float(w), float(h)) pt1 = (xmin, ymin) pt2 =", "2)) + \"]\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, [0, 255, 0], 2)", "faceClassifier = cv2.CascadeClassifier(rn.data_dir + '/dlib/haarcascade_frontalface_default.xml') #rn.recognize_faces_in_video(face_encodings, person_names) while True: ret, frame_read = cap.read()", "dlib.image_window() for path_to_image in full_paths_to_images: print(\"Loading user: \" + path_to_image) #face = io.imread(path_to_image)", "0), (640, 35), (0, 0, 0), -1) alpha = 0.8 draw_frame = cv2.addWeighted(overlay,", "if len(face_rects) > 0: # Case find any face for (x, y, w,", "({0:.2f})\".format(min_value) return 'Not Found' def load_face_encodings(self): image_filenames = filter(lambda x: x.endswith('.jpg'), os.listdir(self.faces_folder_path)) image_filenames", "- it has \" + str(len(faces_bounds))) exit() face_bounds = faces_bounds[0] face_landmarks = self.shape_predictor(face,", "(0, 255, 0), 1) cv2.putText(img, detection[0].decode() + \" [\" + str(round(detection[1] * 100,", "return face_encodings, person_names def detect(self, frame_read): prev_time = time.time() frame_resized = cv2.resize(frame_read, (darknet.network_width(rn.netMain),", "w, h): xmin = int(round(x - (w / 2))) xmax = int(round(x +", "/ 2))) ymin = int(round(y - (h / 2))) ymax = int(round(y +", "for each detect self.darknet_image = darknet.make_image(darknet.network_width(self.netMain), darknet.network_height(self.netMain),3) self.data_dir = os.path.expanduser(yoloDataFolder+'/face_data') self.faces_folder_path = self.data_dir", "cv2.putText(draw_frame, \"Unknow\", (pt1[0], pt1[1] - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.putText(draw_frame,", "from ctypes import * import math import random class YOLO_NN: def __init__(self, yoloDataFolder):", "(h / 2))) return xmin, ymin, xmax, ymax def cvDrawBoxes(self, detections, img): for", "with open(result) as namesFH: namesList = namesFH.read().strip().split(\"\\n\") self.altNames = [x.strip() for x in", "0: for (x, y, w, h) in face_rects: face = cropped[y:y + h,", "key was pressed, break from the loop if key == ord(\"q\"): break cv2.destroyAllWindows()", "= self.convertBack( float(x), float(y), float(w), float(h)) pt1 = (xmin, ymin) pt2 = (xmax,", "face_encodings_in_image = rn.get_face_encodings(face) if (face_encodings_in_image): match = rn.find_match(face_encodings, person_names, face_encodings_in_image[0]) if match ==", "self.face_recognition_model = dlib.face_recognition_model_v1(self.data_dir + '/dlib/dlib_face_recognition_resnet_model_v1.dat') def convertBack(self, x, y, w, h): xmin =" ]
[ "the correct choice questionText = Text(Point(500, 225), f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") questionText.setFill('white') questionText.setSize(25) questionText.setStyle('bold')", "global colors global correctChoice colors = [] for i in range(4): rand_r =", "of the player score = 0 scoreText = Text(Point(500, 155), f\"SCORE: {score}\") scoreText.setFill('white')", "the mouse mousePos = win.getMouse() mouseX = mousePos.getX() mouseY = mousePos.getY() # check", "''' main game ''' gameover = False # create a rectangle that fills", "correct otherwise incorrect for i in range(4): currentSquare = squares[i] currentX1 = currentSquare.getP1().getX()", "square, if it did display correct otherwise incorrect for i in range(4): currentSquare", "def randomise_answers(): global colors global correctChoice colors = [] for i in range(4):", "square_clicked = wait_for_click() if square_clicked == correctChoice: score += 1 scoreText.setText(f\"SCORE: {score}\") randomise_answers()", "mouseY < currentY2: return i ''' main game ''' gameover = False #", "create_squares(225, 325, 100, 50) questionText.setText(f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") else: bgRect.setFill(color_rgb(colors[square_clicked][0], colors[square_clicked][1], colors[square_clicked][2])) bgRect.draw(win) resultText.setText(\"TOO", "currentSquare = squares[i] currentX1 = currentSquare.getP1().getX() currentY1 = currentSquare.getP1().getY() currentX2 = currentSquare.getP2().getX() currentY2", "of the mouse mousePos = win.getMouse() mouseX = mousePos.getX() mouseY = mousePos.getY() #", "spaced across the page def create_squares(x, y, sideLen, spacing): global squares squares =", "create_squares(225, 325, 100, 50) def wait_for_click(): while True: # get the click position", "colors[i][2])) # draw the square in the window square.draw(win) squares.append(square) create_squares(225, 325, 100,", "255) rand_g = random.randint(0, 255) rand_b = random.randint(0, 255) colors.append([rand_r, rand_g, rand_b]) correctChoice", "all the required python libaries: graphics and random from graphics import * import", "350) scoreText.undraw() scoreText.draw(win) gameover = True # wait for click to close window", "if it did display correct otherwise incorrect for i in range(4): currentSquare =", "currentX1 = currentSquare.getP1().getX() currentY1 = currentSquare.getP1().getY() currentX2 = currentSquare.getP2().getX() currentY2 = currentSquare.getP2().getY() if", "= int # generate random colors and questions def randomise_answers(): global colors global", "# create the graphics window and set background colour win = GraphWin(\"Colour Guessing", "of the correct choice questionText = Text(Point(500, 225), f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") questionText.setFill('white') questionText.setSize(25)", "if square_clicked == correctChoice: score += 1 scoreText.setText(f\"SCORE: {score}\") randomise_answers() create_squares(225, 325, 100,", "scoreText.setSize(24) scoreText.anchor = Point(500, 350) scoreText.undraw() scoreText.draw(win) gameover = True # wait for", "[] # create 4 squares of random colour evenly spaced across the page", "i ''' main game ''' gameover = False # create a rectangle that", "rand_g = random.randint(0, 255) rand_b = random.randint(0, 255) colors.append([rand_r, rand_g, rand_b]) correctChoice =", "graphics import * import random # create the graphics window and set background", "Rectangle(Point(0, 0), Point(1000, 135)) titleBg.setOutline('steelblue') titleBg.setFill('steelblue') titleBg.draw(win) title = Text(Point(500, 67.5),\"RGB Colour Guessing", "based on the current 'i' value square = Rectangle(Point(x+i*sideLen+i*spacing, y), Point(x+(i+1)*sideLen+i*spacing, y+sideLen)) #", "(correct/incorrect) resultText = Text(Point(500, 125),\"\") resultText.setSize(128) resultText.setFill('white') # create a Text box that", "that will display the rgb of the correct choice questionText = Text(Point(500, 225),", "title for your game titleBg = Rectangle(Point(0, 0), Point(1000, 135)) titleBg.setOutline('steelblue') titleBg.setFill('steelblue') titleBg.draw(win)", "{colors[correctChoice][1]}, {colors[correctChoice][2]})\") else: bgRect.setFill(color_rgb(colors[square_clicked][0], colors[square_clicked][1], colors[square_clicked][2])) bgRect.draw(win) resultText.setText(\"TOO BAD\") resultText.draw(win) scoreText.setSize(24) scoreText.anchor =", "currentSquare.getP1().getY() currentX2 = currentSquare.getP2().getX() currentY2 = currentSquare.getP2().getY() if mouseX > currentX1 and mouseX", "= [] # create 4 squares of random colour evenly spaced across the", "3) randomise_answers() squares = [] # create 4 squares of random colour evenly", "it did display correct otherwise incorrect for i in range(4): currentSquare = squares[i]", "= Text(Point(500, 155), f\"SCORE: {score}\") scoreText.setFill('white') scoreText.setSize(12) scoreText.draw(win) while gameover == False: square_clicked", "False # create a rectangle that fills the whole screen bgRect = Rectangle(Point(0,", "questionText.setStyle('bold') questionText.draw(win) # create a Text box that will display the score of", "def wait_for_click(): while True: # get the click position of the mouse mousePos", "window square.draw(win) squares.append(square) create_squares(225, 325, 100, 50) def wait_for_click(): while True: # get", "squares squares = [] for i in range(4): # create a square (Rectangle)", "the fill of the square to the random values of r,g and b", "and random from graphics import * import random # create the graphics window", "4 squares of random colour evenly spaced across the page def create_squares(x, y,", "# generate random colors and questions def randomise_answers(): global colors global correctChoice colors", "BAD\") resultText.draw(win) scoreText.setSize(24) scoreText.anchor = Point(500, 350) scoreText.undraw() scoreText.draw(win) gameover = True #", "[] for i in range(4): # create a square (Rectangle) that is positioned", "positioned based on the current 'i' value square = Rectangle(Point(x+i*sideLen+i*spacing, y), Point(x+(i+1)*sideLen+i*spacing, y+sideLen))", "and set background colour win = GraphWin(\"Colour Guessing Game\", 1000, 500) win.setBackground('#232323') #", "= Text(Point(500, 125),\"\") resultText.setSize(128) resultText.setFill('white') # create a Text box that will display", "f\"SCORE: {score}\") scoreText.setFill('white') scoreText.setSize(12) scoreText.draw(win) while gameover == False: square_clicked = wait_for_click() if", "for i in range(4): # create a square (Rectangle) that is positioned based", "currentX2 and mouseY > currentY1 and mouseY < currentY2: return i ''' main", "a Text box that will display the rgb of the correct choice questionText", "= mousePos.getY() # check if the mouse clicked on the correct square, if", "mousePos.getY() # check if the mouse clicked on the correct square, if it", "page def create_squares(x, y, sideLen, spacing): global squares squares = [] for i", "game titleBg = Rectangle(Point(0, 0), Point(1000, 135)) titleBg.setOutline('steelblue') titleBg.setFill('steelblue') titleBg.draw(win) title = Text(Point(500,", "the current 'i' value square = Rectangle(Point(x+i*sideLen+i*spacing, y), Point(x+(i+1)*sideLen+i*spacing, y+sideLen)) # set the", "the mouse clicked on the correct square, if it did display correct otherwise", "== correctChoice: score += 1 scoreText.setText(f\"SCORE: {score}\") randomise_answers() create_squares(225, 325, 100, 50) questionText.setText(f\"rgb({colors[correctChoice][0]},", "currentX2 = currentSquare.getP2().getX() currentY2 = currentSquare.getP2().getY() if mouseX > currentX1 and mouseX <", "= 0 scoreText = Text(Point(500, 155), f\"SCORE: {score}\") scoreText.setFill('white') scoreText.setSize(12) scoreText.draw(win) while gameover", "that is positioned based on the current 'i' value square = Rectangle(Point(x+i*sideLen+i*spacing, y),", "random.randint(0, 255) colors.append([rand_r, rand_g, rand_b]) correctChoice = random.randint(0, 3) randomise_answers() squares = []", "draw the square in the window square.draw(win) squares.append(square) create_squares(225, 325, 100, 50) def", "scoreText.setSize(12) scoreText.draw(win) while gameover == False: square_clicked = wait_for_click() if square_clicked == correctChoice:", "colors[i][1], colors[i][2])) # draw the square in the window square.draw(win) squares.append(square) create_squares(225, 325,", "currentSquare.getP1().getX() currentY1 = currentSquare.getP1().getY() currentX2 = currentSquare.getP2().getX() currentY2 = currentSquare.getP2().getY() if mouseX >", "the results of the guess (correct/incorrect) resultText = Text(Point(500, 125),\"\") resultText.setSize(128) resultText.setFill('white') #", "mouseX > currentX1 and mouseX < currentX2 and mouseY > currentY1 and mouseY", "'i' value square = Rectangle(Point(x+i*sideLen+i*spacing, y), Point(x+(i+1)*sideLen+i*spacing, y+sideLen)) # set the fill of", "= currentSquare.getP1().getY() currentX2 = currentSquare.getP2().getX() currentY2 = currentSquare.getP2().getY() if mouseX > currentX1 and", "range(4): rand_r = random.randint(0, 255) rand_g = random.randint(0, 255) rand_b = random.randint(0, 255)", "500) win.setBackground('#232323') # create a title for your game titleBg = Rectangle(Point(0, 0),", "the page def create_squares(x, y, sideLen, spacing): global squares squares = [] for", "# import all the required python libaries: graphics and random from graphics import", "y), Point(x+(i+1)*sideLen+i*spacing, y+sideLen)) # set the fill of the square to the random", "1 scoreText.setText(f\"SCORE: {score}\") randomise_answers() create_squares(225, 325, 100, 50) questionText.setText(f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") else: bgRect.setFill(color_rgb(colors[square_clicked][0],", "Text(Point(500, 225), f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") questionText.setFill('white') questionText.setSize(25) questionText.setStyle('bold') questionText.draw(win) # create a Text", "range(4): # create a square (Rectangle) that is positioned based on the current", "and questions def randomise_answers(): global colors global correctChoice colors = [] for i", "Game\", 1000, 500) win.setBackground('#232323') # create a title for your game titleBg =", "choice questionText = Text(Point(500, 225), f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") questionText.setFill('white') questionText.setSize(25) questionText.setStyle('bold') questionText.draw(win) #", "currentX1 and mouseX < currentX2 and mouseY > currentY1 and mouseY < currentY2:", "# create a title for your game titleBg = Rectangle(Point(0, 0), Point(1000, 135))", "else: bgRect.setFill(color_rgb(colors[square_clicked][0], colors[square_clicked][1], colors[square_clicked][2])) bgRect.draw(win) resultText.setText(\"TOO BAD\") resultText.draw(win) scoreText.setSize(24) scoreText.anchor = Point(500, 350)", "100, 50) def wait_for_click(): while True: # get the click position of the", "Text box that will display the score of the player score = 0", "random colors and questions def randomise_answers(): global colors global correctChoice colors = []", "= random.randint(0, 255) colors.append([rand_r, rand_g, rand_b]) correctChoice = random.randint(0, 3) randomise_answers() squares =", "the click position of the mouse mousePos = win.getMouse() mouseX = mousePos.getX() mouseY", "Guessing Game\", 1000, 500) win.setBackground('#232323') # create a title for your game titleBg", "otherwise incorrect for i in range(4): currentSquare = squares[i] currentX1 = currentSquare.getP1().getX() currentY1", "the score of the player score = 0 scoreText = Text(Point(500, 155), f\"SCORE:", "a rectangle that fills the whole screen bgRect = Rectangle(Point(0, 0), Point(1000, 500))", "325, 100, 50) questionText.setText(f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") else: bgRect.setFill(color_rgb(colors[square_clicked][0], colors[square_clicked][1], colors[square_clicked][2])) bgRect.draw(win) resultText.setText(\"TOO BAD\")", "titleBg.setOutline('steelblue') titleBg.setFill('steelblue') titleBg.draw(win) title = Text(Point(500, 67.5),\"RGB Colour Guessing Game\") title.setTextColor('white') title.setSize(48) title.setFace('times", "the rgb of the correct choice questionText = Text(Point(500, 225), f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\")", "that will display the results of the guess (correct/incorrect) resultText = Text(Point(500, 125),\"\")", "squares.append(square) create_squares(225, 325, 100, 50) def wait_for_click(): while True: # get the click", "f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") questionText.setFill('white') questionText.setSize(25) questionText.setStyle('bold') questionText.draw(win) # create a Text box that", "random # create the graphics window and set background colour win = GraphWin(\"Colour", "325, 100, 50) def wait_for_click(): while True: # get the click position of", "of the square to the random values of r,g and b square.setFill(color_rgb(colors[i][0], colors[i][1],", "True: # get the click position of the mouse mousePos = win.getMouse() mouseX", "create a Text box that will display the results of the guess (correct/incorrect)", "colors global correctChoice colors = [] for i in range(4): rand_r = random.randint(0,", "import * import random # create the graphics window and set background colour", "values of r,g and b square.setFill(color_rgb(colors[i][0], colors[i][1], colors[i][2])) # draw the square in", "= Text(Point(500, 225), f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") questionText.setFill('white') questionText.setSize(25) questionText.setStyle('bold') questionText.draw(win) # create a", "required python libaries: graphics and random from graphics import * import random #", "mouseX < currentX2 and mouseY > currentY1 and mouseY < currentY2: return i", "the whole screen bgRect = Rectangle(Point(0, 0), Point(1000, 500)) # create a Text", "# create a Text box that will display the results of the guess", "questionText.setText(f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") else: bgRect.setFill(color_rgb(colors[square_clicked][0], colors[square_clicked][1], colors[square_clicked][2])) bgRect.draw(win) resultText.setText(\"TOO BAD\") resultText.draw(win) scoreText.setSize(24) scoreText.anchor", "box that will display the results of the guess (correct/incorrect) resultText = Text(Point(500,", "import all the required python libaries: graphics and random from graphics import *", "rgb of the correct choice questionText = Text(Point(500, 225), f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") questionText.setFill('white')", "bgRect.draw(win) resultText.setText(\"TOO BAD\") resultText.draw(win) scoreText.setSize(24) scoreText.anchor = Point(500, 350) scoreText.undraw() scoreText.draw(win) gameover =", "fill of the square to the random values of r,g and b square.setFill(color_rgb(colors[i][0],", "Point(x+(i+1)*sideLen+i*spacing, y+sideLen)) # set the fill of the square to the random values", "= False # create a rectangle that fills the whole screen bgRect =", "= Text(Point(500, 67.5),\"RGB Colour Guessing Game\") title.setTextColor('white') title.setSize(48) title.setFace('times roman') title.draw(win) colors =", "graphics window and set background colour win = GraphWin(\"Colour Guessing Game\", 1000, 500)", "i in range(4): currentSquare = squares[i] currentX1 = currentSquare.getP1().getX() currentY1 = currentSquare.getP1().getY() currentX2", "python libaries: graphics and random from graphics import * import random # create", "i in range(4): rand_r = random.randint(0, 255) rand_g = random.randint(0, 255) rand_b =", "square.setFill(color_rgb(colors[i][0], colors[i][1], colors[i][2])) # draw the square in the window square.draw(win) squares.append(square) create_squares(225,", "clicked on the correct square, if it did display correct otherwise incorrect for", "create a title for your game titleBg = Rectangle(Point(0, 0), Point(1000, 135)) titleBg.setOutline('steelblue')", "random from graphics import * import random # create the graphics window and", "whole screen bgRect = Rectangle(Point(0, 0), Point(1000, 500)) # create a Text box", "scoreText.draw(win) while gameover == False: square_clicked = wait_for_click() if square_clicked == correctChoice: score", "= random.randint(0, 255) rand_g = random.randint(0, 255) rand_b = random.randint(0, 255) colors.append([rand_r, rand_g,", "# create a Text box that will display the score of the player", "correctChoice = random.randint(0, 3) randomise_answers() squares = [] # create 4 squares of", "is positioned based on the current 'i' value square = Rectangle(Point(x+i*sideLen+i*spacing, y), Point(x+(i+1)*sideLen+i*spacing,", "main game ''' gameover = False # create a rectangle that fills the", "and mouseX < currentX2 and mouseY > currentY1 and mouseY < currentY2: return", "Text box that will display the results of the guess (correct/incorrect) resultText =", "screen bgRect = Rectangle(Point(0, 0), Point(1000, 500)) # create a Text box that", "scoreText.setText(f\"SCORE: {score}\") randomise_answers() create_squares(225, 325, 100, 50) questionText.setText(f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") else: bgRect.setFill(color_rgb(colors[square_clicked][0], colors[square_clicked][1],", "rectangle that fills the whole screen bgRect = Rectangle(Point(0, 0), Point(1000, 500)) #", "create the graphics window and set background colour win = GraphWin(\"Colour Guessing Game\",", "game ''' gameover = False # create a rectangle that fills the whole", "global squares squares = [] for i in range(4): # create a square", "= [] correctChoice = int # generate random colors and questions def randomise_answers():", "in range(4): currentSquare = squares[i] currentX1 = currentSquare.getP1().getX() currentY1 = currentSquare.getP1().getY() currentX2 =", "questionText.setFill('white') questionText.setSize(25) questionText.setStyle('bold') questionText.draw(win) # create a Text box that will display the", "0 scoreText = Text(Point(500, 155), f\"SCORE: {score}\") scoreText.setFill('white') scoreText.setSize(12) scoreText.draw(win) while gameover ==", "bgRect.setFill(color_rgb(colors[square_clicked][0], colors[square_clicked][1], colors[square_clicked][2])) bgRect.draw(win) resultText.setText(\"TOO BAD\") resultText.draw(win) scoreText.setSize(24) scoreText.anchor = Point(500, 350) scoreText.undraw()", "squares = [] for i in range(4): # create a square (Rectangle) that", "gameover == False: square_clicked = wait_for_click() if square_clicked == correctChoice: score += 1", "correct square, if it did display correct otherwise incorrect for i in range(4):", "display the results of the guess (correct/incorrect) resultText = Text(Point(500, 125),\"\") resultText.setSize(128) resultText.setFill('white')", "square = Rectangle(Point(x+i*sideLen+i*spacing, y), Point(x+(i+1)*sideLen+i*spacing, y+sideLen)) # set the fill of the square", "title = Text(Point(500, 67.5),\"RGB Colour Guessing Game\") title.setTextColor('white') title.setSize(48) title.setFace('times roman') title.draw(win) colors", "# create a rectangle that fills the whole screen bgRect = Rectangle(Point(0, 0),", "225), f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") questionText.setFill('white') questionText.setSize(25) questionText.setStyle('bold') questionText.draw(win) # create a Text box", "Guessing Game\") title.setTextColor('white') title.setSize(48) title.setFace('times roman') title.draw(win) colors = [] correctChoice = int", "wait_for_click(): while True: # get the click position of the mouse mousePos =", "graphics and random from graphics import * import random # create the graphics", "across the page def create_squares(x, y, sideLen, spacing): global squares squares = []", "50) def wait_for_click(): while True: # get the click position of the mouse", "for your game titleBg = Rectangle(Point(0, 0), Point(1000, 135)) titleBg.setOutline('steelblue') titleBg.setFill('steelblue') titleBg.draw(win) title", "= squares[i] currentX1 = currentSquare.getP1().getX() currentY1 = currentSquare.getP1().getY() currentX2 = currentSquare.getP2().getX() currentY2 =", "* import random # create the graphics window and set background colour win", "# check if the mouse clicked on the correct square, if it did", "randomise_answers(): global colors global correctChoice colors = [] for i in range(4): rand_r", "spacing): global squares squares = [] for i in range(4): # create a", "mouseY > currentY1 and mouseY < currentY2: return i ''' main game '''", "squares[i] currentX1 = currentSquare.getP1().getX() currentY1 = currentSquare.getP1().getY() currentX2 = currentSquare.getP2().getX() currentY2 = currentSquare.getP2().getY()", "box that will display the rgb of the correct choice questionText = Text(Point(500,", "0), Point(1000, 135)) titleBg.setOutline('steelblue') titleBg.setFill('steelblue') titleBg.draw(win) title = Text(Point(500, 67.5),\"RGB Colour Guessing Game\")", "= win.getMouse() mouseX = mousePos.getX() mouseY = mousePos.getY() # check if the mouse", "title.setFace('times roman') title.draw(win) colors = [] correctChoice = int # generate random colors", "create 4 squares of random colour evenly spaced across the page def create_squares(x,", "the random values of r,g and b square.setFill(color_rgb(colors[i][0], colors[i][1], colors[i][2])) # draw the", "Colour Guessing Game\") title.setTextColor('white') title.setSize(48) title.setFace('times roman') title.draw(win) colors = [] correctChoice =", "randomise_answers() create_squares(225, 325, 100, 50) questionText.setText(f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") else: bgRect.setFill(color_rgb(colors[square_clicked][0], colors[square_clicked][1], colors[square_clicked][2])) bgRect.draw(win)", "player score = 0 scoreText = Text(Point(500, 155), f\"SCORE: {score}\") scoreText.setFill('white') scoreText.setSize(12) scoreText.draw(win)", "wait_for_click() if square_clicked == correctChoice: score += 1 scoreText.setText(f\"SCORE: {score}\") randomise_answers() create_squares(225, 325,", "resultText.setText(\"TOO BAD\") resultText.draw(win) scoreText.setSize(24) scoreText.anchor = Point(500, 350) scoreText.undraw() scoreText.draw(win) gameover = True", "rand_r = random.randint(0, 255) rand_g = random.randint(0, 255) rand_b = random.randint(0, 255) colors.append([rand_r,", "resultText.draw(win) scoreText.setSize(24) scoreText.anchor = Point(500, 350) scoreText.undraw() scoreText.draw(win) gameover = True # wait", "currentY1 and mouseY < currentY2: return i ''' main game ''' gameover =", "questionText.setSize(25) questionText.setStyle('bold') questionText.draw(win) # create a Text box that will display the score", "square to the random values of r,g and b square.setFill(color_rgb(colors[i][0], colors[i][1], colors[i][2])) #", "of the guess (correct/incorrect) resultText = Text(Point(500, 125),\"\") resultText.setSize(128) resultText.setFill('white') # create a", "correctChoice: score += 1 scoreText.setText(f\"SCORE: {score}\") randomise_answers() create_squares(225, 325, 100, 50) questionText.setText(f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]},", "range(4): currentSquare = squares[i] currentX1 = currentSquare.getP1().getX() currentY1 = currentSquare.getP1().getY() currentX2 = currentSquare.getP2().getX()", "randomise_answers() squares = [] # create 4 squares of random colour evenly spaced", "win = GraphWin(\"Colour Guessing Game\", 1000, 500) win.setBackground('#232323') # create a title for", "fills the whole screen bgRect = Rectangle(Point(0, 0), Point(1000, 500)) # create a", "that will display the score of the player score = 0 scoreText =", "correctChoice = int # generate random colors and questions def randomise_answers(): global colors", "resultText.setSize(128) resultText.setFill('white') # create a Text box that will display the rgb of", "255) rand_b = random.randint(0, 255) colors.append([rand_r, rand_g, rand_b]) correctChoice = random.randint(0, 3) randomise_answers()", "random.randint(0, 255) rand_b = random.randint(0, 255) colors.append([rand_r, rand_g, rand_b]) correctChoice = random.randint(0, 3)", "< currentX2 and mouseY > currentY1 and mouseY < currentY2: return i '''", "create a rectangle that fills the whole screen bgRect = Rectangle(Point(0, 0), Point(1000,", "= Point(500, 350) scoreText.undraw() scoreText.draw(win) gameover = True # wait for click to", "def create_squares(x, y, sideLen, spacing): global squares squares = [] for i in", "mouse clicked on the correct square, if it did display correct otherwise incorrect", "a Text box that will display the results of the guess (correct/incorrect) resultText", "set background colour win = GraphWin(\"Colour Guessing Game\", 1000, 500) win.setBackground('#232323') # create", "correctChoice colors = [] for i in range(4): rand_r = random.randint(0, 255) rand_g", "sideLen, spacing): global squares squares = [] for i in range(4): # create", "current 'i' value square = Rectangle(Point(x+i*sideLen+i*spacing, y), Point(x+(i+1)*sideLen+i*spacing, y+sideLen)) # set the fill", "Rectangle(Point(x+i*sideLen+i*spacing, y), Point(x+(i+1)*sideLen+i*spacing, y+sideLen)) # set the fill of the square to the", "resultText = Text(Point(500, 125),\"\") resultText.setSize(128) resultText.setFill('white') # create a Text box that will", "colors and questions def randomise_answers(): global colors global correctChoice colors = [] for", "rand_b]) correctChoice = random.randint(0, 3) randomise_answers() squares = [] # create 4 squares", "colors = [] correctChoice = int # generate random colors and questions def", "square (Rectangle) that is positioned based on the current 'i' value square =", "mouseX = mousePos.getX() mouseY = mousePos.getY() # check if the mouse clicked on", "will display the results of the guess (correct/incorrect) resultText = Text(Point(500, 125),\"\") resultText.setSize(128)", "of random colour evenly spaced across the page def create_squares(x, y, sideLen, spacing):", "> currentY1 and mouseY < currentY2: return i ''' main game ''' gameover", "window and set background colour win = GraphWin(\"Colour Guessing Game\", 1000, 500) win.setBackground('#232323')", "in range(4): rand_r = random.randint(0, 255) rand_g = random.randint(0, 255) rand_b = random.randint(0,", "your game titleBg = Rectangle(Point(0, 0), Point(1000, 135)) titleBg.setOutline('steelblue') titleBg.setFill('steelblue') titleBg.draw(win) title =", "Rectangle(Point(0, 0), Point(1000, 500)) # create a Text box that will display the", "= [] for i in range(4): rand_r = random.randint(0, 255) rand_g = random.randint(0,", "= Rectangle(Point(0, 0), Point(1000, 500)) # create a Text box that will display", "the window square.draw(win) squares.append(square) create_squares(225, 325, 100, 50) def wait_for_click(): while True: #", "questionText = Text(Point(500, 225), f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") questionText.setFill('white') questionText.setSize(25) questionText.setStyle('bold') questionText.draw(win) # create", "the square to the random values of r,g and b square.setFill(color_rgb(colors[i][0], colors[i][1], colors[i][2]))", "rand_g, rand_b]) correctChoice = random.randint(0, 3) randomise_answers() squares = [] # create 4", "= Rectangle(Point(x+i*sideLen+i*spacing, y), Point(x+(i+1)*sideLen+i*spacing, y+sideLen)) # set the fill of the square to", "score += 1 scoreText.setText(f\"SCORE: {score}\") randomise_answers() create_squares(225, 325, 100, 50) questionText.setText(f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\")", "b square.setFill(color_rgb(colors[i][0], colors[i][1], colors[i][2])) # draw the square in the window square.draw(win) squares.append(square)", "evenly spaced across the page def create_squares(x, y, sideLen, spacing): global squares squares", "50) questionText.setText(f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") else: bgRect.setFill(color_rgb(colors[square_clicked][0], colors[square_clicked][1], colors[square_clicked][2])) bgRect.draw(win) resultText.setText(\"TOO BAD\") resultText.draw(win) scoreText.setSize(24)", "= currentSquare.getP2().getY() if mouseX > currentX1 and mouseX < currentX2 and mouseY >", "correct choice questionText = Text(Point(500, 225), f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") questionText.setFill('white') questionText.setSize(25) questionText.setStyle('bold') questionText.draw(win)", "currentY2 = currentSquare.getP2().getY() if mouseX > currentX1 and mouseX < currentX2 and mouseY", "currentSquare.getP2().getX() currentY2 = currentSquare.getP2().getY() if mouseX > currentX1 and mouseX < currentX2 and", "GraphWin(\"Colour Guessing Game\", 1000, 500) win.setBackground('#232323') # create a title for your game", "a Text box that will display the score of the player score =", "= random.randint(0, 3) randomise_answers() squares = [] # create 4 squares of random", "random.randint(0, 255) rand_g = random.randint(0, 255) rand_b = random.randint(0, 255) colors.append([rand_r, rand_g, rand_b])", "== False: square_clicked = wait_for_click() if square_clicked == correctChoice: score += 1 scoreText.setText(f\"SCORE:", "in range(4): # create a square (Rectangle) that is positioned based on the", "= currentSquare.getP1().getX() currentY1 = currentSquare.getP1().getY() currentX2 = currentSquare.getP2().getX() currentY2 = currentSquare.getP2().getY() if mouseX", "colors[square_clicked][1], colors[square_clicked][2])) bgRect.draw(win) resultText.setText(\"TOO BAD\") resultText.draw(win) scoreText.setSize(24) scoreText.anchor = Point(500, 350) scoreText.undraw() scoreText.draw(win)", "(Rectangle) that is positioned based on the current 'i' value square = Rectangle(Point(x+i*sideLen+i*spacing,", "gameover = False # create a rectangle that fills the whole screen bgRect", "display the rgb of the correct choice questionText = Text(Point(500, 225), f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]},", "# set the fill of the square to the random values of r,g", "= Rectangle(Point(0, 0), Point(1000, 135)) titleBg.setOutline('steelblue') titleBg.setFill('steelblue') titleBg.draw(win) title = Text(Point(500, 67.5),\"RGB Colour", "135)) titleBg.setOutline('steelblue') titleBg.setFill('steelblue') titleBg.draw(win) title = Text(Point(500, 67.5),\"RGB Colour Guessing Game\") title.setTextColor('white') title.setSize(48)", "win.getMouse() mouseX = mousePos.getX() mouseY = mousePos.getY() # check if the mouse clicked", "currentSquare.getP2().getY() if mouseX > currentX1 and mouseX < currentX2 and mouseY > currentY1", "create a square (Rectangle) that is positioned based on the current 'i' value", "set the fill of the square to the random values of r,g and", "# draw the square in the window square.draw(win) squares.append(square) create_squares(225, 325, 100, 50)", "for i in range(4): currentSquare = squares[i] currentX1 = currentSquare.getP1().getX() currentY1 = currentSquare.getP1().getY()", "display the score of the player score = 0 scoreText = Text(Point(500, 155),", "colour win = GraphWin(\"Colour Guessing Game\", 1000, 500) win.setBackground('#232323') # create a title", "a square (Rectangle) that is positioned based on the current 'i' value square", "colors = [] for i in range(4): rand_r = random.randint(0, 255) rand_g =", "= [] for i in range(4): # create a square (Rectangle) that is", "guess (correct/incorrect) resultText = Text(Point(500, 125),\"\") resultText.setSize(128) resultText.setFill('white') # create a Text box", "{score}\") randomise_answers() create_squares(225, 325, 100, 50) questionText.setText(f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") else: bgRect.setFill(color_rgb(colors[square_clicked][0], colors[square_clicked][1], colors[square_clicked][2]))", "score of the player score = 0 scoreText = Text(Point(500, 155), f\"SCORE: {score}\")", "titleBg = Rectangle(Point(0, 0), Point(1000, 135)) titleBg.setOutline('steelblue') titleBg.setFill('steelblue') titleBg.draw(win) title = Text(Point(500, 67.5),\"RGB", "global correctChoice colors = [] for i in range(4): rand_r = random.randint(0, 255)", "to the random values of r,g and b square.setFill(color_rgb(colors[i][0], colors[i][1], colors[i][2])) # draw", "create a Text box that will display the rgb of the correct choice", "False: square_clicked = wait_for_click() if square_clicked == correctChoice: score += 1 scoreText.setText(f\"SCORE: {score}\")", "125),\"\") resultText.setSize(128) resultText.setFill('white') # create a Text box that will display the rgb", "= wait_for_click() if square_clicked == correctChoice: score += 1 scoreText.setText(f\"SCORE: {score}\") randomise_answers() create_squares(225,", "titleBg.draw(win) title = Text(Point(500, 67.5),\"RGB Colour Guessing Game\") title.setTextColor('white') title.setSize(48) title.setFace('times roman') title.draw(win)", "Game\") title.setTextColor('white') title.setSize(48) title.setFace('times roman') title.draw(win) colors = [] correctChoice = int #", "scoreText.anchor = Point(500, 350) scoreText.undraw() scoreText.draw(win) gameover = True # wait for click", "score = 0 scoreText = Text(Point(500, 155), f\"SCORE: {score}\") scoreText.setFill('white') scoreText.setSize(12) scoreText.draw(win) while", "500)) # create a Text box that will display the results of the", "that fills the whole screen bgRect = Rectangle(Point(0, 0), Point(1000, 500)) # create", "and mouseY > currentY1 and mouseY < currentY2: return i ''' main game", "{score}\") scoreText.setFill('white') scoreText.setSize(12) scoreText.draw(win) while gameover == False: square_clicked = wait_for_click() if square_clicked", "< currentY2: return i ''' main game ''' gameover = False # create", "random.randint(0, 3) randomise_answers() squares = [] # create 4 squares of random colour", "check if the mouse clicked on the correct square, if it did display", "title.setTextColor('white') title.setSize(48) title.setFace('times roman') title.draw(win) colors = [] correctChoice = int # generate", "r,g and b square.setFill(color_rgb(colors[i][0], colors[i][1], colors[i][2])) # draw the square in the window", "background colour win = GraphWin(\"Colour Guessing Game\", 1000, 500) win.setBackground('#232323') # create a", "title.setSize(48) title.setFace('times roman') title.draw(win) colors = [] correctChoice = int # generate random", "libaries: graphics and random from graphics import * import random # create the", "will display the rgb of the correct choice questionText = Text(Point(500, 225), f\"rgb({colors[correctChoice][0]},", "roman') title.draw(win) colors = [] correctChoice = int # generate random colors and", "random colour evenly spaced across the page def create_squares(x, y, sideLen, spacing): global", "the correct square, if it did display correct otherwise incorrect for i in", "display correct otherwise incorrect for i in range(4): currentSquare = squares[i] currentX1 =", "return i ''' main game ''' gameover = False # create a rectangle", "{colors[correctChoice][2]})\") else: bgRect.setFill(color_rgb(colors[square_clicked][0], colors[square_clicked][1], colors[square_clicked][2])) bgRect.draw(win) resultText.setText(\"TOO BAD\") resultText.draw(win) scoreText.setSize(24) scoreText.anchor = Point(500,", "[] for i in range(4): rand_r = random.randint(0, 255) rand_g = random.randint(0, 255)", "i in range(4): # create a square (Rectangle) that is positioned based on", "67.5),\"RGB Colour Guessing Game\") title.setTextColor('white') title.setSize(48) title.setFace('times roman') title.draw(win) colors = [] correctChoice", "255) colors.append([rand_r, rand_g, rand_b]) correctChoice = random.randint(0, 3) randomise_answers() squares = [] #", "= random.randint(0, 255) rand_b = random.randint(0, 255) colors.append([rand_r, rand_g, rand_b]) correctChoice = random.randint(0,", "{colors[correctChoice][2]})\") questionText.setFill('white') questionText.setSize(25) questionText.setStyle('bold') questionText.draw(win) # create a Text box that will display", "y+sideLen)) # set the fill of the square to the random values of", "results of the guess (correct/incorrect) resultText = Text(Point(500, 125),\"\") resultText.setSize(128) resultText.setFill('white') # create", "titleBg.setFill('steelblue') titleBg.draw(win) title = Text(Point(500, 67.5),\"RGB Colour Guessing Game\") title.setTextColor('white') title.setSize(48) title.setFace('times roman')", "on the correct square, if it did display correct otherwise incorrect for i", "in the window square.draw(win) squares.append(square) create_squares(225, 325, 100, 50) def wait_for_click(): while True:", "mouseY = mousePos.getY() # check if the mouse clicked on the correct square,", "of r,g and b square.setFill(color_rgb(colors[i][0], colors[i][1], colors[i][2])) # draw the square in the", "square in the window square.draw(win) squares.append(square) create_squares(225, 325, 100, 50) def wait_for_click(): while", "0), Point(1000, 500)) # create a Text box that will display the results", "int # generate random colors and questions def randomise_answers(): global colors global correctChoice", "on the current 'i' value square = Rectangle(Point(x+i*sideLen+i*spacing, y), Point(x+(i+1)*sideLen+i*spacing, y+sideLen)) # set", "mousePos.getX() mouseY = mousePos.getY() # check if the mouse clicked on the correct", "the guess (correct/incorrect) resultText = Text(Point(500, 125),\"\") resultText.setSize(128) resultText.setFill('white') # create a Text", "rand_b = random.randint(0, 255) colors.append([rand_r, rand_g, rand_b]) correctChoice = random.randint(0, 3) randomise_answers() squares", "box that will display the score of the player score = 0 scoreText", "will display the score of the player score = 0 scoreText = Text(Point(500,", "the square in the window square.draw(win) squares.append(square) create_squares(225, 325, 100, 50) def wait_for_click():", "position of the mouse mousePos = win.getMouse() mouseX = mousePos.getX() mouseY = mousePos.getY()", "mouse mousePos = win.getMouse() mouseX = mousePos.getX() mouseY = mousePos.getY() # check if", "# create a Text box that will display the rgb of the correct", "win.setBackground('#232323') # create a title for your game titleBg = Rectangle(Point(0, 0), Point(1000,", "the graphics window and set background colour win = GraphWin(\"Colour Guessing Game\", 1000,", "Text box that will display the rgb of the correct choice questionText =", "y, sideLen, spacing): global squares squares = [] for i in range(4): #", "currentY2: return i ''' main game ''' gameover = False # create a", "the required python libaries: graphics and random from graphics import * import random", "create_squares(x, y, sideLen, spacing): global squares squares = [] for i in range(4):", "resultText.setFill('white') # create a Text box that will display the rgb of the", "import random # create the graphics window and set background colour win =", "Text(Point(500, 155), f\"SCORE: {score}\") scoreText.setFill('white') scoreText.setSize(12) scoreText.draw(win) while gameover == False: square_clicked =", "colors.append([rand_r, rand_g, rand_b]) correctChoice = random.randint(0, 3) randomise_answers() squares = [] # create", "scoreText.setFill('white') scoreText.setSize(12) scoreText.draw(win) while gameover == False: square_clicked = wait_for_click() if square_clicked ==", "+= 1 scoreText.setText(f\"SCORE: {score}\") randomise_answers() create_squares(225, 325, 100, 50) questionText.setText(f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") else:", "Point(500, 350) scoreText.undraw() scoreText.draw(win) gameover = True # wait for click to close", "generate random colors and questions def randomise_answers(): global colors global correctChoice colors =", "currentY1 = currentSquare.getP1().getY() currentX2 = currentSquare.getP2().getX() currentY2 = currentSquare.getP2().getY() if mouseX > currentX1", "random values of r,g and b square.setFill(color_rgb(colors[i][0], colors[i][1], colors[i][2])) # draw the square", "and b square.setFill(color_rgb(colors[i][0], colors[i][1], colors[i][2])) # draw the square in the window square.draw(win)", "bgRect = Rectangle(Point(0, 0), Point(1000, 500)) # create a Text box that will", "= mousePos.getX() mouseY = mousePos.getY() # check if the mouse clicked on the", "value square = Rectangle(Point(x+i*sideLen+i*spacing, y), Point(x+(i+1)*sideLen+i*spacing, y+sideLen)) # set the fill of the", "{colors[correctChoice][1]}, {colors[correctChoice][2]})\") questionText.setFill('white') questionText.setSize(25) questionText.setStyle('bold') questionText.draw(win) # create a Text box that will", "while gameover == False: square_clicked = wait_for_click() if square_clicked == correctChoice: score +=", "colour evenly spaced across the page def create_squares(x, y, sideLen, spacing): global squares", "if mouseX > currentX1 and mouseX < currentX2 and mouseY > currentY1 and", "scoreText = Text(Point(500, 155), f\"SCORE: {score}\") scoreText.setFill('white') scoreText.setSize(12) scoreText.draw(win) while gameover == False:", "while True: # get the click position of the mouse mousePos = win.getMouse()", "and mouseY < currentY2: return i ''' main game ''' gameover = False", "155), f\"SCORE: {score}\") scoreText.setFill('white') scoreText.setSize(12) scoreText.draw(win) while gameover == False: square_clicked = wait_for_click()", "# create 4 squares of random colour evenly spaced across the page def", "Point(1000, 500)) # create a Text box that will display the results of", "if the mouse clicked on the correct square, if it did display correct", "create a Text box that will display the score of the player score", "square_clicked == correctChoice: score += 1 scoreText.setText(f\"SCORE: {score}\") randomise_answers() create_squares(225, 325, 100, 50)", "# get the click position of the mouse mousePos = win.getMouse() mouseX =", "get the click position of the mouse mousePos = win.getMouse() mouseX = mousePos.getX()", "# create a square (Rectangle) that is positioned based on the current 'i'", "a title for your game titleBg = Rectangle(Point(0, 0), Point(1000, 135)) titleBg.setOutline('steelblue') titleBg.setFill('steelblue')", "Point(1000, 135)) titleBg.setOutline('steelblue') titleBg.setFill('steelblue') titleBg.draw(win) title = Text(Point(500, 67.5),\"RGB Colour Guessing Game\") title.setTextColor('white')", "[] correctChoice = int # generate random colors and questions def randomise_answers(): global", "squares of random colour evenly spaced across the page def create_squares(x, y, sideLen,", "colors[square_clicked][2])) bgRect.draw(win) resultText.setText(\"TOO BAD\") resultText.draw(win) scoreText.setSize(24) scoreText.anchor = Point(500, 350) scoreText.undraw() scoreText.draw(win) gameover", "questionText.draw(win) # create a Text box that will display the score of the", "100, 50) questionText.setText(f\"rgb({colors[correctChoice][0]}, {colors[correctChoice][1]}, {colors[correctChoice][2]})\") else: bgRect.setFill(color_rgb(colors[square_clicked][0], colors[square_clicked][1], colors[square_clicked][2])) bgRect.draw(win) resultText.setText(\"TOO BAD\") resultText.draw(win)", "for i in range(4): rand_r = random.randint(0, 255) rand_g = random.randint(0, 255) rand_b", "Text(Point(500, 125),\"\") resultText.setSize(128) resultText.setFill('white') # create a Text box that will display the", "title.draw(win) colors = [] correctChoice = int # generate random colors and questions", "= GraphWin(\"Colour Guessing Game\", 1000, 500) win.setBackground('#232323') # create a title for your", "1000, 500) win.setBackground('#232323') # create a title for your game titleBg = Rectangle(Point(0,", "= currentSquare.getP2().getX() currentY2 = currentSquare.getP2().getY() if mouseX > currentX1 and mouseX < currentX2", "did display correct otherwise incorrect for i in range(4): currentSquare = squares[i] currentX1", "square.draw(win) squares.append(square) create_squares(225, 325, 100, 50) def wait_for_click(): while True: # get the", "mousePos = win.getMouse() mouseX = mousePos.getX() mouseY = mousePos.getY() # check if the", "incorrect for i in range(4): currentSquare = squares[i] currentX1 = currentSquare.getP1().getX() currentY1 =", "scoreText.undraw() scoreText.draw(win) gameover = True # wait for click to close window win.getMouse()", "questions def randomise_answers(): global colors global correctChoice colors = [] for i in", "from graphics import * import random # create the graphics window and set", "squares = [] # create 4 squares of random colour evenly spaced across", "''' gameover = False # create a rectangle that fills the whole screen", "Text(Point(500, 67.5),\"RGB Colour Guessing Game\") title.setTextColor('white') title.setSize(48) title.setFace('times roman') title.draw(win) colors = []", "click position of the mouse mousePos = win.getMouse() mouseX = mousePos.getX() mouseY =", "the player score = 0 scoreText = Text(Point(500, 155), f\"SCORE: {score}\") scoreText.setFill('white') scoreText.setSize(12)", "> currentX1 and mouseX < currentX2 and mouseY > currentY1 and mouseY <" ]
[ "LevelBuilder from sprites import * def render(name,bg): lb = LevelBuilder.LevelBuilder(name+\".plist\",background=bg) lb.addObject(Beam.BeamSprite(x=217, y=83,width=167,height=36,angle='90' ,restitution=0.2,static='false',friction=0.5,density=20", "<gh_stars>0 import LevelBuilder from sprites import * def render(name,bg): lb = LevelBuilder.LevelBuilder(name+\".plist\",background=bg) lb.addObject(Beam.BeamSprite(x=217,", "import LevelBuilder from sprites import * def render(name,bg): lb = LevelBuilder.LevelBuilder(name+\".plist\",background=bg) lb.addObject(Beam.BeamSprite(x=217, y=83,width=167,height=36,angle='90'", "lb.addObject(Beam.BeamSprite(x=217, y=83,width=167,height=36,angle='90' ,restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Wizard.WizardSprite(x=409,y=68)) lb.addObject(Enemy.EnemySprite(x=217, y=238,width=136,height=136,angle='0',restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Hero.HeroSprite(x=22, y=21,width=32,height=32)) lb.addObject(Star.StarSprite(x=217, y=238,width=32,height=32)) lb.addObject(Contacts.Contact(body1='Hero',body2=':hat_top',event_name='onReleaseStar'))", ",restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Wizard.WizardSprite(x=409,y=68)) lb.addObject(Enemy.EnemySprite(x=217, y=238,width=136,height=136,angle='0',restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Hero.HeroSprite(x=22, y=21,width=32,height=32)) lb.addObject(Star.StarSprite(x=217, y=238,width=32,height=32)) lb.addObject(Contacts.Contact(body1='Hero',body2=':hat_top',event_name='onReleaseStar')) lb.addObject(Joints.RevoluteJoint(body1='Enemy',body2='Star',motor_speed='1',enable_motor='true',torque='1000',lower_angle='12',upper_angle='50',userData='star_joint',enable_limit='false',collide_connected='false')) lb.render()", "import * def render(name,bg): lb = LevelBuilder.LevelBuilder(name+\".plist\",background=bg) lb.addObject(Beam.BeamSprite(x=217, y=83,width=167,height=36,angle='90' ,restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Wizard.WizardSprite(x=409,y=68)) lb.addObject(Enemy.EnemySprite(x=217,", "sprites import * def render(name,bg): lb = LevelBuilder.LevelBuilder(name+\".plist\",background=bg) lb.addObject(Beam.BeamSprite(x=217, y=83,width=167,height=36,angle='90' ,restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Wizard.WizardSprite(x=409,y=68))", "LevelBuilder.LevelBuilder(name+\".plist\",background=bg) lb.addObject(Beam.BeamSprite(x=217, y=83,width=167,height=36,angle='90' ,restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Wizard.WizardSprite(x=409,y=68)) lb.addObject(Enemy.EnemySprite(x=217, y=238,width=136,height=136,angle='0',restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Hero.HeroSprite(x=22, y=21,width=32,height=32)) lb.addObject(Star.StarSprite(x=217, y=238,width=32,height=32))", "y=83,width=167,height=36,angle='90' ,restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Wizard.WizardSprite(x=409,y=68)) lb.addObject(Enemy.EnemySprite(x=217, y=238,width=136,height=136,angle='0',restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Hero.HeroSprite(x=22, y=21,width=32,height=32)) lb.addObject(Star.StarSprite(x=217, y=238,width=32,height=32)) lb.addObject(Contacts.Contact(body1='Hero',body2=':hat_top',event_name='onReleaseStar')) lb.addObject(Joints.RevoluteJoint(body1='Enemy',body2='Star',motor_speed='1',enable_motor='true',torque='1000',lower_angle='12',upper_angle='50',userData='star_joint',enable_limit='false',collide_connected='false'))", "render(name,bg): lb = LevelBuilder.LevelBuilder(name+\".plist\",background=bg) lb.addObject(Beam.BeamSprite(x=217, y=83,width=167,height=36,angle='90' ,restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Wizard.WizardSprite(x=409,y=68)) lb.addObject(Enemy.EnemySprite(x=217, y=238,width=136,height=136,angle='0',restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Hero.HeroSprite(x=22,", "def render(name,bg): lb = LevelBuilder.LevelBuilder(name+\".plist\",background=bg) lb.addObject(Beam.BeamSprite(x=217, y=83,width=167,height=36,angle='90' ,restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Wizard.WizardSprite(x=409,y=68)) lb.addObject(Enemy.EnemySprite(x=217, y=238,width=136,height=136,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ))", "= LevelBuilder.LevelBuilder(name+\".plist\",background=bg) lb.addObject(Beam.BeamSprite(x=217, y=83,width=167,height=36,angle='90' ,restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Wizard.WizardSprite(x=409,y=68)) lb.addObject(Enemy.EnemySprite(x=217, y=238,width=136,height=136,angle='0',restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Hero.HeroSprite(x=22, y=21,width=32,height=32)) lb.addObject(Star.StarSprite(x=217,", "lb = LevelBuilder.LevelBuilder(name+\".plist\",background=bg) lb.addObject(Beam.BeamSprite(x=217, y=83,width=167,height=36,angle='90' ,restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Wizard.WizardSprite(x=409,y=68)) lb.addObject(Enemy.EnemySprite(x=217, y=238,width=136,height=136,angle='0',restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Hero.HeroSprite(x=22, y=21,width=32,height=32))", "from sprites import * def render(name,bg): lb = LevelBuilder.LevelBuilder(name+\".plist\",background=bg) lb.addObject(Beam.BeamSprite(x=217, y=83,width=167,height=36,angle='90' ,restitution=0.2,static='false',friction=0.5,density=20 ))", "* def render(name,bg): lb = LevelBuilder.LevelBuilder(name+\".plist\",background=bg) lb.addObject(Beam.BeamSprite(x=217, y=83,width=167,height=36,angle='90' ,restitution=0.2,static='false',friction=0.5,density=20 )) lb.addObject(Wizard.WizardSprite(x=409,y=68)) lb.addObject(Enemy.EnemySprite(x=217, y=238,width=136,height=136,angle='0',restitution=0.2,static='false',friction=0.5,density=20" ]
[ "#!/usr/bin/python3 import nvidia_smi import json mydict = nvidia_smi.JsonDeviceQuery() # Example print JSON print(json.dumps(mydict,", "import nvidia_smi import json mydict = nvidia_smi.JsonDeviceQuery() # Example print JSON print(json.dumps(mydict, indent=2))" ]
[ "updater = new_updater() self.assertEqual(updater.from_dict({'be never': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_5_0_str(self): add = json.dumps({'be", "self.assertEqual(updater.from_dict({'be never': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_5_0_str(self): add = json.dumps({'be never': self.ADD_5}) updater", "= {'ip_server': '', 'ip': 1, 'two__': '2', 'three__': 3, 'four__': '4'} TXT_4 =", "def test_5_5_dict(self): updater = new_updater() self.assertEqual(updater.from_dict({'settings': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_prov(self): updater =", "self.assertFalse(updater.save_ini) def test_5_4_str(self): add = json.dumps(self.ADD_5) updater = new_updater() self.assertEqual(updater.from_json(add), 4) self.assertTrue(updater.save_ini) def", "self.assertEqual(updater.from_json(add), 4) self.assertTrue(updater.save_ini) def test_5_5_dict(self): updater = new_updater() self.assertEqual(updater.from_dict({'settings': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def", "updater = new_updater() self.assertEqual(updater.from_dict(CFG()), 0) self.assertEqual(updater._updated_count, 0) self.assertEqual(updater._updated_count, updater._change_count) self.assertFalse(updater.save_ini) def test_5(self): updater", "= json.dumps(self.ADD_5) updater = new_updater() self.assertEqual(updater.from_json(add), 4) self.assertTrue(updater.save_ini) def test_5_5_dict(self): updater = new_updater()", "= new_updater() self.assertEqual(updater.from_dict({'settings': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_prov(self): updater = new_updater() self.assertEqual(updater.from_json(self.TXT_4), 4)", "self.assertEqual(updater._updated_count, updater._change_count) self.assertFalse(updater.save_ini) def test_5(self): updater = new_updater() self.assertEqual(updater.from_dict({'be never': self.ADD_5}), 5) self.assertFalse(updater.save_ini)", "test_prov_proxy(self): txt_5 = self.TXT_4[:-1] + ',\"proxy\":{\"enable\": \"1\"}}' updater = new_updater() self.assertEqual(updater.from_json(txt_5), 5) self.assertTrue(updater.save_ini)", "5) self.assertFalse(updater.save_ini) def test_prov(self): updater = new_updater() self.assertEqual(updater.from_json(self.TXT_4), 4) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 2) self.assertFalse(updater.save_ini)", "CFG(): return run.get_cfg() class ConfigUpdater(unittest.TestCase): ADD_5 = {'ip_server': '', 'ip': 1, 'two__': '2',", "'{\"PROVIDERTTS\":\"NoYandex\",\"APIKEYTTS\":\"y_key\",\"PROVIDERSTT\":\"NoGoogle\",\"APIKEYSTT\":\"g_key\",' \\ '\"ALARMKWACTIVATED\":\"1\",\"ALARMTTS\":\"1\",\"ALARMSTT\":\"1\",\"newer__\":{\"fdfd\":\"777\"}}' def test_self(self): updater = new_updater() self.assertEqual(updater.from_dict(CFG()), 0) self.assertEqual(updater._updated_count, 0) self.assertEqual(updater._updated_count,", "<gh_stars>10-100 import json import unittest import run from lib.tools import config_updater def dummy(*_,", "self.assertFalse(updater.save_ini) def test_prov(self): updater = new_updater() self.assertEqual(updater.from_json(self.TXT_4), 4) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 2) self.assertFalse(updater.save_ini) self.assertEqual(updater.from_dict(CFG()),", "self.assertEqual(updater.from_json(self.TXT_4), 4) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 2) self.assertFalse(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 0) self.assertFalse(updater.save_ini) def test_prov_proxy(self): txt_5 =", "self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_5_0_str(self): add = json.dumps({'be never': self.ADD_5}) updater = new_updater()", "add = json.dumps(self.ADD_5) updater = new_updater() self.assertEqual(updater.from_json(add), 4) self.assertTrue(updater.save_ini) def test_5_5_dict(self): updater =", "dummy(*_, **__): pass def new_updater(): return config_updater.ConfigUpdater(CFG(), dummy) def CFG(): return run.get_cfg() class", "updater = new_updater() self.assertEqual(updater.from_dict({'settings': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_prov(self): updater = new_updater() self.assertEqual(updater.from_json(self.TXT_4),", "def test_prov_proxy(self): txt_5 = self.TXT_4[:-1] + ',\"proxy\":{\"enable\": \"1\"}}' updater = new_updater() self.assertEqual(updater.from_json(txt_5), 5)", "config_updater def dummy(*_, **__): pass def new_updater(): return config_updater.ConfigUpdater(CFG(), dummy) def CFG(): return", "'4'} TXT_4 = '{\"PROVIDERTTS\":\"NoYandex\",\"APIKEYTTS\":\"y_key\",\"PROVIDERSTT\":\"NoGoogle\",\"APIKEYSTT\":\"g_key\",' \\ '\"ALARMKWACTIVATED\":\"1\",\"ALARMTTS\":\"1\",\"ALARMSTT\":\"1\",\"newer__\":{\"fdfd\":\"777\"}}' def test_self(self): updater = new_updater() self.assertEqual(updater.from_dict(CFG()), 0)", "pass def new_updater(): return config_updater.ConfigUpdater(CFG(), dummy) def CFG(): return run.get_cfg() class ConfigUpdater(unittest.TestCase): ADD_5", "dummy) def CFG(): return run.get_cfg() class ConfigUpdater(unittest.TestCase): ADD_5 = {'ip_server': '', 'ip': 1,", "self.assertEqual(updater.from_dict(CFG()), 0) self.assertEqual(updater._updated_count, 0) self.assertEqual(updater._updated_count, updater._change_count) self.assertFalse(updater.save_ini) def test_5(self): updater = new_updater() self.assertEqual(updater.from_dict({'be", "updater = new_updater() self.assertEqual(updater.from_json(add), 4) self.assertTrue(updater.save_ini) def test_5_5_dict(self): updater = new_updater() self.assertEqual(updater.from_dict({'settings': self.ADD_5}),", "test_prov(self): updater = new_updater() self.assertEqual(updater.from_json(self.TXT_4), 4) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 2) self.assertFalse(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 0) self.assertFalse(updater.save_ini)", "import config_updater def dummy(*_, **__): pass def new_updater(): return config_updater.ConfigUpdater(CFG(), dummy) def CFG():", "return run.get_cfg() class ConfigUpdater(unittest.TestCase): ADD_5 = {'ip_server': '', 'ip': 1, 'two__': '2', 'three__':", "new_updater() self.assertEqual(updater.from_json(self.TXT_4), 4) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 2) self.assertFalse(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 0) self.assertFalse(updater.save_ini) def test_prov_proxy(self): txt_5", "def CFG(): return run.get_cfg() class ConfigUpdater(unittest.TestCase): ADD_5 = {'ip_server': '', 'ip': 1, 'two__':", "test_5_0_str(self): add = json.dumps({'be never': self.ADD_5}) updater = new_updater() self.assertEqual(updater.from_json(add), 0) self.assertFalse(updater.save_ini) def", "return config_updater.ConfigUpdater(CFG(), dummy) def CFG(): return run.get_cfg() class ConfigUpdater(unittest.TestCase): ADD_5 = {'ip_server': '',", "TXT_4 = '{\"PROVIDERTTS\":\"NoYandex\",\"APIKEYTTS\":\"y_key\",\"PROVIDERSTT\":\"NoGoogle\",\"APIKEYSTT\":\"g_key\",' \\ '\"ALARMKWACTIVATED\":\"1\",\"ALARMTTS\":\"1\",\"ALARMSTT\":\"1\",\"newer__\":{\"fdfd\":\"777\"}}' def test_self(self): updater = new_updater() self.assertEqual(updater.from_dict(CFG()), 0) self.assertEqual(updater._updated_count,", "updater = new_updater() self.assertEqual(updater.from_json(self.TXT_4), 4) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 2) self.assertFalse(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 0) self.assertFalse(updater.save_ini) def", "config_updater.ConfigUpdater(CFG(), dummy) def CFG(): return run.get_cfg() class ConfigUpdater(unittest.TestCase): ADD_5 = {'ip_server': '', 'ip':", "new_updater() self.assertEqual(updater.from_dict({'be never': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_5_0_str(self): add = json.dumps({'be never': self.ADD_5})", "run.get_cfg() class ConfigUpdater(unittest.TestCase): ADD_5 = {'ip_server': '', 'ip': 1, 'two__': '2', 'three__': 3,", "3, 'four__': '4'} TXT_4 = '{\"PROVIDERTTS\":\"NoYandex\",\"APIKEYTTS\":\"y_key\",\"PROVIDERSTT\":\"NoGoogle\",\"APIKEYSTT\":\"g_key\",' \\ '\"ALARMKWACTIVATED\":\"1\",\"ALARMTTS\":\"1\",\"ALARMSTT\":\"1\",\"newer__\":{\"fdfd\":\"777\"}}' def test_self(self): updater = new_updater()", "= new_updater() self.assertEqual(updater.from_json(self.TXT_4), 4) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 2) self.assertFalse(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 0) self.assertFalse(updater.save_ini) def test_prov_proxy(self):", "new_updater() self.assertEqual(updater.from_dict(CFG()), 0) self.assertEqual(updater._updated_count, 0) self.assertEqual(updater._updated_count, updater._change_count) self.assertFalse(updater.save_ini) def test_5(self): updater = new_updater()", "def dummy(*_, **__): pass def new_updater(): return config_updater.ConfigUpdater(CFG(), dummy) def CFG(): return run.get_cfg()", "lib.tools import config_updater def dummy(*_, **__): pass def new_updater(): return config_updater.ConfigUpdater(CFG(), dummy) def", "import json import unittest import run from lib.tools import config_updater def dummy(*_, **__):", "1, 'two__': '2', 'three__': 3, 'four__': '4'} TXT_4 = '{\"PROVIDERTTS\":\"NoYandex\",\"APIKEYTTS\":\"y_key\",\"PROVIDERSTT\":\"NoGoogle\",\"APIKEYSTT\":\"g_key\",' \\ '\"ALARMKWACTIVATED\":\"1\",\"ALARMTTS\":\"1\",\"ALARMSTT\":\"1\",\"newer__\":{\"fdfd\":\"777\"}}' def", "self.assertEqual(updater._updated_count, 0) self.assertEqual(updater._updated_count, updater._change_count) self.assertFalse(updater.save_ini) def test_5(self): updater = new_updater() self.assertEqual(updater.from_dict({'be never': self.ADD_5}),", "'', 'ip': 1, 'two__': '2', 'three__': 3, 'four__': '4'} TXT_4 = '{\"PROVIDERTTS\":\"NoYandex\",\"APIKEYTTS\":\"y_key\",\"PROVIDERSTT\":\"NoGoogle\",\"APIKEYSTT\":\"g_key\",' \\", "self.assertFalse(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 0) self.assertFalse(updater.save_ini) def test_prov_proxy(self): txt_5 = self.TXT_4[:-1] + ',\"proxy\":{\"enable\": \"1\"}}' updater", "test_self(self): updater = new_updater() self.assertEqual(updater.from_dict(CFG()), 0) self.assertEqual(updater._updated_count, 0) self.assertEqual(updater._updated_count, updater._change_count) self.assertFalse(updater.save_ini) def test_5(self):", "'2', 'three__': 3, 'four__': '4'} TXT_4 = '{\"PROVIDERTTS\":\"NoYandex\",\"APIKEYTTS\":\"y_key\",\"PROVIDERSTT\":\"NoGoogle\",\"APIKEYSTT\":\"g_key\",' \\ '\"ALARMKWACTIVATED\":\"1\",\"ALARMTTS\":\"1\",\"ALARMSTT\":\"1\",\"newer__\":{\"fdfd\":\"777\"}}' def test_self(self): updater", "test_5(self): updater = new_updater() self.assertEqual(updater.from_dict({'be never': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_5_0_str(self): add =", "'\"ALARMKWACTIVATED\":\"1\",\"ALARMTTS\":\"1\",\"ALARMSTT\":\"1\",\"newer__\":{\"fdfd\":\"777\"}}' def test_self(self): updater = new_updater() self.assertEqual(updater.from_dict(CFG()), 0) self.assertEqual(updater._updated_count, 0) self.assertEqual(updater._updated_count, updater._change_count) self.assertFalse(updater.save_ini)", "add = json.dumps({'be never': self.ADD_5}) updater = new_updater() self.assertEqual(updater.from_json(add), 0) self.assertFalse(updater.save_ini) def test_5_4_str(self):", "run from lib.tools import config_updater def dummy(*_, **__): pass def new_updater(): return config_updater.ConfigUpdater(CFG(),", "self.assertEqual(updater.from_dict({'settings': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_prov(self): updater = new_updater() self.assertEqual(updater.from_json(self.TXT_4), 4) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()),", "self.assertEqual(updater.from_json(add), 0) self.assertFalse(updater.save_ini) def test_5_4_str(self): add = json.dumps(self.ADD_5) updater = new_updater() self.assertEqual(updater.from_json(add), 4)", "= new_updater() self.assertEqual(updater.from_dict({'be never': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_5_0_str(self): add = json.dumps({'be never':", "self.assertTrue(updater.save_ini) def test_5_5_dict(self): updater = new_updater() self.assertEqual(updater.from_dict({'settings': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_prov(self): updater", "0) self.assertFalse(updater.save_ini) def test_5_4_str(self): add = json.dumps(self.ADD_5) updater = new_updater() self.assertEqual(updater.from_json(add), 4) self.assertTrue(updater.save_ini)", "'three__': 3, 'four__': '4'} TXT_4 = '{\"PROVIDERTTS\":\"NoYandex\",\"APIKEYTTS\":\"y_key\",\"PROVIDERSTT\":\"NoGoogle\",\"APIKEYSTT\":\"g_key\",' \\ '\"ALARMKWACTIVATED\":\"1\",\"ALARMTTS\":\"1\",\"ALARMSTT\":\"1\",\"newer__\":{\"fdfd\":\"777\"}}' def test_self(self): updater =", "\\ '\"ALARMKWACTIVATED\":\"1\",\"ALARMTTS\":\"1\",\"ALARMSTT\":\"1\",\"newer__\":{\"fdfd\":\"777\"}}' def test_self(self): updater = new_updater() self.assertEqual(updater.from_dict(CFG()), 0) self.assertEqual(updater._updated_count, 0) self.assertEqual(updater._updated_count, updater._change_count)", "def new_updater(): return config_updater.ConfigUpdater(CFG(), dummy) def CFG(): return run.get_cfg() class ConfigUpdater(unittest.TestCase): ADD_5 =", "= new_updater() self.assertEqual(updater.from_json(add), 4) self.assertTrue(updater.save_ini) def test_5_5_dict(self): updater = new_updater() self.assertEqual(updater.from_dict({'settings': self.ADD_5}), 5)", "self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_prov(self): updater = new_updater() self.assertEqual(updater.from_json(self.TXT_4), 4) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 2)", "self.assertEqual(updater.from_dict(CFG()), 2) self.assertFalse(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 0) self.assertFalse(updater.save_ini) def test_prov_proxy(self): txt_5 = self.TXT_4[:-1] + ',\"proxy\":{\"enable\":", "self.ADD_5}) updater = new_updater() self.assertEqual(updater.from_json(add), 0) self.assertFalse(updater.save_ini) def test_5_4_str(self): add = json.dumps(self.ADD_5) updater", "new_updater(): return config_updater.ConfigUpdater(CFG(), dummy) def CFG(): return run.get_cfg() class ConfigUpdater(unittest.TestCase): ADD_5 = {'ip_server':", "def test_5_4_str(self): add = json.dumps(self.ADD_5) updater = new_updater() self.assertEqual(updater.from_json(add), 4) self.assertTrue(updater.save_ini) def test_5_5_dict(self):", "',\"proxy\":{\"enable\": \"1\"}}' updater = new_updater() self.assertEqual(updater.from_json(txt_5), 5) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 3) self.assertFalse(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 0)", "self.assertFalse(updater.save_ini) def test_5(self): updater = new_updater() self.assertEqual(updater.from_dict({'be never': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_5_0_str(self):", "= new_updater() self.assertEqual(updater.from_dict(CFG()), 0) self.assertEqual(updater._updated_count, 0) self.assertEqual(updater._updated_count, updater._change_count) self.assertFalse(updater.save_ini) def test_5(self): updater =", "import run from lib.tools import config_updater def dummy(*_, **__): pass def new_updater(): return", "unittest import run from lib.tools import config_updater def dummy(*_, **__): pass def new_updater():", "5) self.assertFalse(updater.save_ini) def test_5_0_str(self): add = json.dumps({'be never': self.ADD_5}) updater = new_updater() self.assertEqual(updater.from_json(add),", "{'ip_server': '', 'ip': 1, 'two__': '2', 'three__': 3, 'four__': '4'} TXT_4 = '{\"PROVIDERTTS\":\"NoYandex\",\"APIKEYTTS\":\"y_key\",\"PROVIDERSTT\":\"NoGoogle\",\"APIKEYSTT\":\"g_key\",'", "new_updater() self.assertEqual(updater.from_json(add), 4) self.assertTrue(updater.save_ini) def test_5_5_dict(self): updater = new_updater() self.assertEqual(updater.from_dict({'settings': self.ADD_5}), 5) self.assertFalse(updater.save_ini)", "def test_prov(self): updater = new_updater() self.assertEqual(updater.from_json(self.TXT_4), 4) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 2) self.assertFalse(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 0)", "2) self.assertFalse(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 0) self.assertFalse(updater.save_ini) def test_prov_proxy(self): txt_5 = self.TXT_4[:-1] + ',\"proxy\":{\"enable\": \"1\"}}'", "json import unittest import run from lib.tools import config_updater def dummy(*_, **__): pass", "self.assertFalse(updater.save_ini) def test_5_0_str(self): add = json.dumps({'be never': self.ADD_5}) updater = new_updater() self.assertEqual(updater.from_json(add), 0)", "4) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 2) self.assertFalse(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 0) self.assertFalse(updater.save_ini) def test_prov_proxy(self): txt_5 = self.TXT_4[:-1]", "test_5_5_dict(self): updater = new_updater() self.assertEqual(updater.from_dict({'settings': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_prov(self): updater = new_updater()", "0) self.assertEqual(updater._updated_count, updater._change_count) self.assertFalse(updater.save_ini) def test_5(self): updater = new_updater() self.assertEqual(updater.from_dict({'be never': self.ADD_5}), 5)", "def test_5(self): updater = new_updater() self.assertEqual(updater.from_dict({'be never': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_5_0_str(self): add", "never': self.ADD_5}) updater = new_updater() self.assertEqual(updater.from_json(add), 0) self.assertFalse(updater.save_ini) def test_5_4_str(self): add = json.dumps(self.ADD_5)", "\"1\"}}' updater = new_updater() self.assertEqual(updater.from_json(txt_5), 5) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 3) self.assertFalse(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 0) self.assertFalse(updater.save_ini)", "self.TXT_4[:-1] + ',\"proxy\":{\"enable\": \"1\"}}' updater = new_updater() self.assertEqual(updater.from_json(txt_5), 5) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 3) self.assertFalse(updater.save_ini)", "'four__': '4'} TXT_4 = '{\"PROVIDERTTS\":\"NoYandex\",\"APIKEYTTS\":\"y_key\",\"PROVIDERSTT\":\"NoGoogle\",\"APIKEYSTT\":\"g_key\",' \\ '\"ALARMKWACTIVATED\":\"1\",\"ALARMTTS\":\"1\",\"ALARMSTT\":\"1\",\"newer__\":{\"fdfd\":\"777\"}}' def test_self(self): updater = new_updater() self.assertEqual(updater.from_dict(CFG()),", "self.assertEqual(updater.from_dict(CFG()), 0) self.assertFalse(updater.save_ini) def test_prov_proxy(self): txt_5 = self.TXT_4[:-1] + ',\"proxy\":{\"enable\": \"1\"}}' updater =", "json.dumps(self.ADD_5) updater = new_updater() self.assertEqual(updater.from_json(add), 4) self.assertTrue(updater.save_ini) def test_5_5_dict(self): updater = new_updater() self.assertEqual(updater.from_dict({'settings':", "new_updater() self.assertEqual(updater.from_json(add), 0) self.assertFalse(updater.save_ini) def test_5_4_str(self): add = json.dumps(self.ADD_5) updater = new_updater() self.assertEqual(updater.from_json(add),", "0) self.assertEqual(updater._updated_count, 0) self.assertEqual(updater._updated_count, updater._change_count) self.assertFalse(updater.save_ini) def test_5(self): updater = new_updater() self.assertEqual(updater.from_dict({'be never':", "'two__': '2', 'three__': 3, 'four__': '4'} TXT_4 = '{\"PROVIDERTTS\":\"NoYandex\",\"APIKEYTTS\":\"y_key\",\"PROVIDERSTT\":\"NoGoogle\",\"APIKEYSTT\":\"g_key\",' \\ '\"ALARMKWACTIVATED\":\"1\",\"ALARMTTS\":\"1\",\"ALARMSTT\":\"1\",\"newer__\":{\"fdfd\":\"777\"}}' def test_self(self):", "updater._change_count) self.assertFalse(updater.save_ini) def test_5(self): updater = new_updater() self.assertEqual(updater.from_dict({'be never': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def", "**__): pass def new_updater(): return config_updater.ConfigUpdater(CFG(), dummy) def CFG(): return run.get_cfg() class ConfigUpdater(unittest.TestCase):", "class ConfigUpdater(unittest.TestCase): ADD_5 = {'ip_server': '', 'ip': 1, 'two__': '2', 'three__': 3, 'four__':", "def test_self(self): updater = new_updater() self.assertEqual(updater.from_dict(CFG()), 0) self.assertEqual(updater._updated_count, 0) self.assertEqual(updater._updated_count, updater._change_count) self.assertFalse(updater.save_ini) def", "from lib.tools import config_updater def dummy(*_, **__): pass def new_updater(): return config_updater.ConfigUpdater(CFG(), dummy)", "self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 2) self.assertFalse(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 0) self.assertFalse(updater.save_ini) def test_prov_proxy(self): txt_5 = self.TXT_4[:-1] +", "self.assertFalse(updater.save_ini) def test_prov_proxy(self): txt_5 = self.TXT_4[:-1] + ',\"proxy\":{\"enable\": \"1\"}}' updater = new_updater() self.assertEqual(updater.from_json(txt_5),", "updater = new_updater() self.assertEqual(updater.from_json(add), 0) self.assertFalse(updater.save_ini) def test_5_4_str(self): add = json.dumps(self.ADD_5) updater =", "0) self.assertFalse(updater.save_ini) def test_prov_proxy(self): txt_5 = self.TXT_4[:-1] + ',\"proxy\":{\"enable\": \"1\"}}' updater = new_updater()", "test_5_4_str(self): add = json.dumps(self.ADD_5) updater = new_updater() self.assertEqual(updater.from_json(add), 4) self.assertTrue(updater.save_ini) def test_5_5_dict(self): updater", "4) self.assertTrue(updater.save_ini) def test_5_5_dict(self): updater = new_updater() self.assertEqual(updater.from_dict({'settings': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_prov(self):", "ADD_5 = {'ip_server': '', 'ip': 1, 'two__': '2', 'three__': 3, 'four__': '4'} TXT_4", "never': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_5_0_str(self): add = json.dumps({'be never': self.ADD_5}) updater =", "'ip': 1, 'two__': '2', 'three__': 3, 'four__': '4'} TXT_4 = '{\"PROVIDERTTS\":\"NoYandex\",\"APIKEYTTS\":\"y_key\",\"PROVIDERSTT\":\"NoGoogle\",\"APIKEYSTT\":\"g_key\",' \\ '\"ALARMKWACTIVATED\":\"1\",\"ALARMTTS\":\"1\",\"ALARMSTT\":\"1\",\"newer__\":{\"fdfd\":\"777\"}}'", "+ ',\"proxy\":{\"enable\": \"1\"}}' updater = new_updater() self.assertEqual(updater.from_json(txt_5), 5) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 3) self.assertFalse(updater.save_ini) self.assertEqual(updater.from_dict(CFG()),", "= '{\"PROVIDERTTS\":\"NoYandex\",\"APIKEYTTS\":\"y_key\",\"PROVIDERSTT\":\"NoGoogle\",\"APIKEYSTT\":\"g_key\",' \\ '\"ALARMKWACTIVATED\":\"1\",\"ALARMTTS\":\"1\",\"ALARMSTT\":\"1\",\"newer__\":{\"fdfd\":\"777\"}}' def test_self(self): updater = new_updater() self.assertEqual(updater.from_dict(CFG()), 0) self.assertEqual(updater._updated_count, 0)", "= self.TXT_4[:-1] + ',\"proxy\":{\"enable\": \"1\"}}' updater = new_updater() self.assertEqual(updater.from_json(txt_5), 5) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()), 3)", "txt_5 = self.TXT_4[:-1] + ',\"proxy\":{\"enable\": \"1\"}}' updater = new_updater() self.assertEqual(updater.from_json(txt_5), 5) self.assertTrue(updater.save_ini) self.assertEqual(updater.from_dict(CFG()),", "def test_5_0_str(self): add = json.dumps({'be never': self.ADD_5}) updater = new_updater() self.assertEqual(updater.from_json(add), 0) self.assertFalse(updater.save_ini)", "new_updater() self.assertEqual(updater.from_dict({'settings': self.ADD_5}), 5) self.assertFalse(updater.save_ini) def test_prov(self): updater = new_updater() self.assertEqual(updater.from_json(self.TXT_4), 4) self.assertTrue(updater.save_ini)", "import unittest import run from lib.tools import config_updater def dummy(*_, **__): pass def", "= new_updater() self.assertEqual(updater.from_json(add), 0) self.assertFalse(updater.save_ini) def test_5_4_str(self): add = json.dumps(self.ADD_5) updater = new_updater()", "= json.dumps({'be never': self.ADD_5}) updater = new_updater() self.assertEqual(updater.from_json(add), 0) self.assertFalse(updater.save_ini) def test_5_4_str(self): add", "json.dumps({'be never': self.ADD_5}) updater = new_updater() self.assertEqual(updater.from_json(add), 0) self.assertFalse(updater.save_ini) def test_5_4_str(self): add =", "ConfigUpdater(unittest.TestCase): ADD_5 = {'ip_server': '', 'ip': 1, 'two__': '2', 'three__': 3, 'four__': '4'}" ]
[ "import Organism class Dirt(Organism): # Default block that makes our world def __init__(self,", "if self.currentWorld.getWorldType() == 1: self.image = pygame.image.load(os.path.join('icons', 'dirthex.jpg')) else: self.image = pygame.image.load(os.path.join('icons', 'dirt.jpg'))", "self.image = pygame.image.load(os.path.join('icons', 'dirt.jpg')) self.image = pygame.transform.scale(self.image, (self.currentWorld.getIconWidth(), self.currentWorld.getIconHeight())) return self.image def getName(self):", "2021, <NAME>, All rights reserved. import os, pygame from Organism import Organism class", "import os, pygame from Organism import Organism class Dirt(Organism): # Default block that", "pygame from Organism import Organism class Dirt(Organism): # Default block that makes our", "world def __init__(self, _currentWorld, _positionX, _positionY): super(Dirt, self).__init__(0, 0, _currentWorld, _positionX, _positionY, False)", "2021 IT/CS # @ Copyright 2021, <NAME>, All rights reserved. import os, pygame", "0, _currentWorld, _positionX, _positionY, False) def getImage(self): if self.currentWorld.getWorldType() == 1: self.image =", "_positionX, _positionY): super(Dirt, self).__init__(0, 0, _currentWorld, _positionX, _positionY, False) def getImage(self): if self.currentWorld.getWorldType()", "_positionY, False) def getImage(self): if self.currentWorld.getWorldType() == 1: self.image = pygame.image.load(os.path.join('icons', 'dirthex.jpg')) else:", "# <NAME> s184407 2021 IT/CS # @ Copyright 2021, <NAME>, All rights reserved.", "pygame.image.load(os.path.join('icons', 'dirthex.jpg')) else: self.image = pygame.image.load(os.path.join('icons', 'dirt.jpg')) self.image = pygame.transform.scale(self.image, (self.currentWorld.getIconWidth(), self.currentWorld.getIconHeight())) return", "pygame.image.load(os.path.join('icons', 'dirt.jpg')) self.image = pygame.transform.scale(self.image, (self.currentWorld.getIconWidth(), self.currentWorld.getIconHeight())) return self.image def getName(self): return \"Dirt\"", "IT/CS # @ Copyright 2021, <NAME>, All rights reserved. import os, pygame from", "class Dirt(Organism): # Default block that makes our world def __init__(self, _currentWorld, _positionX,", "_currentWorld, _positionX, _positionY, False) def getImage(self): if self.currentWorld.getWorldType() == 1: self.image = pygame.image.load(os.path.join('icons',", "_positionX, _positionY, False) def getImage(self): if self.currentWorld.getWorldType() == 1: self.image = pygame.image.load(os.path.join('icons', 'dirthex.jpg'))", "_positionY): super(Dirt, self).__init__(0, 0, _currentWorld, _positionX, _positionY, False) def getImage(self): if self.currentWorld.getWorldType() ==", "s184407 2021 IT/CS # @ Copyright 2021, <NAME>, All rights reserved. import os,", "NR 2 # <NAME> s184407 2021 IT/CS # @ Copyright 2021, <NAME>, All", "_currentWorld, _positionX, _positionY): super(Dirt, self).__init__(0, 0, _currentWorld, _positionX, _positionY, False) def getImage(self): if", "'dirthex.jpg')) else: self.image = pygame.image.load(os.path.join('icons', 'dirt.jpg')) self.image = pygame.transform.scale(self.image, (self.currentWorld.getIconWidth(), self.currentWorld.getIconHeight())) return self.image", "# @ Copyright 2021, <NAME>, All rights reserved. import os, pygame from Organism", "__init__(self, _currentWorld, _positionX, _positionY): super(Dirt, self).__init__(0, 0, _currentWorld, _positionX, _positionY, False) def getImage(self):", "super(Dirt, self).__init__(0, 0, _currentWorld, _positionX, _positionY, False) def getImage(self): if self.currentWorld.getWorldType() == 1:", "def getImage(self): if self.currentWorld.getWorldType() == 1: self.image = pygame.image.load(os.path.join('icons', 'dirthex.jpg')) else: self.image =", "self).__init__(0, 0, _currentWorld, _positionX, _positionY, False) def getImage(self): if self.currentWorld.getWorldType() == 1: self.image", "Organism import Organism class Dirt(Organism): # Default block that makes our world def", "block that makes our world def __init__(self, _currentWorld, _positionX, _positionY): super(Dirt, self).__init__(0, 0,", "reserved. import os, pygame from Organism import Organism class Dirt(Organism): # Default block", "== 1: self.image = pygame.image.load(os.path.join('icons', 'dirthex.jpg')) else: self.image = pygame.image.load(os.path.join('icons', 'dirt.jpg')) self.image =", "<NAME> s184407 2021 IT/CS # @ Copyright 2021, <NAME>, All rights reserved. import", "makes our world def __init__(self, _currentWorld, _positionX, _positionY): super(Dirt, self).__init__(0, 0, _currentWorld, _positionX,", "Default block that makes our world def __init__(self, _currentWorld, _positionX, _positionY): super(Dirt, self).__init__(0,", "self.currentWorld.getWorldType() == 1: self.image = pygame.image.load(os.path.join('icons', 'dirthex.jpg')) else: self.image = pygame.image.load(os.path.join('icons', 'dirt.jpg')) self.image", "our world def __init__(self, _currentWorld, _positionX, _positionY): super(Dirt, self).__init__(0, 0, _currentWorld, _positionX, _positionY,", "WETI PROJECT NR 2 # <NAME> s184407 2021 IT/CS # @ Copyright 2021,", "1: self.image = pygame.image.load(os.path.join('icons', 'dirthex.jpg')) else: self.image = pygame.image.load(os.path.join('icons', 'dirt.jpg')) self.image = pygame.transform.scale(self.image,", "else: self.image = pygame.image.load(os.path.join('icons', 'dirt.jpg')) self.image = pygame.transform.scale(self.image, (self.currentWorld.getIconWidth(), self.currentWorld.getIconHeight())) return self.image def", "@ Copyright 2021, <NAME>, All rights reserved. import os, pygame from Organism import", "All rights reserved. import os, pygame from Organism import Organism class Dirt(Organism): #", "os, pygame from Organism import Organism class Dirt(Organism): # Default block that makes", "False) def getImage(self): if self.currentWorld.getWorldType() == 1: self.image = pygame.image.load(os.path.join('icons', 'dirthex.jpg')) else: self.image", "Dirt(Organism): # Default block that makes our world def __init__(self, _currentWorld, _positionX, _positionY):", "<NAME>, All rights reserved. import os, pygame from Organism import Organism class Dirt(Organism):", "= pygame.image.load(os.path.join('icons', 'dirt.jpg')) self.image = pygame.transform.scale(self.image, (self.currentWorld.getIconWidth(), self.currentWorld.getIconHeight())) return self.image def getName(self): return", "2 # <NAME> s184407 2021 IT/CS # @ Copyright 2021, <NAME>, All rights", "OOP PG WETI PROJECT NR 2 # <NAME> s184407 2021 IT/CS # @", "Organism class Dirt(Organism): # Default block that makes our world def __init__(self, _currentWorld,", "PROJECT NR 2 # <NAME> s184407 2021 IT/CS # @ Copyright 2021, <NAME>,", "from Organism import Organism class Dirt(Organism): # Default block that makes our world", "self.image = pygame.image.load(os.path.join('icons', 'dirthex.jpg')) else: self.image = pygame.image.load(os.path.join('icons', 'dirt.jpg')) self.image = pygame.transform.scale(self.image, (self.currentWorld.getIconWidth(),", "def __init__(self, _currentWorld, _positionX, _positionY): super(Dirt, self).__init__(0, 0, _currentWorld, _positionX, _positionY, False) def", "# OOP PG WETI PROJECT NR 2 # <NAME> s184407 2021 IT/CS #", "that makes our world def __init__(self, _currentWorld, _positionX, _positionY): super(Dirt, self).__init__(0, 0, _currentWorld,", "getImage(self): if self.currentWorld.getWorldType() == 1: self.image = pygame.image.load(os.path.join('icons', 'dirthex.jpg')) else: self.image = pygame.image.load(os.path.join('icons',", "<gh_stars>1-10 # OOP PG WETI PROJECT NR 2 # <NAME> s184407 2021 IT/CS", "= pygame.image.load(os.path.join('icons', 'dirthex.jpg')) else: self.image = pygame.image.load(os.path.join('icons', 'dirt.jpg')) self.image = pygame.transform.scale(self.image, (self.currentWorld.getIconWidth(), self.currentWorld.getIconHeight()))", "Copyright 2021, <NAME>, All rights reserved. import os, pygame from Organism import Organism", "# Default block that makes our world def __init__(self, _currentWorld, _positionX, _positionY): super(Dirt,", "rights reserved. import os, pygame from Organism import Organism class Dirt(Organism): # Default", "PG WETI PROJECT NR 2 # <NAME> s184407 2021 IT/CS # @ Copyright" ]
[ "distances. \"\"\" from numpy.random import randint from numpy import abs, int import pandas", "pd.Series, target: pd.Series, answers: pd.DataFrame, answer_scale=5, answer_source=None, answer_target=None): def _maybe_random(answer: int) -> int:", "or random - distances. \"\"\" from numpy.random import randint from numpy import abs,", "Dummy module for generating fixed - or random - distances. \"\"\" from numpy.random", "`answer_scale` \"\"\" return answer if answer is not None else randint(0, answer_scale) answer_source", "module for generating fixed - or random - distances. \"\"\" from numpy.random import", "int) -> int: \"\"\" Return either :param:`answer` or random number in `answer_scale` \"\"\"", "for generating fixed - or random - distances. \"\"\" from numpy.random import randint", "answer if answer is not None else randint(0, answer_scale) answer_source = _maybe_random(answer_source) answer_target", ":param:`answer` or random number in `answer_scale` \"\"\" return answer if answer is not", "answer is not None else randint(0, answer_scale) answer_source = _maybe_random(answer_source) answer_target = _maybe_random(answer_target)", "None else randint(0, answer_scale) answer_source = _maybe_random(answer_source) answer_target = _maybe_random(answer_target) return abs(answer_source -", "\"\"\" Dummy module for generating fixed - or random - distances. \"\"\" from", "pandas as pd def distance(source: pd.Series, target: pd.Series, answers: pd.DataFrame, answer_scale=5, answer_source=None, answer_target=None):", "-> int: \"\"\" Return either :param:`answer` or random number in `answer_scale` \"\"\" return", "int: \"\"\" Return either :param:`answer` or random number in `answer_scale` \"\"\" return answer", "or random number in `answer_scale` \"\"\" return answer if answer is not None", "numpy.random import randint from numpy import abs, int import pandas as pd def", "def _maybe_random(answer: int) -> int: \"\"\" Return either :param:`answer` or random number in", "distance(source: pd.Series, target: pd.Series, answers: pd.DataFrame, answer_scale=5, answer_source=None, answer_target=None): def _maybe_random(answer: int) ->", "int import pandas as pd def distance(source: pd.Series, target: pd.Series, answers: pd.DataFrame, answer_scale=5,", "import pandas as pd def distance(source: pd.Series, target: pd.Series, answers: pd.DataFrame, answer_scale=5, answer_source=None,", "either :param:`answer` or random number in `answer_scale` \"\"\" return answer if answer is", "import randint from numpy import abs, int import pandas as pd def distance(source:", "fixed - or random - distances. \"\"\" from numpy.random import randint from numpy", "else randint(0, answer_scale) answer_source = _maybe_random(answer_source) answer_target = _maybe_random(answer_target) return abs(answer_source - answer_target)", "pd def distance(source: pd.Series, target: pd.Series, answers: pd.DataFrame, answer_scale=5, answer_source=None, answer_target=None): def _maybe_random(answer:", "Return either :param:`answer` or random number in `answer_scale` \"\"\" return answer if answer", "abs, int import pandas as pd def distance(source: pd.Series, target: pd.Series, answers: pd.DataFrame,", "\"\"\" return answer if answer is not None else randint(0, answer_scale) answer_source =", "<reponame>Kyrmy/prototyyppi<filename>agora_analytica/analytics/dummy.py \"\"\" Dummy module for generating fixed - or random - distances. \"\"\"", "generating fixed - or random - distances. \"\"\" from numpy.random import randint from", "\"\"\" Return either :param:`answer` or random number in `answer_scale` \"\"\" return answer if", "_maybe_random(answer: int) -> int: \"\"\" Return either :param:`answer` or random number in `answer_scale`", "if answer is not None else randint(0, answer_scale) answer_source = _maybe_random(answer_source) answer_target =", "def distance(source: pd.Series, target: pd.Series, answers: pd.DataFrame, answer_scale=5, answer_source=None, answer_target=None): def _maybe_random(answer: int)", "answer_scale=5, answer_source=None, answer_target=None): def _maybe_random(answer: int) -> int: \"\"\" Return either :param:`answer` or", "pd.DataFrame, answer_scale=5, answer_source=None, answer_target=None): def _maybe_random(answer: int) -> int: \"\"\" Return either :param:`answer`", "randint from numpy import abs, int import pandas as pd def distance(source: pd.Series,", "return answer if answer is not None else randint(0, answer_scale) answer_source = _maybe_random(answer_source)", "random number in `answer_scale` \"\"\" return answer if answer is not None else", "from numpy import abs, int import pandas as pd def distance(source: pd.Series, target:", "is not None else randint(0, answer_scale) answer_source = _maybe_random(answer_source) answer_target = _maybe_random(answer_target) return", "as pd def distance(source: pd.Series, target: pd.Series, answers: pd.DataFrame, answer_scale=5, answer_source=None, answer_target=None): def", "- distances. \"\"\" from numpy.random import randint from numpy import abs, int import", "answer_source=None, answer_target=None): def _maybe_random(answer: int) -> int: \"\"\" Return either :param:`answer` or random", "- or random - distances. \"\"\" from numpy.random import randint from numpy import", "pd.Series, answers: pd.DataFrame, answer_scale=5, answer_source=None, answer_target=None): def _maybe_random(answer: int) -> int: \"\"\" Return", "from numpy.random import randint from numpy import abs, int import pandas as pd", "\"\"\" from numpy.random import randint from numpy import abs, int import pandas as", "answer_target=None): def _maybe_random(answer: int) -> int: \"\"\" Return either :param:`answer` or random number", "answers: pd.DataFrame, answer_scale=5, answer_source=None, answer_target=None): def _maybe_random(answer: int) -> int: \"\"\" Return either", "not None else randint(0, answer_scale) answer_source = _maybe_random(answer_source) answer_target = _maybe_random(answer_target) return abs(answer_source", "numpy import abs, int import pandas as pd def distance(source: pd.Series, target: pd.Series,", "number in `answer_scale` \"\"\" return answer if answer is not None else randint(0,", "import abs, int import pandas as pd def distance(source: pd.Series, target: pd.Series, answers:", "random - distances. \"\"\" from numpy.random import randint from numpy import abs, int", "in `answer_scale` \"\"\" return answer if answer is not None else randint(0, answer_scale)", "target: pd.Series, answers: pd.DataFrame, answer_scale=5, answer_source=None, answer_target=None): def _maybe_random(answer: int) -> int: \"\"\"" ]
[ "from path (filename or directory) if it doesn't already exist. Args: path (str):", "to_abs (bool): If True, convert the path to absolute path. If False, assume", "deal with relative paths. \" \"Use `to_abs` if you want to explicitly convert", "raise OSError(\"Path already exists but is not a directory! Path: {}\".format(directory)) else: #", "If True, then path is treated as a filename and its parent directory", "or directory) if it doesn't already exist. Args: path (str): Absolute path of", "absolute. Default False. Returns: None \"\"\" if to_abs: if not os.path.isabs(path): path =", "it doesn't already exist. Args: path (str): Absolute path of directory or filename.", "os.path.isabs(path): path = os.path.abspath(path) if not os.path.isabs(path): raise ValueError(\"Path must be an absolute", "= os.path.abspath(path) if not os.path.isabs(path): raise ValueError(\"Path must be an absolute path. This", "must be an absolute path. This method does not deal with relative paths.", "absolute path. This method does not deal with relative paths. \" \"Use `to_abs`", "doesn't already exist. Args: path (str): Absolute path of directory or filename. is_file", "to_abs: if not os.path.isabs(path): path = os.path.abspath(path) if not os.path.isabs(path): raise ValueError(\"Path must", "directory = path # If directory does not exist, then simple create it.", "Args: path (str): Absolute path of directory or filename. is_file (bool): Whether the", "is treated as a filename and its parent directory is created. If False,", "else: # Assert that the path is a directory if not os.path.isdir(directory): raise", "os.makedirs(directory) # elif the directory exists else: # Assert that the path is", "Default False. Returns: None \"\"\" if to_abs: if not os.path.isabs(path): path = os.path.abspath(path)", "path to absolute path. If False, assume that the path is absolute. Default", "\"Use `to_abs` if you want to explicitly convert path to absolute.\") if is_file:", "is created. to_abs (bool): If True, convert the path to absolute path. If", "then path is treated as a filename and its parent directory is created.", "True, convert the path to absolute path. If False, assume that the path", "(bool): Whether the path is a directory of filename. If True, then path", "to absolute path. If False, assume that the path is absolute. Default False.", "path (str): Absolute path of directory or filename. is_file (bool): Whether the path", "of directory or filename. is_file (bool): Whether the path is a directory of", "if not os.path.isabs(path): raise ValueError(\"Path must be an absolute path. This method does", "simple create it. if not os.path.exists(directory): os.makedirs(directory) # elif the directory exists else:", "if not os.path.isdir(directory): raise OSError(\"Path already exists but is not a directory! Path:", "False, assume that the path is absolute. Default False. Returns: None \"\"\" if", "not os.path.exists(directory): os.makedirs(directory) # elif the directory exists else: # Assert that the", "is_file: directory = os.path.dirname(path) else: directory = path # If directory does not", "created. to_abs (bool): If True, convert the path to absolute path. If False,", "an absolute path. This method does not deal with relative paths. \" \"Use", "= os.path.dirname(path) else: directory = path # If directory does not exist, then", "path = os.path.abspath(path) if not os.path.isabs(path): raise ValueError(\"Path must be an absolute path.", "ValueError(\"Path must be an absolute path. This method does not deal with relative", "path is treated as a filename and its parent directory is created. If", "directory and it is created. to_abs (bool): If True, convert the path to", "absolute path. If False, assume that the path is absolute. Default False. Returns:", "method does not deal with relative paths. \" \"Use `to_abs` if you want", "\" \"Use `to_abs` if you want to explicitly convert path to absolute.\") if", "path is treated as a directory and it is created. to_abs (bool): If", "if not os.path.exists(directory): os.makedirs(directory) # elif the directory exists else: # Assert that", "a directory of filename. If True, then path is treated as a filename", "with relative paths. \" \"Use `to_abs` if you want to explicitly convert path", "If True, convert the path to absolute path. If False, assume that the", "not deal with relative paths. \" \"Use `to_abs` if you want to explicitly", "relative paths. \" \"Use `to_abs` if you want to explicitly convert path to", "def makedirs(path, is_file, to_abs=False): \"\"\"Make directory from path (filename or directory) if it", "if it doesn't already exist. Args: path (str): Absolute path of directory or", "is_file, to_abs=False): \"\"\"Make directory from path (filename or directory) if it doesn't already", "the path is a directory of filename. If True, then path is treated", "(str): Absolute path of directory or filename. is_file (bool): Whether the path is", "filename and its parent directory is created. If False, then path is treated", "treated as a directory and it is created. to_abs (bool): If True, convert", "Absolute path of directory or filename. is_file (bool): Whether the path is a", "path is a directory of filename. If True, then path is treated as", "the directory exists else: # Assert that the path is a directory if", "create it. if not os.path.exists(directory): os.makedirs(directory) # elif the directory exists else: #", "# elif the directory exists else: # Assert that the path is a", "This method does not deal with relative paths. \" \"Use `to_abs` if you", "is a directory of filename. If True, then path is treated as a", "directory) if it doesn't already exist. Args: path (str): Absolute path of directory", "directory exists else: # Assert that the path is a directory if not", "does not deal with relative paths. \" \"Use `to_abs` if you want to", "file system utils.\"\"\" import os def makedirs(path, is_file, to_abs=False): \"\"\"Make directory from path", "if not os.path.isabs(path): path = os.path.abspath(path) if not os.path.isabs(path): raise ValueError(\"Path must be", "already exists but is not a directory! Path: {}\".format(directory)) else: # Alles in", "the path is absolute. Default False. Returns: None \"\"\" if to_abs: if not", "is not a directory! Path: {}\".format(directory)) else: # Alles in Ordnung pass return", "is a directory if not os.path.isdir(directory): raise OSError(\"Path already exists but is not", "os.path.isabs(path): raise ValueError(\"Path must be an absolute path. This method does not deal", "a directory and it is created. to_abs (bool): If True, convert the path", "Whether the path is a directory of filename. If True, then path is", "the path is a directory if not os.path.isdir(directory): raise OSError(\"Path already exists but", "<reponame>nitred/nr-common<gh_stars>0 \"\"\"Simple file system utils.\"\"\" import os def makedirs(path, is_file, to_abs=False): \"\"\"Make directory", "not exist, then simple create it. if not os.path.exists(directory): os.makedirs(directory) # elif the", "absolute.\") if is_file: directory = os.path.dirname(path) else: directory = path # If directory", "# If directory does not exist, then simple create it. if not os.path.exists(directory):", "a directory if not os.path.isdir(directory): raise OSError(\"Path already exists but is not a", "that the path is a directory if not os.path.isdir(directory): raise OSError(\"Path already exists", "its parent directory is created. If False, then path is treated as a", "else: directory = path # If directory does not exist, then simple create", "convert the path to absolute path. If False, assume that the path is", "directory if not os.path.isdir(directory): raise OSError(\"Path already exists but is not a directory!", "want to explicitly convert path to absolute.\") if is_file: directory = os.path.dirname(path) else:", "filename. If True, then path is treated as a filename and its parent", "that the path is absolute. Default False. Returns: None \"\"\" if to_abs: if", "import os def makedirs(path, is_file, to_abs=False): \"\"\"Make directory from path (filename or directory)", "to explicitly convert path to absolute.\") if is_file: directory = os.path.dirname(path) else: directory", "not os.path.isabs(path): raise ValueError(\"Path must be an absolute path. This method does not", "Returns: None \"\"\" if to_abs: if not os.path.isabs(path): path = os.path.abspath(path) if not", "does not exist, then simple create it. if not os.path.exists(directory): os.makedirs(directory) # elif", "path # If directory does not exist, then simple create it. if not", "you want to explicitly convert path to absolute.\") if is_file: directory = os.path.dirname(path)", "If False, then path is treated as a directory and it is created.", "is treated as a directory and it is created. to_abs (bool): If True,", "\"\"\"Simple file system utils.\"\"\" import os def makedirs(path, is_file, to_abs=False): \"\"\"Make directory from", "if to_abs: if not os.path.isabs(path): path = os.path.abspath(path) if not os.path.isabs(path): raise ValueError(\"Path", "be an absolute path. This method does not deal with relative paths. \"", "filename. is_file (bool): Whether the path is a directory of filename. If True,", "directory does not exist, then simple create it. if not os.path.exists(directory): os.makedirs(directory) #", "as a directory and it is created. to_abs (bool): If True, convert the", "it is created. to_abs (bool): If True, convert the path to absolute path.", "path to absolute.\") if is_file: directory = os.path.dirname(path) else: directory = path #", "and its parent directory is created. If False, then path is treated as", "= path # If directory does not exist, then simple create it. if", "and it is created. to_abs (bool): If True, convert the path to absolute", "explicitly convert path to absolute.\") if is_file: directory = os.path.dirname(path) else: directory =", "to_abs=False): \"\"\"Make directory from path (filename or directory) if it doesn't already exist.", "elif the directory exists else: # Assert that the path is a directory", "OSError(\"Path already exists but is not a directory! Path: {}\".format(directory)) else: # Alles", "path (filename or directory) if it doesn't already exist. Args: path (str): Absolute", "path. If False, assume that the path is absolute. Default False. Returns: None", "directory from path (filename or directory) if it doesn't already exist. Args: path", "(bool): If True, convert the path to absolute path. If False, assume that", "os.path.exists(directory): os.makedirs(directory) # elif the directory exists else: # Assert that the path", "If directory does not exist, then simple create it. if not os.path.exists(directory): os.makedirs(directory)", "False, then path is treated as a directory and it is created. to_abs", "path. This method does not deal with relative paths. \" \"Use `to_abs` if", "it. if not os.path.exists(directory): os.makedirs(directory) # elif the directory exists else: # Assert", "or filename. is_file (bool): Whether the path is a directory of filename. If", "\"\"\"Make directory from path (filename or directory) if it doesn't already exist. Args:", "parent directory is created. If False, then path is treated as a directory", "of filename. If True, then path is treated as a filename and its", "exists else: # Assert that the path is a directory if not os.path.isdir(directory):", "convert path to absolute.\") if is_file: directory = os.path.dirname(path) else: directory = path", "None \"\"\" if to_abs: if not os.path.isabs(path): path = os.path.abspath(path) if not os.path.isabs(path):", "already exist. Args: path (str): Absolute path of directory or filename. is_file (bool):", "False. Returns: None \"\"\" if to_abs: if not os.path.isabs(path): path = os.path.abspath(path) if", "os.path.dirname(path) else: directory = path # If directory does not exist, then simple", "then simple create it. if not os.path.exists(directory): os.makedirs(directory) # elif the directory exists", "paths. \" \"Use `to_abs` if you want to explicitly convert path to absolute.\")", "is absolute. Default False. Returns: None \"\"\" if to_abs: if not os.path.isabs(path): path", "Assert that the path is a directory if not os.path.isdir(directory): raise OSError(\"Path already", "directory = os.path.dirname(path) else: directory = path # If directory does not exist,", "as a filename and its parent directory is created. If False, then path", "if you want to explicitly convert path to absolute.\") if is_file: directory =", "raise ValueError(\"Path must be an absolute path. This method does not deal with", "# Assert that the path is a directory if not os.path.isdir(directory): raise OSError(\"Path", "path is a directory if not os.path.isdir(directory): raise OSError(\"Path already exists but is", "(filename or directory) if it doesn't already exist. Args: path (str): Absolute path", "is created. If False, then path is treated as a directory and it", "If False, assume that the path is absolute. Default False. Returns: None \"\"\"", "path of directory or filename. is_file (bool): Whether the path is a directory", "directory is created. If False, then path is treated as a directory and", "is_file (bool): Whether the path is a directory of filename. If True, then", "assume that the path is absolute. Default False. Returns: None \"\"\" if to_abs:", "not os.path.isabs(path): path = os.path.abspath(path) if not os.path.isabs(path): raise ValueError(\"Path must be an", "system utils.\"\"\" import os def makedirs(path, is_file, to_abs=False): \"\"\"Make directory from path (filename", "os.path.isdir(directory): raise OSError(\"Path already exists but is not a directory! Path: {}\".format(directory)) else:", "utils.\"\"\" import os def makedirs(path, is_file, to_abs=False): \"\"\"Make directory from path (filename or", "not os.path.isdir(directory): raise OSError(\"Path already exists but is not a directory! Path: {}\".format(directory))", "treated as a filename and its parent directory is created. If False, then", "created. If False, then path is treated as a directory and it is", "a filename and its parent directory is created. If False, then path is", "os def makedirs(path, is_file, to_abs=False): \"\"\"Make directory from path (filename or directory) if", "exists but is not a directory! Path: {}\".format(directory)) else: # Alles in Ordnung", "path is absolute. Default False. Returns: None \"\"\" if to_abs: if not os.path.isabs(path):", "True, then path is treated as a filename and its parent directory is", "directory of filename. If True, then path is treated as a filename and", "the path to absolute path. If False, assume that the path is absolute.", "to absolute.\") if is_file: directory = os.path.dirname(path) else: directory = path # If", "os.path.abspath(path) if not os.path.isabs(path): raise ValueError(\"Path must be an absolute path. This method", "`to_abs` if you want to explicitly convert path to absolute.\") if is_file: directory", "exist. Args: path (str): Absolute path of directory or filename. is_file (bool): Whether", "directory or filename. is_file (bool): Whether the path is a directory of filename.", "\"\"\" if to_abs: if not os.path.isabs(path): path = os.path.abspath(path) if not os.path.isabs(path): raise", "then path is treated as a directory and it is created. to_abs (bool):", "if is_file: directory = os.path.dirname(path) else: directory = path # If directory does", "makedirs(path, is_file, to_abs=False): \"\"\"Make directory from path (filename or directory) if it doesn't", "but is not a directory! Path: {}\".format(directory)) else: # Alles in Ordnung pass", "exist, then simple create it. if not os.path.exists(directory): os.makedirs(directory) # elif the directory" ]
[ "names.discard(\"rr\") #we can use \"remove\" also for elem in names: print(elem) #To remove", "if luke in cast: for tony in sorted(cast) : print (tony) cast =", "cast = set([\"tony\",\"preetha\",\"luke\"]) cast.add(\"tessa\") #to get to know the hash print(hash(\"tony\")) namelist =", "but in a list we can repeat items names.discard(\"rr\") #we can use \"remove\"", "#nameunion = names1.intersection(names2) nameunion = names1.difference(names2) for elem in nameunion: print(elem) if names2.issubset(names1):", "in a set so elemensts wont repeat but in a list we can", "print (td) if \"tony\" in d: print(\"YES\") td[\"tony\"] = 101 print(td[\"tony\"]) print(len(sd)) l", "#set is a container that stores a collection of unique values #Union ,", "<filename>tony-setanddict.py ''' #set is a container that stores a collection of unique values", "= {} #error setN = set() #empty set cast = {\"tony\",\"preetha\",\"luke\"} if luke", "elemensts wont repeat but in a list we can repeat items names.discard(\"rr\") #we", "= {} d = dict() d[\"tony\"] = 98 print (d) sd = {\"Tony\":98,\"preetha\":99,\"pre\":99,\"etha\":99,\"tha\":99}", "= names1.intersection(names2) nameunion = names1.difference(names2) for elem in nameunion: print(elem) if names2.issubset(names1): print(\"True\")", "=[] #empty list setN = {} #error setN = set() #empty set cast", "print(td[\"tony\"]) print(len(sd)) l = sd.values() print (l) sd.pop(\"preetha\") print(sd) print(sd.get(\"tony\",\"not found\")) print(sd.get(\"Tony\",\"not found\"))", "the value for the keys exist in the dict #item() # this will", "d[\"tony\"] = 98 print (d) sd = {\"Tony\":98,\"preetha\":99,\"pre\":99,\"etha\":99,\"tha\":99} print (sd) td = {}", "given example for elem in sd: print(sd[elem]) # print all the value for", "set cast = {\"tony\",\"preetha\",\"luke\"} if luke in cast: for tony in sorted(cast) :", "print(sd.get(\"Tony\",\"not found\")) #if key doesnt exist then print \"not found\" as per the", "set names.clear() namelist1 = [\"tt1\",\"nn2\",\"jj3\",\"rr4\",\"uu5\"] namelist2 = [\"tt1\",\"nn\",\"jj3\",\"rr4\",\"uu5\"] names1 = set(namelist1) names2 =", "keys exist in the dict #item() # this will return both key and", "everything from the set names.clear() namelist1 = [\"tt1\",\"nn2\",\"jj3\",\"rr4\",\"uu5\"] namelist2 = [\"tt1\",\"nn\",\"jj3\",\"rr4\",\"uu5\"] names1 =", "be anything d = dict() d = {} d = dict() d[\"tony\"] =", "for elem in nameunion: print(elem) if names2.issubset(names1): print(\"True\") else: print(\"False\") ''' #Dictionary or", "cast.add(\"tessa\") #to get to know the hash print(hash(\"tony\")) namelist = [\"tt\",\"nn\",\"jj\",\"rr\",\"uu\"] names =", "names = set(namelist) #copy list and create a new set names.add(\"nn\") #You cant", "and value can be anything d = dict() d = {} d =", "remove elements from intersection of a and b list =[] #empty list setN", "list we can repeat items names.discard(\"rr\") #we can use \"remove\" also for elem", "example for elem in sd: print(sd[elem]) # print all the value for the", "all the value for the keys exist in the dict #item() # this", "print (tony) cast = set([\"tony\",\"preetha\",\"luke\"]) cast.add(\"tessa\") #to get to know the hash print(hash(\"tony\"))", "of unique values #Union , intersection , substraction for sets #Substruction means remove", "value can be anything d = dict() d = {} d = dict()", "is unique and value can be anything d = dict() d = {}", "a new set names.add(\"nn\") #You cant have same elements in a set so", "print(sd[elem]) # print all the value for the keys exist in the dict", "is a container that stores a collection of unique values #Union , intersection", "set so elemensts wont repeat but in a list we can repeat items", "same elements in a set so elemensts wont repeat but in a list", "= [\"tt\",\"nn\",\"jj\",\"rr\",\"uu\"] names = set(namelist) #copy list and create a new set names.add(\"nn\")", "names.clear() namelist1 = [\"tt1\",\"nn2\",\"jj3\",\"rr4\",\"uu5\"] namelist2 = [\"tt1\",\"nn\",\"jj3\",\"rr4\",\"uu5\"] names1 = set(namelist1) names2 = set(namelist2)", "map ; key is unique and value can be anything d = dict()", "set(namelist1) names2 = set(namelist2) #nameunion = names1.union(names2) #nameunion = names1.intersection(names2) nameunion = names1.difference(names2)", "print (d) sd = {\"Tony\":98,\"preetha\":99,\"pre\":99,\"etha\":99,\"tha\":99} print (sd) td = {} td[\"tony\"] = 98", "d: print(\"YES\") td[\"tony\"] = 101 print(td[\"tony\"]) print(len(sd)) l = sd.values() print (l) sd.pop(\"preetha\")", "this will return both key and value ; it will return it as", "for item in sd.items(): print(item[0], \" \",item[1]) for (u,v) in sd.items(): print(u, \"", "# print all the value for the keys exist in the dict #item()", "set([\"tony\",\"preetha\",\"luke\"]) cast.add(\"tessa\") #to get to know the hash print(hash(\"tony\")) namelist = [\"tt\",\"nn\",\"jj\",\"rr\",\"uu\"] names", "= names1.difference(names2) for elem in nameunion: print(elem) if names2.issubset(names1): print(\"True\") else: print(\"False\") '''", "unique and value can be anything d = dict() d = {} d", "know the hash print(hash(\"tony\")) namelist = [\"tt\",\"nn\",\"jj\",\"rr\",\"uu\"] names = set(namelist) #copy list and", "and value ; it will return it as tuple for item in sd.items():", "d = dict() d[\"tony\"] = 98 print (d) sd = {\"Tony\":98,\"preetha\":99,\"pre\":99,\"etha\":99,\"tha\":99} print (sd)", "item in sd.items(): print(item[0], \" \",item[1]) for (u,v) in sd.items(): print(u, \" \",v)", "98 print (d) sd = {\"Tony\":98,\"preetha\":99,\"pre\":99,\"etha\":99,\"tha\":99} print (sd) td = {} td[\"tony\"] =", "(td) if \"tony\" in d: print(\"YES\") td[\"tony\"] = 101 print(td[\"tony\"]) print(len(sd)) l =", "= set(namelist2) #nameunion = names1.union(names2) #nameunion = names1.intersection(names2) nameunion = names1.difference(names2) for elem", "print (sd) td = {} td[\"tony\"] = 98 print (td) if \"tony\" in", "print(\"False\") ''' #Dictionary or map ; key is unique and value can be", "setN = {} #error setN = set() #empty set cast = {\"tony\",\"preetha\",\"luke\"} if", "also for elem in names: print(elem) #To remove everything from the set names.clear()", "in names: print(elem) #To remove everything from the set names.clear() namelist1 = [\"tt1\",\"nn2\",\"jj3\",\"rr4\",\"uu5\"]", "l = sd.values() print (l) sd.pop(\"preetha\") print(sd) print(sd.get(\"tony\",\"not found\")) print(sd.get(\"Tony\",\"not found\")) #if key", "wont repeat but in a list we can repeat items names.discard(\"rr\") #we can", "{} td[\"tony\"] = 98 print (td) if \"tony\" in d: print(\"YES\") td[\"tony\"] =", "print(sd.get(\"tony\",\"not found\")) print(sd.get(\"Tony\",\"not found\")) #if key doesnt exist then print \"not found\" as", "setN = set() #empty set cast = {\"tony\",\"preetha\",\"luke\"} if luke in cast: for", "in cast: for tony in sorted(cast) : print (tony) cast = set([\"tony\",\"preetha\",\"luke\"]) cast.add(\"tessa\")", "#To remove everything from the set names.clear() namelist1 = [\"tt1\",\"nn2\",\"jj3\",\"rr4\",\"uu5\"] namelist2 = [\"tt1\",\"nn\",\"jj3\",\"rr4\",\"uu5\"]", "from the set names.clear() namelist1 = [\"tt1\",\"nn2\",\"jj3\",\"rr4\",\"uu5\"] namelist2 = [\"tt1\",\"nn\",\"jj3\",\"rr4\",\"uu5\"] names1 = set(namelist1)", "dict() d = {} d = dict() d[\"tony\"] = 98 print (d) sd", "(l) sd.pop(\"preetha\") print(sd) print(sd.get(\"tony\",\"not found\")) print(sd.get(\"Tony\",\"not found\")) #if key doesnt exist then print", "= [\"tt1\",\"nn\",\"jj3\",\"rr4\",\"uu5\"] names1 = set(namelist1) names2 = set(namelist2) #nameunion = names1.union(names2) #nameunion =", "as tuple for item in sd.items(): print(item[0], \" \",item[1]) for (u,v) in sd.items():", "can use \"remove\" also for elem in names: print(elem) #To remove everything from", "doesnt exist then print \"not found\" as per the given example for elem", "= set(namelist) #copy list and create a new set names.add(\"nn\") #You cant have", "have same elements in a set so elemensts wont repeat but in a", "value for the keys exist in the dict #item() # this will return", "; it will return it as tuple for item in sd.items(): print(item[0], \"", "the set names.clear() namelist1 = [\"tt1\",\"nn2\",\"jj3\",\"rr4\",\"uu5\"] namelist2 = [\"tt1\",\"nn\",\"jj3\",\"rr4\",\"uu5\"] names1 = set(namelist1) names2", "else: print(\"False\") ''' #Dictionary or map ; key is unique and value can", "[\"tt1\",\"nn\",\"jj3\",\"rr4\",\"uu5\"] names1 = set(namelist1) names2 = set(namelist2) #nameunion = names1.union(names2) #nameunion = names1.intersection(names2)", "set names.add(\"nn\") #You cant have same elements in a set so elemensts wont", "or map ; key is unique and value can be anything d =", "both key and value ; it will return it as tuple for item", "to know the hash print(hash(\"tony\")) namelist = [\"tt\",\"nn\",\"jj\",\"rr\",\"uu\"] names = set(namelist) #copy list", "= names1.union(names2) #nameunion = names1.intersection(names2) nameunion = names1.difference(names2) for elem in nameunion: print(elem)", "101 print(td[\"tony\"]) print(len(sd)) l = sd.values() print (l) sd.pop(\"preetha\") print(sd) print(sd.get(\"tony\",\"not found\")) print(sd.get(\"Tony\",\"not", "= set() #empty set cast = {\"tony\",\"preetha\",\"luke\"} if luke in cast: for tony", "for tony in sorted(cast) : print (tony) cast = set([\"tony\",\"preetha\",\"luke\"]) cast.add(\"tessa\") #to get", "in d: print(\"YES\") td[\"tony\"] = 101 print(td[\"tony\"]) print(len(sd)) l = sd.values() print (l)", "#empty set cast = {\"tony\",\"preetha\",\"luke\"} if luke in cast: for tony in sorted(cast)", "will return both key and value ; it will return it as tuple", "dict() d[\"tony\"] = 98 print (d) sd = {\"Tony\":98,\"preetha\":99,\"pre\":99,\"etha\":99,\"tha\":99} print (sd) td =", "namelist1 = [\"tt1\",\"nn2\",\"jj3\",\"rr4\",\"uu5\"] namelist2 = [\"tt1\",\"nn\",\"jj3\",\"rr4\",\"uu5\"] names1 = set(namelist1) names2 = set(namelist2) #nameunion", "#Substruction means remove elements from intersection of a and b list =[] #empty", "names1.difference(names2) for elem in nameunion: print(elem) if names2.issubset(names1): print(\"True\") else: print(\"False\") ''' #Dictionary", "of a and b list =[] #empty list setN = {} #error setN", "(tony) cast = set([\"tony\",\"preetha\",\"luke\"]) cast.add(\"tessa\") #to get to know the hash print(hash(\"tony\")) namelist", "repeat items names.discard(\"rr\") #we can use \"remove\" also for elem in names: print(elem)", "unique values #Union , intersection , substraction for sets #Substruction means remove elements", "''' #set is a container that stores a collection of unique values #Union", "intersection of a and b list =[] #empty list setN = {} #error", "dict #item() # this will return both key and value ; it will", "# this will return both key and value ; it will return it", "print(elem) #To remove everything from the set names.clear() namelist1 = [\"tt1\",\"nn2\",\"jj3\",\"rr4\",\"uu5\"] namelist2 =", "= {\"tony\",\"preetha\",\"luke\"} if luke in cast: for tony in sorted(cast) : print (tony)", "{\"tony\",\"preetha\",\"luke\"} if luke in cast: for tony in sorted(cast) : print (tony) cast", "the given example for elem in sd: print(sd[elem]) # print all the value", "key doesnt exist then print \"not found\" as per the given example for", "#item() # this will return both key and value ; it will return", "and b list =[] #empty list setN = {} #error setN = set()", "values #Union , intersection , substraction for sets #Substruction means remove elements from", "if names2.issubset(names1): print(\"True\") else: print(\"False\") ''' #Dictionary or map ; key is unique", "print(\"True\") else: print(\"False\") ''' #Dictionary or map ; key is unique and value", "d = {} d = dict() d[\"tony\"] = 98 print (d) sd =", "#You cant have same elements in a set so elemensts wont repeat but", "td[\"tony\"] = 98 print (td) if \"tony\" in d: print(\"YES\") td[\"tony\"] = 101", "per the given example for elem in sd: print(sd[elem]) # print all the", "for elem in sd: print(sd[elem]) # print all the value for the keys", "= set(namelist1) names2 = set(namelist2) #nameunion = names1.union(names2) #nameunion = names1.intersection(names2) nameunion =", "#Union , intersection , substraction for sets #Substruction means remove elements from intersection", "hash print(hash(\"tony\")) namelist = [\"tt\",\"nn\",\"jj\",\"rr\",\"uu\"] names = set(namelist) #copy list and create a", "return both key and value ; it will return it as tuple for", "means remove elements from intersection of a and b list =[] #empty list", "in the dict #item() # this will return both key and value ;", "set(namelist2) #nameunion = names1.union(names2) #nameunion = names1.intersection(names2) nameunion = names1.difference(names2) for elem in", "= {\"Tony\":98,\"preetha\":99,\"pre\":99,\"etha\":99,\"tha\":99} print (sd) td = {} td[\"tony\"] = 98 print (td) if", "and create a new set names.add(\"nn\") #You cant have same elements in a", "from intersection of a and b list =[] #empty list setN = {}", "= sd.values() print (l) sd.pop(\"preetha\") print(sd) print(sd.get(\"tony\",\"not found\")) print(sd.get(\"Tony\",\"not found\")) #if key doesnt", "create a new set names.add(\"nn\") #You cant have same elements in a set", "''' #Dictionary or map ; key is unique and value can be anything", "elements in a set so elemensts wont repeat but in a list we", "#nameunion = names1.union(names2) #nameunion = names1.intersection(names2) nameunion = names1.difference(names2) for elem in nameunion:", "exist then print \"not found\" as per the given example for elem in", "will return it as tuple for item in sd.items(): print(item[0], \" \",item[1]) for", "names1 = set(namelist1) names2 = set(namelist2) #nameunion = names1.union(names2) #nameunion = names1.intersection(names2) nameunion", ": print (tony) cast = set([\"tony\",\"preetha\",\"luke\"]) cast.add(\"tessa\") #to get to know the hash", "as per the given example for elem in sd: print(sd[elem]) # print all", "a and b list =[] #empty list setN = {} #error setN =", "for the keys exist in the dict #item() # this will return both", "container that stores a collection of unique values #Union , intersection , substraction", "list =[] #empty list setN = {} #error setN = set() #empty set", "set(namelist) #copy list and create a new set names.add(\"nn\") #You cant have same", "tuple for item in sd.items(): print(item[0], \" \",item[1]) for (u,v) in sd.items(): print(u,", "found\")) print(sd.get(\"Tony\",\"not found\")) #if key doesnt exist then print \"not found\" as per", "[\"tt\",\"nn\",\"jj\",\"rr\",\"uu\"] names = set(namelist) #copy list and create a new set names.add(\"nn\") #You", "we can repeat items names.discard(\"rr\") #we can use \"remove\" also for elem in", "if \"tony\" in d: print(\"YES\") td[\"tony\"] = 101 print(td[\"tony\"]) print(len(sd)) l = sd.values()", "the keys exist in the dict #item() # this will return both key", "98 print (td) if \"tony\" in d: print(\"YES\") td[\"tony\"] = 101 print(td[\"tony\"]) print(len(sd))", "names1.union(names2) #nameunion = names1.intersection(names2) nameunion = names1.difference(names2) for elem in nameunion: print(elem) if", "elem in names: print(elem) #To remove everything from the set names.clear() namelist1 =", ", substraction for sets #Substruction means remove elements from intersection of a and", "then print \"not found\" as per the given example for elem in sd:", "= [\"tt1\",\"nn2\",\"jj3\",\"rr4\",\"uu5\"] namelist2 = [\"tt1\",\"nn\",\"jj3\",\"rr4\",\"uu5\"] names1 = set(namelist1) names2 = set(namelist2) #nameunion =", "print(\"YES\") td[\"tony\"] = 101 print(td[\"tony\"]) print(len(sd)) l = sd.values() print (l) sd.pop(\"preetha\") print(sd)", "= set([\"tony\",\"preetha\",\"luke\"]) cast.add(\"tessa\") #to get to know the hash print(hash(\"tony\")) namelist = [\"tt\",\"nn\",\"jj\",\"rr\",\"uu\"]", "tony in sorted(cast) : print (tony) cast = set([\"tony\",\"preetha\",\"luke\"]) cast.add(\"tessa\") #to get to", "names2 = set(namelist2) #nameunion = names1.union(names2) #nameunion = names1.intersection(names2) nameunion = names1.difference(names2) for", ", intersection , substraction for sets #Substruction means remove elements from intersection of", "b list =[] #empty list setN = {} #error setN = set() #empty", "#to get to know the hash print(hash(\"tony\")) namelist = [\"tt\",\"nn\",\"jj\",\"rr\",\"uu\"] names = set(namelist)", "new set names.add(\"nn\") #You cant have same elements in a set so elemensts", "elements from intersection of a and b list =[] #empty list setN =", "names: print(elem) #To remove everything from the set names.clear() namelist1 = [\"tt1\",\"nn2\",\"jj3\",\"rr4\",\"uu5\"] namelist2", "td[\"tony\"] = 101 print(td[\"tony\"]) print(len(sd)) l = sd.values() print (l) sd.pop(\"preetha\") print(sd) print(sd.get(\"tony\",\"not", "sd = {\"Tony\":98,\"preetha\":99,\"pre\":99,\"etha\":99,\"tha\":99} print (sd) td = {} td[\"tony\"] = 98 print (td)", "[\"tt1\",\"nn2\",\"jj3\",\"rr4\",\"uu5\"] namelist2 = [\"tt1\",\"nn\",\"jj3\",\"rr4\",\"uu5\"] names1 = set(namelist1) names2 = set(namelist2) #nameunion = names1.union(names2)", "can repeat items names.discard(\"rr\") #we can use \"remove\" also for elem in names:", "in sd: print(sd[elem]) # print all the value for the keys exist in", "key is unique and value can be anything d = dict() d =", "print(len(sd)) l = sd.values() print (l) sd.pop(\"preetha\") print(sd) print(sd.get(\"tony\",\"not found\")) print(sd.get(\"Tony\",\"not found\")) #if", "for elem in names: print(elem) #To remove everything from the set names.clear() namelist1", "it will return it as tuple for item in sd.items(): print(item[0], \" \",item[1])", "sets #Substruction means remove elements from intersection of a and b list =[]", "intersection , substraction for sets #Substruction means remove elements from intersection of a", "remove everything from the set names.clear() namelist1 = [\"tt1\",\"nn2\",\"jj3\",\"rr4\",\"uu5\"] namelist2 = [\"tt1\",\"nn\",\"jj3\",\"rr4\",\"uu5\"] names1", "#copy list and create a new set names.add(\"nn\") #You cant have same elements", "in nameunion: print(elem) if names2.issubset(names1): print(\"True\") else: print(\"False\") ''' #Dictionary or map ;", "a list we can repeat items names.discard(\"rr\") #we can use \"remove\" also for", "\"remove\" also for elem in names: print(elem) #To remove everything from the set", "{} d = dict() d[\"tony\"] = 98 print (d) sd = {\"Tony\":98,\"preetha\":99,\"pre\":99,\"etha\":99,\"tha\":99} print", "namelist2 = [\"tt1\",\"nn\",\"jj3\",\"rr4\",\"uu5\"] names1 = set(namelist1) names2 = set(namelist2) #nameunion = names1.union(names2) #nameunion", "can be anything d = dict() d = {} d = dict() d[\"tony\"]", "elem in nameunion: print(elem) if names2.issubset(names1): print(\"True\") else: print(\"False\") ''' #Dictionary or map", "so elemensts wont repeat but in a list we can repeat items names.discard(\"rr\")", "= {} td[\"tony\"] = 98 print (td) if \"tony\" in d: print(\"YES\") td[\"tony\"]", "#error setN = set() #empty set cast = {\"tony\",\"preetha\",\"luke\"} if luke in cast:", "set() #empty set cast = {\"tony\",\"preetha\",\"luke\"} if luke in cast: for tony in", "in sorted(cast) : print (tony) cast = set([\"tony\",\"preetha\",\"luke\"]) cast.add(\"tessa\") #to get to know", "d = dict() d = {} d = dict() d[\"tony\"] = 98 print", "collection of unique values #Union , intersection , substraction for sets #Substruction means", "in a list we can repeat items names.discard(\"rr\") #we can use \"remove\" also", "= 101 print(td[\"tony\"]) print(len(sd)) l = sd.values() print (l) sd.pop(\"preetha\") print(sd) print(sd.get(\"tony\",\"not found\"))", "exist in the dict #item() # this will return both key and value", "list setN = {} #error setN = set() #empty set cast = {\"tony\",\"preetha\",\"luke\"}", "print all the value for the keys exist in the dict #item() #", "; key is unique and value can be anything d = dict() d", "value ; it will return it as tuple for item in sd.items(): print(item[0],", "list and create a new set names.add(\"nn\") #You cant have same elements in", "{\"Tony\":98,\"preetha\":99,\"pre\":99,\"etha\":99,\"tha\":99} print (sd) td = {} td[\"tony\"] = 98 print (td) if \"tony\"", "that stores a collection of unique values #Union , intersection , substraction for", "cast = {\"tony\",\"preetha\",\"luke\"} if luke in cast: for tony in sorted(cast) : print", "(sd) td = {} td[\"tony\"] = 98 print (td) if \"tony\" in d:", "print(elem) if names2.issubset(names1): print(\"True\") else: print(\"False\") ''' #Dictionary or map ; key is", "#Dictionary or map ; key is unique and value can be anything d", "\"tony\" in d: print(\"YES\") td[\"tony\"] = 101 print(td[\"tony\"]) print(len(sd)) l = sd.values() print", "print (l) sd.pop(\"preetha\") print(sd) print(sd.get(\"tony\",\"not found\")) print(sd.get(\"Tony\",\"not found\")) #if key doesnt exist then", "sorted(cast) : print (tony) cast = set([\"tony\",\"preetha\",\"luke\"]) cast.add(\"tessa\") #to get to know the", "found\")) #if key doesnt exist then print \"not found\" as per the given", "luke in cast: for tony in sorted(cast) : print (tony) cast = set([\"tony\",\"preetha\",\"luke\"])", "#if key doesnt exist then print \"not found\" as per the given example", "get to know the hash print(hash(\"tony\")) namelist = [\"tt\",\"nn\",\"jj\",\"rr\",\"uu\"] names = set(namelist) #copy", "elem in sd: print(sd[elem]) # print all the value for the keys exist", "nameunion: print(elem) if names2.issubset(names1): print(\"True\") else: print(\"False\") ''' #Dictionary or map ; key", "nameunion = names1.difference(names2) for elem in nameunion: print(elem) if names2.issubset(names1): print(\"True\") else: print(\"False\")", "a container that stores a collection of unique values #Union , intersection ,", "return it as tuple for item in sd.items(): print(item[0], \" \",item[1]) for (u,v)", "for sets #Substruction means remove elements from intersection of a and b list", "{} #error setN = set() #empty set cast = {\"tony\",\"preetha\",\"luke\"} if luke in", "td = {} td[\"tony\"] = 98 print (td) if \"tony\" in d: print(\"YES\")", "= dict() d = {} d = dict() d[\"tony\"] = 98 print (d)", "= dict() d[\"tony\"] = 98 print (d) sd = {\"Tony\":98,\"preetha\":99,\"pre\":99,\"etha\":99,\"tha\":99} print (sd) td", "print(sd) print(sd.get(\"tony\",\"not found\")) print(sd.get(\"Tony\",\"not found\")) #if key doesnt exist then print \"not found\"", "a set so elemensts wont repeat but in a list we can repeat", "it as tuple for item in sd.items(): print(item[0], \" \",item[1]) for (u,v) in", "cast: for tony in sorted(cast) : print (tony) cast = set([\"tony\",\"preetha\",\"luke\"]) cast.add(\"tessa\") #to", "names.add(\"nn\") #You cant have same elements in a set so elemensts wont repeat", "key and value ; it will return it as tuple for item in", "namelist = [\"tt\",\"nn\",\"jj\",\"rr\",\"uu\"] names = set(namelist) #copy list and create a new set", "stores a collection of unique values #Union , intersection , substraction for sets", "substraction for sets #Substruction means remove elements from intersection of a and b", "#we can use \"remove\" also for elem in names: print(elem) #To remove everything", "= 98 print (td) if \"tony\" in d: print(\"YES\") td[\"tony\"] = 101 print(td[\"tony\"])", "= 98 print (d) sd = {\"Tony\":98,\"preetha\":99,\"pre\":99,\"etha\":99,\"tha\":99} print (sd) td = {} td[\"tony\"]", "sd.values() print (l) sd.pop(\"preetha\") print(sd) print(sd.get(\"tony\",\"not found\")) print(sd.get(\"Tony\",\"not found\")) #if key doesnt exist", "names1.intersection(names2) nameunion = names1.difference(names2) for elem in nameunion: print(elem) if names2.issubset(names1): print(\"True\") else:", "repeat but in a list we can repeat items names.discard(\"rr\") #we can use", "\"not found\" as per the given example for elem in sd: print(sd[elem]) #", "a collection of unique values #Union , intersection , substraction for sets #Substruction", "the dict #item() # this will return both key and value ; it", "(d) sd = {\"Tony\":98,\"preetha\":99,\"pre\":99,\"etha\":99,\"tha\":99} print (sd) td = {} td[\"tony\"] = 98 print", "sd: print(sd[elem]) # print all the value for the keys exist in the", "anything d = dict() d = {} d = dict() d[\"tony\"] = 98", "items names.discard(\"rr\") #we can use \"remove\" also for elem in names: print(elem) #To", "#empty list setN = {} #error setN = set() #empty set cast =", "the hash print(hash(\"tony\")) namelist = [\"tt\",\"nn\",\"jj\",\"rr\",\"uu\"] names = set(namelist) #copy list and create", "use \"remove\" also for elem in names: print(elem) #To remove everything from the", "names2.issubset(names1): print(\"True\") else: print(\"False\") ''' #Dictionary or map ; key is unique and", "sd.pop(\"preetha\") print(sd) print(sd.get(\"tony\",\"not found\")) print(sd.get(\"Tony\",\"not found\")) #if key doesnt exist then print \"not", "print \"not found\" as per the given example for elem in sd: print(sd[elem])", "found\" as per the given example for elem in sd: print(sd[elem]) # print", "print(hash(\"tony\")) namelist = [\"tt\",\"nn\",\"jj\",\"rr\",\"uu\"] names = set(namelist) #copy list and create a new", "cant have same elements in a set so elemensts wont repeat but in" ]
[ "in range(self.n_layers): encode_block = self.layers[i] x = encode_block(x) #block1 = F.dropout(self.bn1(F.elu(self.e1(input))), p=self.drop_prob) #encoded_representation", "hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Decoder, self).__init__() self.n_layers = n_layers self.drop_prob = drop_prob self.second_last_layer_size", "weight_decay=1e-5) # Specify loss function criterion = torch.nn.MSELoss(reduce=True) # Cycle through epochs for", "for phase in phases: print(f'Epoch {epoch + 1}/{epochs}') if phase == 'train': loss", "n_layers, drop_prob, phase) test_plot_losses.append(loss) print(loss) #plot_losses.append(loss) prefix = '' if phase == 'validation':", "clear the gradients in the optimizers if phase == 'train': encoder_optimizer.zero_grad() decoder_optimizer.zero_grad() #", "= 'val_' logs[prefix + 'log loss'] = loss liveloss.update(logs) #liveloss liveloss.draw() #liveloss return", "import pdb import seaborn as sns import matplotlib.pyplot as plt from livelossplot import", "to update the model weights encoder_optimizer.step() decoder_optimizer.step() # Return the loss value to", "= nn.Tanh() self.bn1 = nn.BatchNorm1d(self.second_last_layer_size) if (self.second_last_layer_size) > encoded_size: self.layers = nn.ModuleList([]) for", "torch import torch.nn as nn import numpy as np import pandas as pd", "of losses train_plot_losses = [] test_plot_losses = [] # Initialize Encoder Optimizer encoder_optimizer", "F.dropout(self.bn1(F.elu(self.e1(input))), p=self.drop_prob) #encoded_representation = torch.tanh(self.e2(block1)) encoded_representation = self.e2(x) return encoded_representation #Decoder: class LayerBlockDecode(nn.Module):", "= nn.Tanh() self.bn1 = nn.BatchNorm1d(hidden_size) if (hidden_size // (2**n_layers)) > encoded_size: self.layers =", "= drop_prob self.e1 = nn.Linear(input_shape, hidden_size) self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(hidden_size) if", "#plot_losses.append(loss) prefix = '' if phase == 'validation': prefix = 'val_' logs[prefix +", "encoder_optimizer.step() decoder_optimizer.step() # Return the loss value to track training progress return loss.item()", "loss value to track training progress return loss.item() # Training Loop def trainIters(encoder,", "in range(self.n_layers): decode_block = self.layers[i] x = decode_block(x) #block = F.dropout(self.bn(F.elu(self.d(input))), p=self.drop_prob) #reconstruction", "pd from tensorboardX import SummaryWriter import torch.nn.functional as F import pdb import seaborn", "self.activation = nn.Tanh() self.bn = nn.BatchNorm1d(hidden_size_2) self.dropout = nn.Dropout(p) def forward(self, x): x", "else: encoder.eval() decoder.eval() # clear the gradients in the optimizers if phase ==", "drop_prob, phase) test_plot_losses.append(loss) print(loss) #plot_losses.append(loss) prefix = '' if phase == 'validation': prefix", "(2**n_layers) self.d1 = nn.Linear(encoded_size, self.second_last_layer_size) self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(self.second_last_layer_size) if (self.second_last_layer_size)", "encoded_size, n_layers, drop_prob, print_every_n_batches=100, learning_rate=0.01, phases=[\"train\", \"validation\"],): # Live Loss liveloss = PlotLosses()", "== 'train': encoder.train() decoder.train() else: encoder.eval() decoder.eval() # clear the gradients in the", "= decode_block(x) #block = F.dropout(self.bn(F.elu(self.d(input))), p=self.drop_prob) #reconstruction = torch.tanh(self.d4(block)) reconstruction = self.d2(x) return", "'train': encoder.train() decoder.train() else: encoder.eval() decoder.eval() # clear the gradients in the optimizers", "data_tensor, data_tensor_valid, epochs, hidden_size, encoded_size, n_layers, drop_prob, print_every_n_batches=100, learning_rate=0.01, phases=[\"train\", \"validation\"],): # Live", "encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase) test_plot_losses.append(loss) print(loss) #plot_losses.append(loss)", "= self.activation(x) x = self.bn(x) x = self.dropout(x) return (x) class Decoder(nn.Module): def", "= nn.ModuleList([]) for i in range(self.n_layers): self.layers.append(LayerBlockDecode(hidden_size//(2**(n_layers-i)), hidden_size//(2**(n_layers-i-1)), self.drop_prob)) else: self.n_layers = 0", "encoded_size, n_layers=3, drop_prob=0.5): super(Encoder, self).__init__() self.n_layers = n_layers self.drop_prob = drop_prob self.e1 =", "plt from livelossplot import PlotLosses #Encoder class LayerBlockEncode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p):", "def forward(self, input): x = self.e1(input) x = self.activation1(x) x = self.bn1(x) for", "import pandas as pd from tensorboardX import SummaryWriter import torch.nn.functional as F import", "encoder(input_tensor) reconstruction = decoder(encoded_representation) # Compute the loss loss = criterion(reconstruction, target_tensor) if", "Initialize Encoder Optimizer encoder_optimizer = torch.optim.Adam(encoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Initialize Decoder Optimizer decoder_optimizer", "x = self.activation(x) x = self.bn(x) x = self.dropout(x) return (x) class Encoder(nn.Module):", "Compute the loss loss = criterion(reconstruction, target_tensor) if phase == 'train': # Compute", "phase == 'validation': prefix = 'val_' logs[prefix + 'log loss'] = loss liveloss.update(logs)", "nn.Linear((hidden_size//(2**n_layers)), encoded_size) def forward(self, input): x = self.e1(input) x = self.activation1(x) x =", "n_layers self.drop_prob = drop_prob self.second_last_layer_size = hidden_size // (2**n_layers) self.d1 = nn.Linear(encoded_size, self.second_last_layer_size)", "input_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Encoder, self).__init__() self.n_layers = n_layers self.drop_prob = drop_prob", "through encoded_representation = encoder(input_tensor) reconstruction = decoder(encoded_representation) # Compute the loss loss =", "PlotLosses #Encoder class LayerBlockEncode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p): super().__init__() self.layer = nn.Linear(hidden_size_1,", "= torch.tanh(self.d4(block)) reconstruction = self.d2(x) return reconstruction # Training AutoEncoders Function def train_ae(input_tensor,", "the loss value to track training progress return loss.item() # Training Loop def", "nn.BatchNorm1d(hidden_size) if (hidden_size // (2**n_layers)) > encoded_size: self.layers = nn.ModuleList([]) for i in", "logs[prefix + 'log loss'] = loss liveloss.update(logs) #liveloss liveloss.draw() #liveloss return train_plot_losses, test_plot_losses", "n_layers=3, drop_prob=0.5): super(Encoder, self).__init__() self.n_layers = n_layers self.drop_prob = drop_prob self.e1 = nn.Linear(input_shape,", "import seaborn as sns import matplotlib.pyplot as plt from livelossplot import PlotLosses #Encoder", "= nn.Linear(hidden_size_1, hidden_size_2) self.activation = nn.Tanh() self.bn = nn.BatchNorm1d(hidden_size_2) self.dropout = nn.Dropout(p) def", "Compute the gradients loss.backward() # Step the optimizers to update the model weights", "test_plot_losses = [] # Initialize Encoder Optimizer encoder_optimizer = torch.optim.Adam(encoder.parameters(), lr=learning_rate, weight_decay=1e-5) #", "(x) class Encoder(nn.Module): def __init__(self, input_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Encoder, self).__init__() self.n_layers", "self.d2(x) return reconstruction # Training AutoEncoders Function def train_ae(input_tensor, target_tensor, encoder, decoder, encoder_optimizer,", "hidden_size, encoded_size, n_layers, drop_prob, print_every_n_batches=100, learning_rate=0.01, phases=[\"train\", \"validation\"],): # Live Loss liveloss =", "class LayerBlockDecode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p): super().__init__() self.layer = nn.Linear(hidden_size_1, hidden_size_2) self.activation", "__init__(self, hidden_size_1, hidden_size_2, p): super().__init__() self.layer = nn.Linear(hidden_size_1, hidden_size_2) self.activation = nn.Tanh() self.bn", "hidden_size//(2**(i+1)), self.drop_prob)) else: self.n_layers = 0 self.e2 = nn.Linear((hidden_size//(2**n_layers)), encoded_size) def forward(self, input):", "return (x) class Decoder(nn.Module): def __init__(self, output_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Decoder, self).__init__()", "hidden_size, encoded_size, n_layers, drop_prob, phase) test_plot_losses.append(loss) print(loss) #plot_losses.append(loss) prefix = '' if phase", "from tensorboardX import SummaryWriter import torch.nn.functional as F import pdb import seaborn as", "range(n_layers): self.layers.append(LayerBlockEncode(hidden_size//(2**i), hidden_size//(2**(i+1)), self.drop_prob)) else: self.n_layers = 0 self.e2 = nn.Linear((hidden_size//(2**n_layers)), encoded_size) def", "torch.tanh(self.d4(block)) reconstruction = self.d2(x) return reconstruction # Training AutoEncoders Function def train_ae(input_tensor, target_tensor,", "'validation': prefix = 'val_' logs[prefix + 'log loss'] = loss liveloss.update(logs) #liveloss liveloss.draw()", "{epoch + 1}/{epochs}') if phase == 'train': loss = train_ae(data_tensor, data_tensor, encoder, decoder,", "to track training progress return loss.item() # Training Loop def trainIters(encoder, decoder, data_tensor,", "gradients loss.backward() # Step the optimizers to update the model weights encoder_optimizer.step() decoder_optimizer.step()", "import matplotlib.pyplot as plt from livelossplot import PlotLosses #Encoder class LayerBlockEncode(nn.Module): def __init__(self,", "p=self.drop_prob) #encoded_representation = torch.tanh(self.e2(block1)) encoded_representation = self.e2(x) return encoded_representation #Decoder: class LayerBlockDecode(nn.Module): def", "self.layers.append(LayerBlockDecode(hidden_size//(2**(n_layers-i)), hidden_size//(2**(n_layers-i-1)), self.drop_prob)) else: self.n_layers = 0 self.d2 = nn.Linear(hidden_size, output_shape) def forward(self,", "train_ae(data_tensor_valid, data_tensor_valid, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase) test_plot_losses.append(loss)", "= decoder(encoded_representation) # Compute the loss loss = criterion(reconstruction, target_tensor) if phase ==", "encoded_representation = self.e2(x) return encoded_representation #Decoder: class LayerBlockDecode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p):", "drop_prob, print_every_n_batches=100, learning_rate=0.01, phases=[\"train\", \"validation\"],): # Live Loss liveloss = PlotLosses() # keep", "0 self.e2 = nn.Linear((hidden_size//(2**n_layers)), encoded_size) def forward(self, input): x = self.e1(input) x =", "// (2**n_layers)) > encoded_size: self.layers = nn.ModuleList([]) for i in range(n_layers): self.layers.append(LayerBlockEncode(hidden_size//(2**i), hidden_size//(2**(i+1)),", "= nn.Linear(hidden_size, output_shape) def forward(self, input): x = self.d1(input) x = self.activation1(x) x", "(x) class Decoder(nn.Module): def __init__(self, output_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Decoder, self).__init__() self.n_layers", "= hidden_size // (2**n_layers) self.d1 = nn.Linear(encoded_size, self.second_last_layer_size) self.activation1 = nn.Tanh() self.bn1 =", "x = self.bn1(x) for i in range(self.n_layers): encode_block = self.layers[i] x = encode_block(x)", "in range(epochs): logs = {} for phase in phases: print(f'Epoch {epoch + 1}/{epochs}')", "decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase) test_plot_losses.append(loss) print(loss) #plot_losses.append(loss) prefix = ''", "self.bn1(x) for i in range(self.n_layers): encode_block = self.layers[i] x = encode_block(x) #block1 =", "as sns import matplotlib.pyplot as plt from livelossplot import PlotLosses #Encoder class LayerBlockEncode(nn.Module):", "import numpy as np import pandas as pd from tensorboardX import SummaryWriter import", "drop_prob self.e1 = nn.Linear(input_shape, hidden_size) self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(hidden_size) if (hidden_size", "Training Loop def trainIters(encoder, decoder, data_tensor, data_tensor_valid, epochs, hidden_size, encoded_size, n_layers, drop_prob, print_every_n_batches=100,", "output_shape) def forward(self, input): x = self.d1(input) x = self.activation1(x) x = self.bn1(x)", "data_tensor_valid, epochs, hidden_size, encoded_size, n_layers, drop_prob, print_every_n_batches=100, learning_rate=0.01, phases=[\"train\", \"validation\"],): # Live Loss", "n_layers=3, drop_prob=0.5): super(Decoder, self).__init__() self.n_layers = n_layers self.drop_prob = drop_prob self.second_last_layer_size = hidden_size", "nn.Linear(hidden_size, output_shape) def forward(self, input): x = self.d1(input) x = self.activation1(x) x =", "hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Encoder, self).__init__() self.n_layers = n_layers self.drop_prob = drop_prob self.e1", "super(Decoder, self).__init__() self.n_layers = n_layers self.drop_prob = drop_prob self.second_last_layer_size = hidden_size // (2**n_layers)", "range(self.n_layers): self.layers.append(LayerBlockDecode(hidden_size//(2**(n_layers-i)), hidden_size//(2**(n_layers-i-1)), self.drop_prob)) else: self.n_layers = 0 self.d2 = nn.Linear(hidden_size, output_shape) def", "range(self.n_layers): decode_block = self.layers[i] x = decode_block(x) #block = F.dropout(self.bn(F.elu(self.d(input))), p=self.drop_prob) #reconstruction =", "x = encode_block(x) #block1 = F.dropout(self.bn1(F.elu(self.e1(input))), p=self.drop_prob) #encoded_representation = torch.tanh(self.e2(block1)) encoded_representation = self.e2(x)", "self.bn(x) x = self.dropout(x) return (x) class Decoder(nn.Module): def __init__(self, output_shape, hidden_size, encoded_size,", "def forward(self, x): x = self.layer(x) x = self.activation(x) x = self.bn(x) x", "decoder.eval() # clear the gradients in the optimizers if phase == 'train': encoder_optimizer.zero_grad()", "torch.optim.Adam(decoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Specify loss function criterion = torch.nn.MSELoss(reduce=True) # Cycle through", "drop_prob=0.5): super(Encoder, self).__init__() self.n_layers = n_layers self.drop_prob = drop_prob self.e1 = nn.Linear(input_shape, hidden_size)", "= PlotLosses() # keep track of losses train_plot_losses = [] test_plot_losses = []", "== 'train': encoder_optimizer.zero_grad() decoder_optimizer.zero_grad() # Forward pass through encoded_representation = encoder(input_tensor) reconstruction =", "phase) test_plot_losses.append(loss) print(loss) #plot_losses.append(loss) prefix = '' if phase == 'validation': prefix =", "decoder_optimizer.step() # Return the loss value to track training progress return loss.item() #", "= self.layer(x) x = self.activation(x) x = self.bn(x) x = self.dropout(x) return (x)", "target_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase): if phase", "hidden_size, encoded_size, n_layers, drop_prob, phase): if phase == 'train': encoder.train() decoder.train() else: encoder.eval()", "= self.dropout(x) return (x) class Encoder(nn.Module): def __init__(self, input_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5):", "function criterion = torch.nn.MSELoss(reduce=True) # Cycle through epochs for epoch in range(epochs): logs", "super(Encoder, self).__init__() self.n_layers = n_layers self.drop_prob = drop_prob self.e1 = nn.Linear(input_shape, hidden_size) self.activation1", "= self.activation1(x) x = self.bn1(x) for i in range(self.n_layers): decode_block = self.layers[i] x", "#encoded_representation = torch.tanh(self.e2(block1)) encoded_representation = self.e2(x) return encoded_representation #Decoder: class LayerBlockDecode(nn.Module): def __init__(self,", "encoder_optimizer = torch.optim.Adam(encoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Initialize Decoder Optimizer decoder_optimizer = torch.optim.Adam(decoder.parameters(), lr=learning_rate,", "self.activation(x) x = self.bn(x) x = self.dropout(x) return (x) class Decoder(nn.Module): def __init__(self,", "Training AutoEncoders Function def train_ae(input_tensor, target_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size,", "LayerBlockDecode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p): super().__init__() self.layer = nn.Linear(hidden_size_1, hidden_size_2) self.activation =", "matplotlib.pyplot as plt from livelossplot import PlotLosses #Encoder class LayerBlockEncode(nn.Module): def __init__(self, hidden_size_1,", "hidden_size // (2**n_layers) self.d1 = nn.Linear(encoded_size, self.second_last_layer_size) self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(self.second_last_layer_size)", "'' if phase == 'validation': prefix = 'val_' logs[prefix + 'log loss'] =", "encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase): if phase ==", "epochs, hidden_size, encoded_size, n_layers, drop_prob, print_every_n_batches=100, learning_rate=0.01, phases=[\"train\", \"validation\"],): # Live Loss liveloss", "for epoch in range(epochs): logs = {} for phase in phases: print(f'Epoch {epoch", "i in range(n_layers): self.layers.append(LayerBlockEncode(hidden_size//(2**i), hidden_size//(2**(i+1)), self.drop_prob)) else: self.n_layers = 0 self.e2 = nn.Linear((hidden_size//(2**n_layers)),", "self.second_last_layer_size = hidden_size // (2**n_layers) self.d1 = nn.Linear(encoded_size, self.second_last_layer_size) self.activation1 = nn.Tanh() self.bn1", "criterion = torch.nn.MSELoss(reduce=True) # Cycle through epochs for epoch in range(epochs): logs =", "as np import pandas as pd from tensorboardX import SummaryWriter import torch.nn.functional as", "n_layers, drop_prob, phase): if phase == 'train': encoder.train() decoder.train() else: encoder.eval() decoder.eval() #", "= F.dropout(self.bn(F.elu(self.d(input))), p=self.drop_prob) #reconstruction = torch.tanh(self.d4(block)) reconstruction = self.d2(x) return reconstruction # Training", "range(self.n_layers): encode_block = self.layers[i] x = encode_block(x) #block1 = F.dropout(self.bn1(F.elu(self.e1(input))), p=self.drop_prob) #encoded_representation =", "logs = {} for phase in phases: print(f'Epoch {epoch + 1}/{epochs}') if phase", "torch.nn as nn import numpy as np import pandas as pd from tensorboardX", "if phase == 'train': # Compute the gradients loss.backward() # Step the optimizers", "torch.nn.MSELoss(reduce=True) # Cycle through epochs for epoch in range(epochs): logs = {} for", "weight_decay=1e-5) # Initialize Decoder Optimizer decoder_optimizer = torch.optim.Adam(decoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Specify loss", "pandas as pd from tensorboardX import SummaryWriter import torch.nn.functional as F import pdb", "# Compute the loss loss = criterion(reconstruction, target_tensor) if phase == 'train': #", "#Encoder class LayerBlockEncode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p): super().__init__() self.layer = nn.Linear(hidden_size_1, hidden_size_2)", "self).__init__() self.n_layers = n_layers self.drop_prob = drop_prob self.second_last_layer_size = hidden_size // (2**n_layers) self.d1", "Step the optimizers to update the model weights encoder_optimizer.step() decoder_optimizer.step() # Return the", "phase) train_plot_losses.append(loss) else: loss = train_ae(data_tensor_valid, data_tensor_valid, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size,", "> encoded_size: self.layers = nn.ModuleList([]) for i in range(self.n_layers): self.layers.append(LayerBlockDecode(hidden_size//(2**(n_layers-i)), hidden_size//(2**(n_layers-i-1)), self.drop_prob)) else:", "<gh_stars>1-10 # Libraries import torch import torch.nn as nn import numpy as np", "output_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Decoder, self).__init__() self.n_layers = n_layers self.drop_prob = drop_prob", "self.drop_prob)) else: self.n_layers = 0 self.e2 = nn.Linear((hidden_size//(2**n_layers)), encoded_size) def forward(self, input): x", "encoded_representation = encoder(input_tensor) reconstruction = decoder(encoded_representation) # Compute the loss loss = criterion(reconstruction,", "p=self.drop_prob) #reconstruction = torch.tanh(self.d4(block)) reconstruction = self.d2(x) return reconstruction # Training AutoEncoders Function", "Initialize Decoder Optimizer decoder_optimizer = torch.optim.Adam(decoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Specify loss function criterion", "loss.item() # Training Loop def trainIters(encoder, decoder, data_tensor, data_tensor_valid, epochs, hidden_size, encoded_size, n_layers,", "x = self.activation1(x) x = self.bn1(x) for i in range(self.n_layers): decode_block = self.layers[i]", "forward(self, x): x = self.layer(x) x = self.activation(x) x = self.bn(x) x =", "self).__init__() self.n_layers = n_layers self.drop_prob = drop_prob self.e1 = nn.Linear(input_shape, hidden_size) self.activation1 =", "'train': # Compute the gradients loss.backward() # Step the optimizers to update the", "self.bn = nn.BatchNorm1d(hidden_size_2) self.dropout = nn.Dropout(p) def forward(self, x): x = self.layer(x) x", "import torch.nn as nn import numpy as np import pandas as pd from", "x = self.bn(x) x = self.dropout(x) return (x) class Decoder(nn.Module): def __init__(self, output_shape,", "x = self.layer(x) x = self.activation(x) x = self.bn(x) x = self.dropout(x) return", "# Return the loss value to track training progress return loss.item() # Training", "n_layers self.drop_prob = drop_prob self.e1 = nn.Linear(input_shape, hidden_size) self.activation1 = nn.Tanh() self.bn1 =", "= [] # Initialize Encoder Optimizer encoder_optimizer = torch.optim.Adam(encoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Initialize", "losses train_plot_losses = [] test_plot_losses = [] # Initialize Encoder Optimizer encoder_optimizer =", "x = self.activation(x) x = self.bn(x) x = self.dropout(x) return (x) class Decoder(nn.Module):", "= nn.Linear(encoded_size, self.second_last_layer_size) self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(self.second_last_layer_size) if (self.second_last_layer_size) > encoded_size:", "__init__(self, input_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Encoder, self).__init__() self.n_layers = n_layers self.drop_prob =", "= self.layers[i] x = encode_block(x) #block1 = F.dropout(self.bn1(F.elu(self.e1(input))), p=self.drop_prob) #encoded_representation = torch.tanh(self.e2(block1)) encoded_representation", "range(epochs): logs = {} for phase in phases: print(f'Epoch {epoch + 1}/{epochs}') if", "= n_layers self.drop_prob = drop_prob self.e1 = nn.Linear(input_shape, hidden_size) self.activation1 = nn.Tanh() self.bn1", "nn.Tanh() self.bn1 = nn.BatchNorm1d(hidden_size) if (hidden_size // (2**n_layers)) > encoded_size: self.layers = nn.ModuleList([])", "through epochs for epoch in range(epochs): logs = {} for phase in phases:", "x = self.activation1(x) x = self.bn1(x) for i in range(self.n_layers): encode_block = self.layers[i]", "self.activation1(x) x = self.bn1(x) for i in range(self.n_layers): decode_block = self.layers[i] x =", "__init__(self, output_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Decoder, self).__init__() self.n_layers = n_layers self.drop_prob =", "= n_layers self.drop_prob = drop_prob self.second_last_layer_size = hidden_size // (2**n_layers) self.d1 = nn.Linear(encoded_size,", "else: self.n_layers = 0 self.d2 = nn.Linear(hidden_size, output_shape) def forward(self, input): x =", "self.layers = nn.ModuleList([]) for i in range(self.n_layers): self.layers.append(LayerBlockDecode(hidden_size//(2**(n_layers-i)), hidden_size//(2**(n_layers-i-1)), self.drop_prob)) else: self.n_layers =", "epochs for epoch in range(epochs): logs = {} for phase in phases: print(f'Epoch", "Specify loss function criterion = torch.nn.MSELoss(reduce=True) # Cycle through epochs for epoch in", "self.layers[i] x = decode_block(x) #block = F.dropout(self.bn(F.elu(self.d(input))), p=self.drop_prob) #reconstruction = torch.tanh(self.d4(block)) reconstruction =", "criterion, hidden_size, encoded_size, n_layers, drop_prob, phase): if phase == 'train': encoder.train() decoder.train() else:", "hidden_size_1, hidden_size_2, p): super().__init__() self.layer = nn.Linear(hidden_size_1, hidden_size_2) self.activation = nn.Tanh() self.bn =", "keep track of losses train_plot_losses = [] test_plot_losses = [] # Initialize Encoder", "encode_block(x) #block1 = F.dropout(self.bn1(F.elu(self.e1(input))), p=self.drop_prob) #encoded_representation = torch.tanh(self.e2(block1)) encoded_representation = self.e2(x) return encoded_representation", "in the optimizers if phase == 'train': encoder_optimizer.zero_grad() decoder_optimizer.zero_grad() # Forward pass through", "self.bn1(x) for i in range(self.n_layers): decode_block = self.layers[i] x = decode_block(x) #block =", "torch.nn.functional as F import pdb import seaborn as sns import matplotlib.pyplot as plt", "seaborn as sns import matplotlib.pyplot as plt from livelossplot import PlotLosses #Encoder class", "track of losses train_plot_losses = [] test_plot_losses = [] # Initialize Encoder Optimizer", "if (self.second_last_layer_size) > encoded_size: self.layers = nn.ModuleList([]) for i in range(self.n_layers): self.layers.append(LayerBlockDecode(hidden_size//(2**(n_layers-i)), hidden_size//(2**(n_layers-i-1)),", "phases: print(f'Epoch {epoch + 1}/{epochs}') if phase == 'train': loss = train_ae(data_tensor, data_tensor,", "def __init__(self, hidden_size_1, hidden_size_2, p): super().__init__() self.layer = nn.Linear(hidden_size_1, hidden_size_2) self.activation = nn.Tanh()", "self.drop_prob)) else: self.n_layers = 0 self.d2 = nn.Linear(hidden_size, output_shape) def forward(self, input): x", "# Training Loop def trainIters(encoder, decoder, data_tensor, data_tensor_valid, epochs, hidden_size, encoded_size, n_layers, drop_prob,", "for i in range(self.n_layers): encode_block = self.layers[i] x = encode_block(x) #block1 = F.dropout(self.bn1(F.elu(self.e1(input))),", "encoded_size, n_layers, drop_prob, phase) train_plot_losses.append(loss) else: loss = train_ae(data_tensor_valid, data_tensor_valid, encoder, decoder, encoder_optimizer,", "self.layers[i] x = encode_block(x) #block1 = F.dropout(self.bn1(F.elu(self.e1(input))), p=self.drop_prob) #encoded_representation = torch.tanh(self.e2(block1)) encoded_representation =", "encoder_optimizer.zero_grad() decoder_optimizer.zero_grad() # Forward pass through encoded_representation = encoder(input_tensor) reconstruction = decoder(encoded_representation) #", "x = self.bn1(x) for i in range(self.n_layers): decode_block = self.layers[i] x = decode_block(x)", "Cycle through epochs for epoch in range(epochs): logs = {} for phase in", "> encoded_size: self.layers = nn.ModuleList([]) for i in range(n_layers): self.layers.append(LayerBlockEncode(hidden_size//(2**i), hidden_size//(2**(i+1)), self.drop_prob)) else:", "F import pdb import seaborn as sns import matplotlib.pyplot as plt from livelossplot", "nn.Tanh() self.bn1 = nn.BatchNorm1d(self.second_last_layer_size) if (self.second_last_layer_size) > encoded_size: self.layers = nn.ModuleList([]) for i", "= self.bn1(x) for i in range(self.n_layers): encode_block = self.layers[i] x = encode_block(x) #block1", "decode_block(x) #block = F.dropout(self.bn(F.elu(self.d(input))), p=self.drop_prob) #reconstruction = torch.tanh(self.d4(block)) reconstruction = self.d2(x) return reconstruction", "liveloss = PlotLosses() # keep track of losses train_plot_losses = [] test_plot_losses =", "loss = criterion(reconstruction, target_tensor) if phase == 'train': # Compute the gradients loss.backward()", "the loss loss = criterion(reconstruction, target_tensor) if phase == 'train': # Compute the", "drop_prob=0.5): super(Decoder, self).__init__() self.n_layers = n_layers self.drop_prob = drop_prob self.second_last_layer_size = hidden_size //", "encoded_size) def forward(self, input): x = self.e1(input) x = self.activation1(x) x = self.bn1(x)", "value to track training progress return loss.item() # Training Loop def trainIters(encoder, decoder,", "as plt from livelossplot import PlotLosses #Encoder class LayerBlockEncode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2,", "nn.Linear(input_shape, hidden_size) self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(hidden_size) if (hidden_size // (2**n_layers)) >", "= nn.Dropout(p) def forward(self, x): x = self.layer(x) x = self.activation(x) x =", "decoder(encoded_representation) # Compute the loss loss = criterion(reconstruction, target_tensor) if phase == 'train':", "forward(self, input): x = self.e1(input) x = self.activation1(x) x = self.bn1(x) for i", "(self.second_last_layer_size) > encoded_size: self.layers = nn.ModuleList([]) for i in range(self.n_layers): self.layers.append(LayerBlockDecode(hidden_size//(2**(n_layers-i)), hidden_size//(2**(n_layers-i-1)), self.drop_prob))", "phase in phases: print(f'Epoch {epoch + 1}/{epochs}') if phase == 'train': loss =", "import torch.nn.functional as F import pdb import seaborn as sns import matplotlib.pyplot as", "return encoded_representation #Decoder: class LayerBlockDecode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p): super().__init__() self.layer =", "the optimizers if phase == 'train': encoder_optimizer.zero_grad() decoder_optimizer.zero_grad() # Forward pass through encoded_representation", "nn.Dropout(p) def forward(self, x): x = self.layer(x) x = self.activation(x) x = self.bn(x)", "x): x = self.layer(x) x = self.activation(x) x = self.bn(x) x = self.dropout(x)", "reconstruction = decoder(encoded_representation) # Compute the loss loss = criterion(reconstruction, target_tensor) if phase", "if phase == 'train': encoder_optimizer.zero_grad() decoder_optimizer.zero_grad() # Forward pass through encoded_representation = encoder(input_tensor)", "= self.e2(x) return encoded_representation #Decoder: class LayerBlockDecode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p): super().__init__()", "# clear the gradients in the optimizers if phase == 'train': encoder_optimizer.zero_grad() decoder_optimizer.zero_grad()", "lr=learning_rate, weight_decay=1e-5) # Initialize Decoder Optimizer decoder_optimizer = torch.optim.Adam(decoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Specify", "# Cycle through epochs for epoch in range(epochs): logs = {} for phase", "optimizers to update the model weights encoder_optimizer.step() decoder_optimizer.step() # Return the loss value", "self.layers = nn.ModuleList([]) for i in range(n_layers): self.layers.append(LayerBlockEncode(hidden_size//(2**i), hidden_size//(2**(i+1)), self.drop_prob)) else: self.n_layers =", "self.activation1(x) x = self.bn1(x) for i in range(self.n_layers): encode_block = self.layers[i] x =", "optimizers if phase == 'train': encoder_optimizer.zero_grad() decoder_optimizer.zero_grad() # Forward pass through encoded_representation =", "encoded_size, n_layers, drop_prob, phase): if phase == 'train': encoder.train() decoder.train() else: encoder.eval() decoder.eval()", "loss = train_ae(data_tensor, data_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob,", "update the model weights encoder_optimizer.step() decoder_optimizer.step() # Return the loss value to track", "self.drop_prob = drop_prob self.e1 = nn.Linear(input_shape, hidden_size) self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(hidden_size)", "Decoder Optimizer decoder_optimizer = torch.optim.Adam(decoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Specify loss function criterion =", "self.n_layers = n_layers self.drop_prob = drop_prob self.second_last_layer_size = hidden_size // (2**n_layers) self.d1 =", "drop_prob self.second_last_layer_size = hidden_size // (2**n_layers) self.d1 = nn.Linear(encoded_size, self.second_last_layer_size) self.activation1 = nn.Tanh()", "= drop_prob self.second_last_layer_size = hidden_size // (2**n_layers) self.d1 = nn.Linear(encoded_size, self.second_last_layer_size) self.activation1 =", "(hidden_size // (2**n_layers)) > encoded_size: self.layers = nn.ModuleList([]) for i in range(n_layers): self.layers.append(LayerBlockEncode(hidden_size//(2**i),", "if phase == 'train': loss = train_ae(data_tensor, data_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion,", "= F.dropout(self.bn1(F.elu(self.e1(input))), p=self.drop_prob) #encoded_representation = torch.tanh(self.e2(block1)) encoded_representation = self.e2(x) return encoded_representation #Decoder: class", "decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase) train_plot_losses.append(loss) else: loss = train_ae(data_tensor_valid, data_tensor_valid,", "nn.Linear(encoded_size, self.second_last_layer_size) self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(self.second_last_layer_size) if (self.second_last_layer_size) > encoded_size: self.layers", "(2**n_layers)) > encoded_size: self.layers = nn.ModuleList([]) for i in range(n_layers): self.layers.append(LayerBlockEncode(hidden_size//(2**i), hidden_size//(2**(i+1)), self.drop_prob))", "train_ae(input_tensor, target_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase): if", "super().__init__() self.layer = nn.Linear(hidden_size_1, hidden_size_2) self.activation = nn.Tanh() self.bn = nn.BatchNorm1d(hidden_size_2) self.dropout =", "{} for phase in phases: print(f'Epoch {epoch + 1}/{epochs}') if phase == 'train':", "decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase): if phase == 'train':", "hidden_size//(2**(n_layers-i-1)), self.drop_prob)) else: self.n_layers = 0 self.d2 = nn.Linear(hidden_size, output_shape) def forward(self, input):", "nn.Tanh() self.bn = nn.BatchNorm1d(hidden_size_2) self.dropout = nn.Dropout(p) def forward(self, x): x = self.layer(x)", "# Training AutoEncoders Function def train_ae(input_tensor, target_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size,", "Encoder Optimizer encoder_optimizer = torch.optim.Adam(encoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Initialize Decoder Optimizer decoder_optimizer =", "self.d1 = nn.Linear(encoded_size, self.second_last_layer_size) self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(self.second_last_layer_size) if (self.second_last_layer_size) >", "pass through encoded_representation = encoder(input_tensor) reconstruction = decoder(encoded_representation) # Compute the loss loss", "else: loss = train_ae(data_tensor_valid, data_tensor_valid, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers,", "decoder_optimizer = torch.optim.Adam(decoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Specify loss function criterion = torch.nn.MSELoss(reduce=True) #", "== 'train': # Compute the gradients loss.backward() # Step the optimizers to update", "decoder, data_tensor, data_tensor_valid, epochs, hidden_size, encoded_size, n_layers, drop_prob, print_every_n_batches=100, learning_rate=0.01, phases=[\"train\", \"validation\"],): #", "= self.layers[i] x = decode_block(x) #block = F.dropout(self.bn(F.elu(self.d(input))), p=self.drop_prob) #reconstruction = torch.tanh(self.d4(block)) reconstruction", "for i in range(self.n_layers): decode_block = self.layers[i] x = decode_block(x) #block = F.dropout(self.bn(F.elu(self.d(input))),", "phases=[\"train\", \"validation\"],): # Live Loss liveloss = PlotLosses() # keep track of losses", "nn.ModuleList([]) for i in range(n_layers): self.layers.append(LayerBlockEncode(hidden_size//(2**i), hidden_size//(2**(i+1)), self.drop_prob)) else: self.n_layers = 0 self.e2", "Live Loss liveloss = PlotLosses() # keep track of losses train_plot_losses = []", "in range(self.n_layers): self.layers.append(LayerBlockDecode(hidden_size//(2**(n_layers-i)), hidden_size//(2**(n_layers-i-1)), self.drop_prob)) else: self.n_layers = 0 self.d2 = nn.Linear(hidden_size, output_shape)", "Libraries import torch import torch.nn as nn import numpy as np import pandas", "forward(self, input): x = self.d1(input) x = self.activation1(x) x = self.bn1(x) for i", "np import pandas as pd from tensorboardX import SummaryWriter import torch.nn.functional as F", "x = decode_block(x) #block = F.dropout(self.bn(F.elu(self.d(input))), p=self.drop_prob) #reconstruction = torch.tanh(self.d4(block)) reconstruction = self.d2(x)", "decoder.train() else: encoder.eval() decoder.eval() # clear the gradients in the optimizers if phase", "Optimizer decoder_optimizer = torch.optim.Adam(decoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Specify loss function criterion = torch.nn.MSELoss(reduce=True)", "encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase) train_plot_losses.append(loss) else: loss = train_ae(data_tensor_valid,", "self.e2(x) return encoded_representation #Decoder: class LayerBlockDecode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p): super().__init__() self.layer", "import PlotLosses #Encoder class LayerBlockEncode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p): super().__init__() self.layer =", "x = self.e1(input) x = self.activation1(x) x = self.bn1(x) for i in range(self.n_layers):", "def train_ae(input_tensor, target_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase):", "encoded_size, n_layers=3, drop_prob=0.5): super(Decoder, self).__init__() self.n_layers = n_layers self.drop_prob = drop_prob self.second_last_layer_size =", "= self.d1(input) x = self.activation1(x) x = self.bn1(x) for i in range(self.n_layers): decode_block", "Encoder(nn.Module): def __init__(self, input_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Encoder, self).__init__() self.n_layers = n_layers", "if phase == 'validation': prefix = 'val_' logs[prefix + 'log loss'] = loss", "self.n_layers = 0 self.e2 = nn.Linear((hidden_size//(2**n_layers)), encoded_size) def forward(self, input): x = self.e1(input)", "= nn.BatchNorm1d(hidden_size) if (hidden_size // (2**n_layers)) > encoded_size: self.layers = nn.ModuleList([]) for i", "Function def train_ae(input_tensor, target_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob,", "learning_rate=0.01, phases=[\"train\", \"validation\"],): # Live Loss liveloss = PlotLosses() # keep track of", "# Initialize Decoder Optimizer decoder_optimizer = torch.optim.Adam(decoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Specify loss function", "encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase): if phase == 'train': encoder.train()", "= self.bn(x) x = self.dropout(x) return (x) class Encoder(nn.Module): def __init__(self, input_shape, hidden_size,", "if (hidden_size // (2**n_layers)) > encoded_size: self.layers = nn.ModuleList([]) for i in range(n_layers):", "# Specify loss function criterion = torch.nn.MSELoss(reduce=True) # Cycle through epochs for epoch", "self.bn(x) x = self.dropout(x) return (x) class Encoder(nn.Module): def __init__(self, input_shape, hidden_size, encoded_size,", "phase == 'train': encoder_optimizer.zero_grad() decoder_optimizer.zero_grad() # Forward pass through encoded_representation = encoder(input_tensor) reconstruction", "pdb import seaborn as sns import matplotlib.pyplot as plt from livelossplot import PlotLosses", "= {} for phase in phases: print(f'Epoch {epoch + 1}/{epochs}') if phase ==", "for i in range(n_layers): self.layers.append(LayerBlockEncode(hidden_size//(2**i), hidden_size//(2**(i+1)), self.drop_prob)) else: self.n_layers = 0 self.e2 =", "#block = F.dropout(self.bn(F.elu(self.d(input))), p=self.drop_prob) #reconstruction = torch.tanh(self.d4(block)) reconstruction = self.d2(x) return reconstruction #", "tensorboardX import SummaryWriter import torch.nn.functional as F import pdb import seaborn as sns", "n_layers, drop_prob, phase) train_plot_losses.append(loss) else: loss = train_ae(data_tensor_valid, data_tensor_valid, encoder, decoder, encoder_optimizer, decoder_optimizer,", "numpy as np import pandas as pd from tensorboardX import SummaryWriter import torch.nn.functional", "== 'train': loss = train_ae(data_tensor, data_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size,", "= self.activation(x) x = self.bn(x) x = self.dropout(x) return (x) class Encoder(nn.Module): def", "in range(n_layers): self.layers.append(LayerBlockEncode(hidden_size//(2**i), hidden_size//(2**(i+1)), self.drop_prob)) else: self.n_layers = 0 self.e2 = nn.Linear((hidden_size//(2**n_layers)), encoded_size)", "0 self.d2 = nn.Linear(hidden_size, output_shape) def forward(self, input): x = self.d1(input) x =", "data_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase) train_plot_losses.append(loss) else:", "x = self.bn(x) x = self.dropout(x) return (x) class Encoder(nn.Module): def __init__(self, input_shape,", "prefix = 'val_' logs[prefix + 'log loss'] = loss liveloss.update(logs) #liveloss liveloss.draw() #liveloss", "n_layers, drop_prob, print_every_n_batches=100, learning_rate=0.01, phases=[\"train\", \"validation\"],): # Live Loss liveloss = PlotLosses() #", "torch.optim.Adam(encoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Initialize Decoder Optimizer decoder_optimizer = torch.optim.Adam(decoder.parameters(), lr=learning_rate, weight_decay=1e-5) #", "= self.bn1(x) for i in range(self.n_layers): decode_block = self.layers[i] x = decode_block(x) #block", "# Forward pass through encoded_representation = encoder(input_tensor) reconstruction = decoder(encoded_representation) # Compute the", "self.e1(input) x = self.activation1(x) x = self.bn1(x) for i in range(self.n_layers): encode_block =", "trainIters(encoder, decoder, data_tensor, data_tensor_valid, epochs, hidden_size, encoded_size, n_layers, drop_prob, print_every_n_batches=100, learning_rate=0.01, phases=[\"train\", \"validation\"],):", "'val_' logs[prefix + 'log loss'] = loss liveloss.update(logs) #liveloss liveloss.draw() #liveloss return train_plot_losses,", "as F import pdb import seaborn as sns import matplotlib.pyplot as plt from", "self.d2 = nn.Linear(hidden_size, output_shape) def forward(self, input): x = self.d1(input) x = self.activation1(x)", "hidden_size_2) self.activation = nn.Tanh() self.bn = nn.BatchNorm1d(hidden_size_2) self.dropout = nn.Dropout(p) def forward(self, x):", "= 0 self.e2 = nn.Linear((hidden_size//(2**n_layers)), encoded_size) def forward(self, input): x = self.e1(input) x", "loss.backward() # Step the optimizers to update the model weights encoder_optimizer.step() decoder_optimizer.step() #", "self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(hidden_size) if (hidden_size // (2**n_layers)) > encoded_size: self.layers", "x = self.d1(input) x = self.activation1(x) x = self.bn1(x) for i in range(self.n_layers):", "self.d1(input) x = self.activation1(x) x = self.bn1(x) for i in range(self.n_layers): decode_block =", "Loop def trainIters(encoder, decoder, data_tensor, data_tensor_valid, epochs, hidden_size, encoded_size, n_layers, drop_prob, print_every_n_batches=100, learning_rate=0.01,", "progress return loss.item() # Training Loop def trainIters(encoder, decoder, data_tensor, data_tensor_valid, epochs, hidden_size,", "self.dropout(x) return (x) class Decoder(nn.Module): def __init__(self, output_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Decoder,", "def __init__(self, input_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Encoder, self).__init__() self.n_layers = n_layers self.drop_prob", "def trainIters(encoder, decoder, data_tensor, data_tensor_valid, epochs, hidden_size, encoded_size, n_layers, drop_prob, print_every_n_batches=100, learning_rate=0.01, phases=[\"train\",", "encoded_size: self.layers = nn.ModuleList([]) for i in range(self.n_layers): self.layers.append(LayerBlockDecode(hidden_size//(2**(n_layers-i)), hidden_size//(2**(n_layers-i-1)), self.drop_prob)) else: self.n_layers", "the model weights encoder_optimizer.step() decoder_optimizer.step() # Return the loss value to track training", "train_plot_losses = [] test_plot_losses = [] # Initialize Encoder Optimizer encoder_optimizer = torch.optim.Adam(encoder.parameters(),", "x = self.dropout(x) return (x) class Decoder(nn.Module): def __init__(self, output_shape, hidden_size, encoded_size, n_layers=3,", "encoded_size, n_layers, drop_prob, phase) test_plot_losses.append(loss) print(loss) #plot_losses.append(loss) prefix = '' if phase ==", "Return the loss value to track training progress return loss.item() # Training Loop", "= encoder(input_tensor) reconstruction = decoder(encoded_representation) # Compute the loss loss = criterion(reconstruction, target_tensor)", "= torch.optim.Adam(encoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Initialize Decoder Optimizer decoder_optimizer = torch.optim.Adam(decoder.parameters(), lr=learning_rate, weight_decay=1e-5)", "self.bn1 = nn.BatchNorm1d(self.second_last_layer_size) if (self.second_last_layer_size) > encoded_size: self.layers = nn.ModuleList([]) for i in", "encoded_representation #Decoder: class LayerBlockDecode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p): super().__init__() self.layer = nn.Linear(hidden_size_1,", "input): x = self.d1(input) x = self.activation1(x) x = self.bn1(x) for i in", "#Decoder: class LayerBlockDecode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p): super().__init__() self.layer = nn.Linear(hidden_size_1, hidden_size_2)", "livelossplot import PlotLosses #Encoder class LayerBlockEncode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p): super().__init__() self.layer", "import SummaryWriter import torch.nn.functional as F import pdb import seaborn as sns import", "== 'validation': prefix = 'val_' logs[prefix + 'log loss'] = loss liveloss.update(logs) #liveloss", "LayerBlockEncode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p): super().__init__() self.layer = nn.Linear(hidden_size_1, hidden_size_2) self.activation =", "Optimizer encoder_optimizer = torch.optim.Adam(encoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Initialize Decoder Optimizer decoder_optimizer = torch.optim.Adam(decoder.parameters(),", "= torch.nn.MSELoss(reduce=True) # Cycle through epochs for epoch in range(epochs): logs = {}", "= [] test_plot_losses = [] # Initialize Encoder Optimizer encoder_optimizer = torch.optim.Adam(encoder.parameters(), lr=learning_rate,", "loss function criterion = torch.nn.MSELoss(reduce=True) # Cycle through epochs for epoch in range(epochs):", "phase == 'train': encoder.train() decoder.train() else: encoder.eval() decoder.eval() # clear the gradients in", "prefix = '' if phase == 'validation': prefix = 'val_' logs[prefix + 'log", "'train': loss = train_ae(data_tensor, data_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers,", "as pd from tensorboardX import SummaryWriter import torch.nn.functional as F import pdb import", "self.activation(x) x = self.bn(x) x = self.dropout(x) return (x) class Encoder(nn.Module): def __init__(self,", "AutoEncoders Function def train_ae(input_tensor, target_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers,", "model weights encoder_optimizer.step() decoder_optimizer.step() # Return the loss value to track training progress", "self.e1 = nn.Linear(input_shape, hidden_size) self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(hidden_size) if (hidden_size //", "= nn.Linear(input_shape, hidden_size) self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(hidden_size) if (hidden_size // (2**n_layers))", "encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase) test_plot_losses.append(loss) print(loss) #plot_losses.append(loss) prefix =", "decoder_optimizer.zero_grad() # Forward pass through encoded_representation = encoder(input_tensor) reconstruction = decoder(encoded_representation) # Compute", "phase): if phase == 'train': encoder.train() decoder.train() else: encoder.eval() decoder.eval() # clear the", "decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase) test_plot_losses.append(loss) print(loss) #plot_losses.append(loss) prefix", "+ 1}/{epochs}') if phase == 'train': loss = train_ae(data_tensor, data_tensor, encoder, decoder, encoder_optimizer,", "nn import numpy as np import pandas as pd from tensorboardX import SummaryWriter", "# Compute the gradients loss.backward() # Step the optimizers to update the model", "hidden_size_2, p): super().__init__() self.layer = nn.Linear(hidden_size_1, hidden_size_2) self.activation = nn.Tanh() self.bn = nn.BatchNorm1d(hidden_size_2)", "#reconstruction = torch.tanh(self.d4(block)) reconstruction = self.d2(x) return reconstruction # Training AutoEncoders Function def", "decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase): if phase == 'train': encoder.train() decoder.train()", "return reconstruction # Training AutoEncoders Function def train_ae(input_tensor, target_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer,", "track training progress return loss.item() # Training Loop def trainIters(encoder, decoder, data_tensor, data_tensor_valid,", "= criterion(reconstruction, target_tensor) if phase == 'train': # Compute the gradients loss.backward() #", "self.layer = nn.Linear(hidden_size_1, hidden_size_2) self.activation = nn.Tanh() self.bn = nn.BatchNorm1d(hidden_size_2) self.dropout = nn.Dropout(p)", "self.layer(x) x = self.activation(x) x = self.bn(x) x = self.dropout(x) return (x) class", "# keep track of losses train_plot_losses = [] test_plot_losses = [] # Initialize", "'train': encoder_optimizer.zero_grad() decoder_optimizer.zero_grad() # Forward pass through encoded_representation = encoder(input_tensor) reconstruction = decoder(encoded_representation)", "train_ae(data_tensor, data_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase) train_plot_losses.append(loss)", "# Libraries import torch import torch.nn as nn import numpy as np import", "= torch.optim.Adam(decoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Specify loss function criterion = torch.nn.MSELoss(reduce=True) # Cycle", "= train_ae(data_tensor, data_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase)", "the gradients loss.backward() # Step the optimizers to update the model weights encoder_optimizer.step()", "[] # Initialize Encoder Optimizer encoder_optimizer = torch.optim.Adam(encoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Initialize Decoder", "target_tensor) if phase == 'train': # Compute the gradients loss.backward() # Step the", "decode_block = self.layers[i] x = decode_block(x) #block = F.dropout(self.bn(F.elu(self.d(input))), p=self.drop_prob) #reconstruction = torch.tanh(self.d4(block))", "= self.bn(x) x = self.dropout(x) return (x) class Decoder(nn.Module): def __init__(self, output_shape, hidden_size,", "nn.Linear(hidden_size_1, hidden_size_2) self.activation = nn.Tanh() self.bn = nn.BatchNorm1d(hidden_size_2) self.dropout = nn.Dropout(p) def forward(self,", "in phases: print(f'Epoch {epoch + 1}/{epochs}') if phase == 'train': loss = train_ae(data_tensor,", "else: self.n_layers = 0 self.e2 = nn.Linear((hidden_size//(2**n_layers)), encoded_size) def forward(self, input): x =", "criterion(reconstruction, target_tensor) if phase == 'train': # Compute the gradients loss.backward() # Step", "phase == 'train': loss = train_ae(data_tensor, data_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size,", "reconstruction # Training AutoEncoders Function def train_ae(input_tensor, target_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion,", "= nn.Linear((hidden_size//(2**n_layers)), encoded_size) def forward(self, input): x = self.e1(input) x = self.activation1(x) x", "drop_prob, phase): if phase == 'train': encoder.train() decoder.train() else: encoder.eval() decoder.eval() # clear", "self.n_layers = n_layers self.drop_prob = drop_prob self.e1 = nn.Linear(input_shape, hidden_size) self.activation1 = nn.Tanh()", "gradients in the optimizers if phase == 'train': encoder_optimizer.zero_grad() decoder_optimizer.zero_grad() # Forward pass", "class Encoder(nn.Module): def __init__(self, input_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Encoder, self).__init__() self.n_layers =", "= nn.Tanh() self.bn = nn.BatchNorm1d(hidden_size_2) self.dropout = nn.Dropout(p) def forward(self, x): x =", "def forward(self, input): x = self.d1(input) x = self.activation1(x) x = self.bn1(x) for", "print(loss) #plot_losses.append(loss) prefix = '' if phase == 'validation': prefix = 'val_' logs[prefix", "= nn.ModuleList([]) for i in range(n_layers): self.layers.append(LayerBlockEncode(hidden_size//(2**i), hidden_size//(2**(i+1)), self.drop_prob)) else: self.n_layers = 0", "phase == 'train': # Compute the gradients loss.backward() # Step the optimizers to", "hidden_size, encoded_size, n_layers, drop_prob, phase) train_plot_losses.append(loss) else: loss = train_ae(data_tensor_valid, data_tensor_valid, encoder, decoder,", "encode_block = self.layers[i] x = encode_block(x) #block1 = F.dropout(self.bn1(F.elu(self.e1(input))), p=self.drop_prob) #encoded_representation = torch.tanh(self.e2(block1))", "= nn.BatchNorm1d(hidden_size_2) self.dropout = nn.Dropout(p) def forward(self, x): x = self.layer(x) x =", "self.dropout(x) return (x) class Encoder(nn.Module): def __init__(self, input_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Encoder,", "x = self.dropout(x) return (x) class Encoder(nn.Module): def __init__(self, input_shape, hidden_size, encoded_size, n_layers=3,", "loss loss = criterion(reconstruction, target_tensor) if phase == 'train': # Compute the gradients", "# Initialize Encoder Optimizer encoder_optimizer = torch.optim.Adam(encoder.parameters(), lr=learning_rate, weight_decay=1e-5) # Initialize Decoder Optimizer", "PlotLosses() # keep track of losses train_plot_losses = [] test_plot_losses = [] #", "= self.d2(x) return reconstruction # Training AutoEncoders Function def train_ae(input_tensor, target_tensor, encoder, decoder,", "[] test_plot_losses = [] # Initialize Encoder Optimizer encoder_optimizer = torch.optim.Adam(encoder.parameters(), lr=learning_rate, weight_decay=1e-5)", "Forward pass through encoded_representation = encoder(input_tensor) reconstruction = decoder(encoded_representation) # Compute the loss", "from livelossplot import PlotLosses #Encoder class LayerBlockEncode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p): super().__init__()", "// (2**n_layers) self.d1 = nn.Linear(encoded_size, self.second_last_layer_size) self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(self.second_last_layer_size) if", "= self.dropout(x) return (x) class Decoder(nn.Module): def __init__(self, output_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5):", "= nn.BatchNorm1d(self.second_last_layer_size) if (self.second_last_layer_size) > encoded_size: self.layers = nn.ModuleList([]) for i in range(self.n_layers):", "1}/{epochs}') if phase == 'train': loss = train_ae(data_tensor, data_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer,", "nn.BatchNorm1d(hidden_size_2) self.dropout = nn.Dropout(p) def forward(self, x): x = self.layer(x) x = self.activation(x)", "= encode_block(x) #block1 = F.dropout(self.bn1(F.elu(self.e1(input))), p=self.drop_prob) #encoded_representation = torch.tanh(self.e2(block1)) encoded_representation = self.e2(x) return", "self.n_layers = 0 self.d2 = nn.Linear(hidden_size, output_shape) def forward(self, input): x = self.d1(input)", "encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase) train_plot_losses.append(loss) else: loss", "torch.tanh(self.e2(block1)) encoded_representation = self.e2(x) return encoded_representation #Decoder: class LayerBlockDecode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2,", "\"validation\"],): # Live Loss liveloss = PlotLosses() # keep track of losses train_plot_losses", "epoch in range(epochs): logs = {} for phase in phases: print(f'Epoch {epoch +", "the gradients in the optimizers if phase == 'train': encoder_optimizer.zero_grad() decoder_optimizer.zero_grad() # Forward", "= train_ae(data_tensor_valid, data_tensor_valid, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase)", "= self.e1(input) x = self.activation1(x) x = self.bn1(x) for i in range(self.n_layers): encode_block", "encoder.eval() decoder.eval() # clear the gradients in the optimizers if phase == 'train':", "#block1 = F.dropout(self.bn1(F.elu(self.e1(input))), p=self.drop_prob) #encoded_representation = torch.tanh(self.e2(block1)) encoded_representation = self.e2(x) return encoded_representation #Decoder:", "p): super().__init__() self.layer = nn.Linear(hidden_size_1, hidden_size_2) self.activation = nn.Tanh() self.bn = nn.BatchNorm1d(hidden_size_2) self.dropout", "# Live Loss liveloss = PlotLosses() # keep track of losses train_plot_losses =", "as nn import numpy as np import pandas as pd from tensorboardX import", "hidden_size) self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(hidden_size) if (hidden_size // (2**n_layers)) > encoded_size:", "# Step the optimizers to update the model weights encoder_optimizer.step() decoder_optimizer.step() # Return", "decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase) train_plot_losses.append(loss) else: loss =", "i in range(self.n_layers): decode_block = self.layers[i] x = decode_block(x) #block = F.dropout(self.bn(F.elu(self.d(input))), p=self.drop_prob)", "print_every_n_batches=100, learning_rate=0.01, phases=[\"train\", \"validation\"],): # Live Loss liveloss = PlotLosses() # keep track", "self.drop_prob = drop_prob self.second_last_layer_size = hidden_size // (2**n_layers) self.d1 = nn.Linear(encoded_size, self.second_last_layer_size) self.activation1", "lr=learning_rate, weight_decay=1e-5) # Specify loss function criterion = torch.nn.MSELoss(reduce=True) # Cycle through epochs", "class LayerBlockEncode(nn.Module): def __init__(self, hidden_size_1, hidden_size_2, p): super().__init__() self.layer = nn.Linear(hidden_size_1, hidden_size_2) self.activation", "reconstruction = self.d2(x) return reconstruction # Training AutoEncoders Function def train_ae(input_tensor, target_tensor, encoder,", "encoded_size: self.layers = nn.ModuleList([]) for i in range(n_layers): self.layers.append(LayerBlockEncode(hidden_size//(2**i), hidden_size//(2**(i+1)), self.drop_prob)) else: self.n_layers", "if phase == 'train': encoder.train() decoder.train() else: encoder.eval() decoder.eval() # clear the gradients", "Loss liveloss = PlotLosses() # keep track of losses train_plot_losses = [] test_plot_losses", "print(f'Epoch {epoch + 1}/{epochs}') if phase == 'train': loss = train_ae(data_tensor, data_tensor, encoder,", "the optimizers to update the model weights encoder_optimizer.step() decoder_optimizer.step() # Return the loss", "drop_prob, phase) train_plot_losses.append(loss) else: loss = train_ae(data_tensor_valid, data_tensor_valid, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion,", "self.dropout = nn.Dropout(p) def forward(self, x): x = self.layer(x) x = self.activation(x) x", "SummaryWriter import torch.nn.functional as F import pdb import seaborn as sns import matplotlib.pyplot", "self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(self.second_last_layer_size) if (self.second_last_layer_size) > encoded_size: self.layers = nn.ModuleList([])", "self.e2 = nn.Linear((hidden_size//(2**n_layers)), encoded_size) def forward(self, input): x = self.e1(input) x = self.activation1(x)", "F.dropout(self.bn(F.elu(self.d(input))), p=self.drop_prob) #reconstruction = torch.tanh(self.d4(block)) reconstruction = self.d2(x) return reconstruction # Training AutoEncoders", "i in range(self.n_layers): encode_block = self.layers[i] x = encode_block(x) #block1 = F.dropout(self.bn1(F.elu(self.e1(input))), p=self.drop_prob)", "for i in range(self.n_layers): self.layers.append(LayerBlockDecode(hidden_size//(2**(n_layers-i)), hidden_size//(2**(n_layers-i-1)), self.drop_prob)) else: self.n_layers = 0 self.d2 =", "criterion, hidden_size, encoded_size, n_layers, drop_prob, phase) train_plot_losses.append(loss) else: loss = train_ae(data_tensor_valid, data_tensor_valid, encoder,", "data_tensor_valid, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob, phase) test_plot_losses.append(loss) print(loss)", "self.second_last_layer_size) self.activation1 = nn.Tanh() self.bn1 = nn.BatchNorm1d(self.second_last_layer_size) if (self.second_last_layer_size) > encoded_size: self.layers =", "Decoder(nn.Module): def __init__(self, output_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Decoder, self).__init__() self.n_layers = n_layers", "= 0 self.d2 = nn.Linear(hidden_size, output_shape) def forward(self, input): x = self.d1(input) x", "criterion, hidden_size, encoded_size, n_layers, drop_prob, phase) test_plot_losses.append(loss) print(loss) #plot_losses.append(loss) prefix = '' if", "weights encoder_optimizer.step() decoder_optimizer.step() # Return the loss value to track training progress return", "self.layers.append(LayerBlockEncode(hidden_size//(2**i), hidden_size//(2**(i+1)), self.drop_prob)) else: self.n_layers = 0 self.e2 = nn.Linear((hidden_size//(2**n_layers)), encoded_size) def forward(self,", "def __init__(self, output_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Decoder, self).__init__() self.n_layers = n_layers self.drop_prob", "i in range(self.n_layers): self.layers.append(LayerBlockDecode(hidden_size//(2**(n_layers-i)), hidden_size//(2**(n_layers-i-1)), self.drop_prob)) else: self.n_layers = 0 self.d2 = nn.Linear(hidden_size,", "class Decoder(nn.Module): def __init__(self, output_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Decoder, self).__init__() self.n_layers =", "encoder.train() decoder.train() else: encoder.eval() decoder.eval() # clear the gradients in the optimizers if", "train_plot_losses.append(loss) else: loss = train_ae(data_tensor_valid, data_tensor_valid, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size,", "test_plot_losses.append(loss) print(loss) #plot_losses.append(loss) prefix = '' if phase == 'validation': prefix = 'val_'", "sns import matplotlib.pyplot as plt from livelossplot import PlotLosses #Encoder class LayerBlockEncode(nn.Module): def", "= torch.tanh(self.e2(block1)) encoded_representation = self.e2(x) return encoded_representation #Decoder: class LayerBlockDecode(nn.Module): def __init__(self, hidden_size_1,", "import torch import torch.nn as nn import numpy as np import pandas as", "= self.activation1(x) x = self.bn1(x) for i in range(self.n_layers): encode_block = self.layers[i] x", "input): x = self.e1(input) x = self.activation1(x) x = self.bn1(x) for i in", "loss = train_ae(data_tensor_valid, data_tensor_valid, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, hidden_size, encoded_size, n_layers, drop_prob,", "= '' if phase == 'validation': prefix = 'val_' logs[prefix + 'log loss']", "self.bn1 = nn.BatchNorm1d(hidden_size) if (hidden_size // (2**n_layers)) > encoded_size: self.layers = nn.ModuleList([]) for", "return (x) class Encoder(nn.Module): def __init__(self, input_shape, hidden_size, encoded_size, n_layers=3, drop_prob=0.5): super(Encoder, self).__init__()", "training progress return loss.item() # Training Loop def trainIters(encoder, decoder, data_tensor, data_tensor_valid, epochs,", "return loss.item() # Training Loop def trainIters(encoder, decoder, data_tensor, data_tensor_valid, epochs, hidden_size, encoded_size,", "nn.ModuleList([]) for i in range(self.n_layers): self.layers.append(LayerBlockDecode(hidden_size//(2**(n_layers-i)), hidden_size//(2**(n_layers-i-1)), self.drop_prob)) else: self.n_layers = 0 self.d2", "nn.BatchNorm1d(self.second_last_layer_size) if (self.second_last_layer_size) > encoded_size: self.layers = nn.ModuleList([]) for i in range(self.n_layers): self.layers.append(LayerBlockDecode(hidden_size//(2**(n_layers-i))," ]
[ "= r.pipeline() for device_id in device_list: p.hdel(redis_device_key, device_id) p.execute() def update_device(self, device_id, websocket_send_data):", "datetime.datetime.strptime(values[1], \"%Y-%m-%d %H:%M:%S.%f\") now_time = datetime.datetime.today() expire_time_delta = datetime.timedelta(seconds=device_expire_second) if now_time > update_time", "= user_pair[1].split('@') ''' device_id = user_pair[0] update_time = datetime.datetime.strptime(values[1], \"%Y-%m-%d %H:%M:%S.%f\") now_time =", "= datetime.datetime.strptime(values[1], \"%Y-%m-%d %H:%M:%S.%f\") now_time = datetime.datetime.today() expire_time_delta = datetime.timedelta(seconds=device_expire_second) if now_time >", "db=0) def connect(self): return redis.Redis(connection_pool=self.redis_pool) def get_device_datas(self): device_datas = [] r = self.connect()", "= datetime.timedelta(seconds=device_expire_second) if now_time > update_time + expire_time_delta: device_ids.append(device_id) else: remove_device_list.append(device_id) ''' device_datas.append(json.loads(values[0]))", "host='127.0.0.1', port=6379): self.redis_pool = redis.ConnectionPool(host=host, port=port, db=0) def connect(self): return redis.Redis(connection_pool=self.redis_pool) def get_device_datas(self):", "r = self.connect() insert_value = \"%s@%s\" % (json.dumps(websocket_send_data), datetime.datetime.now()) return r.hset(redis_device_key, device_id, insert_value)", "device_datas def remove_devices(self, device_list): r = self.connect() p = r.pipeline() for device_id in", "now_time = datetime.datetime.today() expire_time_delta = datetime.timedelta(seconds=device_expire_second) if now_time > update_time + expire_time_delta: device_ids.append(device_id)", "RedisProxy(object): def __init__(self, host='127.0.0.1', port=6379): self.redis_pool = redis.ConnectionPool(host=host, port=port, db=0) def connect(self): return", "device_datas.append(json.loads(values[0])) self.remove_devices(remove_device_list) return device_datas def remove_devices(self, device_list): r = self.connect() p = r.pipeline()", "redis_device_key = 'redis_device_key' device_expire_second = 60 class RedisProxy(object): def __init__(self, host='127.0.0.1', port=6379): self.redis_pool", "+ expire_time_delta: device_ids.append(device_id) else: remove_device_list.append(device_id) ''' device_datas.append(json.loads(values[0])) self.remove_devices(remove_device_list) return device_datas def remove_devices(self, device_list):", "result.items(): values = user_pair[1].split('@') ''' device_id = user_pair[0] update_time = datetime.datetime.strptime(values[1], \"%Y-%m-%d %H:%M:%S.%f\")", "port=port, db=0) def connect(self): return redis.Redis(connection_pool=self.redis_pool) def get_device_datas(self): device_datas = [] r =", "user_pair[0] update_time = datetime.datetime.strptime(values[1], \"%Y-%m-%d %H:%M:%S.%f\") now_time = datetime.datetime.today() expire_time_delta = datetime.timedelta(seconds=device_expire_second) if", "user_pair[1].split('@') ''' device_id = user_pair[0] update_time = datetime.datetime.strptime(values[1], \"%Y-%m-%d %H:%M:%S.%f\") now_time = datetime.datetime.today()", "r.hgetall(redis_device_key) remove_device_list = [] for user_pair in result.items(): values = user_pair[1].split('@') ''' device_id", "\"%Y-%m-%d %H:%M:%S.%f\") now_time = datetime.datetime.today() expire_time_delta = datetime.timedelta(seconds=device_expire_second) if now_time > update_time +", "json import redis redis_device_key = 'redis_device_key' device_expire_second = 60 class RedisProxy(object): def __init__(self,", "r.pipeline() for device_id in device_list: p.hdel(redis_device_key, device_id) p.execute() def update_device(self, device_id, websocket_send_data): r", "expire_time_delta = datetime.timedelta(seconds=device_expire_second) if now_time > update_time + expire_time_delta: device_ids.append(device_id) else: remove_device_list.append(device_id) '''", "update_time + expire_time_delta: device_ids.append(device_id) else: remove_device_list.append(device_id) ''' device_datas.append(json.loads(values[0])) self.remove_devices(remove_device_list) return device_datas def remove_devices(self,", "device_list: p.hdel(redis_device_key, device_id) p.execute() def update_device(self, device_id, websocket_send_data): r = self.connect() insert_value =", "in result.items(): values = user_pair[1].split('@') ''' device_id = user_pair[0] update_time = datetime.datetime.strptime(values[1], \"%Y-%m-%d", "for device_id in device_list: p.hdel(redis_device_key, device_id) p.execute() def update_device(self, device_id, websocket_send_data): r =", "values = user_pair[1].split('@') ''' device_id = user_pair[0] update_time = datetime.datetime.strptime(values[1], \"%Y-%m-%d %H:%M:%S.%f\") now_time", "def update_device(self, device_id, websocket_send_data): r = self.connect() insert_value = \"%s@%s\" % (json.dumps(websocket_send_data), datetime.datetime.now())", "device_datas = [] r = self.connect() result = r.hgetall(redis_device_key) remove_device_list = [] for", "= self.connect() p = r.pipeline() for device_id in device_list: p.hdel(redis_device_key, device_id) p.execute() def", "for user_pair in result.items(): values = user_pair[1].split('@') ''' device_id = user_pair[0] update_time =", "import redis redis_device_key = 'redis_device_key' device_expire_second = 60 class RedisProxy(object): def __init__(self, host='127.0.0.1',", "[] r = self.connect() result = r.hgetall(redis_device_key) remove_device_list = [] for user_pair in", "def get_device_datas(self): device_datas = [] r = self.connect() result = r.hgetall(redis_device_key) remove_device_list =", "update_device(self, device_id, websocket_send_data): r = self.connect() insert_value = \"%s@%s\" % (json.dumps(websocket_send_data), datetime.datetime.now()) return", "> update_time + expire_time_delta: device_ids.append(device_id) else: remove_device_list.append(device_id) ''' device_datas.append(json.loads(values[0])) self.remove_devices(remove_device_list) return device_datas def", "= r.hgetall(redis_device_key) remove_device_list = [] for user_pair in result.items(): values = user_pair[1].split('@') '''", "def __init__(self, host='127.0.0.1', port=6379): self.redis_pool = redis.ConnectionPool(host=host, port=port, db=0) def connect(self): return redis.Redis(connection_pool=self.redis_pool)", "= [] r = self.connect() result = r.hgetall(redis_device_key) remove_device_list = [] for user_pair", "''' device_datas.append(json.loads(values[0])) self.remove_devices(remove_device_list) return device_datas def remove_devices(self, device_list): r = self.connect() p =", "datetime.datetime.today() expire_time_delta = datetime.timedelta(seconds=device_expire_second) if now_time > update_time + expire_time_delta: device_ids.append(device_id) else: remove_device_list.append(device_id)", "self.remove_devices(remove_device_list) return device_datas def remove_devices(self, device_list): r = self.connect() p = r.pipeline() for", "connect(self): return redis.Redis(connection_pool=self.redis_pool) def get_device_datas(self): device_datas = [] r = self.connect() result =", "get_device_datas(self): device_datas = [] r = self.connect() result = r.hgetall(redis_device_key) remove_device_list = []", "return redis.Redis(connection_pool=self.redis_pool) def get_device_datas(self): device_datas = [] r = self.connect() result = r.hgetall(redis_device_key)", "update_time = datetime.datetime.strptime(values[1], \"%Y-%m-%d %H:%M:%S.%f\") now_time = datetime.datetime.today() expire_time_delta = datetime.timedelta(seconds=device_expire_second) if now_time", "result = r.hgetall(redis_device_key) remove_device_list = [] for user_pair in result.items(): values = user_pair[1].split('@')", "= self.connect() result = r.hgetall(redis_device_key) remove_device_list = [] for user_pair in result.items(): values", "device_id = user_pair[0] update_time = datetime.datetime.strptime(values[1], \"%Y-%m-%d %H:%M:%S.%f\") now_time = datetime.datetime.today() expire_time_delta =", "remove_device_list.append(device_id) ''' device_datas.append(json.loads(values[0])) self.remove_devices(remove_device_list) return device_datas def remove_devices(self, device_list): r = self.connect() p", "if now_time > update_time + expire_time_delta: device_ids.append(device_id) else: remove_device_list.append(device_id) ''' device_datas.append(json.loads(values[0])) self.remove_devices(remove_device_list) return", "remove_device_list = [] for user_pair in result.items(): values = user_pair[1].split('@') ''' device_id =", "'redis_device_key' device_expire_second = 60 class RedisProxy(object): def __init__(self, host='127.0.0.1', port=6379): self.redis_pool = redis.ConnectionPool(host=host,", "p = r.pipeline() for device_id in device_list: p.hdel(redis_device_key, device_id) p.execute() def update_device(self, device_id,", "60 class RedisProxy(object): def __init__(self, host='127.0.0.1', port=6379): self.redis_pool = redis.ConnectionPool(host=host, port=port, db=0) def", "else: remove_device_list.append(device_id) ''' device_datas.append(json.loads(values[0])) self.remove_devices(remove_device_list) return device_datas def remove_devices(self, device_list): r = self.connect()", "port=6379): self.redis_pool = redis.ConnectionPool(host=host, port=port, db=0) def connect(self): return redis.Redis(connection_pool=self.redis_pool) def get_device_datas(self): device_datas", "%H:%M:%S.%f\") now_time = datetime.datetime.today() expire_time_delta = datetime.timedelta(seconds=device_expire_second) if now_time > update_time + expire_time_delta:", "= 60 class RedisProxy(object): def __init__(self, host='127.0.0.1', port=6379): self.redis_pool = redis.ConnectionPool(host=host, port=port, db=0)", "p.execute() def update_device(self, device_id, websocket_send_data): r = self.connect() insert_value = \"%s@%s\" % (json.dumps(websocket_send_data),", "= datetime.datetime.today() expire_time_delta = datetime.timedelta(seconds=device_expire_second) if now_time > update_time + expire_time_delta: device_ids.append(device_id) else:", "redis.Redis(connection_pool=self.redis_pool) def get_device_datas(self): device_datas = [] r = self.connect() result = r.hgetall(redis_device_key) remove_device_list", "= user_pair[0] update_time = datetime.datetime.strptime(values[1], \"%Y-%m-%d %H:%M:%S.%f\") now_time = datetime.datetime.today() expire_time_delta = datetime.timedelta(seconds=device_expire_second)", "now_time > update_time + expire_time_delta: device_ids.append(device_id) else: remove_device_list.append(device_id) ''' device_datas.append(json.loads(values[0])) self.remove_devices(remove_device_list) return device_datas", "= redis.ConnectionPool(host=host, port=port, db=0) def connect(self): return redis.Redis(connection_pool=self.redis_pool) def get_device_datas(self): device_datas = []", "device_id) p.execute() def update_device(self, device_id, websocket_send_data): r = self.connect() insert_value = \"%s@%s\" %", "''' device_id = user_pair[0] update_time = datetime.datetime.strptime(values[1], \"%Y-%m-%d %H:%M:%S.%f\") now_time = datetime.datetime.today() expire_time_delta", "self.connect() p = r.pipeline() for device_id in device_list: p.hdel(redis_device_key, device_id) p.execute() def update_device(self,", "import json import redis redis_device_key = 'redis_device_key' device_expire_second = 60 class RedisProxy(object): def", "import datetime import json import redis redis_device_key = 'redis_device_key' device_expire_second = 60 class", "= 'redis_device_key' device_expire_second = 60 class RedisProxy(object): def __init__(self, host='127.0.0.1', port=6379): self.redis_pool =", "def connect(self): return redis.Redis(connection_pool=self.redis_pool) def get_device_datas(self): device_datas = [] r = self.connect() result", "r = self.connect() p = r.pipeline() for device_id in device_list: p.hdel(redis_device_key, device_id) p.execute()", "redis.ConnectionPool(host=host, port=port, db=0) def connect(self): return redis.Redis(connection_pool=self.redis_pool) def get_device_datas(self): device_datas = [] r", "p.hdel(redis_device_key, device_id) p.execute() def update_device(self, device_id, websocket_send_data): r = self.connect() insert_value = \"%s@%s\"", "expire_time_delta: device_ids.append(device_id) else: remove_device_list.append(device_id) ''' device_datas.append(json.loads(values[0])) self.remove_devices(remove_device_list) return device_datas def remove_devices(self, device_list): r", "self.connect() result = r.hgetall(redis_device_key) remove_device_list = [] for user_pair in result.items(): values =", "datetime.timedelta(seconds=device_expire_second) if now_time > update_time + expire_time_delta: device_ids.append(device_id) else: remove_device_list.append(device_id) ''' device_datas.append(json.loads(values[0])) self.remove_devices(remove_device_list)", "return device_datas def remove_devices(self, device_list): r = self.connect() p = r.pipeline() for device_id", "class RedisProxy(object): def __init__(self, host='127.0.0.1', port=6379): self.redis_pool = redis.ConnectionPool(host=host, port=port, db=0) def connect(self):", "device_id, websocket_send_data): r = self.connect() insert_value = \"%s@%s\" % (json.dumps(websocket_send_data), datetime.datetime.now()) return r.hset(redis_device_key,", "remove_devices(self, device_list): r = self.connect() p = r.pipeline() for device_id in device_list: p.hdel(redis_device_key,", "[] for user_pair in result.items(): values = user_pair[1].split('@') ''' device_id = user_pair[0] update_time", "device_expire_second = 60 class RedisProxy(object): def __init__(self, host='127.0.0.1', port=6379): self.redis_pool = redis.ConnectionPool(host=host, port=port,", "device_list): r = self.connect() p = r.pipeline() for device_id in device_list: p.hdel(redis_device_key, device_id)", "def remove_devices(self, device_list): r = self.connect() p = r.pipeline() for device_id in device_list:", "device_id in device_list: p.hdel(redis_device_key, device_id) p.execute() def update_device(self, device_id, websocket_send_data): r = self.connect()", "redis redis_device_key = 'redis_device_key' device_expire_second = 60 class RedisProxy(object): def __init__(self, host='127.0.0.1', port=6379):", "websocket_send_data): r = self.connect() insert_value = \"%s@%s\" % (json.dumps(websocket_send_data), datetime.datetime.now()) return r.hset(redis_device_key, device_id,", "self.redis_pool = redis.ConnectionPool(host=host, port=port, db=0) def connect(self): return redis.Redis(connection_pool=self.redis_pool) def get_device_datas(self): device_datas =", "= [] for user_pair in result.items(): values = user_pair[1].split('@') ''' device_id = user_pair[0]", "user_pair in result.items(): values = user_pair[1].split('@') ''' device_id = user_pair[0] update_time = datetime.datetime.strptime(values[1],", "__init__(self, host='127.0.0.1', port=6379): self.redis_pool = redis.ConnectionPool(host=host, port=port, db=0) def connect(self): return redis.Redis(connection_pool=self.redis_pool) def", "device_ids.append(device_id) else: remove_device_list.append(device_id) ''' device_datas.append(json.loads(values[0])) self.remove_devices(remove_device_list) return device_datas def remove_devices(self, device_list): r =", "in device_list: p.hdel(redis_device_key, device_id) p.execute() def update_device(self, device_id, websocket_send_data): r = self.connect() insert_value", "r = self.connect() result = r.hgetall(redis_device_key) remove_device_list = [] for user_pair in result.items():", "datetime import json import redis redis_device_key = 'redis_device_key' device_expire_second = 60 class RedisProxy(object):" ]
[ "def to(line: int, col: int) -> str: # * Move cursor to line,", "* Restore saved cursor postion t = to r = right l =", "left(dx: int) -> str: return \"\\033[{}D\".format(dx) @staticmethod def up(dy: int) -> str: return", ".u[p](lines) | .d[own](lines) | .save() | .restore()\"\"\" @staticmethod def to(line: int, col: int)", "col) @staticmethod def right(dx: int) -> str: return \"\\033[{}C\".format(dx) @staticmethod def left(dx: int)", "# * Move cursor to line, column return \"\\033[{};{}f\".format(line, col) @staticmethod def right(dx:", "str: return \"\\033[{}B\".format(dy) save: str = \"\\033[s\" # * Save cursor position restore:", "down(dy: int) -> str: return \"\\033[{}B\".format(dy) save: str = \"\\033[s\" # * Save", ".l[eft](columns) | .u[p](lines) | .d[own](lines) | .save() | .restore()\"\"\" @staticmethod def to(line: int,", "postion t = to r = right l = left u = up", "functions: .t[o](line, column) | .r[ight](columns) | .l[eft](columns) | .u[p](lines) | .d[own](lines) | .save()", "\"\"\"Class with collection of cursor movement functions: .t[o](line, column) | .r[ight](columns) | .l[eft](columns)", "str: # * Move cursor to line, column return \"\\033[{};{}f\".format(line, col) @staticmethod def", "Cursor: \"\"\"Class with collection of cursor movement functions: .t[o](line, column) | .r[ight](columns) |", "cursor to line, column return \"\\033[{};{}f\".format(line, col) @staticmethod def right(dx: int) -> str:", "to(line: int, col: int) -> str: # * Move cursor to line, column", "\"\\033[s\" # * Save cursor position restore: str = \"\\033[u\" # * Restore", "position restore: str = \"\\033[u\" # * Restore saved cursor postion t =", "@staticmethod def up(dy: int) -> str: return \"\\033[{}A\".format(dy) @staticmethod def down(dy: int) ->", "# * Restore saved cursor postion t = to r = right l", "to line, column return \"\\033[{};{}f\".format(line, col) @staticmethod def right(dx: int) -> str: return", "\"\\033[u\" # * Restore saved cursor postion t = to r = right", "return \"\\033[{}B\".format(dy) save: str = \"\\033[s\" # * Save cursor position restore: str", "cursor position restore: str = \"\\033[u\" # * Restore saved cursor postion t", "= \"\\033[u\" # * Restore saved cursor postion t = to r =", "line, column return \"\\033[{};{}f\".format(line, col) @staticmethod def right(dx: int) -> str: return \"\\033[{}C\".format(dx)", "-> str: return \"\\033[{}A\".format(dy) @staticmethod def down(dy: int) -> str: return \"\\033[{}B\".format(dy) save:", "str: return \"\\033[{}A\".format(dy) @staticmethod def down(dy: int) -> str: return \"\\033[{}B\".format(dy) save: str", ".save() | .restore()\"\"\" @staticmethod def to(line: int, col: int) -> str: # *", "\"\\033[{}C\".format(dx) @staticmethod def left(dx: int) -> str: return \"\\033[{}D\".format(dx) @staticmethod def up(dy: int)", "cursor postion t = to r = right l = left u =", "@staticmethod def left(dx: int) -> str: return \"\\033[{}D\".format(dx) @staticmethod def up(dy: int) ->", "str: return \"\\033[{}D\".format(dx) @staticmethod def up(dy: int) -> str: return \"\\033[{}A\".format(dy) @staticmethod def", "\"\\033[{}B\".format(dy) save: str = \"\\033[s\" # * Save cursor position restore: str =", "def up(dy: int) -> str: return \"\\033[{}A\".format(dy) @staticmethod def down(dy: int) -> str:", "\"\\033[{}D\".format(dx) @staticmethod def up(dy: int) -> str: return \"\\033[{}A\".format(dy) @staticmethod def down(dy: int)", ".restore()\"\"\" @staticmethod def to(line: int, col: int) -> str: # * Move cursor", "def right(dx: int) -> str: return \"\\033[{}C\".format(dx) @staticmethod def left(dx: int) -> str:", "to r = right l = left u = up d = down", "restore: str = \"\\033[u\" # * Restore saved cursor postion t = to", "of cursor movement functions: .t[o](line, column) | .r[ight](columns) | .l[eft](columns) | .u[p](lines) |", "save: str = \"\\033[s\" # * Save cursor position restore: str = \"\\033[u\"", "<reponame>zlj-zz/pyzgit class Cursor: \"\"\"Class with collection of cursor movement functions: .t[o](line, column) |", "column) | .r[ight](columns) | .l[eft](columns) | .u[p](lines) | .d[own](lines) | .save() | .restore()\"\"\"", "| .l[eft](columns) | .u[p](lines) | .d[own](lines) | .save() | .restore()\"\"\" @staticmethod def to(line:", "@staticmethod def down(dy: int) -> str: return \"\\033[{}B\".format(dy) save: str = \"\\033[s\" #", "-> str: return \"\\033[{}D\".format(dx) @staticmethod def up(dy: int) -> str: return \"\\033[{}A\".format(dy) @staticmethod", "return \"\\033[{}D\".format(dx) @staticmethod def up(dy: int) -> str: return \"\\033[{}A\".format(dy) @staticmethod def down(dy:", "int) -> str: return \"\\033[{}C\".format(dx) @staticmethod def left(dx: int) -> str: return \"\\033[{}D\".format(dx)", "@staticmethod def to(line: int, col: int) -> str: # * Move cursor to", "with collection of cursor movement functions: .t[o](line, column) | .r[ight](columns) | .l[eft](columns) |", "str: return \"\\033[{}C\".format(dx) @staticmethod def left(dx: int) -> str: return \"\\033[{}D\".format(dx) @staticmethod def", "saved cursor postion t = to r = right l = left u", "int) -> str: # * Move cursor to line, column return \"\\033[{};{}f\".format(line, col)", "-> str: return \"\\033[{}B\".format(dy) save: str = \"\\033[s\" # * Save cursor position", "Move cursor to line, column return \"\\033[{};{}f\".format(line, col) @staticmethod def right(dx: int) ->", "-> str: # * Move cursor to line, column return \"\\033[{};{}f\".format(line, col) @staticmethod", "int) -> str: return \"\\033[{}A\".format(dy) @staticmethod def down(dy: int) -> str: return \"\\033[{}B\".format(dy)", "str = \"\\033[s\" # * Save cursor position restore: str = \"\\033[u\" #", "column return \"\\033[{};{}f\".format(line, col) @staticmethod def right(dx: int) -> str: return \"\\033[{}C\".format(dx) @staticmethod", "| .u[p](lines) | .d[own](lines) | .save() | .restore()\"\"\" @staticmethod def to(line: int, col:", "| .save() | .restore()\"\"\" @staticmethod def to(line: int, col: int) -> str: #", "# * Save cursor position restore: str = \"\\033[u\" # * Restore saved", "t = to r = right l = left u = up d", "return \"\\033[{};{}f\".format(line, col) @staticmethod def right(dx: int) -> str: return \"\\033[{}C\".format(dx) @staticmethod def", "int, col: int) -> str: # * Move cursor to line, column return", "class Cursor: \"\"\"Class with collection of cursor movement functions: .t[o](line, column) | .r[ight](columns)", "= to r = right l = left u = up d =", "| .r[ight](columns) | .l[eft](columns) | .u[p](lines) | .d[own](lines) | .save() | .restore()\"\"\" @staticmethod", "movement functions: .t[o](line, column) | .r[ight](columns) | .l[eft](columns) | .u[p](lines) | .d[own](lines) |", "up(dy: int) -> str: return \"\\033[{}A\".format(dy) @staticmethod def down(dy: int) -> str: return", ".t[o](line, column) | .r[ight](columns) | .l[eft](columns) | .u[p](lines) | .d[own](lines) | .save() |", ".r[ight](columns) | .l[eft](columns) | .u[p](lines) | .d[own](lines) | .save() | .restore()\"\"\" @staticmethod def", "str = \"\\033[u\" # * Restore saved cursor postion t = to r", "Restore saved cursor postion t = to r = right l = left", "= \"\\033[s\" # * Save cursor position restore: str = \"\\033[u\" # *", "int) -> str: return \"\\033[{}B\".format(dy) save: str = \"\\033[s\" # * Save cursor", "col: int) -> str: # * Move cursor to line, column return \"\\033[{};{}f\".format(line,", "return \"\\033[{}A\".format(dy) @staticmethod def down(dy: int) -> str: return \"\\033[{}B\".format(dy) save: str =", "-> str: return \"\\033[{}C\".format(dx) @staticmethod def left(dx: int) -> str: return \"\\033[{}D\".format(dx) @staticmethod", "return \"\\033[{}C\".format(dx) @staticmethod def left(dx: int) -> str: return \"\\033[{}D\".format(dx) @staticmethod def up(dy:", "\"\\033[{}A\".format(dy) @staticmethod def down(dy: int) -> str: return \"\\033[{}B\".format(dy) save: str = \"\\033[s\"", "| .restore()\"\"\" @staticmethod def to(line: int, col: int) -> str: # * Move", "cursor movement functions: .t[o](line, column) | .r[ight](columns) | .l[eft](columns) | .u[p](lines) | .d[own](lines)", "right(dx: int) -> str: return \"\\033[{}C\".format(dx) @staticmethod def left(dx: int) -> str: return", "int) -> str: return \"\\033[{}D\".format(dx) @staticmethod def up(dy: int) -> str: return \"\\033[{}A\".format(dy)", "def down(dy: int) -> str: return \"\\033[{}B\".format(dy) save: str = \"\\033[s\" # *", ".d[own](lines) | .save() | .restore()\"\"\" @staticmethod def to(line: int, col: int) -> str:", "collection of cursor movement functions: .t[o](line, column) | .r[ight](columns) | .l[eft](columns) | .u[p](lines)", "\"\\033[{};{}f\".format(line, col) @staticmethod def right(dx: int) -> str: return \"\\033[{}C\".format(dx) @staticmethod def left(dx:", "* Move cursor to line, column return \"\\033[{};{}f\".format(line, col) @staticmethod def right(dx: int)", "| .d[own](lines) | .save() | .restore()\"\"\" @staticmethod def to(line: int, col: int) ->", "* Save cursor position restore: str = \"\\033[u\" # * Restore saved cursor", "@staticmethod def right(dx: int) -> str: return \"\\033[{}C\".format(dx) @staticmethod def left(dx: int) ->", "def left(dx: int) -> str: return \"\\033[{}D\".format(dx) @staticmethod def up(dy: int) -> str:", "Save cursor position restore: str = \"\\033[u\" # * Restore saved cursor postion" ]
[]
[ "final_layer.append(image_layers[j][i]) break return np.array(final_layer) # Prep layers = load(image_dimensions) # First wanted_layer =", "np image_dimensions = (25, 6) def load(image_dims, path: str = \"input/08.txt\"): with open(path)", "stacked_layer = stack_layers(layers).reshape(image_dimensions[::-1]) final_image = list() for row in stacked_layer: r = \"\"", "if element == \"1\" else \" \" if element == \"0\" else \"", "# Prep layers = load(image_dimensions) # First wanted_layer = None minimum = None", "stack_layers(layers).reshape(image_dimensions[::-1]) final_image = list() for row in stacked_layer: r = \"\" for element", "element == \"1\" else \" \" if element == \"0\" else \" \"", "element == \"0\" else \" \" final_image.append(r) print(f\"[2]\") for r in final_image: print(r)", "n = number_of_values_in_layer(l, \"0\") if minimum is None or wanted_layer is None or", "in stacked_layer: r = \"\" for element in row: r += \"##\" if", "= None minimum = None for l in layers: n = number_of_values_in_layer(l, \"0\")", "l wanted_1 = number_of_values_in_layer(wanted_layer, \"1\") * number_of_values_in_layer(wanted_layer, \"2\") print(f\"[1]\\t{wanted_1}\") # Second stacked_layer =", "as np image_dimensions = (25, 6) def load(image_dims, path: str = \"input/08.txt\"): with", "list() for row in stacked_layer: r = \"\" for element in row: r", "for element in row: r += \"##\" if element == \"1\" else \"", "n < minimum: minimum = n wanted_layer = l wanted_1 = number_of_values_in_layer(wanted_layer, \"1\")", "= l wanted_1 = number_of_values_in_layer(wanted_layer, \"1\") * number_of_values_in_layer(wanted_layer, \"2\") print(f\"[1]\\t{wanted_1}\") # Second stacked_layer", "wanted_1 = number_of_values_in_layer(wanted_layer, \"1\") * number_of_values_in_layer(wanted_layer, \"2\") print(f\"[1]\\t{wanted_1}\") # Second stacked_layer = stack_layers(layers).reshape(image_dimensions[::-1])", "number_of_values_in_layer(l, \"0\") if minimum is None or wanted_layer is None or n <", "= \"input/08.txt\"): with open(path) as file: return np.array([c for c in file.read()]).reshape((-1, image_dims[0]", "image_layers[j][i] != \"2\": final_layer.append(image_layers[j][i]) break return np.array(final_layer) # Prep layers = load(image_dimensions) #", "wanted_layer = l wanted_1 = number_of_values_in_layer(wanted_layer, \"1\") * number_of_values_in_layer(wanted_layer, \"2\") print(f\"[1]\\t{wanted_1}\") # Second", "list() for i in range(len(image_layers[0])): for j in range(len(image_layers)): if image_layers[j][i] != \"2\":", "Prep layers = load(image_dimensions) # First wanted_layer = None minimum = None for", "minimum = None for l in layers: n = number_of_values_in_layer(l, \"0\") if minimum", "layers: n = number_of_values_in_layer(l, \"0\") if minimum is None or wanted_layer is None", "= list() for row in stacked_layer: r = \"\" for element in row:", "None or wanted_layer is None or n < minimum: minimum = n wanted_layer", "is None or wanted_layer is None or n < minimum: minimum = n", "in range(len(image_layers)): if image_layers[j][i] != \"2\": final_layer.append(image_layers[j][i]) break return np.array(final_layer) # Prep layers", "np.array(final_layer) # Prep layers = load(image_dimensions) # First wanted_layer = None minimum =", "minimum is None or wanted_layer is None or n < minimum: minimum =", "path: str = \"input/08.txt\"): with open(path) as file: return np.array([c for c in", "for i in range(len(image_layers[0])): for j in range(len(image_layers)): if image_layers[j][i] != \"2\": final_layer.append(image_layers[j][i])", "is None or n < minimum: minimum = n wanted_layer = l wanted_1", "element in row: r += \"##\" if element == \"1\" else \" \"", "value) def stack_layers(image_layers): final_layer = list() for i in range(len(image_layers[0])): for j in", "= number_of_values_in_layer(wanted_layer, \"1\") * number_of_values_in_layer(wanted_layer, \"2\") print(f\"[1]\\t{wanted_1}\") # Second stacked_layer = stack_layers(layers).reshape(image_dimensions[::-1]) final_image", "minimum = n wanted_layer = l wanted_1 = number_of_values_in_layer(wanted_layer, \"1\") * number_of_values_in_layer(wanted_layer, \"2\")", "or n < minimum: minimum = n wanted_layer = l wanted_1 = number_of_values_in_layer(wanted_layer,", "def number_of_values_in_layer(layer, value): return np.count_nonzero(layer == value) def stack_layers(image_layers): final_layer = list() for", "np.count_nonzero(layer == value) def stack_layers(image_layers): final_layer = list() for i in range(len(image_layers[0])): for", "\"1\" else \" \" if element == \"0\" else \" \" final_image.append(r) print(f\"[2]\")", "wanted_layer = None minimum = None for l in layers: n = number_of_values_in_layer(l,", "return np.array([c for c in file.read()]).reshape((-1, image_dims[0] * image_dims[1])) def number_of_values_in_layer(layer, value): return", "in layers: n = number_of_values_in_layer(l, \"0\") if minimum is None or wanted_layer is", "r += \"##\" if element == \"1\" else \" \" if element ==", "\"##\" if element == \"1\" else \" \" if element == \"0\" else", "\" if element == \"0\" else \" \" final_image.append(r) print(f\"[2]\") for r in", "j in range(len(image_layers)): if image_layers[j][i] != \"2\": final_layer.append(image_layers[j][i]) break return np.array(final_layer) # Prep", "np.array([c for c in file.read()]).reshape((-1, image_dims[0] * image_dims[1])) def number_of_values_in_layer(layer, value): return np.count_nonzero(layer", "break return np.array(final_layer) # Prep layers = load(image_dimensions) # First wanted_layer = None", "= stack_layers(layers).reshape(image_dimensions[::-1]) final_image = list() for row in stacked_layer: r = \"\" for", "\" \" if element == \"0\" else \" \" final_image.append(r) print(f\"[2]\") for r", "\"2\": final_layer.append(image_layers[j][i]) break return np.array(final_layer) # Prep layers = load(image_dimensions) # First wanted_layer", "else \" \" if element == \"0\" else \" \" final_image.append(r) print(f\"[2]\") for", "def stack_layers(image_layers): final_layer = list() for i in range(len(image_layers[0])): for j in range(len(image_layers)):", "def load(image_dims, path: str = \"input/08.txt\"): with open(path) as file: return np.array([c for", "stack_layers(image_layers): final_layer = list() for i in range(len(image_layers[0])): for j in range(len(image_layers)): if", "for l in layers: n = number_of_values_in_layer(l, \"0\") if minimum is None or", "= load(image_dimensions) # First wanted_layer = None minimum = None for l in", "for row in stacked_layer: r = \"\" for element in row: r +=", "open(path) as file: return np.array([c for c in file.read()]).reshape((-1, image_dims[0] * image_dims[1])) def", "for c in file.read()]).reshape((-1, image_dims[0] * image_dims[1])) def number_of_values_in_layer(layer, value): return np.count_nonzero(layer ==", "= \"\" for element in row: r += \"##\" if element == \"1\"", "in file.read()]).reshape((-1, image_dims[0] * image_dims[1])) def number_of_values_in_layer(layer, value): return np.count_nonzero(layer == value) def", "image_dims[1])) def number_of_values_in_layer(layer, value): return np.count_nonzero(layer == value) def stack_layers(image_layers): final_layer = list()", "number_of_values_in_layer(layer, value): return np.count_nonzero(layer == value) def stack_layers(image_layers): final_layer = list() for i", "final_layer = list() for i in range(len(image_layers[0])): for j in range(len(image_layers)): if image_layers[j][i]", "\"0\") if minimum is None or wanted_layer is None or n < minimum:", "\"input/08.txt\"): with open(path) as file: return np.array([c for c in file.read()]).reshape((-1, image_dims[0] *", "load(image_dims, path: str = \"input/08.txt\"): with open(path) as file: return np.array([c for c", "range(len(image_layers)): if image_layers[j][i] != \"2\": final_layer.append(image_layers[j][i]) break return np.array(final_layer) # Prep layers =", "= n wanted_layer = l wanted_1 = number_of_values_in_layer(wanted_layer, \"1\") * number_of_values_in_layer(wanted_layer, \"2\") print(f\"[1]\\t{wanted_1}\")", "n wanted_layer = l wanted_1 = number_of_values_in_layer(wanted_layer, \"1\") * number_of_values_in_layer(wanted_layer, \"2\") print(f\"[1]\\t{wanted_1}\") #", "+= \"##\" if element == \"1\" else \" \" if element == \"0\"", "str = \"input/08.txt\"): with open(path) as file: return np.array([c for c in file.read()]).reshape((-1,", "number_of_values_in_layer(wanted_layer, \"1\") * number_of_values_in_layer(wanted_layer, \"2\") print(f\"[1]\\t{wanted_1}\") # Second stacked_layer = stack_layers(layers).reshape(image_dimensions[::-1]) final_image =", "\"2\") print(f\"[1]\\t{wanted_1}\") # Second stacked_layer = stack_layers(layers).reshape(image_dimensions[::-1]) final_image = list() for row in", "file: return np.array([c for c in file.read()]).reshape((-1, image_dims[0] * image_dims[1])) def number_of_values_in_layer(layer, value):", "return np.count_nonzero(layer == value) def stack_layers(image_layers): final_layer = list() for i in range(len(image_layers[0])):", "(25, 6) def load(image_dims, path: str = \"input/08.txt\"): with open(path) as file: return", "or wanted_layer is None or n < minimum: minimum = n wanted_layer =", "== \"1\" else \" \" if element == \"0\" else \" \" final_image.append(r)", "< minimum: minimum = n wanted_layer = l wanted_1 = number_of_values_in_layer(wanted_layer, \"1\") *", "value): return np.count_nonzero(layer == value) def stack_layers(image_layers): final_layer = list() for i in", "load(image_dimensions) # First wanted_layer = None minimum = None for l in layers:", "row in stacked_layer: r = \"\" for element in row: r += \"##\"", "print(f\"[1]\\t{wanted_1}\") # Second stacked_layer = stack_layers(layers).reshape(image_dimensions[::-1]) final_image = list() for row in stacked_layer:", "in row: r += \"##\" if element == \"1\" else \" \" if", "final_image = list() for row in stacked_layer: r = \"\" for element in", "as file: return np.array([c for c in file.read()]).reshape((-1, image_dims[0] * image_dims[1])) def number_of_values_in_layer(layer,", "None minimum = None for l in layers: n = number_of_values_in_layer(l, \"0\") if", "= None for l in layers: n = number_of_values_in_layer(l, \"0\") if minimum is", "# Second stacked_layer = stack_layers(layers).reshape(image_dimensions[::-1]) final_image = list() for row in stacked_layer: r", "= list() for i in range(len(image_layers[0])): for j in range(len(image_layers)): if image_layers[j][i] !=", "* number_of_values_in_layer(wanted_layer, \"2\") print(f\"[1]\\t{wanted_1}\") # Second stacked_layer = stack_layers(layers).reshape(image_dimensions[::-1]) final_image = list() for", "= number_of_values_in_layer(l, \"0\") if minimum is None or wanted_layer is None or n", "if image_layers[j][i] != \"2\": final_layer.append(image_layers[j][i]) break return np.array(final_layer) # Prep layers = load(image_dimensions)", "= (25, 6) def load(image_dims, path: str = \"input/08.txt\"): with open(path) as file:", "minimum: minimum = n wanted_layer = l wanted_1 = number_of_values_in_layer(wanted_layer, \"1\") * number_of_values_in_layer(wanted_layer,", "stacked_layer: r = \"\" for element in row: r += \"##\" if element", "range(len(image_layers[0])): for j in range(len(image_layers)): if image_layers[j][i] != \"2\": final_layer.append(image_layers[j][i]) break return np.array(final_layer)", "import numpy as np image_dimensions = (25, 6) def load(image_dims, path: str =", "None for l in layers: n = number_of_values_in_layer(l, \"0\") if minimum is None", "First wanted_layer = None minimum = None for l in layers: n =", "Second stacked_layer = stack_layers(layers).reshape(image_dimensions[::-1]) final_image = list() for row in stacked_layer: r =", "in range(len(image_layers[0])): for j in range(len(image_layers)): if image_layers[j][i] != \"2\": final_layer.append(image_layers[j][i]) break return", "r = \"\" for element in row: r += \"##\" if element ==", "layers = load(image_dimensions) # First wanted_layer = None minimum = None for l", "# First wanted_layer = None minimum = None for l in layers: n", "if element == \"0\" else \" \" final_image.append(r) print(f\"[2]\") for r in final_image:", "wanted_layer is None or n < minimum: minimum = n wanted_layer = l", "return np.array(final_layer) # Prep layers = load(image_dimensions) # First wanted_layer = None minimum", "image_dims[0] * image_dims[1])) def number_of_values_in_layer(layer, value): return np.count_nonzero(layer == value) def stack_layers(image_layers): final_layer", "c in file.read()]).reshape((-1, image_dims[0] * image_dims[1])) def number_of_values_in_layer(layer, value): return np.count_nonzero(layer == value)", "for j in range(len(image_layers)): if image_layers[j][i] != \"2\": final_layer.append(image_layers[j][i]) break return np.array(final_layer) #", "None or n < minimum: minimum = n wanted_layer = l wanted_1 =", "* image_dims[1])) def number_of_values_in_layer(layer, value): return np.count_nonzero(layer == value) def stack_layers(image_layers): final_layer =", "\"\" for element in row: r += \"##\" if element == \"1\" else", "if minimum is None or wanted_layer is None or n < minimum: minimum", "6) def load(image_dims, path: str = \"input/08.txt\"): with open(path) as file: return np.array([c", "number_of_values_in_layer(wanted_layer, \"2\") print(f\"[1]\\t{wanted_1}\") # Second stacked_layer = stack_layers(layers).reshape(image_dimensions[::-1]) final_image = list() for row", "with open(path) as file: return np.array([c for c in file.read()]).reshape((-1, image_dims[0] * image_dims[1]))", "file.read()]).reshape((-1, image_dims[0] * image_dims[1])) def number_of_values_in_layer(layer, value): return np.count_nonzero(layer == value) def stack_layers(image_layers):", "numpy as np image_dimensions = (25, 6) def load(image_dims, path: str = \"input/08.txt\"):", "\"1\") * number_of_values_in_layer(wanted_layer, \"2\") print(f\"[1]\\t{wanted_1}\") # Second stacked_layer = stack_layers(layers).reshape(image_dimensions[::-1]) final_image = list()", "row: r += \"##\" if element == \"1\" else \" \" if element", "== value) def stack_layers(image_layers): final_layer = list() for i in range(len(image_layers[0])): for j", "l in layers: n = number_of_values_in_layer(l, \"0\") if minimum is None or wanted_layer", "image_dimensions = (25, 6) def load(image_dims, path: str = \"input/08.txt\"): with open(path) as", "i in range(len(image_layers[0])): for j in range(len(image_layers)): if image_layers[j][i] != \"2\": final_layer.append(image_layers[j][i]) break", "!= \"2\": final_layer.append(image_layers[j][i]) break return np.array(final_layer) # Prep layers = load(image_dimensions) # First" ]
[ "class College(models.Model): name = models.CharField( max_length=255, blank=False, verbose_name='College Name' ) abbv = models.CharField(", "models.CharField( max_length=255, blank=False, verbose_name='College Name' ) abbv = models.CharField( max_length=30, blank=False, verbose_name='Abbreviation' )", "models.CharField(max_length=20) class Meta: verbose_name_plural = 'Countries' def __str__(self): return self.name class State(models.Model): name", "country = models.ForeignKey(Country) def __str__(self): return self.name class College(models.Model): name = models.CharField( max_length=255,", "verbose_name='Abbreviation' ) state = models.ForeignKey(State) @staticmethod def autocomplete_search_fields(): return 'name', 'abbv' def __str__(self):", "models.ForeignKey(Country) def __str__(self): return self.name class College(models.Model): name = models.CharField( max_length=255, blank=False, verbose_name='College", "__str__(self): return self.name class State(models.Model): name = models.CharField(max_length=20) country = models.ForeignKey(Country) def __str__(self):", "blank=False, verbose_name='College Name' ) abbv = models.CharField( max_length=30, blank=False, verbose_name='Abbreviation' ) state =", ") state = models.ForeignKey(State) @staticmethod def autocomplete_search_fields(): return 'name', 'abbv' def __str__(self): return", "self.name class College(models.Model): name = models.CharField( max_length=255, blank=False, verbose_name='College Name' ) abbv =", "state = models.ForeignKey(State) @staticmethod def autocomplete_search_fields(): return 'name', 'abbv' def __str__(self): return self.name", "models.CharField( max_length=30, blank=False, verbose_name='Abbreviation' ) state = models.ForeignKey(State) @staticmethod def autocomplete_search_fields(): return 'name',", "from django.db import models class Country(models.Model): name = models.CharField(max_length=20) class Meta: verbose_name_plural =", "class State(models.Model): name = models.CharField(max_length=20) country = models.ForeignKey(Country) def __str__(self): return self.name class", "verbose_name='College Name' ) abbv = models.CharField( max_length=30, blank=False, verbose_name='Abbreviation' ) state = models.ForeignKey(State)", "django.db import models class Country(models.Model): name = models.CharField(max_length=20) class Meta: verbose_name_plural = 'Countries'", "Meta: verbose_name_plural = 'Countries' def __str__(self): return self.name class State(models.Model): name = models.CharField(max_length=20)", "'Countries' def __str__(self): return self.name class State(models.Model): name = models.CharField(max_length=20) country = models.ForeignKey(Country)", "= 'Countries' def __str__(self): return self.name class State(models.Model): name = models.CharField(max_length=20) country =", "def __str__(self): return self.name class College(models.Model): name = models.CharField( max_length=255, blank=False, verbose_name='College Name'", "return self.name class College(models.Model): name = models.CharField( max_length=255, blank=False, verbose_name='College Name' ) abbv", "name = models.CharField( max_length=255, blank=False, verbose_name='College Name' ) abbv = models.CharField( max_length=30, blank=False,", "verbose_name_plural = 'Countries' def __str__(self): return self.name class State(models.Model): name = models.CharField(max_length=20) country", "State(models.Model): name = models.CharField(max_length=20) country = models.ForeignKey(Country) def __str__(self): return self.name class College(models.Model):", "Country(models.Model): name = models.CharField(max_length=20) class Meta: verbose_name_plural = 'Countries' def __str__(self): return self.name", "self.name class State(models.Model): name = models.CharField(max_length=20) country = models.ForeignKey(Country) def __str__(self): return self.name", "College(models.Model): name = models.CharField( max_length=255, blank=False, verbose_name='College Name' ) abbv = models.CharField( max_length=30,", "name = models.CharField(max_length=20) class Meta: verbose_name_plural = 'Countries' def __str__(self): return self.name class", "max_length=255, blank=False, verbose_name='College Name' ) abbv = models.CharField( max_length=30, blank=False, verbose_name='Abbreviation' ) state", "import models class Country(models.Model): name = models.CharField(max_length=20) class Meta: verbose_name_plural = 'Countries' def", "name = models.CharField(max_length=20) country = models.ForeignKey(Country) def __str__(self): return self.name class College(models.Model): name", "def __str__(self): return self.name class State(models.Model): name = models.CharField(max_length=20) country = models.ForeignKey(Country) def", "class Country(models.Model): name = models.CharField(max_length=20) class Meta: verbose_name_plural = 'Countries' def __str__(self): return", "= models.CharField(max_length=20) class Meta: verbose_name_plural = 'Countries' def __str__(self): return self.name class State(models.Model):", "models class Country(models.Model): name = models.CharField(max_length=20) class Meta: verbose_name_plural = 'Countries' def __str__(self):", "return self.name class State(models.Model): name = models.CharField(max_length=20) country = models.ForeignKey(Country) def __str__(self): return", "= models.CharField(max_length=20) country = models.ForeignKey(Country) def __str__(self): return self.name class College(models.Model): name =", "<filename>Robotix/apps/miscellaneous/models.py<gh_stars>0 from django.db import models class Country(models.Model): name = models.CharField(max_length=20) class Meta: verbose_name_plural", ") abbv = models.CharField( max_length=30, blank=False, verbose_name='Abbreviation' ) state = models.ForeignKey(State) @staticmethod def", "max_length=30, blank=False, verbose_name='Abbreviation' ) state = models.ForeignKey(State) @staticmethod def autocomplete_search_fields(): return 'name', 'abbv'", "blank=False, verbose_name='Abbreviation' ) state = models.ForeignKey(State) @staticmethod def autocomplete_search_fields(): return 'name', 'abbv' def", "= models.CharField( max_length=30, blank=False, verbose_name='Abbreviation' ) state = models.ForeignKey(State) @staticmethod def autocomplete_search_fields(): return", "Name' ) abbv = models.CharField( max_length=30, blank=False, verbose_name='Abbreviation' ) state = models.ForeignKey(State) @staticmethod", "= models.CharField( max_length=255, blank=False, verbose_name='College Name' ) abbv = models.CharField( max_length=30, blank=False, verbose_name='Abbreviation'", "class Meta: verbose_name_plural = 'Countries' def __str__(self): return self.name class State(models.Model): name =", "= models.ForeignKey(Country) def __str__(self): return self.name class College(models.Model): name = models.CharField( max_length=255, blank=False,", "abbv = models.CharField( max_length=30, blank=False, verbose_name='Abbreviation' ) state = models.ForeignKey(State) @staticmethod def autocomplete_search_fields():", "__str__(self): return self.name class College(models.Model): name = models.CharField( max_length=255, blank=False, verbose_name='College Name' )", "models.CharField(max_length=20) country = models.ForeignKey(Country) def __str__(self): return self.name class College(models.Model): name = models.CharField(" ]
[ "import SE_ResNet_18, SE_ResNet_50, SE_ResNet_101, SE_ResNet_152 class TestModelArchitectures(keras_parameterized.TestCase): def test_se_resnet_18(self): model_type = \"SE_ResNet_18\" input_shape", "SE_ResNet_50( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_101(self):", "tensorflow.python.platform import test from tensorflow.keras.utils import plot_model from senet.keras_fn.se_resnet import SE_ResNet_18, SE_ResNet_50, SE_ResNet_101,", "224, 3) num_classes = 2 model = SE_ResNet_18( include_top=True, weights=None, input_shape=input_shape, classes=num_classes )", "num_classes = 2 model = SE_ResNet_50( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type", "include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_101(self): model_type", "num_classes = 2 model = SE_ResNet_18( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type", "+ \".png\", show_shapes=True) def test_se_resnet_152(self): model_type = \"SE_ResNet_152\" input_shape = (224, 224, 3)", "= 2 model = SE_ResNet_50( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type +", "tensorflow as tf from tensorflow.python.keras import keras_parameterized from tensorflow.python.platform import test from tensorflow.keras.utils", "= \"SE_ResNet_152\" input_shape = (224, 224, 3) num_classes = 2 model = SE_ResNet_152(", "Jan-02-21 20:43 # @Author : <NAME> (<EMAIL>) import tensorflow as tf from tensorflow.python.keras", "(<EMAIL>) import tensorflow as tf from tensorflow.python.keras import keras_parameterized from tensorflow.python.platform import test", "def test_se_resnet_101(self): model_type = \"SE_ResNet_101\" input_shape = (224, 224, 3) num_classes = 2", "\"SE_ResNet_101\" input_shape = (224, 224, 3) num_classes = 2 model = SE_ResNet_101( include_top=True,", "SE_ResNet_152 class TestModelArchitectures(keras_parameterized.TestCase): def test_se_resnet_18(self): model_type = \"SE_ResNet_18\" input_shape = (224, 224, 3)", "input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) if __name__ == \"__main__\": test.main()", "@Author : <NAME> (<EMAIL>) import tensorflow as tf from tensorflow.python.keras import keras_parameterized from", "model_type = \"SE_ResNet_152\" input_shape = (224, 224, 3) num_classes = 2 model =", "+ \".png\", show_shapes=True) def test_se_resnet_101(self): model_type = \"SE_ResNet_101\" input_shape = (224, 224, 3)", "include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) if __name__ ==", "def test_se_resnet_152(self): model_type = \"SE_ResNet_152\" input_shape = (224, 224, 3) num_classes = 2", "SE_ResNet_50, SE_ResNet_101, SE_ResNet_152 class TestModelArchitectures(keras_parameterized.TestCase): def test_se_resnet_18(self): model_type = \"SE_ResNet_18\" input_shape = (224,", "senet.keras_fn.se_resnet import SE_ResNet_18, SE_ResNet_50, SE_ResNet_101, SE_ResNet_152 class TestModelArchitectures(keras_parameterized.TestCase): def test_se_resnet_18(self): model_type = \"SE_ResNet_18\"", "test_se_resnet_50(self): model_type = \"SE_ResNet_50\" input_shape = (224, 224, 3) num_classes = 2 model", "as tf from tensorflow.python.keras import keras_parameterized from tensorflow.python.platform import test from tensorflow.keras.utils import", "model_type = \"SE_ResNet_101\" input_shape = (224, 224, 3) num_classes = 2 model =", "tensorflow.python.keras import keras_parameterized from tensorflow.python.platform import test from tensorflow.keras.utils import plot_model from senet.keras_fn.se_resnet", "\".png\", show_shapes=True) def test_se_resnet_50(self): model_type = \"SE_ResNet_50\" input_shape = (224, 224, 3) num_classes", ": <NAME> (<EMAIL>) import tensorflow as tf from tensorflow.python.keras import keras_parameterized from tensorflow.python.platform", "SE_ResNet_101, SE_ResNet_152 class TestModelArchitectures(keras_parameterized.TestCase): def test_se_resnet_18(self): model_type = \"SE_ResNet_18\" input_shape = (224, 224,", "input_shape = (224, 224, 3) num_classes = 2 model = SE_ResNet_152( include_top=True, weights=None,", "= SE_ResNet_50( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def", "224, 3) num_classes = 2 model = SE_ResNet_152( include_top=True, weights=None, input_shape=input_shape, classes=num_classes )", "-*- coding: utf-8 -*- # @Date : Jan-02-21 20:43 # @Author : <NAME>", "input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_50(self): model_type = \"SE_ResNet_50\"", "\"SE_ResNet_50\" input_shape = (224, 224, 3) num_classes = 2 model = SE_ResNet_50( include_top=True,", "classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_50(self): model_type = \"SE_ResNet_50\" input_shape", ") plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_101(self): model_type = \"SE_ResNet_101\" input_shape =", "plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_101(self): model_type = \"SE_ResNet_101\" input_shape = (224,", "input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_152(self): model_type = \"SE_ResNet_152\"", "3) num_classes = 2 model = SE_ResNet_152( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model,", "input_shape = (224, 224, 3) num_classes = 2 model = SE_ResNet_18( include_top=True, weights=None,", "\".png\", show_shapes=True) def test_se_resnet_101(self): model_type = \"SE_ResNet_101\" input_shape = (224, 224, 3) num_classes", "input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_101(self): model_type = \"SE_ResNet_101\"", "tf from tensorflow.python.keras import keras_parameterized from tensorflow.python.platform import test from tensorflow.keras.utils import plot_model", "2 model = SE_ResNet_50( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\",", "\"SE_ResNet_18\" input_shape = (224, 224, 3) num_classes = 2 model = SE_ResNet_18( include_top=True,", "def test_se_resnet_50(self): model_type = \"SE_ResNet_50\" input_shape = (224, 224, 3) num_classes = 2", "SE_ResNet_101( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_152(self):", "from tensorflow.keras.utils import plot_model from senet.keras_fn.se_resnet import SE_ResNet_18, SE_ResNet_50, SE_ResNet_101, SE_ResNet_152 class TestModelArchitectures(keras_parameterized.TestCase):", "2 model = SE_ResNet_101( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\",", "show_shapes=True) def test_se_resnet_50(self): model_type = \"SE_ResNet_50\" input_shape = (224, 224, 3) num_classes =", "show_shapes=True) def test_se_resnet_101(self): model_type = \"SE_ResNet_101\" input_shape = (224, 224, 3) num_classes =", "(224, 224, 3) num_classes = 2 model = SE_ResNet_50( include_top=True, weights=None, input_shape=input_shape, classes=num_classes", "weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_50(self): model_type =", "classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_101(self): model_type = \"SE_ResNet_101\" input_shape", "3) num_classes = 2 model = SE_ResNet_101( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model,", "= (224, 224, 3) num_classes = 2 model = SE_ResNet_101( include_top=True, weights=None, input_shape=input_shape,", "2 model = SE_ResNet_152( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\",", "coding: utf-8 -*- # @Date : Jan-02-21 20:43 # @Author : <NAME> (<EMAIL>)", "model_type = \"SE_ResNet_50\" input_shape = (224, 224, 3) num_classes = 2 model =", "@Date : Jan-02-21 20:43 # @Author : <NAME> (<EMAIL>) import tensorflow as tf", "classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_152(self): model_type = \"SE_ResNet_152\" input_shape", "= 2 model = SE_ResNet_18( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type +", "to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_50(self): model_type = \"SE_ResNet_50\" input_shape = (224, 224,", "\".png\", show_shapes=True) def test_se_resnet_152(self): model_type = \"SE_ResNet_152\" input_shape = (224, 224, 3) num_classes", "import plot_model from senet.keras_fn.se_resnet import SE_ResNet_18, SE_ResNet_50, SE_ResNet_101, SE_ResNet_152 class TestModelArchitectures(keras_parameterized.TestCase): def test_se_resnet_18(self):", "test_se_resnet_18(self): model_type = \"SE_ResNet_18\" input_shape = (224, 224, 3) num_classes = 2 model", "model = SE_ResNet_152( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True)", "to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_152(self): model_type = \"SE_ResNet_152\" input_shape = (224, 224,", "from tensorflow.python.keras import keras_parameterized from tensorflow.python.platform import test from tensorflow.keras.utils import plot_model from", "+ \".png\", show_shapes=True) def test_se_resnet_50(self): model_type = \"SE_ResNet_50\" input_shape = (224, 224, 3)", "import tensorflow as tf from tensorflow.python.keras import keras_parameterized from tensorflow.python.platform import test from", "input_shape = (224, 224, 3) num_classes = 2 model = SE_ResNet_101( include_top=True, weights=None,", "= SE_ResNet_152( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) if", "weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_152(self): model_type =", "# -*- coding: utf-8 -*- # @Date : Jan-02-21 20:43 # @Author :", "= SE_ResNet_101( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def", "plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_50(self): model_type = \"SE_ResNet_50\" input_shape = (224,", "to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_101(self): model_type = \"SE_ResNet_101\" input_shape = (224, 224,", "weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) if __name__ == \"__main__\":", "from tensorflow.python.platform import test from tensorflow.keras.utils import plot_model from senet.keras_fn.se_resnet import SE_ResNet_18, SE_ResNet_50,", "input_shape = (224, 224, 3) num_classes = 2 model = SE_ResNet_50( include_top=True, weights=None,", "= 2 model = SE_ResNet_152( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type +", "SE_ResNet_152( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) if __name__", "= \"SE_ResNet_101\" input_shape = (224, 224, 3) num_classes = 2 model = SE_ResNet_101(", "= 2 model = SE_ResNet_101( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type +", "class TestModelArchitectures(keras_parameterized.TestCase): def test_se_resnet_18(self): model_type = \"SE_ResNet_18\" input_shape = (224, 224, 3) num_classes", "test_se_resnet_152(self): model_type = \"SE_ResNet_152\" input_shape = (224, 224, 3) num_classes = 2 model", "\"SE_ResNet_152\" input_shape = (224, 224, 3) num_classes = 2 model = SE_ResNet_152( include_top=True,", "SE_ResNet_18, SE_ResNet_50, SE_ResNet_101, SE_ResNet_152 class TestModelArchitectures(keras_parameterized.TestCase): def test_se_resnet_18(self): model_type = \"SE_ResNet_18\" input_shape =", "= (224, 224, 3) num_classes = 2 model = SE_ResNet_50( include_top=True, weights=None, input_shape=input_shape,", "= (224, 224, 3) num_classes = 2 model = SE_ResNet_152( include_top=True, weights=None, input_shape=input_shape,", "= SE_ResNet_18( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def", "keras_parameterized from tensorflow.python.platform import test from tensorflow.keras.utils import plot_model from senet.keras_fn.se_resnet import SE_ResNet_18,", "tensorflow.keras.utils import plot_model from senet.keras_fn.se_resnet import SE_ResNet_18, SE_ResNet_50, SE_ResNet_101, SE_ResNet_152 class TestModelArchitectures(keras_parameterized.TestCase): def", "(224, 224, 3) num_classes = 2 model = SE_ResNet_101( include_top=True, weights=None, input_shape=input_shape, classes=num_classes", "from senet.keras_fn.se_resnet import SE_ResNet_18, SE_ResNet_50, SE_ResNet_101, SE_ResNet_152 class TestModelArchitectures(keras_parameterized.TestCase): def test_se_resnet_18(self): model_type =", "num_classes = 2 model = SE_ResNet_101( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type", "3) num_classes = 2 model = SE_ResNet_18( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model,", "model = SE_ResNet_50( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True)", "include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_152(self): model_type", "20:43 # @Author : <NAME> (<EMAIL>) import tensorflow as tf from tensorflow.python.keras import", "2 model = SE_ResNet_18( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\",", "test_se_resnet_101(self): model_type = \"SE_ResNet_101\" input_shape = (224, 224, 3) num_classes = 2 model", "python3 # -*- coding: utf-8 -*- # @Date : Jan-02-21 20:43 # @Author", "(224, 224, 3) num_classes = 2 model = SE_ResNet_18( include_top=True, weights=None, input_shape=input_shape, classes=num_classes", "SE_ResNet_18( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_50(self):", "plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_152(self): model_type = \"SE_ResNet_152\" input_shape = (224,", ") plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_152(self): model_type = \"SE_ResNet_152\" input_shape =", "(224, 224, 3) num_classes = 2 model = SE_ResNet_152( include_top=True, weights=None, input_shape=input_shape, classes=num_classes", "224, 3) num_classes = 2 model = SE_ResNet_50( include_top=True, weights=None, input_shape=input_shape, classes=num_classes )", "import keras_parameterized from tensorflow.python.platform import test from tensorflow.keras.utils import plot_model from senet.keras_fn.se_resnet import", "= (224, 224, 3) num_classes = 2 model = SE_ResNet_18( include_top=True, weights=None, input_shape=input_shape,", "show_shapes=True) def test_se_resnet_152(self): model_type = \"SE_ResNet_152\" input_shape = (224, 224, 3) num_classes =", "def test_se_resnet_18(self): model_type = \"SE_ResNet_18\" input_shape = (224, 224, 3) num_classes = 2", ") plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_50(self): model_type = \"SE_ResNet_50\" input_shape =", "import test from tensorflow.keras.utils import plot_model from senet.keras_fn.se_resnet import SE_ResNet_18, SE_ResNet_50, SE_ResNet_101, SE_ResNet_152", "utf-8 -*- # @Date : Jan-02-21 20:43 # @Author : <NAME> (<EMAIL>) import", "= \"SE_ResNet_50\" input_shape = (224, 224, 3) num_classes = 2 model = SE_ResNet_50(", "-*- # @Date : Jan-02-21 20:43 # @Author : <NAME> (<EMAIL>) import tensorflow", "model_type = \"SE_ResNet_18\" input_shape = (224, 224, 3) num_classes = 2 model =", "<NAME> (<EMAIL>) import tensorflow as tf from tensorflow.python.keras import keras_parameterized from tensorflow.python.platform import", "TestModelArchitectures(keras_parameterized.TestCase): def test_se_resnet_18(self): model_type = \"SE_ResNet_18\" input_shape = (224, 224, 3) num_classes =", "# @Date : Jan-02-21 20:43 # @Author : <NAME> (<EMAIL>) import tensorflow as", "include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_50(self): model_type", "plot_model from senet.keras_fn.se_resnet import SE_ResNet_18, SE_ResNet_50, SE_ResNet_101, SE_ResNet_152 class TestModelArchitectures(keras_parameterized.TestCase): def test_se_resnet_18(self): model_type", "model = SE_ResNet_18( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True)", "test from tensorflow.keras.utils import plot_model from senet.keras_fn.se_resnet import SE_ResNet_18, SE_ResNet_50, SE_ResNet_101, SE_ResNet_152 class", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- # @Date : Jan-02-21 20:43 #", "num_classes = 2 model = SE_ResNet_152( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type", "weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True) def test_se_resnet_101(self): model_type =", "224, 3) num_classes = 2 model = SE_ResNet_101( include_top=True, weights=None, input_shape=input_shape, classes=num_classes )", "3) num_classes = 2 model = SE_ResNet_50( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model,", "model = SE_ResNet_101( include_top=True, weights=None, input_shape=input_shape, classes=num_classes ) plot_model(model, to_file=model_type + \".png\", show_shapes=True)", "# @Author : <NAME> (<EMAIL>) import tensorflow as tf from tensorflow.python.keras import keras_parameterized", ": Jan-02-21 20:43 # @Author : <NAME> (<EMAIL>) import tensorflow as tf from", "= \"SE_ResNet_18\" input_shape = (224, 224, 3) num_classes = 2 model = SE_ResNet_18(" ]
[ "logger.info(f\"Clearing vitals delta: {delta_path}\") shutil.rmtree(delta_path, ignore_errors=True) # logger.info(f\"Creating vitals delta: {output_path}\") # delta_path", "-> None: \"\"\" \"\"\" # TODO: Import spark_etl to Jupyter container # TODO:", "I'm not sure how to use this. It may be that Kafka is", "so I'm not sure how to use this. It may be that Kafka", "= \"{root}/public/vitals/delta\".format(root=output_path) if delta_truncate: logger.info(f\"Clearing vitals delta: {delta_path}\") shutil.rmtree(delta_path, ignore_errors=True) # logger.info(f\"Creating vitals", "@click.option( '--output-path', required=False, help='The output file path') @click.option( '--delta-truncate/--no-delta-truncate', default=True, help='Clear previous delta", ".format(\"delta\") .outputMode(\"append\") .option(\"checkpointLocation\", f\"{output_path}/_checkpoints/stream-from-delta\") .queryName('vitals_stream') .start(output_path) .awaitTermination(timeout=60*5) # 5 min ) if __name__", "3.2 container with Python bindings # TODO: RE: patient matches, load demographics as", "use this. It may be that Kafka is necessary for true streaming. \"\"\"", "to delta on: {source_path}\") ( spark_session .readStream .format(\"delta\") # .option(\"ignoreDeletes\", \"true\") # .option(\"ignoreChanges\",", "spark_session = ( SparkSession .builder .appName(\"stage_data\") # AWS general authorization # .config(\"spark.hadoop.fs.s3a.access.key\", os.environ['P3_AWS_ACCESS_KEY'])", "pass @cli.command() def smoke_test(): pass @cli.command() @click.option('--filepath', required=False, help='The input file path') @click.option('--filepath2',", "mpmi, filepath, output_path) logger.info(f\"Load process finished in {datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath2}\")", "\\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" ./spark-sql --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\", "lake integration with Spark DataSourceV2 and Catalog # .config(\"spark.jars.packages\", \"io.delta:delta-core_2.12:1.0.0\") # .config(\"spark.sql.extensions\", \"io.delta.sql.DeltaSparkSessionExtension\")", "\"\"\" \"\"\" # TODO: Import spark_etl to Jupyter container # TODO: Build Spark", "output_path: str) -> None: \"\"\" JDBC streaming is not supported so I'm not", "output file path') @click.option( '--delta-truncate/--no-delta-truncate', default=True, help='Clear previous delta runs') def acquire_vitals( filepath:", "not supported so I'm not sure how to use this. It may be", "# delta_path = create_vitals_delta(spark_session, output_path) # logger.info(f\"Create finished in {datetime.now() - start}\") logger.info(f\"Caching", ") from spark_etl.secret import get_secret \"\"\" To configure AWS bucket-specific authorization, use the", "Spark DataSourceV2 and Catalog # .config(\"spark.jars.packages\", \"io.delta:delta-core_2.12:1.0.0\") # .config(\"spark.sql.extensions\", \"io.delta.sql.DeltaSparkSessionExtension\") # .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.spark.sql.delta.catalog.DeltaCatalog\")", "from spark_etl.etl import ( create_vitals_delta, cache_mpmi, save_mpmi, load_vitals, upsert_vitals, time_travel ) from spark_etl.secret", "parent/child tables to db at scale? # See here: https://www.youtube.com/watch?v=aF2hRH5WZAU # monotonically_increasing_id() can", "vitals: {delta_path}\") time_travel( spark_session, delta_path ) logger.info(f\"Time-travel finished in {datetime.now() - start}\") input(\"Press", "logger.info(f\"Cache finished in {datetime.now() - start}\") # logger.info(f\"Persisting mpmi\") # mpmi_path = save_mpmi(spark_session,", "Optimizations: PathOutputCommitProtocol cannot be resolved # .config(\"spark.hadoop.fs.s3a.committer.name\", \"directory\") # .config(\"spark.sql.sources.commitProtocolClass\", # \"org.apache.spark.internal.io.cloud.PathOutputCommitProtocol\") #", "mode) to delta on: {source_path}\") ( spark_session .readStream .format(\"delta\") # .option(\"ignoreDeletes\", \"true\") #", "TODO: Import spark_etl to Jupyter container # TODO: Build Spark 3.2 container with", "{datetime.now() - start}\") logger.info(f\"Caching mpmi\") mpmi = cache_mpmi(spark_session) logger.info(f\"Cache finished in {datetime.now() -", "As specified here: - https://hadoop.apache.org/docs/current2/hadoop-aws/tools/hadoop-aws/index.html#Configuring_different_S3_buckets TODO: Consider optimizing the S3A for I/O. -", "str, output_path: str, delta_truncate: bool) -> None: \"\"\" \"\"\" # TODO: Import spark_etl", "start}\") logger.info(f\"Time-travel vitals: {delta_path}\") time_travel( spark_session, delta_path ) logger.info(f\"Time-travel finished in {datetime.now() -", "logger.info(f\"Persisting mpmi\") # mpmi_path = save_mpmi(spark_session, output_path) # logger.info(f\"Save finished in {datetime.now() -", "help='The output file path') @click.option( '--delta-truncate/--no-delta-truncate', default=True, help='Clear previous delta runs') def acquire_vitals(", "def acquire_vitals( filepath: str, filepath2: str, output_path: str, delta_truncate: bool) -> None: \"\"\"", "os.environ['CO_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.secret.key\", os.environ['CO_AWS_SECRET_KEY']) # TODO: S3A Optimizations .config(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\") .config(\"spark.hadoop.mapreduce.fileoutputcommitter.cleanup-failures.ignored\", \"true\") #", "output_path: str, delta_truncate: bool) -> None: \"\"\" \"\"\" # TODO: Import spark_etl to", "datetime.now() delta_path = \"{root}/public/vitals/delta\".format(root=output_path) if delta_truncate: logger.info(f\"Clearing vitals delta: {delta_path}\") shutil.rmtree(delta_path, ignore_errors=True) #", "- https://hadoop.apache.org/docs/current2/hadoop-aws/tools/hadoop-aws/index.html#Configuring_different_S3_buckets TODO: Consider optimizing the S3A for I/O. - https://spark.apache.org/docs/3.1.1/cloud-integration.html#recommended-settings-for-writing-to-object-stores \"\"\" spark_session", "required=False, help='The Delta path') @click.option('--output-path', required=False, help='The output file path') def stream_vitals(source_path: str,", "finished in {datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath2}\") upsert_vitals(spark_session, mpmi, filepath2, output_path) logger.info(f\"Upsert", "shutil.rmtree(delta_path, ignore_errors=True) # logger.info(f\"Creating vitals delta: {output_path}\") # delta_path = create_vitals_delta(spark_session, output_path) #", "Kafka is necessary for true streaming. \"\"\" logger.info(f\"Stream (append mode) to delta on:", ".config(\"spark.sql.parquet.filterPushdown\", \"true\") .config(\"spark.sql.hive.metastorePartitionPruning\", \"true\") # Specify different location for Hive metastore # .config(\"spark.sql.warehouse.dir\",", "- start}\") logger.info(f\"Processing vitals: {filepath2}\") upsert_vitals(spark_session, mpmi, filepath2, output_path) logger.info(f\"Upsert process finished in", "spark_etl import logger, SPARK_LOG_LEVEL from spark_etl.etl import ( create_vitals_delta, cache_mpmi, save_mpmi, load_vitals, upsert_vitals,", "for I/O. - https://spark.apache.org/docs/3.1.1/cloud-integration.html#recommended-settings-for-writing-to-object-stores \"\"\" spark_session = ( SparkSession .builder .appName(\"stage_data\") # AWS", "# .config(\"spark.sql.catalogImplementation\", \"hive\") # Delta lake integration with Spark DataSourceV2 and Catalog #", "--conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" ./spark-sql --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" \"\"\" from", "patient matches, load demographics as a Delta and keep sync'd # TODO: Partition", "import SparkSession from spark_etl import logger, SPARK_LOG_LEVEL from spark_etl.etl import ( create_vitals_delta, cache_mpmi,", "@click.option('--source-path', required=False, help='The Delta path') @click.option('--output-path', required=False, help='The output file path') def stream_vitals(source_path:", "# .config(\"spark.hadoop.fs.s3a.bucket.bangkok.access.key\", os.environ['BK_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.secret.key\", os.environ['BK_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.access.key\", os.environ['CO_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.secret.key\", os.environ['CO_AWS_SECRET_KEY']) #", "From Spark 3.1.1 base container with Python bindings: docker run --rm -it --name", "at scale? # See here: https://www.youtube.com/watch?v=aF2hRH5WZAU # monotonically_increasing_id() can also be used. start", "stream_vitals(source_path: str, output_path: str) -> None: \"\"\" JDBC streaming is not supported so", "io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" \"\"\" from datetime import datetime, date,", "# logger.info(f\"Create finished in {datetime.now() - start}\") logger.info(f\"Caching mpmi\") mpmi = cache_mpmi(spark_session) logger.info(f\"Cache", "ignore_errors=True) # logger.info(f\"Creating vitals delta: {output_path}\") # delta_path = create_vitals_delta(spark_session, output_path) # logger.info(f\"Create", "AWS bucket-specific authorization # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.session.token\", os.environ['P3_AWS_SESSION_TOKEN']) #", "TODO: Build Spark 3.2 container with Python bindings # TODO: RE: patient matches,", "\"Current\" tables as delta lake tables (merge/upsert) # TODO: How to write parent/child", ".config(\"spark.sql.catalogImplementation\", \"hive\") # Delta lake integration with Spark DataSourceV2 and Catalog # .config(\"spark.jars.packages\",", "- start}\") logger.info(f\"Time-travel vitals: {delta_path}\") time_travel( spark_session, delta_path ) logger.info(f\"Time-travel finished in {datetime.now()", "Optimizations .config(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\") .config(\"spark.hadoop.mapreduce.fileoutputcommitter.cleanup-failures.ignored\", \"true\") # TODO: S3A Optimizations: PathOutputCommitProtocol cannot be resolved", "os.environ['P3_AWS_SESSION_TOKEN']) # Or .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.access.key\", os.environ['BK_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.secret.key\", os.environ['BK_AWS_SECRET_KEY'])", "def cli(): pass @cli.command() def smoke_test(): pass @cli.command() @click.option('--filepath', required=False, help='The input file", "# logger.info(f\"Save finished in {datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath}\") load_vitals(spark_session, mpmi, filepath,", "# AWS bucket-specific authorization # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.session.token\", os.environ['P3_AWS_SESSION_TOKEN'])", "mpmi, filepath2, output_path) logger.info(f\"Upsert process finished in {datetime.now() - start}\") logger.info(f\"Time-travel vitals: {delta_path}\")", "Delta path') @click.option('--output-path', required=False, help='The output file path') def stream_vitals(source_path: str, output_path: str)", "os.environ['P3_AWS_ACCESS_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.session.token\", os.environ['P3_AWS_SESSION_TOKEN']) # Or .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY'])", "that Kafka is necessary for true streaming. \"\"\" logger.info(f\"Stream (append mode) to delta", "scale? # See here: https://www.youtube.com/watch?v=aF2hRH5WZAU # monotonically_increasing_id() can also be used. start =", "import click from pyspark.sql import SparkSession from spark_etl import logger, SPARK_LOG_LEVEL from spark_etl.etl", "with Python bindings: docker run --rm -it --name test_pyspark spark-ingest:latest /bin/bash ./bin/spark-submit spark-ingest/main.py", ".getOrCreate() ) spark_session.sparkContext.setLogLevel(SPARK_LOG_LEVEL) @click.group() def cli(): pass @cli.command() def smoke_test(): pass @cli.command() @click.option('--filepath',", "TODO: Partition demographics Delta by prac # TODO: Implement \"Current\" tables as delta", "bucket-specific authorization, use the `fs.s3a.bucket.[bucket name].access.key` configuration setting. As specified here: - https://hadoop.apache.org/docs/current2/hadoop-aws/tools/hadoop-aws/index.html#Configuring_different_S3_buckets", "output_path) logger.info(f\"Load process finished in {datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath2}\") upsert_vitals(spark_session, mpmi,", "os.environ['P3_AWS_SECRET_KEY']) # AWS bucket-specific authorization # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.session.token\",", "-it --name test_pyspark spark-ingest:latest /bin/bash ./bin/spark-submit spark-ingest/main.py --filepath ./examples/src/main/python/pi.py - From binaries: ./pyspark", "output_path) # logger.info(f\"Create finished in {datetime.now() - start}\") logger.info(f\"Caching mpmi\") mpmi = cache_mpmi(spark_session)", "from spark_etl.secret import get_secret \"\"\" To configure AWS bucket-specific authorization, use the `fs.s3a.bucket.[bucket", "Hive metastore # .config(\"spark.sql.warehouse.dir\", \"/opt/spark/hive_warehouse\") # .config(\"spark.sql.catalogImplementation\", \"hive\") # Delta lake integration with", "{datetime.now() - start}\") # logger.info(f\"Persisting mpmi\") # mpmi_path = save_mpmi(spark_session, output_path) # logger.info(f\"Save", "# TODO: How to write parent/child tables to db at scale? # See", "streaming. \"\"\" logger.info(f\"Stream (append mode) to delta on: {source_path}\") ( spark_session .readStream .format(\"delta\")", "# debug .format(\"delta\") .outputMode(\"append\") .option(\"checkpointLocation\", f\"{output_path}/_checkpoints/stream-from-delta\") .queryName('vitals_stream') .start(output_path) .awaitTermination(timeout=60*5) # 5 min )", "shutil import boto3 import click from pyspark.sql import SparkSession from spark_etl import logger,", "= save_mpmi(spark_session, output_path) # logger.info(f\"Save finished in {datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath}\")", "from pyspark.sql import SparkSession from spark_etl import logger, SPARK_LOG_LEVEL from spark_etl.etl import (", ".config(\"spark.sql.parquet.mergeSchema\", \"false\") .config(\"spark.sql.parquet.filterPushdown\", \"true\") .config(\"spark.sql.hive.metastorePartitionPruning\", \"true\") # Specify different location for Hive metastore", "RE: patient matches, load demographics as a Delta and keep sync'd # TODO:", "mpmi\") mpmi = cache_mpmi(spark_session) logger.info(f\"Cache finished in {datetime.now() - start}\") # logger.info(f\"Persisting mpmi\")", "bool) -> None: \"\"\" \"\"\" # TODO: Import spark_etl to Jupyter container #", "TODO: RE: patient matches, load demographics as a Delta and keep sync'd #", "configuration setting. As specified here: - https://hadoop.apache.org/docs/current2/hadoop-aws/tools/hadoop-aws/index.html#Configuring_different_S3_buckets TODO: Consider optimizing the S3A for", "import shutil import boto3 import click from pyspark.sql import SparkSession from spark_etl import", "previous delta runs') def acquire_vitals( filepath: str, filepath2: str, output_path: str, delta_truncate: bool)", "can also be used. start = datetime.now() delta_path = \"{root}/public/vitals/delta\".format(root=output_path) if delta_truncate: logger.info(f\"Clearing", "# TODO: Parquet Optimizations .config(\"spark.hadoop.parquet.enable.summary-metadata\", \"false\") .config(\"spark.sql.parquet.mergeSchema\", \"false\") .config(\"spark.sql.parquet.filterPushdown\", \"true\") .config(\"spark.sql.hive.metastorePartitionPruning\", \"true\") #", "logger.info(f\"Upsert process finished in {datetime.now() - start}\") logger.info(f\"Time-travel vitals: {delta_path}\") time_travel( spark_session, delta_path", "(append mode) to delta on: {source_path}\") ( spark_session .readStream .format(\"delta\") # .option(\"ignoreDeletes\", \"true\")", ".config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.session.token\", os.environ['P3_AWS_SESSION_TOKEN']) # Or .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\",", "output file path') def stream_vitals(source_path: str, output_path: str) -> None: \"\"\" JDBC streaming", "io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" ./spark-sql --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\"", "--rm -it --name test_pyspark spark-ingest:latest /bin/bash ./bin/spark-submit spark-ingest/main.py --filepath ./examples/src/main/python/pi.py - From binaries:", "sync'd # TODO: Partition demographics Delta by prac # TODO: Implement \"Current\" tables", "path') @click.option( '--output-path', required=False, help='The output file path') @click.option( '--delta-truncate/--no-delta-truncate', default=True, help='Clear previous", "# Or .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.access.key\", os.environ['BK_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.secret.key\", os.environ['BK_AWS_SECRET_KEY']) #", "# TODO: S3A Optimizations .config(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\") .config(\"spark.hadoop.mapreduce.fileoutputcommitter.cleanup-failures.ignored\", \"true\") # TODO: S3A Optimizations: PathOutputCommitProtocol", "logger.info(f\"Load process finished in {datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath2}\") upsert_vitals(spark_session, mpmi, filepath2,", "this. It may be that Kafka is necessary for true streaming. \"\"\" logger.info(f\"Stream", "\"org.apache.spark.internal.io.cloud.PathOutputCommitProtocol\") # .config(\"spark.sql.parquet.output.committer.class\", # \"org.apache.spark.internal.io.cloud.BindingParquetOutputCommitter\") # TODO: Parquet Optimizations .config(\"spark.hadoop.parquet.enable.summary-metadata\", \"false\") .config(\"spark.sql.parquet.mergeSchema\", \"false\")", "the `fs.s3a.bucket.[bucket name].access.key` configuration setting. As specified here: - https://hadoop.apache.org/docs/current2/hadoop-aws/tools/hadoop-aws/index.html#Configuring_different_S3_buckets TODO: Consider optimizing", "os.environ['CO_AWS_SECRET_KEY']) # TODO: S3A Optimizations .config(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\") .config(\"spark.hadoop.mapreduce.fileoutputcommitter.cleanup-failures.ignored\", \"true\") # TODO: S3A Optimizations:", "\"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" ./spark-sql --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\"", "import datetime, date, timedelta import os import shutil import boto3 import click from", "# .config(\"spark.hadoop.fs.s3a.bucket.bangkok.secret.key\", os.environ['BK_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.access.key\", os.environ['CO_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.secret.key\", os.environ['CO_AWS_SECRET_KEY']) # TODO: S3A Optimizations", "\"/opt/spark/hive_warehouse\") # .config(\"spark.sql.catalogImplementation\", \"hive\") # Delta lake integration with Spark DataSourceV2 and Catalog", "= datetime.now() delta_path = \"{root}/public/vitals/delta\".format(root=output_path) if delta_truncate: logger.info(f\"Clearing vitals delta: {delta_path}\") shutil.rmtree(delta_path, ignore_errors=True)", ".config(\"spark.hadoop.fs.s3a.bucket.bangkok.secret.key\", os.environ['BK_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.access.key\", os.environ['CO_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.secret.key\", os.environ['CO_AWS_SECRET_KEY']) # TODO: S3A Optimizations .config(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\",", "process finished in {datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath2}\") upsert_vitals(spark_session, mpmi, filepath2, output_path)", "base container with Python bindings: docker run --rm -it --name test_pyspark spark-ingest:latest /bin/bash", "help='The Delta path') @click.option('--output-path', required=False, help='The output file path') def stream_vitals(source_path: str, output_path:", ".format(\"console\") # debug .format(\"delta\") .outputMode(\"append\") .option(\"checkpointLocation\", f\"{output_path}/_checkpoints/stream-from-delta\") .queryName('vitals_stream') .start(output_path) .awaitTermination(timeout=60*5) # 5 min", "as delta lake tables (merge/upsert) # TODO: How to write parent/child tables to", "streaming is not supported so I'm not sure how to use this. It", "finished in {datetime.now() - start}\") logger.info(f\"Time-travel vitals: {delta_path}\") time_travel( spark_session, delta_path ) logger.info(f\"Time-travel", "with Python bindings # TODO: RE: patient matches, load demographics as a Delta", "{datetime.now() - start}\") input(\"Press enter to exit...\") # keep alive for Spark UI", "# .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.spark.sql.delta.catalog.DeltaCatalog\") .getOrCreate() ) spark_session.sparkContext.setLogLevel(SPARK_LOG_LEVEL) @click.group() def cli(): pass @cli.command() def smoke_test():", ".config(\"spark.hadoop.fs.s3a.bucket.condesa.secret.key\", os.environ['CO_AWS_SECRET_KEY']) # TODO: S3A Optimizations .config(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\") .config(\"spark.hadoop.mapreduce.fileoutputcommitter.cleanup-failures.ignored\", \"true\") # TODO: S3A", "DataSourceV2 and Catalog # .config(\"spark.jars.packages\", \"io.delta:delta-core_2.12:1.0.0\") # .config(\"spark.sql.extensions\", \"io.delta.sql.DeltaSparkSessionExtension\") # .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.spark.sql.delta.catalog.DeltaCatalog\") .getOrCreate()", "delta lake tables (merge/upsert) # TODO: How to write parent/child tables to db", "mpmi = cache_mpmi(spark_session) logger.info(f\"Cache finished in {datetime.now() - start}\") # logger.info(f\"Persisting mpmi\") #", ".writeStream # .format(\"console\") # debug .format(\"delta\") .outputMode(\"append\") .option(\"checkpointLocation\", f\"{output_path}/_checkpoints/stream-from-delta\") .queryName('vitals_stream') .start(output_path) .awaitTermination(timeout=60*5) #", "Import spark_etl to Jupyter container # TODO: Build Spark 3.2 container with Python", "{datetime.now() - start}\") logger.info(f\"Time-travel vitals: {delta_path}\") time_travel( spark_session, delta_path ) logger.info(f\"Time-travel finished in", "\"\"\" spark_session = ( SparkSession .builder .appName(\"stage_data\") # AWS general authorization # .config(\"spark.hadoop.fs.s3a.access.key\",", ".config(\"spark.hadoop.fs.s3a.bucket.bangkok.access.key\", os.environ['BK_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.secret.key\", os.environ['BK_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.access.key\", os.environ['CO_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.secret.key\", os.environ['CO_AWS_SECRET_KEY']) # TODO:", ") logger.info(f\"Time-travel finished in {datetime.now() - start}\") input(\"Press enter to exit...\") # keep", "in {datetime.now() - start}\") logger.info(f\"Caching mpmi\") mpmi = cache_mpmi(spark_session) logger.info(f\"Cache finished in {datetime.now()", ".load(source_path) .writeStream # .format(\"console\") # debug .format(\"delta\") .outputMode(\"append\") .option(\"checkpointLocation\", f\"{output_path}/_checkpoints/stream-from-delta\") .queryName('vitals_stream') .start(output_path) .awaitTermination(timeout=60*5)", "help='The input file path') @click.option( '--output-path', required=False, help='The output file path') @click.option( '--delta-truncate/--no-delta-truncate',", "UI @cli.command() @click.option('--source-path', required=False, help='The Delta path') @click.option('--output-path', required=False, help='The output file path')", "container # TODO: Build Spark 3.2 container with Python bindings # TODO: RE:", "delta on: {source_path}\") ( spark_session .readStream .format(\"delta\") # .option(\"ignoreDeletes\", \"true\") # .option(\"ignoreChanges\", \"true\")", "(merge/upsert) # TODO: How to write parent/child tables to db at scale? #", "https://spark.apache.org/docs/3.1.1/cloud-integration.html#recommended-settings-for-writing-to-object-stores \"\"\" spark_session = ( SparkSession .builder .appName(\"stage_data\") # AWS general authorization #", "AWS general authorization # .config(\"spark.hadoop.fs.s3a.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # AWS bucket-specific authorization", "different location for Hive metastore # .config(\"spark.sql.warehouse.dir\", \"/opt/spark/hive_warehouse\") # .config(\"spark.sql.catalogImplementation\", \"hive\") # Delta", "delta runs') def acquire_vitals( filepath: str, filepath2: str, output_path: str, delta_truncate: bool) ->", "mpmi_path = save_mpmi(spark_session, output_path) # logger.info(f\"Save finished in {datetime.now() - start}\") logger.info(f\"Processing vitals:", "# .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.session.token\", os.environ['P3_AWS_SESSION_TOKEN']) # Or .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.access.key\", os.environ['BK_AWS_ACCESS_KEY']) #", "- start}\") # logger.info(f\"Persisting mpmi\") # mpmi_path = save_mpmi(spark_session, output_path) # logger.info(f\"Save finished", "os.environ['P3_AWS_SECRET_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.session.token\", os.environ['P3_AWS_SESSION_TOKEN']) # Or .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.access.key\", os.environ['BK_AWS_ACCESS_KEY'])", "binaries: ./pyspark --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" ./spark-sql --packages io.delta:delta-core_2.12:1.0.0", "spark_session.sparkContext.setLogLevel(SPARK_LOG_LEVEL) @click.group() def cli(): pass @cli.command() def smoke_test(): pass @cli.command() @click.option('--filepath', required=False, help='The", "None: \"\"\" \"\"\" # TODO: Import spark_etl to Jupyter container # TODO: Build", "load_vitals, upsert_vitals, time_travel ) from spark_etl.secret import get_secret \"\"\" To configure AWS bucket-specific", "\"false\") .config(\"spark.sql.parquet.filterPushdown\", \"true\") .config(\"spark.sql.hive.metastorePartitionPruning\", \"true\") # Specify different location for Hive metastore #", "'--delta-truncate/--no-delta-truncate', default=True, help='Clear previous delta runs') def acquire_vitals( filepath: str, filepath2: str, output_path:", "monotonically_increasing_id() can also be used. start = datetime.now() delta_path = \"{root}/public/vitals/delta\".format(root=output_path) if delta_truncate:", "@cli.command() @click.option('--filepath', required=False, help='The input file path') @click.option('--filepath2', required=False, help='The input file path')", "# TODO: S3A Optimizations: PathOutputCommitProtocol cannot be resolved # .config(\"spark.hadoop.fs.s3a.committer.name\", \"directory\") # .config(\"spark.sql.sources.commitProtocolClass\",", ".builder .appName(\"stage_data\") # AWS general authorization # .config(\"spark.hadoop.fs.s3a.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.secret.key\", os.environ['P3_AWS_SECRET_KEY']) #", "S3A Optimizations .config(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\") .config(\"spark.hadoop.mapreduce.fileoutputcommitter.cleanup-failures.ignored\", \"true\") # TODO: S3A Optimizations: PathOutputCommitProtocol cannot be", "load demographics as a Delta and keep sync'd # TODO: Partition demographics Delta", ".config(\"spark.hadoop.mapreduce.fileoutputcommitter.cleanup-failures.ignored\", \"true\") # TODO: S3A Optimizations: PathOutputCommitProtocol cannot be resolved # .config(\"spark.hadoop.fs.s3a.committer.name\", \"directory\")", "# See here: https://www.youtube.com/watch?v=aF2hRH5WZAU # monotonically_increasing_id() can also be used. start = datetime.now()", "spark_session .readStream .format(\"delta\") # .option(\"ignoreDeletes\", \"true\") # .option(\"ignoreChanges\", \"true\") .load(source_path) .writeStream # .format(\"console\")", "# .config(\"spark.hadoop.fs.s3a.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # AWS bucket-specific authorization # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY'])", "demographics Delta by prac # TODO: Implement \"Current\" tables as delta lake tables", "\\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" \"\"\" from datetime import datetime, date, timedelta import os import", "@cli.command() @click.option('--source-path', required=False, help='The Delta path') @click.option('--output-path', required=False, help='The output file path') def", "\"\"\" To configure AWS bucket-specific authorization, use the `fs.s3a.bucket.[bucket name].access.key` configuration setting. As", "logger.info(f\"Create finished in {datetime.now() - start}\") logger.info(f\"Caching mpmi\") mpmi = cache_mpmi(spark_session) logger.info(f\"Cache finished", "filepath, output_path) logger.info(f\"Load process finished in {datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath2}\") upsert_vitals(spark_session,", "finished in {datetime.now() - start}\") # logger.info(f\"Persisting mpmi\") # mpmi_path = save_mpmi(spark_session, output_path)", "is not supported so I'm not sure how to use this. It may", "@cli.command() def smoke_test(): pass @cli.command() @click.option('--filepath', required=False, help='The input file path') @click.option('--filepath2', required=False,", "# .config(\"spark.hadoop.fs.s3a.committer.name\", \"directory\") # .config(\"spark.sql.sources.commitProtocolClass\", # \"org.apache.spark.internal.io.cloud.PathOutputCommitProtocol\") # .config(\"spark.sql.parquet.output.committer.class\", # \"org.apache.spark.internal.io.cloud.BindingParquetOutputCommitter\") # TODO:", "for Hive metastore # .config(\"spark.sql.warehouse.dir\", \"/opt/spark/hive_warehouse\") # .config(\"spark.sql.catalogImplementation\", \"hive\") # Delta lake integration", "# Delta lake integration with Spark DataSourceV2 and Catalog # .config(\"spark.jars.packages\", \"io.delta:delta-core_2.12:1.0.0\") #", "\"\"\" # TODO: Import spark_etl to Jupyter container # TODO: Build Spark 3.2", "time_travel ) from spark_etl.secret import get_secret \"\"\" To configure AWS bucket-specific authorization, use", "Specify different location for Hive metastore # .config(\"spark.sql.warehouse.dir\", \"/opt/spark/hive_warehouse\") # .config(\"spark.sql.catalogImplementation\", \"hive\") #", "integration with Spark DataSourceV2 and Catalog # .config(\"spark.jars.packages\", \"io.delta:delta-core_2.12:1.0.0\") # .config(\"spark.sql.extensions\", \"io.delta.sql.DeltaSparkSessionExtension\") #", "upsert_vitals(spark_session, mpmi, filepath2, output_path) logger.info(f\"Upsert process finished in {datetime.now() - start}\") logger.info(f\"Time-travel vitals:", "\"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" \"\"\" from datetime import datetime, date, timedelta import os import shutil import", "os.environ['P3_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # AWS bucket-specific authorization # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\",", "in {datetime.now() - start}\") input(\"Press enter to exit...\") # keep alive for Spark", "- From Spark 3.1.1 base container with Python bindings: docker run --rm -it", "Or .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.access.key\", os.environ['BK_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.secret.key\", os.environ['BK_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.access.key\",", "in {datetime.now() - start}\") logger.info(f\"Time-travel vitals: {delta_path}\") time_travel( spark_session, delta_path ) logger.info(f\"Time-travel finished", "necessary for true streaming. \"\"\" logger.info(f\"Stream (append mode) to delta on: {source_path}\") (", "as a Delta and keep sync'd # TODO: Partition demographics Delta by prac", "create_vitals_delta, cache_mpmi, save_mpmi, load_vitals, upsert_vitals, time_travel ) from spark_etl.secret import get_secret \"\"\" To", "SparkSession .builder .appName(\"stage_data\") # AWS general authorization # .config(\"spark.hadoop.fs.s3a.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.secret.key\", os.environ['P3_AWS_SECRET_KEY'])", "# .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.session.token\", os.environ['P3_AWS_SESSION_TOKEN']) # Or .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) #", ".config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.session.token\", os.environ['P3_AWS_SESSION_TOKEN']) # Or .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.access.key\", os.environ['BK_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.secret.key\",", "# \"org.apache.spark.internal.io.cloud.PathOutputCommitProtocol\") # .config(\"spark.sql.parquet.output.committer.class\", # \"org.apache.spark.internal.io.cloud.BindingParquetOutputCommitter\") # TODO: Parquet Optimizations .config(\"spark.hadoop.parquet.enable.summary-metadata\", \"false\") .config(\"spark.sql.parquet.mergeSchema\",", "load_vitals(spark_session, mpmi, filepath, output_path) logger.info(f\"Load process finished in {datetime.now() - start}\") logger.info(f\"Processing vitals:", "write parent/child tables to db at scale? # See here: https://www.youtube.com/watch?v=aF2hRH5WZAU # monotonically_increasing_id()", "Optimizations .config(\"spark.hadoop.parquet.enable.summary-metadata\", \"false\") .config(\"spark.sql.parquet.mergeSchema\", \"false\") .config(\"spark.sql.parquet.filterPushdown\", \"true\") .config(\"spark.sql.hive.metastorePartitionPruning\", \"true\") # Specify different location", "enter to exit...\") # keep alive for Spark UI @cli.command() @click.option('--source-path', required=False, help='The", "true streaming. \"\"\" logger.info(f\"Stream (append mode) to delta on: {source_path}\") ( spark_session .readStream", "and Catalog # .config(\"spark.jars.packages\", \"io.delta:delta-core_2.12:1.0.0\") # .config(\"spark.sql.extensions\", \"io.delta.sql.DeltaSparkSessionExtension\") # .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.spark.sql.delta.catalog.DeltaCatalog\") .getOrCreate() )", "# logger.info(f\"Creating vitals delta: {output_path}\") # delta_path = create_vitals_delta(spark_session, output_path) # logger.info(f\"Create finished", "os.environ['P3_AWS_ACCESS_KEY']) .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.access.key\", os.environ['BK_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.secret.key\", os.environ['BK_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.access.key\", os.environ['CO_AWS_ACCESS_KEY']) #", "# logger.info(f\"Persisting mpmi\") # mpmi_path = save_mpmi(spark_session, output_path) # logger.info(f\"Save finished in {datetime.now()", "\"\"\" from datetime import datetime, date, timedelta import os import shutil import boto3", "finished in {datetime.now() - start}\") input(\"Press enter to exit...\") # keep alive for", "--name test_pyspark spark-ingest:latest /bin/bash ./bin/spark-submit spark-ingest/main.py --filepath ./examples/src/main/python/pi.py - From binaries: ./pyspark --packages", "metastore # .config(\"spark.sql.warehouse.dir\", \"/opt/spark/hive_warehouse\") # .config(\"spark.sql.catalogImplementation\", \"hive\") # Delta lake integration with Spark", "\"\"\" JDBC streaming is not supported so I'm not sure how to use", "output_path) logger.info(f\"Upsert process finished in {datetime.now() - start}\") logger.info(f\"Time-travel vitals: {delta_path}\") time_travel( spark_session,", "be that Kafka is necessary for true streaming. \"\"\" logger.info(f\"Stream (append mode) to", "logger.info(f\"Processing vitals: {filepath}\") load_vitals(spark_session, mpmi, filepath, output_path) logger.info(f\"Load process finished in {datetime.now() -", "logger.info(f\"Processing vitals: {filepath2}\") upsert_vitals(spark_session, mpmi, filepath2, output_path) logger.info(f\"Upsert process finished in {datetime.now() -", "@click.group() def cli(): pass @cli.command() def smoke_test(): pass @cli.command() @click.option('--filepath', required=False, help='The input", ") spark_session.sparkContext.setLogLevel(SPARK_LOG_LEVEL) @click.group() def cli(): pass @cli.command() def smoke_test(): pass @cli.command() @click.option('--filepath', required=False,", "spark_etl.secret import get_secret \"\"\" To configure AWS bucket-specific authorization, use the `fs.s3a.bucket.[bucket name].access.key`", "{output_path}\") # delta_path = create_vitals_delta(spark_session, output_path) # logger.info(f\"Create finished in {datetime.now() - start}\")", "Build Spark 3.2 container with Python bindings # TODO: RE: patient matches, load", "datetime import datetime, date, timedelta import os import shutil import boto3 import click", "container with Python bindings # TODO: RE: patient matches, load demographics as a", "= ( SparkSession .builder .appName(\"stage_data\") # AWS general authorization # .config(\"spark.hadoop.fs.s3a.access.key\", os.environ['P3_AWS_ACCESS_KEY']) #", "delta_truncate: logger.info(f\"Clearing vitals delta: {delta_path}\") shutil.rmtree(delta_path, ignore_errors=True) # logger.info(f\"Creating vitals delta: {output_path}\") #", "from spark_etl import logger, SPARK_LOG_LEVEL from spark_etl.etl import ( create_vitals_delta, cache_mpmi, save_mpmi, load_vitals,", "to Jupyter container # TODO: Build Spark 3.2 container with Python bindings #", "@click.option('--output-path', required=False, help='The output file path') def stream_vitals(source_path: str, output_path: str) -> None:", ".config(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\") .config(\"spark.hadoop.mapreduce.fileoutputcommitter.cleanup-failures.ignored\", \"true\") # TODO: S3A Optimizations: PathOutputCommitProtocol cannot be resolved #", "cannot be resolved # .config(\"spark.hadoop.fs.s3a.committer.name\", \"directory\") # .config(\"spark.sql.sources.commitProtocolClass\", # \"org.apache.spark.internal.io.cloud.PathOutputCommitProtocol\") # .config(\"spark.sql.parquet.output.committer.class\", #", "help='The output file path') def stream_vitals(source_path: str, output_path: str) -> None: \"\"\" JDBC", "os.environ['BK_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.secret.key\", os.environ['BK_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.access.key\", os.environ['CO_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.secret.key\", os.environ['CO_AWS_SECRET_KEY']) # TODO: S3A", "{filepath2}\") upsert_vitals(spark_session, mpmi, filepath2, output_path) logger.info(f\"Upsert process finished in {datetime.now() - start}\") logger.info(f\"Time-travel", "start}\") logger.info(f\"Processing vitals: {filepath2}\") upsert_vitals(spark_session, mpmi, filepath2, output_path) logger.info(f\"Upsert process finished in {datetime.now()", "# .config(\"spark.sql.extensions\", \"io.delta.sql.DeltaSparkSessionExtension\") # .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.spark.sql.delta.catalog.DeltaCatalog\") .getOrCreate() ) spark_session.sparkContext.setLogLevel(SPARK_LOG_LEVEL) @click.group() def cli(): pass", "runs') def acquire_vitals( filepath: str, filepath2: str, output_path: str, delta_truncate: bool) -> None:", "spark-ingest:latest /bin/bash ./bin/spark-submit spark-ingest/main.py --filepath ./examples/src/main/python/pi.py - From binaries: ./pyspark --packages io.delta:delta-core_2.12:1.0.0 \\", "-> None: \"\"\" JDBC streaming is not supported so I'm not sure how", "Usage: - From Spark 3.1.1 base container with Python bindings: docker run --rm", "( create_vitals_delta, cache_mpmi, save_mpmi, load_vitals, upsert_vitals, time_travel ) from spark_etl.secret import get_secret \"\"\"", "datetime, date, timedelta import os import shutil import boto3 import click from pyspark.sql", "finished in {datetime.now() - start}\") logger.info(f\"Caching mpmi\") mpmi = cache_mpmi(spark_session) logger.info(f\"Cache finished in", "os.environ['P3_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.access.key\", os.environ['BK_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.secret.key\", os.environ['BK_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.access.key\", os.environ['CO_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.secret.key\", os.environ['CO_AWS_SECRET_KEY'])", "See here: https://www.youtube.com/watch?v=aF2hRH5WZAU # monotonically_increasing_id() can also be used. start = datetime.now() delta_path", ".config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.session.token\", os.environ['P3_AWS_SESSION_TOKEN']) # Or .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.access.key\",", "\"true\") .load(source_path) .writeStream # .format(\"console\") # debug .format(\"delta\") .outputMode(\"append\") .option(\"checkpointLocation\", f\"{output_path}/_checkpoints/stream-from-delta\") .queryName('vitals_stream') .start(output_path)", "sure how to use this. It may be that Kafka is necessary for", "\"true\") # .option(\"ignoreChanges\", \"true\") .load(source_path) .writeStream # .format(\"console\") # debug .format(\"delta\") .outputMode(\"append\") .option(\"checkpointLocation\",", "--filepath ./examples/src/main/python/pi.py - From binaries: ./pyspark --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf", "os import shutil import boto3 import click from pyspark.sql import SparkSession from spark_etl", "path') @click.option( '--delta-truncate/--no-delta-truncate', default=True, help='Clear previous delta runs') def acquire_vitals( filepath: str, filepath2:", "How to write parent/child tables to db at scale? # See here: https://www.youtube.com/watch?v=aF2hRH5WZAU", ".config(\"spark.sql.warehouse.dir\", \"/opt/spark/hive_warehouse\") # .config(\"spark.sql.catalogImplementation\", \"hive\") # Delta lake integration with Spark DataSourceV2 and", "path') @click.option('--output-path', required=False, help='The output file path') def stream_vitals(source_path: str, output_path: str) ->", "f\"{output_path}/_checkpoints/stream-from-delta\") .queryName('vitals_stream') .start(output_path) .awaitTermination(timeout=60*5) # 5 min ) if __name__ == \"__main__\": cli()", "on: {source_path}\") ( spark_session .readStream .format(\"delta\") # .option(\"ignoreDeletes\", \"true\") # .option(\"ignoreChanges\", \"true\") .load(source_path)", "PathOutputCommitProtocol cannot be resolved # .config(\"spark.hadoop.fs.s3a.committer.name\", \"directory\") # .config(\"spark.sql.sources.commitProtocolClass\", # \"org.apache.spark.internal.io.cloud.PathOutputCommitProtocol\") # .config(\"spark.sql.parquet.output.committer.class\",", ".readStream .format(\"delta\") # .option(\"ignoreDeletes\", \"true\") # .option(\"ignoreChanges\", \"true\") .load(source_path) .writeStream # .format(\"console\") #", "--packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" \"\"\" from datetime import datetime,", "Spark 3.2 container with Python bindings # TODO: RE: patient matches, load demographics", "./spark-sql --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" \"\"\" from datetime import", "time_travel( spark_session, delta_path ) logger.info(f\"Time-travel finished in {datetime.now() - start}\") input(\"Press enter to", "to exit...\") # keep alive for Spark UI @cli.command() @click.option('--source-path', required=False, help='The Delta", "--conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" ./spark-sql --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf", "SPARK_LOG_LEVEL from spark_etl.etl import ( create_vitals_delta, cache_mpmi, save_mpmi, load_vitals, upsert_vitals, time_travel ) from", "\"2\") .config(\"spark.hadoop.mapreduce.fileoutputcommitter.cleanup-failures.ignored\", \"true\") # TODO: S3A Optimizations: PathOutputCommitProtocol cannot be resolved # .config(\"spark.hadoop.fs.s3a.committer.name\",", "input(\"Press enter to exit...\") # keep alive for Spark UI @cli.command() @click.option('--source-path', required=False,", ".config(\"spark.sql.parquet.output.committer.class\", # \"org.apache.spark.internal.io.cloud.BindingParquetOutputCommitter\") # TODO: Parquet Optimizations .config(\"spark.hadoop.parquet.enable.summary-metadata\", \"false\") .config(\"spark.sql.parquet.mergeSchema\", \"false\") .config(\"spark.sql.parquet.filterPushdown\", \"true\")", "is necessary for true streaming. \"\"\" logger.info(f\"Stream (append mode) to delta on: {source_path}\")", "Implement \"Current\" tables as delta lake tables (merge/upsert) # TODO: How to write", "# .config(\"spark.sql.warehouse.dir\", \"/opt/spark/hive_warehouse\") # .config(\"spark.sql.catalogImplementation\", \"hive\") # Delta lake integration with Spark DataSourceV2", "Python bindings: docker run --rm -it --name test_pyspark spark-ingest:latest /bin/bash ./bin/spark-submit spark-ingest/main.py --filepath", "- start}\") logger.info(f\"Processing vitals: {filepath}\") load_vitals(spark_session, mpmi, filepath, output_path) logger.info(f\"Load process finished in", "--conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" \"\"\" from datetime import datetime, date, timedelta import os import shutil", "required=False, help='The output file path') def stream_vitals(source_path: str, output_path: str) -> None: \"\"\"", "optimizing the S3A for I/O. - https://spark.apache.org/docs/3.1.1/cloud-integration.html#recommended-settings-for-writing-to-object-stores \"\"\" spark_session = ( SparkSession .builder", "S3A for I/O. - https://spark.apache.org/docs/3.1.1/cloud-integration.html#recommended-settings-for-writing-to-object-stores \"\"\" spark_session = ( SparkSession .builder .appName(\"stage_data\") #", "https://hadoop.apache.org/docs/current2/hadoop-aws/tools/hadoop-aws/index.html#Configuring_different_S3_buckets TODO: Consider optimizing the S3A for I/O. - https://spark.apache.org/docs/3.1.1/cloud-integration.html#recommended-settings-for-writing-to-object-stores \"\"\" spark_session =", "authorization # .config(\"spark.hadoop.fs.s3a.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # AWS bucket-specific authorization # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\",", "# .config(\"spark.hadoop.fs.s3a.bucket.condesa.secret.key\", os.environ['CO_AWS_SECRET_KEY']) # TODO: S3A Optimizations .config(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\") .config(\"spark.hadoop.mapreduce.fileoutputcommitter.cleanup-failures.ignored\", \"true\") # TODO:", "used. start = datetime.now() delta_path = \"{root}/public/vitals/delta\".format(root=output_path) if delta_truncate: logger.info(f\"Clearing vitals delta: {delta_path}\")", ".config(\"spark.hadoop.fs.s3a.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # AWS bucket-specific authorization # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) #", "Partition demographics Delta by prac # TODO: Implement \"Current\" tables as delta lake", "for true streaming. \"\"\" logger.info(f\"Stream (append mode) to delta on: {source_path}\") ( spark_session", "Consider optimizing the S3A for I/O. - https://spark.apache.org/docs/3.1.1/cloud-integration.html#recommended-settings-for-writing-to-object-stores \"\"\" spark_session = ( SparkSession", "`fs.s3a.bucket.[bucket name].access.key` configuration setting. As specified here: - https://hadoop.apache.org/docs/current2/hadoop-aws/tools/hadoop-aws/index.html#Configuring_different_S3_buckets TODO: Consider optimizing the", "{delta_path}\") time_travel( spark_session, delta_path ) logger.info(f\"Time-travel finished in {datetime.now() - start}\") input(\"Press enter", "delta: {output_path}\") # delta_path = create_vitals_delta(spark_session, output_path) # logger.info(f\"Create finished in {datetime.now() -", "str, output_path: str) -> None: \"\"\" JDBC streaming is not supported so I'm", "{filepath}\") load_vitals(spark_session, mpmi, filepath, output_path) logger.info(f\"Load process finished in {datetime.now() - start}\") logger.info(f\"Processing", "get_secret \"\"\" To configure AWS bucket-specific authorization, use the `fs.s3a.bucket.[bucket name].access.key` configuration setting.", "logger.info(f\"Caching mpmi\") mpmi = cache_mpmi(spark_session) logger.info(f\"Cache finished in {datetime.now() - start}\") # logger.info(f\"Persisting", "to write parent/child tables to db at scale? # See here: https://www.youtube.com/watch?v=aF2hRH5WZAU #", "Spark 3.1.1 base container with Python bindings: docker run --rm -it --name test_pyspark", "TODO: Implement \"Current\" tables as delta lake tables (merge/upsert) # TODO: How to", "configure AWS bucket-specific authorization, use the `fs.s3a.bucket.[bucket name].access.key` configuration setting. As specified here:", "to use this. It may be that Kafka is necessary for true streaming.", "I/O. - https://spark.apache.org/docs/3.1.1/cloud-integration.html#recommended-settings-for-writing-to-object-stores \"\"\" spark_session = ( SparkSession .builder .appName(\"stage_data\") # AWS general", "vitals delta: {delta_path}\") shutil.rmtree(delta_path, ignore_errors=True) # logger.info(f\"Creating vitals delta: {output_path}\") # delta_path =", "start}\") # logger.info(f\"Persisting mpmi\") # mpmi_path = save_mpmi(spark_session, output_path) # logger.info(f\"Save finished in", "not sure how to use this. It may be that Kafka is necessary", "import logger, SPARK_LOG_LEVEL from spark_etl.etl import ( create_vitals_delta, cache_mpmi, save_mpmi, load_vitals, upsert_vitals, time_travel", "process finished in {datetime.now() - start}\") logger.info(f\"Time-travel vitals: {delta_path}\") time_travel( spark_session, delta_path )", "matches, load demographics as a Delta and keep sync'd # TODO: Partition demographics", "tables to db at scale? # See here: https://www.youtube.com/watch?v=aF2hRH5WZAU # monotonically_increasing_id() can also", "\"true\") # Specify different location for Hive metastore # .config(\"spark.sql.warehouse.dir\", \"/opt/spark/hive_warehouse\") # .config(\"spark.sql.catalogImplementation\",", ".appName(\"stage_data\") # AWS general authorization # .config(\"spark.hadoop.fs.s3a.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # AWS", "input file path') @click.option('--filepath2', required=False, help='The input file path') @click.option( '--output-path', required=False, help='The", "3.1.1 base container with Python bindings: docker run --rm -it --name test_pyspark spark-ingest:latest", "\"true\") .config(\"spark.sql.hive.metastorePartitionPruning\", \"true\") # Specify different location for Hive metastore # .config(\"spark.sql.warehouse.dir\", \"/opt/spark/hive_warehouse\")", "- start}\") logger.info(f\"Caching mpmi\") mpmi = cache_mpmi(spark_session) logger.info(f\"Cache finished in {datetime.now() - start}\")", "--packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" ./spark-sql --packages io.delta:delta-core_2.12:1.0.0 \\ --conf", "os.environ['BK_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.access.key\", os.environ['CO_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.secret.key\", os.environ['CO_AWS_SECRET_KEY']) # TODO: S3A Optimizations .config(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\")", "a Delta and keep sync'd # TODO: Partition demographics Delta by prac #", "keep alive for Spark UI @cli.command() @click.option('--source-path', required=False, help='The Delta path') @click.option('--output-path', required=False,", "save_mpmi, load_vitals, upsert_vitals, time_travel ) from spark_etl.secret import get_secret \"\"\" To configure AWS", ".option(\"ignoreChanges\", \"true\") .load(source_path) .writeStream # .format(\"console\") # debug .format(\"delta\") .outputMode(\"append\") .option(\"checkpointLocation\", f\"{output_path}/_checkpoints/stream-from-delta\") .queryName('vitals_stream')", ".config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.access.key\", os.environ['BK_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.secret.key\", os.environ['BK_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.access.key\", os.environ['CO_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.secret.key\",", "logger.info(f\"Creating vitals delta: {output_path}\") # delta_path = create_vitals_delta(spark_session, output_path) # logger.info(f\"Create finished in", "# mpmi_path = save_mpmi(spark_session, output_path) # logger.info(f\"Save finished in {datetime.now() - start}\") logger.info(f\"Processing", "in {datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath}\") load_vitals(spark_session, mpmi, filepath, output_path) logger.info(f\"Load process", ".config(\"spark.sql.extensions\", \"io.delta.sql.DeltaSparkSessionExtension\") # .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.spark.sql.delta.catalog.DeltaCatalog\") .getOrCreate() ) spark_session.sparkContext.setLogLevel(SPARK_LOG_LEVEL) @click.group() def cli(): pass @cli.command()", "import boto3 import click from pyspark.sql import SparkSession from spark_etl import logger, SPARK_LOG_LEVEL", "lake tables (merge/upsert) # TODO: How to write parent/child tables to db at", ".config(\"spark.sql.sources.commitProtocolClass\", # \"org.apache.spark.internal.io.cloud.PathOutputCommitProtocol\") # .config(\"spark.sql.parquet.output.committer.class\", # \"org.apache.spark.internal.io.cloud.BindingParquetOutputCommitter\") # TODO: Parquet Optimizations .config(\"spark.hadoop.parquet.enable.summary-metadata\", \"false\")", "cache_mpmi, save_mpmi, load_vitals, upsert_vitals, time_travel ) from spark_etl.secret import get_secret \"\"\" To configure", "def smoke_test(): pass @cli.command() @click.option('--filepath', required=False, help='The input file path') @click.option('--filepath2', required=False, help='The", "( SparkSession .builder .appName(\"stage_data\") # AWS general authorization # .config(\"spark.hadoop.fs.s3a.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.secret.key\",", "test_pyspark spark-ingest:latest /bin/bash ./bin/spark-submit spark-ingest/main.py --filepath ./examples/src/main/python/pi.py - From binaries: ./pyspark --packages io.delta:delta-core_2.12:1.0.0", "spark-ingest/main.py --filepath ./examples/src/main/python/pi.py - From binaries: ./pyspark --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\", "- From binaries: ./pyspark --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" ./spark-sql", "may be that Kafka is necessary for true streaming. \"\"\" logger.info(f\"Stream (append mode)", "path') def stream_vitals(source_path: str, output_path: str) -> None: \"\"\" JDBC streaming is not", "str) -> None: \"\"\" JDBC streaming is not supported so I'm not sure", "{datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath2}\") upsert_vitals(spark_session, mpmi, filepath2, output_path) logger.info(f\"Upsert process finished", "container with Python bindings: docker run --rm -it --name test_pyspark spark-ingest:latest /bin/bash ./bin/spark-submit", "file path') @click.option( '--output-path', required=False, help='The output file path') @click.option( '--delta-truncate/--no-delta-truncate', default=True, help='Clear", "{datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath}\") load_vitals(spark_session, mpmi, filepath, output_path) logger.info(f\"Load process finished", "# .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.session.token\", os.environ['P3_AWS_SESSION_TOKEN']) # Or .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY'])", "# .format(\"console\") # debug .format(\"delta\") .outputMode(\"append\") .option(\"checkpointLocation\", f\"{output_path}/_checkpoints/stream-from-delta\") .queryName('vitals_stream') .start(output_path) .awaitTermination(timeout=60*5) # 5", "output_path) # logger.info(f\"Save finished in {datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath}\") load_vitals(spark_session, mpmi,", "( spark_session .readStream .format(\"delta\") # .option(\"ignoreDeletes\", \"true\") # .option(\"ignoreChanges\", \"true\") .load(source_path) .writeStream #", "finished in {datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath}\") load_vitals(spark_session, mpmi, filepath, output_path) logger.info(f\"Load", "vitals: {filepath}\") load_vitals(spark_session, mpmi, filepath, output_path) logger.info(f\"Load process finished in {datetime.now() - start}\")", "./examples/src/main/python/pi.py - From binaries: ./pyspark --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\"", "default=True, help='Clear previous delta runs') def acquire_vitals( filepath: str, filepath2: str, output_path: str,", "resolved # .config(\"spark.hadoop.fs.s3a.committer.name\", \"directory\") # .config(\"spark.sql.sources.commitProtocolClass\", # \"org.apache.spark.internal.io.cloud.PathOutputCommitProtocol\") # .config(\"spark.sql.parquet.output.committer.class\", # \"org.apache.spark.internal.io.cloud.BindingParquetOutputCommitter\") #", "file path') def stream_vitals(source_path: str, output_path: str) -> None: \"\"\" JDBC streaming is", ".config(\"spark.hadoop.fs.s3a.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # AWS bucket-specific authorization # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) #", "mpmi\") # mpmi_path = save_mpmi(spark_session, output_path) # logger.info(f\"Save finished in {datetime.now() - start}\")", "input file path') @click.option( '--output-path', required=False, help='The output file path') @click.option( '--delta-truncate/--no-delta-truncate', default=True,", "SparkSession from spark_etl import logger, SPARK_LOG_LEVEL from spark_etl.etl import ( create_vitals_delta, cache_mpmi, save_mpmi,", ".config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.access.key\", os.environ['BK_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.bangkok.secret.key\", os.environ['BK_AWS_SECRET_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.access.key\", os.environ['CO_AWS_ACCESS_KEY'])", "tables as delta lake tables (merge/upsert) # TODO: How to write parent/child tables", "filepath2, output_path) logger.info(f\"Upsert process finished in {datetime.now() - start}\") logger.info(f\"Time-travel vitals: {delta_path}\") time_travel(", "= create_vitals_delta(spark_session, output_path) # logger.info(f\"Create finished in {datetime.now() - start}\") logger.info(f\"Caching mpmi\") mpmi", "click from pyspark.sql import SparkSession from spark_etl import logger, SPARK_LOG_LEVEL from spark_etl.etl import", "specified here: - https://hadoop.apache.org/docs/current2/hadoop-aws/tools/hadoop-aws/index.html#Configuring_different_S3_buckets TODO: Consider optimizing the S3A for I/O. - https://spark.apache.org/docs/3.1.1/cloud-integration.html#recommended-settings-for-writing-to-object-stores", "\"true\") # TODO: S3A Optimizations: PathOutputCommitProtocol cannot be resolved # .config(\"spark.hadoop.fs.s3a.committer.name\", \"directory\") #", "db at scale? # See here: https://www.youtube.com/watch?v=aF2hRH5WZAU # monotonically_increasing_id() can also be used.", "import os import shutil import boto3 import click from pyspark.sql import SparkSession from", "# .option(\"ignoreChanges\", \"true\") .load(source_path) .writeStream # .format(\"console\") # debug .format(\"delta\") .outputMode(\"append\") .option(\"checkpointLocation\", f\"{output_path}/_checkpoints/stream-from-delta\")", "filepath: str, filepath2: str, output_path: str, delta_truncate: bool) -> None: \"\"\" \"\"\" #", "debug .format(\"delta\") .outputMode(\"append\") .option(\"checkpointLocation\", f\"{output_path}/_checkpoints/stream-from-delta\") .queryName('vitals_stream') .start(output_path) .awaitTermination(timeout=60*5) # 5 min ) if", "TODO: Parquet Optimizations .config(\"spark.hadoop.parquet.enable.summary-metadata\", \"false\") .config(\"spark.sql.parquet.mergeSchema\", \"false\") .config(\"spark.sql.parquet.filterPushdown\", \"true\") .config(\"spark.sql.hive.metastorePartitionPruning\", \"true\") # Specify", "cache_mpmi(spark_session) logger.info(f\"Cache finished in {datetime.now() - start}\") # logger.info(f\"Persisting mpmi\") # mpmi_path =", "the S3A for I/O. - https://spark.apache.org/docs/3.1.1/cloud-integration.html#recommended-settings-for-writing-to-object-stores \"\"\" spark_session = ( SparkSession .builder .appName(\"stage_data\")", "# .config(\"spark.hadoop.fs.s3a.bucket.condesa.access.key\", os.environ['CO_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.secret.key\", os.environ['CO_AWS_SECRET_KEY']) # TODO: S3A Optimizations .config(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\") .config(\"spark.hadoop.mapreduce.fileoutputcommitter.cleanup-failures.ignored\",", "use the `fs.s3a.bucket.[bucket name].access.key` configuration setting. As specified here: - https://hadoop.apache.org/docs/current2/hadoop-aws/tools/hadoop-aws/index.html#Configuring_different_S3_buckets TODO: Consider", "\"io.delta.sql.DeltaSparkSessionExtension\") # .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.spark.sql.delta.catalog.DeltaCatalog\") .getOrCreate() ) spark_session.sparkContext.setLogLevel(SPARK_LOG_LEVEL) @click.group() def cli(): pass @cli.command() def", "# .option(\"ignoreDeletes\", \"true\") # .option(\"ignoreChanges\", \"true\") .load(source_path) .writeStream # .format(\"console\") # debug .format(\"delta\")", "delta_truncate: bool) -> None: \"\"\" \"\"\" # TODO: Import spark_etl to Jupyter container", "\"io.delta:delta-core_2.12:1.0.0\") # .config(\"spark.sql.extensions\", \"io.delta.sql.DeltaSparkSessionExtension\") # .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.spark.sql.delta.catalog.DeltaCatalog\") .getOrCreate() ) spark_session.sparkContext.setLogLevel(SPARK_LOG_LEVEL) @click.group() def cli():", "From binaries: ./pyspark --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" ./spark-sql --packages", "prac # TODO: Implement \"Current\" tables as delta lake tables (merge/upsert) # TODO:", "exit...\") # keep alive for Spark UI @cli.command() @click.option('--source-path', required=False, help='The Delta path')", "pass @cli.command() @click.option('--filepath', required=False, help='The input file path') @click.option('--filepath2', required=False, help='The input file", "smoke_test(): pass @cli.command() @click.option('--filepath', required=False, help='The input file path') @click.option('--filepath2', required=False, help='The input", "# .config(\"spark.sql.parquet.output.committer.class\", # \"org.apache.spark.internal.io.cloud.BindingParquetOutputCommitter\") # TODO: Parquet Optimizations .config(\"spark.hadoop.parquet.enable.summary-metadata\", \"false\") .config(\"spark.sql.parquet.mergeSchema\", \"false\") .config(\"spark.sql.parquet.filterPushdown\",", "cli(): pass @cli.command() def smoke_test(): pass @cli.command() @click.option('--filepath', required=False, help='The input file path')", "\\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" \"\"\" from datetime import datetime, date, timedelta", "TODO: How to write parent/child tables to db at scale? # See here:", "in {datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath2}\") upsert_vitals(spark_session, mpmi, filepath2, output_path) logger.info(f\"Upsert process", "def stream_vitals(source_path: str, output_path: str) -> None: \"\"\" JDBC streaming is not supported", "delta_path = \"{root}/public/vitals/delta\".format(root=output_path) if delta_truncate: logger.info(f\"Clearing vitals delta: {delta_path}\") shutil.rmtree(delta_path, ignore_errors=True) # logger.info(f\"Creating", "None: \"\"\" JDBC streaming is not supported so I'm not sure how to", "start}\") logger.info(f\"Processing vitals: {filepath}\") load_vitals(spark_session, mpmi, filepath, output_path) logger.info(f\"Load process finished in {datetime.now()", "general authorization # .config(\"spark.hadoop.fs.s3a.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # AWS bucket-specific authorization #", "@click.option('--filepath2', required=False, help='The input file path') @click.option( '--output-path', required=False, help='The output file path')", "S3A Optimizations: PathOutputCommitProtocol cannot be resolved # .config(\"spark.hadoop.fs.s3a.committer.name\", \"directory\") # .config(\"spark.sql.sources.commitProtocolClass\", # \"org.apache.spark.internal.io.cloud.PathOutputCommitProtocol\")", "here: https://www.youtube.com/watch?v=aF2hRH5WZAU # monotonically_increasing_id() can also be used. start = datetime.now() delta_path =", "\"hive\") # Delta lake integration with Spark DataSourceV2 and Catalog # .config(\"spark.jars.packages\", \"io.delta:delta-core_2.12:1.0.0\")", "Parquet Optimizations .config(\"spark.hadoop.parquet.enable.summary-metadata\", \"false\") .config(\"spark.sql.parquet.mergeSchema\", \"false\") .config(\"spark.sql.parquet.filterPushdown\", \"true\") .config(\"spark.sql.hive.metastorePartitionPruning\", \"true\") # Specify different", "demographics as a Delta and keep sync'd # TODO: Partition demographics Delta by", "# TODO: Implement \"Current\" tables as delta lake tables (merge/upsert) # TODO: How", "path') @click.option('--filepath2', required=False, help='The input file path') @click.option( '--output-path', required=False, help='The output file", "from datetime import datetime, date, timedelta import os import shutil import boto3 import", "be used. start = datetime.now() delta_path = \"{root}/public/vitals/delta\".format(root=output_path) if delta_truncate: logger.info(f\"Clearing vitals delta:", ".config(\"spark.hadoop.fs.s3a.committer.name\", \"directory\") # .config(\"spark.sql.sources.commitProtocolClass\", # \"org.apache.spark.internal.io.cloud.PathOutputCommitProtocol\") # .config(\"spark.sql.parquet.output.committer.class\", # \"org.apache.spark.internal.io.cloud.BindingParquetOutputCommitter\") # TODO: Parquet", ".option(\"checkpointLocation\", f\"{output_path}/_checkpoints/stream-from-delta\") .queryName('vitals_stream') .start(output_path) .awaitTermination(timeout=60*5) # 5 min ) if __name__ == \"__main__\":", "how to use this. It may be that Kafka is necessary for true", "./pyspark --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" ./spark-sql --packages io.delta:delta-core_2.12:1.0.0 \\", ".config(\"spark.hadoop.fs.s3a.bucket.condesa.access.key\", os.environ['CO_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.bucket.condesa.secret.key\", os.environ['CO_AWS_SECRET_KEY']) # TODO: S3A Optimizations .config(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\") .config(\"spark.hadoop.mapreduce.fileoutputcommitter.cleanup-failures.ignored\", \"true\")", "\"org.apache.spark.sql.delta.catalog.DeltaCatalog\") .getOrCreate() ) spark_session.sparkContext.setLogLevel(SPARK_LOG_LEVEL) @click.group() def cli(): pass @cli.command() def smoke_test(): pass @cli.command()", "file path') @click.option('--filepath2', required=False, help='The input file path') @click.option( '--output-path', required=False, help='The output", "# monotonically_increasing_id() can also be used. start = datetime.now() delta_path = \"{root}/public/vitals/delta\".format(root=output_path) if", "# \"org.apache.spark.internal.io.cloud.BindingParquetOutputCommitter\") # TODO: Parquet Optimizations .config(\"spark.hadoop.parquet.enable.summary-metadata\", \"false\") .config(\"spark.sql.parquet.mergeSchema\", \"false\") .config(\"spark.sql.parquet.filterPushdown\", \"true\") .config(\"spark.sql.hive.metastorePartitionPruning\",", "bindings # TODO: RE: patient matches, load demographics as a Delta and keep", "authorization # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.session.token\", os.environ['P3_AWS_SESSION_TOKEN']) # Or .config(f\"spark.hadoop.fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\",", "'--output-path', required=False, help='The output file path') @click.option( '--delta-truncate/--no-delta-truncate', default=True, help='Clear previous delta runs')", "setting. As specified here: - https://hadoop.apache.org/docs/current2/hadoop-aws/tools/hadoop-aws/index.html#Configuring_different_S3_buckets TODO: Consider optimizing the S3A for I/O.", "{delta_path}\") shutil.rmtree(delta_path, ignore_errors=True) # logger.info(f\"Creating vitals delta: {output_path}\") # delta_path = create_vitals_delta(spark_session, output_path)", "@click.option( '--delta-truncate/--no-delta-truncate', default=True, help='Clear previous delta runs') def acquire_vitals( filepath: str, filepath2: str,", "required=False, help='The input file path') @click.option('--filepath2', required=False, help='The input file path') @click.option( '--output-path',", "Spark UI @cli.command() @click.option('--source-path', required=False, help='The Delta path') @click.option('--output-path', required=False, help='The output file", "start}\") logger.info(f\"Caching mpmi\") mpmi = cache_mpmi(spark_session) logger.info(f\"Cache finished in {datetime.now() - start}\") #", "date, timedelta import os import shutil import boto3 import click from pyspark.sql import", ".config(\"spark.hadoop.parquet.enable.summary-metadata\", \"false\") .config(\"spark.sql.parquet.mergeSchema\", \"false\") .config(\"spark.sql.parquet.filterPushdown\", \"true\") .config(\"spark.sql.hive.metastorePartitionPruning\", \"true\") # Specify different location for", "str, delta_truncate: bool) -> None: \"\"\" \"\"\" # TODO: Import spark_etl to Jupyter", "pyspark.sql import SparkSession from spark_etl import logger, SPARK_LOG_LEVEL from spark_etl.etl import ( create_vitals_delta,", "\"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" \"\"\" from datetime import datetime, date, timedelta import os", "boto3 import click from pyspark.sql import SparkSession from spark_etl import logger, SPARK_LOG_LEVEL from", "logger, SPARK_LOG_LEVEL from spark_etl.etl import ( create_vitals_delta, cache_mpmi, save_mpmi, load_vitals, upsert_vitals, time_travel )", "also be used. start = datetime.now() delta_path = \"{root}/public/vitals/delta\".format(root=output_path) if delta_truncate: logger.info(f\"Clearing vitals", "docker run --rm -it --name test_pyspark spark-ingest:latest /bin/bash ./bin/spark-submit spark-ingest/main.py --filepath ./examples/src/main/python/pi.py -", "- start}\") input(\"Press enter to exit...\") # keep alive for Spark UI @cli.command()", "bucket-specific authorization # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.session.token\", os.environ['P3_AWS_SESSION_TOKEN']) # Or", ".option(\"ignoreDeletes\", \"true\") # .option(\"ignoreChanges\", \"true\") .load(source_path) .writeStream # .format(\"console\") # debug .format(\"delta\") .outputMode(\"append\")", "delta_path ) logger.info(f\"Time-travel finished in {datetime.now() - start}\") input(\"Press enter to exit...\") #", "here: - https://hadoop.apache.org/docs/current2/hadoop-aws/tools/hadoop-aws/index.html#Configuring_different_S3_buckets TODO: Consider optimizing the S3A for I/O. - https://spark.apache.org/docs/3.1.1/cloud-integration.html#recommended-settings-for-writing-to-object-stores \"\"\"", "spark_session, delta_path ) logger.info(f\"Time-travel finished in {datetime.now() - start}\") input(\"Press enter to exit...\")", "supported so I'm not sure how to use this. It may be that", "To configure AWS bucket-specific authorization, use the `fs.s3a.bucket.[bucket name].access.key` configuration setting. As specified", "--conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" \"\"\" from datetime import datetime, date, timedelta import", "keep sync'd # TODO: Partition demographics Delta by prac # TODO: Implement \"Current\"", "# TODO: Partition demographics Delta by prac # TODO: Implement \"Current\" tables as", "with Spark DataSourceV2 and Catalog # .config(\"spark.jars.packages\", \"io.delta:delta-core_2.12:1.0.0\") # .config(\"spark.sql.extensions\", \"io.delta.sql.DeltaSparkSessionExtension\") # .config(\"spark.sql.catalog.spark_catalog\",", "JDBC streaming is not supported so I'm not sure how to use this.", "Delta and keep sync'd # TODO: Partition demographics Delta by prac # TODO:", "# .config(\"spark.jars.packages\", \"io.delta:delta-core_2.12:1.0.0\") # .config(\"spark.sql.extensions\", \"io.delta.sql.DeltaSparkSessionExtension\") # .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.spark.sql.delta.catalog.DeltaCatalog\") .getOrCreate() ) spark_session.sparkContext.setLogLevel(SPARK_LOG_LEVEL) @click.group()", "@click.option('--filepath', required=False, help='The input file path') @click.option('--filepath2', required=False, help='The input file path') @click.option(", "str, filepath2: str, output_path: str, delta_truncate: bool) -> None: \"\"\" \"\"\" # TODO:", "create_vitals_delta(spark_session, output_path) # logger.info(f\"Create finished in {datetime.now() - start}\") logger.info(f\"Caching mpmi\") mpmi =", "\\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" ./spark-sql --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" \"\"\"", "delta_path = create_vitals_delta(spark_session, output_path) # logger.info(f\"Create finished in {datetime.now() - start}\") logger.info(f\"Caching mpmi\")", "name].access.key` configuration setting. As specified here: - https://hadoop.apache.org/docs/current2/hadoop-aws/tools/hadoop-aws/index.html#Configuring_different_S3_buckets TODO: Consider optimizing the S3A", "\"false\") .config(\"spark.sql.parquet.mergeSchema\", \"false\") .config(\"spark.sql.parquet.filterPushdown\", \"true\") .config(\"spark.sql.hive.metastorePartitionPruning\", \"true\") # Specify different location for Hive", ".config(\"spark.sql.catalog.spark_catalog\", \"org.apache.spark.sql.delta.catalog.DeltaCatalog\") .getOrCreate() ) spark_session.sparkContext.setLogLevel(SPARK_LOG_LEVEL) @click.group() def cli(): pass @cli.command() def smoke_test(): pass", "logger.info(f\"Time-travel finished in {datetime.now() - start}\") input(\"Press enter to exit...\") # keep alive", "AWS bucket-specific authorization, use the `fs.s3a.bucket.[bucket name].access.key` configuration setting. As specified here: -", "be resolved # .config(\"spark.hadoop.fs.s3a.committer.name\", \"directory\") # .config(\"spark.sql.sources.commitProtocolClass\", # \"org.apache.spark.internal.io.cloud.PathOutputCommitProtocol\") # .config(\"spark.sql.parquet.output.committer.class\", # \"org.apache.spark.internal.io.cloud.BindingParquetOutputCommitter\")", "bindings: docker run --rm -it --name test_pyspark spark-ingest:latest /bin/bash ./bin/spark-submit spark-ingest/main.py --filepath ./examples/src/main/python/pi.py", "and keep sync'd # TODO: Partition demographics Delta by prac # TODO: Implement", "TODO: S3A Optimizations: PathOutputCommitProtocol cannot be resolved # .config(\"spark.hadoop.fs.s3a.committer.name\", \"directory\") # .config(\"spark.sql.sources.commitProtocolClass\", #", "It may be that Kafka is necessary for true streaming. \"\"\" logger.info(f\"Stream (append", "# keep alive for Spark UI @cli.command() @click.option('--source-path', required=False, help='The Delta path') @click.option('--output-path',", "authorization, use the `fs.s3a.bucket.[bucket name].access.key` configuration setting. As specified here: - https://hadoop.apache.org/docs/current2/hadoop-aws/tools/hadoop-aws/index.html#Configuring_different_S3_buckets TODO:", "# AWS general authorization # .config(\"spark.hadoop.fs.s3a.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(\"spark.hadoop.fs.s3a.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # AWS bucket-specific", "help='The input file path') @click.option('--filepath2', required=False, help='The input file path') @click.option( '--output-path', required=False,", "\"\"\" Usage: - From Spark 3.1.1 base container with Python bindings: docker run", "Jupyter container # TODO: Build Spark 3.2 container with Python bindings # TODO:", "import ( create_vitals_delta, cache_mpmi, save_mpmi, load_vitals, upsert_vitals, time_travel ) from spark_etl.secret import get_secret", "{source_path}\") ( spark_session .readStream .format(\"delta\") # .option(\"ignoreDeletes\", \"true\") # .option(\"ignoreChanges\", \"true\") .load(source_path) .writeStream", "logger.info(f\"Stream (append mode) to delta on: {source_path}\") ( spark_session .readStream .format(\"delta\") # .option(\"ignoreDeletes\",", "import get_secret \"\"\" To configure AWS bucket-specific authorization, use the `fs.s3a.bucket.[bucket name].access.key` configuration", "TODO: S3A Optimizations .config(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\") .config(\"spark.hadoop.mapreduce.fileoutputcommitter.cleanup-failures.ignored\", \"true\") # TODO: S3A Optimizations: PathOutputCommitProtocol cannot", ".format(\"delta\") # .option(\"ignoreDeletes\", \"true\") # .option(\"ignoreChanges\", \"true\") .load(source_path) .writeStream # .format(\"console\") # debug", "for Spark UI @cli.command() @click.option('--source-path', required=False, help='The Delta path') @click.option('--output-path', required=False, help='The output", "# TODO: Import spark_etl to Jupyter container # TODO: Build Spark 3.2 container", "alive for Spark UI @cli.command() @click.option('--source-path', required=False, help='The Delta path') @click.option('--output-path', required=False, help='The", "in {datetime.now() - start}\") # logger.info(f\"Persisting mpmi\") # mpmi_path = save_mpmi(spark_session, output_path) #", "- https://spark.apache.org/docs/3.1.1/cloud-integration.html#recommended-settings-for-writing-to-object-stores \"\"\" spark_session = ( SparkSession .builder .appName(\"stage_data\") # AWS general authorization", "Delta by prac # TODO: Implement \"Current\" tables as delta lake tables (merge/upsert)", "start}\") input(\"Press enter to exit...\") # keep alive for Spark UI @cli.command() @click.option('--source-path',", "run --rm -it --name test_pyspark spark-ingest:latest /bin/bash ./bin/spark-submit spark-ingest/main.py --filepath ./examples/src/main/python/pi.py - From", "/bin/bash ./bin/spark-submit spark-ingest/main.py --filepath ./examples/src/main/python/pi.py - From binaries: ./pyspark --packages io.delta:delta-core_2.12:1.0.0 \\ --conf", "required=False, help='The output file path') @click.option( '--delta-truncate/--no-delta-truncate', default=True, help='Clear previous delta runs') def", "./bin/spark-submit spark-ingest/main.py --filepath ./examples/src/main/python/pi.py - From binaries: ./pyspark --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\"", "Delta lake integration with Spark DataSourceV2 and Catalog # .config(\"spark.jars.packages\", \"io.delta:delta-core_2.12:1.0.0\") # .config(\"spark.sql.extensions\",", "logger.info(f\"Time-travel vitals: {delta_path}\") time_travel( spark_session, delta_path ) logger.info(f\"Time-travel finished in {datetime.now() - start}\")", "help='Clear previous delta runs') def acquire_vitals( filepath: str, filepath2: str, output_path: str, delta_truncate:", "timedelta import os import shutil import boto3 import click from pyspark.sql import SparkSession", "save_mpmi(spark_session, output_path) # logger.info(f\"Save finished in {datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath}\") load_vitals(spark_session,", "delta: {delta_path}\") shutil.rmtree(delta_path, ignore_errors=True) # logger.info(f\"Creating vitals delta: {output_path}\") # delta_path = create_vitals_delta(spark_session,", "file path') @click.option( '--delta-truncate/--no-delta-truncate', default=True, help='Clear previous delta runs') def acquire_vitals( filepath: str,", ".config(\"spark.jars.packages\", \"io.delta:delta-core_2.12:1.0.0\") # .config(\"spark.sql.extensions\", \"io.delta.sql.DeltaSparkSessionExtension\") # .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.spark.sql.delta.catalog.DeltaCatalog\") .getOrCreate() ) spark_session.sparkContext.setLogLevel(SPARK_LOG_LEVEL) @click.group() def", "\"directory\") # .config(\"spark.sql.sources.commitProtocolClass\", # \"org.apache.spark.internal.io.cloud.PathOutputCommitProtocol\") # .config(\"spark.sql.parquet.output.committer.class\", # \"org.apache.spark.internal.io.cloud.BindingParquetOutputCommitter\") # TODO: Parquet Optimizations", ".config(\"spark.sql.hive.metastorePartitionPruning\", \"true\") # Specify different location for Hive metastore # .config(\"spark.sql.warehouse.dir\", \"/opt/spark/hive_warehouse\") #", "TODO: Consider optimizing the S3A for I/O. - https://spark.apache.org/docs/3.1.1/cloud-integration.html#recommended-settings-for-writing-to-object-stores \"\"\" spark_session = (", "# TODO: Build Spark 3.2 container with Python bindings # TODO: RE: patient", "by prac # TODO: Implement \"Current\" tables as delta lake tables (merge/upsert) #", ".outputMode(\"append\") .option(\"checkpointLocation\", f\"{output_path}/_checkpoints/stream-from-delta\") .queryName('vitals_stream') .start(output_path) .awaitTermination(timeout=60*5) # 5 min ) if __name__ ==", "tables (merge/upsert) # TODO: How to write parent/child tables to db at scale?", "# TODO: RE: patient matches, load demographics as a Delta and keep sync'd", "# .config(\"spark.sql.sources.commitProtocolClass\", # \"org.apache.spark.internal.io.cloud.PathOutputCommitProtocol\") # .config(\"spark.sql.parquet.output.committer.class\", # \"org.apache.spark.internal.io.cloud.BindingParquetOutputCommitter\") # TODO: Parquet Optimizations .config(\"spark.hadoop.parquet.enable.summary-metadata\",", "filepath2: str, output_path: str, delta_truncate: bool) -> None: \"\"\" \"\"\" # TODO: Import", "# Specify different location for Hive metastore # .config(\"spark.sql.warehouse.dir\", \"/opt/spark/hive_warehouse\") # .config(\"spark.sql.catalogImplementation\", \"hive\")", "https://www.youtube.com/watch?v=aF2hRH5WZAU # monotonically_increasing_id() can also be used. start = datetime.now() delta_path = \"{root}/public/vitals/delta\".format(root=output_path)", "\"\"\" logger.info(f\"Stream (append mode) to delta on: {source_path}\") ( spark_session .readStream .format(\"delta\") #", "= cache_mpmi(spark_session) logger.info(f\"Cache finished in {datetime.now() - start}\") # logger.info(f\"Persisting mpmi\") # mpmi_path", "\"{root}/public/vitals/delta\".format(root=output_path) if delta_truncate: logger.info(f\"Clearing vitals delta: {delta_path}\") shutil.rmtree(delta_path, ignore_errors=True) # logger.info(f\"Creating vitals delta:", "spark_etl to Jupyter container # TODO: Build Spark 3.2 container with Python bindings", "location for Hive metastore # .config(\"spark.sql.warehouse.dir\", \"/opt/spark/hive_warehouse\") # .config(\"spark.sql.catalogImplementation\", \"hive\") # Delta lake", "vitals: {filepath2}\") upsert_vitals(spark_session, mpmi, filepath2, output_path) logger.info(f\"Upsert process finished in {datetime.now() - start}\")", "Python bindings # TODO: RE: patient matches, load demographics as a Delta and", "\"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" ./spark-sql --packages io.delta:delta-core_2.12:1.0.0 \\ --conf \"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension\" \\ --conf \"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog\" \"\"\" from datetime", "start = datetime.now() delta_path = \"{root}/public/vitals/delta\".format(root=output_path) if delta_truncate: logger.info(f\"Clearing vitals delta: {delta_path}\") shutil.rmtree(delta_path,", "upsert_vitals, time_travel ) from spark_etl.secret import get_secret \"\"\" To configure AWS bucket-specific authorization,", "spark_etl.etl import ( create_vitals_delta, cache_mpmi, save_mpmi, load_vitals, upsert_vitals, time_travel ) from spark_etl.secret import", "Catalog # .config(\"spark.jars.packages\", \"io.delta:delta-core_2.12:1.0.0\") # .config(\"spark.sql.extensions\", \"io.delta.sql.DeltaSparkSessionExtension\") # .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.spark.sql.delta.catalog.DeltaCatalog\") .getOrCreate() ) spark_session.sparkContext.setLogLevel(SPARK_LOG_LEVEL)", "vitals delta: {output_path}\") # delta_path = create_vitals_delta(spark_session, output_path) # logger.info(f\"Create finished in {datetime.now()", "required=False, help='The input file path') @click.option( '--output-path', required=False, help='The output file path') @click.option(", "acquire_vitals( filepath: str, filepath2: str, output_path: str, delta_truncate: bool) -> None: \"\"\" \"\"\"", "if delta_truncate: logger.info(f\"Clearing vitals delta: {delta_path}\") shutil.rmtree(delta_path, ignore_errors=True) # logger.info(f\"Creating vitals delta: {output_path}\")", "# .config(\"spark.hadoop.fs.s3a.secret.key\", os.environ['P3_AWS_SECRET_KEY']) # AWS bucket-specific authorization # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.access.key\", os.environ['P3_AWS_ACCESS_KEY']) # .config(f\"fs.s3a.bucket.{os.environ['P3_BUCKET']}.secret.key\", os.environ['P3_AWS_SECRET_KEY'])", "logger.info(f\"Save finished in {datetime.now() - start}\") logger.info(f\"Processing vitals: {filepath}\") load_vitals(spark_session, mpmi, filepath, output_path)", "\"org.apache.spark.internal.io.cloud.BindingParquetOutputCommitter\") # TODO: Parquet Optimizations .config(\"spark.hadoop.parquet.enable.summary-metadata\", \"false\") .config(\"spark.sql.parquet.mergeSchema\", \"false\") .config(\"spark.sql.parquet.filterPushdown\", \"true\") .config(\"spark.sql.hive.metastorePartitionPruning\", \"true\")", "to db at scale? # See here: https://www.youtube.com/watch?v=aF2hRH5WZAU # monotonically_increasing_id() can also be" ]
[ "from rest_framework.response import Response from rest_framework import status from runner.tasks import start_flow_task class", "rest_framework import status from runner.tasks import start_flow_task class RunnerStartFlow(APIView): def post(self, request): flow_uuid", "flow_uuid or not flow_repo_url: return Response('Missing parameters', status=status.HTTP_400_BAD_REQUEST) start_flow_task.delay(flow_uuid, flow_repo_url) return Response('Received', status=status.HTTP_202_ACCEPTED)", "import status from runner.tasks import start_flow_task class RunnerStartFlow(APIView): def post(self, request): flow_uuid =", "if not flow_uuid or not flow_repo_url: return Response('Missing parameters', status=status.HTTP_400_BAD_REQUEST) start_flow_task.delay(flow_uuid, flow_repo_url) return", "status from runner.tasks import start_flow_task class RunnerStartFlow(APIView): def post(self, request): flow_uuid = request.POST.get('flow_uuid',", "from rest_framework import status from runner.tasks import start_flow_task class RunnerStartFlow(APIView): def post(self, request):", "Response from rest_framework import status from runner.tasks import start_flow_task class RunnerStartFlow(APIView): def post(self,", "import APIView from rest_framework.response import Response from rest_framework import status from runner.tasks import", "def post(self, request): flow_uuid = request.POST.get('flow_uuid', None) flow_repo_url = request.POST.get('flow_repo_url', None) if not", "APIView from rest_framework.response import Response from rest_framework import status from runner.tasks import start_flow_task", "import start_flow_task class RunnerStartFlow(APIView): def post(self, request): flow_uuid = request.POST.get('flow_uuid', None) flow_repo_url =", "RunnerStartFlow(APIView): def post(self, request): flow_uuid = request.POST.get('flow_uuid', None) flow_repo_url = request.POST.get('flow_repo_url', None) if", "start_flow_task class RunnerStartFlow(APIView): def post(self, request): flow_uuid = request.POST.get('flow_uuid', None) flow_repo_url = request.POST.get('flow_repo_url',", "post(self, request): flow_uuid = request.POST.get('flow_uuid', None) flow_repo_url = request.POST.get('flow_repo_url', None) if not flow_uuid", "request.POST.get('flow_uuid', None) flow_repo_url = request.POST.get('flow_repo_url', None) if not flow_uuid or not flow_repo_url: return", "flow_repo_url = request.POST.get('flow_repo_url', None) if not flow_uuid or not flow_repo_url: return Response('Missing parameters',", "class RunnerStartFlow(APIView): def post(self, request): flow_uuid = request.POST.get('flow_uuid', None) flow_repo_url = request.POST.get('flow_repo_url', None)", "flow_uuid = request.POST.get('flow_uuid', None) flow_repo_url = request.POST.get('flow_repo_url', None) if not flow_uuid or not", "rest_framework.response import Response from rest_framework import status from runner.tasks import start_flow_task class RunnerStartFlow(APIView):", "runner.tasks import start_flow_task class RunnerStartFlow(APIView): def post(self, request): flow_uuid = request.POST.get('flow_uuid', None) flow_repo_url", "rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from runner.tasks", "= request.POST.get('flow_uuid', None) flow_repo_url = request.POST.get('flow_repo_url', None) if not flow_uuid or not flow_repo_url:", "import Response from rest_framework import status from runner.tasks import start_flow_task class RunnerStartFlow(APIView): def", "from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from", "request.POST.get('flow_repo_url', None) if not flow_uuid or not flow_repo_url: return Response('Missing parameters', status=status.HTTP_400_BAD_REQUEST) start_flow_task.delay(flow_uuid,", "= request.POST.get('flow_repo_url', None) if not flow_uuid or not flow_repo_url: return Response('Missing parameters', status=status.HTTP_400_BAD_REQUEST)", "from runner.tasks import start_flow_task class RunnerStartFlow(APIView): def post(self, request): flow_uuid = request.POST.get('flow_uuid', None)", "None) flow_repo_url = request.POST.get('flow_repo_url', None) if not flow_uuid or not flow_repo_url: return Response('Missing", "request): flow_uuid = request.POST.get('flow_uuid', None) flow_repo_url = request.POST.get('flow_repo_url', None) if not flow_uuid or", "not flow_uuid or not flow_repo_url: return Response('Missing parameters', status=status.HTTP_400_BAD_REQUEST) start_flow_task.delay(flow_uuid, flow_repo_url) return Response('Received',", "None) if not flow_uuid or not flow_repo_url: return Response('Missing parameters', status=status.HTTP_400_BAD_REQUEST) start_flow_task.delay(flow_uuid, flow_repo_url)" ]
[ "reader, writer, initiator): # pylint: disable=too-many-arguments self.conn_ip = ip self.conn_port = port self.reader", "self._next_id = 0 if initiator else 1 self.initiator = initiator async def write(self,", "else 1 self.initiator = initiator async def write(self, data): self.writer.write(data) self.writer.write(\"\\n\".encode()) await self.writer.drain()", "disable=too-many-arguments self.conn_ip = ip self.conn_port = port self.reader = reader self.writer = writer", "self.writer.write(data) self.writer.write(\"\\n\".encode()) await self.writer.drain() async def read(self): line = await self.reader.readline() adjusted_line =", "way to remove \\n without going back and forth with # encoding and", "IRawConnection class RawConnection(IRawConnection): def __init__(self, ip, port, reader, writer, initiator): # pylint: disable=too-many-arguments", "initiator async def write(self, data): self.writer.write(data) self.writer.write(\"\\n\".encode()) await self.writer.drain() async def read(self): line", "adjusted_line = line.decode().rstrip('\\n') # TODO: figure out a way to remove \\n without", "def __init__(self, ip, port, reader, writer, initiator): # pylint: disable=too-many-arguments self.conn_ip = ip", "forth with # encoding and decoding return adjusted_line.encode() def close(self): self.writer.close() def next_stream_id(self):", "def close(self): self.writer.close() def next_stream_id(self): \"\"\" Get next available stream id :return: next", "from .raw_connection_interface import IRawConnection class RawConnection(IRawConnection): def __init__(self, ip, port, reader, writer, initiator):", "back and forth with # encoding and decoding return adjusted_line.encode() def close(self): self.writer.close()", ".raw_connection_interface import IRawConnection class RawConnection(IRawConnection): def __init__(self, ip, port, reader, writer, initiator): #", "= line.decode().rstrip('\\n') # TODO: figure out a way to remove \\n without going", "initiator else 1 self.initiator = initiator async def write(self, data): self.writer.write(data) self.writer.write(\"\\n\".encode()) await", "adjusted_line.encode() def close(self): self.writer.close() def next_stream_id(self): \"\"\" Get next available stream id :return:", "def write(self, data): self.writer.write(data) self.writer.write(\"\\n\".encode()) await self.writer.drain() async def read(self): line = await", "available stream id :return: next available stream id for the connection \"\"\" next_id", "self.writer.write(\"\\n\".encode()) await self.writer.drain() async def read(self): line = await self.reader.readline() adjusted_line = line.decode().rstrip('\\n')", "read(self): line = await self.reader.readline() adjusted_line = line.decode().rstrip('\\n') # TODO: figure out a", "figure out a way to remove \\n without going back and forth with", "__init__(self, ip, port, reader, writer, initiator): # pylint: disable=too-many-arguments self.conn_ip = ip self.conn_port", "decoding return adjusted_line.encode() def close(self): self.writer.close() def next_stream_id(self): \"\"\" Get next available stream", "port, reader, writer, initiator): # pylint: disable=too-many-arguments self.conn_ip = ip self.conn_port = port", "\\n without going back and forth with # encoding and decoding return adjusted_line.encode()", "writer, initiator): # pylint: disable=too-many-arguments self.conn_ip = ip self.conn_port = port self.reader =", "writer self._next_id = 0 if initiator else 1 self.initiator = initiator async def", "class RawConnection(IRawConnection): def __init__(self, ip, port, reader, writer, initiator): # pylint: disable=too-many-arguments self.conn_ip", "data): self.writer.write(data) self.writer.write(\"\\n\".encode()) await self.writer.drain() async def read(self): line = await self.reader.readline() adjusted_line", "self.writer.drain() async def read(self): line = await self.reader.readline() adjusted_line = line.decode().rstrip('\\n') # TODO:", "encoding and decoding return adjusted_line.encode() def close(self): self.writer.close() def next_stream_id(self): \"\"\" Get next", "self.writer = writer self._next_id = 0 if initiator else 1 self.initiator = initiator", "line.decode().rstrip('\\n') # TODO: figure out a way to remove \\n without going back", "available stream id for the connection \"\"\" next_id = self._next_id self._next_id += 2", "# TODO: figure out a way to remove \\n without going back and", "RawConnection(IRawConnection): def __init__(self, ip, port, reader, writer, initiator): # pylint: disable=too-many-arguments self.conn_ip =", "initiator): # pylint: disable=too-many-arguments self.conn_ip = ip self.conn_port = port self.reader = reader", "id for the connection \"\"\" next_id = self._next_id self._next_id += 2 return next_id", "1 self.initiator = initiator async def write(self, data): self.writer.write(data) self.writer.write(\"\\n\".encode()) await self.writer.drain() async", "import IRawConnection class RawConnection(IRawConnection): def __init__(self, ip, port, reader, writer, initiator): # pylint:", "pylint: disable=too-many-arguments self.conn_ip = ip self.conn_port = port self.reader = reader self.writer =", "= ip self.conn_port = port self.reader = reader self.writer = writer self._next_id =", "self.reader = reader self.writer = writer self._next_id = 0 if initiator else 1", "= port self.reader = reader self.writer = writer self._next_id = 0 if initiator", "line = await self.reader.readline() adjusted_line = line.decode().rstrip('\\n') # TODO: figure out a way", "next_stream_id(self): \"\"\" Get next available stream id :return: next available stream id for", "and forth with # encoding and decoding return adjusted_line.encode() def close(self): self.writer.close() def", "await self.reader.readline() adjusted_line = line.decode().rstrip('\\n') # TODO: figure out a way to remove", "if initiator else 1 self.initiator = initiator async def write(self, data): self.writer.write(data) self.writer.write(\"\\n\".encode())", "= initiator async def write(self, data): self.writer.write(data) self.writer.write(\"\\n\".encode()) await self.writer.drain() async def read(self):", "self.conn_port = port self.reader = reader self.writer = writer self._next_id = 0 if", "async def read(self): line = await self.reader.readline() adjusted_line = line.decode().rstrip('\\n') # TODO: figure", "= reader self.writer = writer self._next_id = 0 if initiator else 1 self.initiator", "self.conn_ip = ip self.conn_port = port self.reader = reader self.writer = writer self._next_id", "= await self.reader.readline() adjusted_line = line.decode().rstrip('\\n') # TODO: figure out a way to", "stream id :return: next available stream id for the connection \"\"\" next_id =", "await self.writer.drain() async def read(self): line = await self.reader.readline() adjusted_line = line.decode().rstrip('\\n') #", ":return: next available stream id for the connection \"\"\" next_id = self._next_id self._next_id", "and decoding return adjusted_line.encode() def close(self): self.writer.close() def next_stream_id(self): \"\"\" Get next available", "= 0 if initiator else 1 self.initiator = initiator async def write(self, data):", "return adjusted_line.encode() def close(self): self.writer.close() def next_stream_id(self): \"\"\" Get next available stream id", "reader self.writer = writer self._next_id = 0 if initiator else 1 self.initiator =", "def next_stream_id(self): \"\"\" Get next available stream id :return: next available stream id", "remove \\n without going back and forth with # encoding and decoding return", "ip, port, reader, writer, initiator): # pylint: disable=too-many-arguments self.conn_ip = ip self.conn_port =", "to remove \\n without going back and forth with # encoding and decoding", "self.writer.close() def next_stream_id(self): \"\"\" Get next available stream id :return: next available stream", "0 if initiator else 1 self.initiator = initiator async def write(self, data): self.writer.write(data)", "self.reader.readline() adjusted_line = line.decode().rstrip('\\n') # TODO: figure out a way to remove \\n", "going back and forth with # encoding and decoding return adjusted_line.encode() def close(self):", "close(self): self.writer.close() def next_stream_id(self): \"\"\" Get next available stream id :return: next available", "ip self.conn_port = port self.reader = reader self.writer = writer self._next_id = 0", "write(self, data): self.writer.write(data) self.writer.write(\"\\n\".encode()) await self.writer.drain() async def read(self): line = await self.reader.readline()", "next available stream id for the connection \"\"\" next_id = self._next_id self._next_id +=", "a way to remove \\n without going back and forth with # encoding", "id :return: next available stream id for the connection \"\"\" next_id = self._next_id", "\"\"\" Get next available stream id :return: next available stream id for the", "next available stream id :return: next available stream id for the connection \"\"\"", "Get next available stream id :return: next available stream id for the connection", "out a way to remove \\n without going back and forth with #", "stream id for the connection \"\"\" next_id = self._next_id self._next_id += 2 return", "async def write(self, data): self.writer.write(data) self.writer.write(\"\\n\".encode()) await self.writer.drain() async def read(self): line =", "self.initiator = initiator async def write(self, data): self.writer.write(data) self.writer.write(\"\\n\".encode()) await self.writer.drain() async def", "= writer self._next_id = 0 if initiator else 1 self.initiator = initiator async", "TODO: figure out a way to remove \\n without going back and forth", "# encoding and decoding return adjusted_line.encode() def close(self): self.writer.close() def next_stream_id(self): \"\"\" Get", "# pylint: disable=too-many-arguments self.conn_ip = ip self.conn_port = port self.reader = reader self.writer", "without going back and forth with # encoding and decoding return adjusted_line.encode() def", "with # encoding and decoding return adjusted_line.encode() def close(self): self.writer.close() def next_stream_id(self): \"\"\"", "port self.reader = reader self.writer = writer self._next_id = 0 if initiator else", "def read(self): line = await self.reader.readline() adjusted_line = line.decode().rstrip('\\n') # TODO: figure out" ]
[ "ColorDescriptor app = Flask(__name__) INDEX = os.path.join(os.path.dirname(__file__), 'index.csv') cd = ColorDescriptor() @app.route('/', methods=['GET',", "jsonify import numpy as np import cv2 from Searcher import Searcher from ColorDescriptor", "str(score)}) context = {\"images\": res} print(context) return render_template('index.html', context=context) res = [] context", "render_template('index.html', context=context) res = [] context = {\"images\": res} return render_template('index.html', context=context) if", "query = cv2.imdecode(np.fromstring(request.files['img'].read(), np.uint8), cv2.IMREAD_COLOR) features = cd.describe(query) searcher = Searcher(INDEX) results =", "Searcher import Searcher from ColorDescriptor import ColorDescriptor app = Flask(__name__) INDEX = os.path.join(os.path.dirname(__file__),", "= Searcher(INDEX) results = searcher.search(features, 10) res = [] for (score, resultID) in", "[] for (score, resultID) in results: res.append({\"Image\": str(resultID), \"Score\": str(score)}) context = {\"images\":", "= [] for (score, resultID) in results: res.append({\"Image\": str(resultID), \"Score\": str(score)}) context =", "(score, resultID) in results: res.append({\"Image\": str(resultID), \"Score\": str(score)}) context = {\"images\": res} print(context)", "context = {\"images\": res} return render_template('index.html', context=context) if __name__ == '__main__': app.run('127.0.0.1', debug=True)", "import ColorDescriptor app = Flask(__name__) INDEX = os.path.join(os.path.dirname(__file__), 'index.csv') cd = ColorDescriptor() @app.route('/',", "np import cv2 from Searcher import Searcher from ColorDescriptor import ColorDescriptor app =", "as np import cv2 from Searcher import Searcher from ColorDescriptor import ColorDescriptor app", "searcher.search(features, 10) res = [] for (score, resultID) in results: res.append({\"Image\": str(resultID), \"Score\":", "\"Score\": str(score)}) context = {\"images\": res} print(context) return render_template('index.html', context=context) res = []", "def search(): if request.method == 'POST': query = cv2.imdecode(np.fromstring(request.files['img'].read(), np.uint8), cv2.IMREAD_COLOR) features =", "render_template, request, jsonify import numpy as np import cv2 from Searcher import Searcher", "context=context) res = [] context = {\"images\": res} return render_template('index.html', context=context) if __name__", "import numpy as np import cv2 from Searcher import Searcher from ColorDescriptor import", "10) res = [] for (score, resultID) in results: res.append({\"Image\": str(resultID), \"Score\": str(score)})", "return render_template('index.html', context=context) res = [] context = {\"images\": res} return render_template('index.html', context=context)", "== 'POST': query = cv2.imdecode(np.fromstring(request.files['img'].read(), np.uint8), cv2.IMREAD_COLOR) features = cd.describe(query) searcher = Searcher(INDEX)", "request, jsonify import numpy as np import cv2 from Searcher import Searcher from", "in results: res.append({\"Image\": str(resultID), \"Score\": str(score)}) context = {\"images\": res} print(context) return render_template('index.html',", "if request.method == 'POST': query = cv2.imdecode(np.fromstring(request.files['img'].read(), np.uint8), cv2.IMREAD_COLOR) features = cd.describe(query) searcher", "search(): if request.method == 'POST': query = cv2.imdecode(np.fromstring(request.files['img'].read(), np.uint8), cv2.IMREAD_COLOR) features = cd.describe(query)", "{\"images\": res} print(context) return render_template('index.html', context=context) res = [] context = {\"images\": res}", "os.path.join(os.path.dirname(__file__), 'index.csv') cd = ColorDescriptor() @app.route('/', methods=['GET', 'POST']) def search(): if request.method ==", "= cv2.imdecode(np.fromstring(request.files['img'].read(), np.uint8), cv2.IMREAD_COLOR) features = cd.describe(query) searcher = Searcher(INDEX) results = searcher.search(features,", "Searcher(INDEX) results = searcher.search(features, 10) res = [] for (score, resultID) in results:", "'POST': query = cv2.imdecode(np.fromstring(request.files['img'].read(), np.uint8), cv2.IMREAD_COLOR) features = cd.describe(query) searcher = Searcher(INDEX) results", "= os.path.join(os.path.dirname(__file__), 'index.csv') cd = ColorDescriptor() @app.route('/', methods=['GET', 'POST']) def search(): if request.method", "import Searcher from ColorDescriptor import ColorDescriptor app = Flask(__name__) INDEX = os.path.join(os.path.dirname(__file__), 'index.csv')", "flask import Flask, render_template, request, jsonify import numpy as np import cv2 from", "res.append({\"Image\": str(resultID), \"Score\": str(score)}) context = {\"images\": res} print(context) return render_template('index.html', context=context) res", "'POST']) def search(): if request.method == 'POST': query = cv2.imdecode(np.fromstring(request.files['img'].read(), np.uint8), cv2.IMREAD_COLOR) features", "context = {\"images\": res} print(context) return render_template('index.html', context=context) res = [] context =", "<reponame>porcelainruler/Image-Search-Engine import os from flask import Flask, render_template, request, jsonify import numpy as", "cv2 from Searcher import Searcher from ColorDescriptor import ColorDescriptor app = Flask(__name__) INDEX", "app = Flask(__name__) INDEX = os.path.join(os.path.dirname(__file__), 'index.csv') cd = ColorDescriptor() @app.route('/', methods=['GET', 'POST'])", "= [] context = {\"images\": res} return render_template('index.html', context=context) if __name__ == '__main__':", "import Flask, render_template, request, jsonify import numpy as np import cv2 from Searcher", "from ColorDescriptor import ColorDescriptor app = Flask(__name__) INDEX = os.path.join(os.path.dirname(__file__), 'index.csv') cd =", "for (score, resultID) in results: res.append({\"Image\": str(resultID), \"Score\": str(score)}) context = {\"images\": res}", "'index.csv') cd = ColorDescriptor() @app.route('/', methods=['GET', 'POST']) def search(): if request.method == 'POST':", "cd = ColorDescriptor() @app.route('/', methods=['GET', 'POST']) def search(): if request.method == 'POST': query", "cd.describe(query) searcher = Searcher(INDEX) results = searcher.search(features, 10) res = [] for (score,", "cv2.IMREAD_COLOR) features = cd.describe(query) searcher = Searcher(INDEX) results = searcher.search(features, 10) res =", "ColorDescriptor import ColorDescriptor app = Flask(__name__) INDEX = os.path.join(os.path.dirname(__file__), 'index.csv') cd = ColorDescriptor()", "ColorDescriptor() @app.route('/', methods=['GET', 'POST']) def search(): if request.method == 'POST': query = cv2.imdecode(np.fromstring(request.files['img'].read(),", "[] context = {\"images\": res} return render_template('index.html', context=context) if __name__ == '__main__': app.run('127.0.0.1',", "res = [] for (score, resultID) in results: res.append({\"Image\": str(resultID), \"Score\": str(score)}) context", "request.method == 'POST': query = cv2.imdecode(np.fromstring(request.files['img'].read(), np.uint8), cv2.IMREAD_COLOR) features = cd.describe(query) searcher =", "res} print(context) return render_template('index.html', context=context) res = [] context = {\"images\": res} return", "res = [] context = {\"images\": res} return render_template('index.html', context=context) if __name__ ==", "@app.route('/', methods=['GET', 'POST']) def search(): if request.method == 'POST': query = cv2.imdecode(np.fromstring(request.files['img'].read(), np.uint8),", "Flask(__name__) INDEX = os.path.join(os.path.dirname(__file__), 'index.csv') cd = ColorDescriptor() @app.route('/', methods=['GET', 'POST']) def search():", "np.uint8), cv2.IMREAD_COLOR) features = cd.describe(query) searcher = Searcher(INDEX) results = searcher.search(features, 10) res", "cv2.imdecode(np.fromstring(request.files['img'].read(), np.uint8), cv2.IMREAD_COLOR) features = cd.describe(query) searcher = Searcher(INDEX) results = searcher.search(features, 10)", "= {\"images\": res} print(context) return render_template('index.html', context=context) res = [] context = {\"images\":", "from flask import Flask, render_template, request, jsonify import numpy as np import cv2", "print(context) return render_template('index.html', context=context) res = [] context = {\"images\": res} return render_template('index.html',", "import os from flask import Flask, render_template, request, jsonify import numpy as np", "os from flask import Flask, render_template, request, jsonify import numpy as np import", "= Flask(__name__) INDEX = os.path.join(os.path.dirname(__file__), 'index.csv') cd = ColorDescriptor() @app.route('/', methods=['GET', 'POST']) def", "methods=['GET', 'POST']) def search(): if request.method == 'POST': query = cv2.imdecode(np.fromstring(request.files['img'].read(), np.uint8), cv2.IMREAD_COLOR)", "features = cd.describe(query) searcher = Searcher(INDEX) results = searcher.search(features, 10) res = []", "= searcher.search(features, 10) res = [] for (score, resultID) in results: res.append({\"Image\": str(resultID),", "str(resultID), \"Score\": str(score)}) context = {\"images\": res} print(context) return render_template('index.html', context=context) res =", "Flask, render_template, request, jsonify import numpy as np import cv2 from Searcher import", "INDEX = os.path.join(os.path.dirname(__file__), 'index.csv') cd = ColorDescriptor() @app.route('/', methods=['GET', 'POST']) def search(): if", "= cd.describe(query) searcher = Searcher(INDEX) results = searcher.search(features, 10) res = [] for", "results = searcher.search(features, 10) res = [] for (score, resultID) in results: res.append({\"Image\":", "results: res.append({\"Image\": str(resultID), \"Score\": str(score)}) context = {\"images\": res} print(context) return render_template('index.html', context=context)", "resultID) in results: res.append({\"Image\": str(resultID), \"Score\": str(score)}) context = {\"images\": res} print(context) return", "searcher = Searcher(INDEX) results = searcher.search(features, 10) res = [] for (score, resultID)", "from Searcher import Searcher from ColorDescriptor import ColorDescriptor app = Flask(__name__) INDEX =", "numpy as np import cv2 from Searcher import Searcher from ColorDescriptor import ColorDescriptor", "import cv2 from Searcher import Searcher from ColorDescriptor import ColorDescriptor app = Flask(__name__)", "Searcher from ColorDescriptor import ColorDescriptor app = Flask(__name__) INDEX = os.path.join(os.path.dirname(__file__), 'index.csv') cd", "= ColorDescriptor() @app.route('/', methods=['GET', 'POST']) def search(): if request.method == 'POST': query =" ]
[ "The unique id of the service account. \"\"\" if id and not isinstance(id,", "https://github.com/terraform-providers/terraform-provider-google/blob/master/website/docs/d/compute_default_service_account.html.markdown. \"\"\" __args__ = dict() __args__['project'] = project if opts is None: opts", "None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = utilities.get_version() __ret__ =", "argument 'id' to be a str\") __self__.id = id \"\"\" id is the", "utilities.get_version() __ret__ = pulumi.runtime.invoke('gcp:compute/getDefaultServiceAccount:getDefaultServiceAccount', __args__, opts=opts).value return AwaitableGetDefaultServiceAccountResult( display_name=__ret__.get('displayName'), email=__ret__.get('email'), name=__ret__.get('name'), project=__ret__.get('project'), unique_id=__ret__.get('uniqueId'),", "'id' to be a str\") __self__.id = id \"\"\" id is the provider-assigned", "the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by", "isinstance(unique_id, str): raise TypeError(\"Expected argument 'unique_id' to be a str\") __self__.unique_id = unique_id", "not edit by hand unless you're certain you know what you are doing!", "return GetDefaultServiceAccountResult( display_name=self.display_name, email=self.email, name=self.name, project=self.project, unique_id=self.unique_id, id=self.id) def get_default_service_account(project=None,opts=None): \"\"\" Use this", "__ret__ = pulumi.runtime.invoke('gcp:compute/getDefaultServiceAccount:getDefaultServiceAccount', __args__, opts=opts).value return AwaitableGetDefaultServiceAccountResult( display_name=__ret__.get('displayName'), email=__ret__.get('email'), name=__ret__.get('name'), project=__ret__.get('project'), unique_id=__ret__.get('uniqueId'), id=__ret__.get('id'))", "Do not edit by hand unless you're certain you know what you are", "if False: yield self return GetDefaultServiceAccountResult( display_name=self.display_name, email=self.email, name=self.name, project=self.project, unique_id=self.unique_id, id=self.id) def", "argument 'project' to be a str\") __self__.project = project if unique_id and not", "you are doing! *** import json import warnings import pulumi import pulumi.runtime from", "'display_name' to be a str\") __self__.display_name = display_name \"\"\" The display name for", "# pylint: disable=using-constant-test def __await__(self): if False: yield self return GetDefaultServiceAccountResult( display_name=self.display_name, email=self.email,", "doing! *** import json import warnings import pulumi import pulumi.runtime from typing import", "__await__(self): if False: yield self return GetDefaultServiceAccountResult( display_name=self.display_name, email=self.email, name=self.name, project=self.project, unique_id=self.unique_id, id=self.id)", "project: The project ID. If it is not provided, the provider project is", "TypeError(\"Expected argument 'display_name' to be a str\") __self__.display_name = display_name \"\"\" The display", "service account for this project :param str project: The project ID. If it", "id of the service account. \"\"\" if id and not isinstance(id, str): raise", "account used by VMs running in this project \"\"\" if name and not", "display name for the service account. \"\"\" if email and not isinstance(email, str):", "argument 'unique_id' to be a str\") __self__.unique_id = unique_id \"\"\" The unique id", "a str\") __self__.project = project if unique_id and not isinstance(unique_id, str): raise TypeError(\"Expected", "argument 'name' to be a str\") __self__.name = name \"\"\" The fully-qualified name", "generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not", "WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***", "collection of values returned by getDefaultServiceAccount. \"\"\" def __init__(__self__, display_name=None, email=None, name=None, project=None,", ":param str project: The project ID. If it is not provided, the provider", "= id \"\"\" id is the provider-assigned unique ID for this managed resource.", "is the provider-assigned unique ID for this managed resource. \"\"\" class AwaitableGetDefaultServiceAccountResult(GetDefaultServiceAccountResult): #", "self return GetDefaultServiceAccountResult( display_name=self.display_name, email=self.email, name=self.name, project=self.project, unique_id=self.unique_id, id=self.id) def get_default_service_account(project=None,opts=None): \"\"\" Use", "to be a str\") __self__.id = id \"\"\" id is the provider-assigned unique", "this data source to retrieve default service account for this project :param str", "'name' to be a str\") __self__.name = name \"\"\" The fully-qualified name of", "import pulumi import pulumi.runtime from typing import Union from .. import utilities, tables", "isinstance(project, str): raise TypeError(\"Expected argument 'project' to be a str\") __self__.project = project", "TypeError(\"Expected argument 'unique_id' to be a str\") __self__.unique_id = unique_id \"\"\" The unique", "if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version =", "unique ID for this managed resource. \"\"\" class AwaitableGetDefaultServiceAccountResult(GetDefaultServiceAccountResult): # pylint: disable=using-constant-test def", "a str\") __self__.display_name = display_name \"\"\" The display name for the service account.", "and not isinstance(name, str): raise TypeError(\"Expected argument 'name' to be a str\") __self__.name", "ID. If it is not provided, the provider project is used. > This", "of the default service account used by VMs running in this project \"\"\"", "and not isinstance(email, str): raise TypeError(\"Expected argument 'email' to be a str\") __self__.email", "*** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool.", "service account used by VMs running in this project \"\"\" if name and", "str): raise TypeError(\"Expected argument 'id' to be a str\") __self__.id = id \"\"\"", "hand unless you're certain you know what you are doing! *** import json", "by hand unless you're certain you know what you are doing! *** import", "= project if opts is None: opts = pulumi.InvokeOptions() if opts.version is None:", "The project ID. If it is not provided, the provider project is used.", "service account. \"\"\" if project and not isinstance(project, str): raise TypeError(\"Expected argument 'project'", "project=None, unique_id=None, id=None): if display_name and not isinstance(display_name, str): raise TypeError(\"Expected argument 'display_name'", "json import warnings import pulumi import pulumi.runtime from typing import Union from ..", "> This content is derived from https://github.com/terraform-providers/terraform-provider-google/blob/master/website/docs/d/compute_default_service_account.html.markdown. \"\"\" __args__ = dict() __args__['project'] =", "for this project :param str project: The project ID. If it is not", "display_name=None, email=None, name=None, project=None, unique_id=None, id=None): if display_name and not isinstance(display_name, str): raise", "*** # *** Do not edit by hand unless you're certain you know", "be a str\") __self__.email = email \"\"\" Email address of the default service", "the service account. \"\"\" if id and not isinstance(id, str): raise TypeError(\"Expected argument", "by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit", "account. \"\"\" if project and not isinstance(project, str): raise TypeError(\"Expected argument 'project' to", "the provider project is used. > This content is derived from https://github.com/terraform-providers/terraform-provider-google/blob/master/website/docs/d/compute_default_service_account.html.markdown. \"\"\"", "Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're", "The fully-qualified name of the service account. \"\"\" if project and not isinstance(project,", "argument 'email' to be a str\") __self__.email = email \"\"\" Email address of", "and not isinstance(project, str): raise TypeError(\"Expected argument 'project' to be a str\") __self__.project", "opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = utilities.get_version()", "A collection of values returned by getDefaultServiceAccount. \"\"\" def __init__(__self__, display_name=None, email=None, name=None,", "email \"\"\" Email address of the default service account used by VMs running", "str\") __self__.email = email \"\"\" Email address of the default service account used", "project \"\"\" if name and not isinstance(name, str): raise TypeError(\"Expected argument 'name' to", "not provided, the provider project is used. > This content is derived from", "raise TypeError(\"Expected argument 'display_name' to be a str\") __self__.display_name = display_name \"\"\" The", "def __await__(self): if False: yield self return GetDefaultServiceAccountResult( display_name=self.display_name, email=self.email, name=self.name, project=self.project, unique_id=self.unique_id,", "name \"\"\" The fully-qualified name of the service account. \"\"\" if project and", "not isinstance(display_name, str): raise TypeError(\"Expected argument 'display_name' to be a str\") __self__.display_name =", "argument 'display_name' to be a str\") __self__.display_name = display_name \"\"\" The display name", "= utilities.get_version() __ret__ = pulumi.runtime.invoke('gcp:compute/getDefaultServiceAccount:getDefaultServiceAccount', __args__, opts=opts).value return AwaitableGetDefaultServiceAccountResult( display_name=__ret__.get('displayName'), email=__ret__.get('email'), name=__ret__.get('name'), project=__ret__.get('project'),", "is not provided, the provider project is used. > This content is derived", "TypeError(\"Expected argument 'id' to be a str\") __self__.id = id \"\"\" id is", "import Union from .. import utilities, tables class GetDefaultServiceAccountResult: \"\"\" A collection of", "is None: opts.version = utilities.get_version() __ret__ = pulumi.runtime.invoke('gcp:compute/getDefaultServiceAccount:getDefaultServiceAccount', __args__, opts=opts).value return AwaitableGetDefaultServiceAccountResult( display_name=__ret__.get('displayName'),", "unless you're certain you know what you are doing! *** import json import", "tables class GetDefaultServiceAccountResult: \"\"\" A collection of values returned by getDefaultServiceAccount. \"\"\" def", "__self__.unique_id = unique_id \"\"\" The unique id of the service account. \"\"\" if", "warnings import pulumi import pulumi.runtime from typing import Union from .. import utilities,", "raise TypeError(\"Expected argument 'email' to be a str\") __self__.email = email \"\"\" Email", "the default service account used by VMs running in this project \"\"\" if", "disable=using-constant-test def __await__(self): if False: yield self return GetDefaultServiceAccountResult( display_name=self.display_name, email=self.email, name=self.name, project=self.project,", "str\") __self__.name = name \"\"\" The fully-qualified name of the service account. \"\"\"", "\"\"\" The fully-qualified name of the service account. \"\"\" if project and not", "Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand", "= dict() __args__['project'] = project if opts is None: opts = pulumi.InvokeOptions() if", "from https://github.com/terraform-providers/terraform-provider-google/blob/master/website/docs/d/compute_default_service_account.html.markdown. \"\"\" __args__ = dict() __args__['project'] = project if opts is None:", "running in this project \"\"\" if name and not isinstance(name, str): raise TypeError(\"Expected", "the service account. \"\"\" if project and not isinstance(project, str): raise TypeError(\"Expected argument", "unique_id \"\"\" The unique id of the service account. \"\"\" if id and", "= project if unique_id and not isinstance(unique_id, str): raise TypeError(\"Expected argument 'unique_id' to", "Email address of the default service account used by VMs running in this", "be a str\") __self__.project = project if unique_id and not isinstance(unique_id, str): raise", "used. > This content is derived from https://github.com/terraform-providers/terraform-provider-google/blob/master/website/docs/d/compute_default_service_account.html.markdown. \"\"\" __args__ = dict() __args__['project']", "if id and not isinstance(id, str): raise TypeError(\"Expected argument 'id' to be a", "# *** Do not edit by hand unless you're certain you know what", "isinstance(name, str): raise TypeError(\"Expected argument 'name' to be a str\") __self__.name = name", "if unique_id and not isinstance(unique_id, str): raise TypeError(\"Expected argument 'unique_id' to be a", "VMs running in this project \"\"\" if name and not isinstance(name, str): raise", "coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge", "id is the provider-assigned unique ID for this managed resource. \"\"\" class AwaitableGetDefaultServiceAccountResult(GetDefaultServiceAccountResult):", "to be a str\") __self__.display_name = display_name \"\"\" The display name for the", "= email \"\"\" Email address of the default service account used by VMs", "this project :param str project: The project ID. If it is not provided,", "project is used. > This content is derived from https://github.com/terraform-providers/terraform-provider-google/blob/master/website/docs/d/compute_default_service_account.html.markdown. \"\"\" __args__ =", "opts = pulumi.InvokeOptions() if opts.version is None: opts.version = utilities.get_version() __ret__ = pulumi.runtime.invoke('gcp:compute/getDefaultServiceAccount:getDefaultServiceAccount',", "str\") __self__.unique_id = unique_id \"\"\" The unique id of the service account. \"\"\"", "of the service account. \"\"\" if id and not isinstance(id, str): raise TypeError(\"Expected", "pulumi.runtime from typing import Union from .. import utilities, tables class GetDefaultServiceAccountResult: \"\"\"", "*** Do not edit by hand unless you're certain you know what you", "*** import json import warnings import pulumi import pulumi.runtime from typing import Union", "unique id of the service account. \"\"\" if id and not isinstance(id, str):", "to be a str\") __self__.email = email \"\"\" Email address of the default", "# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform", "__self__.display_name = display_name \"\"\" The display name for the service account. \"\"\" if", "yield self return GetDefaultServiceAccountResult( display_name=self.display_name, email=self.email, name=self.name, project=self.project, unique_id=self.unique_id, id=self.id) def get_default_service_account(project=None,opts=None): \"\"\"", "are doing! *** import json import warnings import pulumi import pulumi.runtime from typing", "pylint: disable=using-constant-test def __await__(self): if False: yield self return GetDefaultServiceAccountResult( display_name=self.display_name, email=self.email, name=self.name,", "__args__['project'] = project if opts is None: opts = pulumi.InvokeOptions() if opts.version is", "is used. > This content is derived from https://github.com/terraform-providers/terraform-provider-google/blob/master/website/docs/d/compute_default_service_account.html.markdown. \"\"\" __args__ = dict()", ".. import utilities, tables class GetDefaultServiceAccountResult: \"\"\" A collection of values returned by", "__self__.id = id \"\"\" id is the provider-assigned unique ID for this managed", "what you are doing! *** import json import warnings import pulumi import pulumi.runtime", "str project: The project ID. If it is not provided, the provider project", "used by VMs running in this project \"\"\" if name and not isinstance(name,", "get_default_service_account(project=None,opts=None): \"\"\" Use this data source to retrieve default service account for this", "__self__.email = email \"\"\" Email address of the default service account used by", "\"\"\" A collection of values returned by getDefaultServiceAccount. \"\"\" def __init__(__self__, display_name=None, email=None,", "(tfgen) Tool. *** # *** Do not edit by hand unless you're certain", "source to retrieve default service account for this project :param str project: The", "this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** #", "This content is derived from https://github.com/terraform-providers/terraform-provider-google/blob/master/website/docs/d/compute_default_service_account.html.markdown. \"\"\" __args__ = dict() __args__['project'] = project", "email=self.email, name=self.name, project=self.project, unique_id=self.unique_id, id=self.id) def get_default_service_account(project=None,opts=None): \"\"\" Use this data source to", "\"\"\" id is the provider-assigned unique ID for this managed resource. \"\"\" class", "retrieve default service account for this project :param str project: The project ID.", "raise TypeError(\"Expected argument 'project' to be a str\") __self__.project = project if unique_id", "values returned by getDefaultServiceAccount. \"\"\" def __init__(__self__, display_name=None, email=None, name=None, project=None, unique_id=None, id=None):", "for the service account. \"\"\" if email and not isinstance(email, str): raise TypeError(\"Expected", "to be a str\") __self__.project = project if unique_id and not isinstance(unique_id, str):", "to be a str\") __self__.name = name \"\"\" The fully-qualified name of the", "TypeError(\"Expected argument 'project' to be a str\") __self__.project = project if unique_id and", "provider-assigned unique ID for this managed resource. \"\"\" class AwaitableGetDefaultServiceAccountResult(GetDefaultServiceAccountResult): # pylint: disable=using-constant-test", "name for the service account. \"\"\" if email and not isinstance(email, str): raise", "False: yield self return GetDefaultServiceAccountResult( display_name=self.display_name, email=self.email, name=self.name, project=self.project, unique_id=self.unique_id, id=self.id) def get_default_service_account(project=None,opts=None):", "# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen)", "isinstance(id, str): raise TypeError(\"Expected argument 'id' to be a str\") __self__.id = id", "be a str\") __self__.id = id \"\"\" id is the provider-assigned unique ID", "provided, the provider project is used. > This content is derived from https://github.com/terraform-providers/terraform-provider-google/blob/master/website/docs/d/compute_default_service_account.html.markdown.", "import json import warnings import pulumi import pulumi.runtime from typing import Union from", "TypeError(\"Expected argument 'email' to be a str\") __self__.email = email \"\"\" Email address", "this project \"\"\" if name and not isinstance(name, str): raise TypeError(\"Expected argument 'name'", "class GetDefaultServiceAccountResult: \"\"\" A collection of values returned by getDefaultServiceAccount. \"\"\" def __init__(__self__,", "import utilities, tables class GetDefaultServiceAccountResult: \"\"\" A collection of values returned by getDefaultServiceAccount.", "if name and not isinstance(name, str): raise TypeError(\"Expected argument 'name' to be a", "raise TypeError(\"Expected argument 'id' to be a str\") __self__.id = id \"\"\" id", "str): raise TypeError(\"Expected argument 'name' to be a str\") __self__.name = name \"\"\"", "TypeError(\"Expected argument 'name' to be a str\") __self__.name = name \"\"\" The fully-qualified", "typing import Union from .. import utilities, tables class GetDefaultServiceAccountResult: \"\"\" A collection", "and not isinstance(unique_id, str): raise TypeError(\"Expected argument 'unique_id' to be a str\") __self__.unique_id", "a str\") __self__.id = id \"\"\" id is the provider-assigned unique ID for", "name=self.name, project=self.project, unique_id=self.unique_id, id=self.id) def get_default_service_account(project=None,opts=None): \"\"\" Use this data source to retrieve", "of the service account. \"\"\" if project and not isinstance(project, str): raise TypeError(\"Expected", "\"\"\" if name and not isinstance(name, str): raise TypeError(\"Expected argument 'name' to be", "= pulumi.InvokeOptions() if opts.version is None: opts.version = utilities.get_version() __ret__ = pulumi.runtime.invoke('gcp:compute/getDefaultServiceAccount:getDefaultServiceAccount', __args__,", "id=self.id) def get_default_service_account(project=None,opts=None): \"\"\" Use this data source to retrieve default service account", "import pulumi.runtime from typing import Union from .. import utilities, tables class GetDefaultServiceAccountResult:", "be a str\") __self__.display_name = display_name \"\"\" The display name for the service", "name=None, project=None, unique_id=None, id=None): if display_name and not isinstance(display_name, str): raise TypeError(\"Expected argument", "The display name for the service account. \"\"\" if email and not isinstance(email,", "name of the service account. \"\"\" if project and not isinstance(project, str): raise", "unique_id=None, id=None): if display_name and not isinstance(display_name, str): raise TypeError(\"Expected argument 'display_name' to", "for this managed resource. \"\"\" class AwaitableGetDefaultServiceAccountResult(GetDefaultServiceAccountResult): # pylint: disable=using-constant-test def __await__(self): if", "project=self.project, unique_id=self.unique_id, id=self.id) def get_default_service_account(project=None,opts=None): \"\"\" Use this data source to retrieve default", "default service account used by VMs running in this project \"\"\" if name", "Tool. *** # *** Do not edit by hand unless you're certain you", "a str\") __self__.unique_id = unique_id \"\"\" The unique id of the service account.", "not isinstance(unique_id, str): raise TypeError(\"Expected argument 'unique_id' to be a str\") __self__.unique_id =", "by getDefaultServiceAccount. \"\"\" def __init__(__self__, display_name=None, email=None, name=None, project=None, unique_id=None, id=None): if display_name", "be a str\") __self__.name = name \"\"\" The fully-qualified name of the service", "isinstance(email, str): raise TypeError(\"Expected argument 'email' to be a str\") __self__.email = email", "from .. import utilities, tables class GetDefaultServiceAccountResult: \"\"\" A collection of values returned", "to be a str\") __self__.unique_id = unique_id \"\"\" The unique id of the", "not isinstance(name, str): raise TypeError(\"Expected argument 'name' to be a str\") __self__.name =", "display_name=self.display_name, email=self.email, name=self.name, project=self.project, unique_id=self.unique_id, id=self.id) def get_default_service_account(project=None,opts=None): \"\"\" Use this data source", "pulumi.InvokeOptions() if opts.version is None: opts.version = utilities.get_version() __ret__ = pulumi.runtime.invoke('gcp:compute/getDefaultServiceAccount:getDefaultServiceAccount', __args__, opts=opts).value", "if display_name and not isinstance(display_name, str): raise TypeError(\"Expected argument 'display_name' to be a", "managed resource. \"\"\" class AwaitableGetDefaultServiceAccountResult(GetDefaultServiceAccountResult): # pylint: disable=using-constant-test def __await__(self): if False: yield", "'project' to be a str\") __self__.project = project if unique_id and not isinstance(unique_id,", "it is not provided, the provider project is used. > This content is", "of values returned by getDefaultServiceAccount. \"\"\" def __init__(__self__, display_name=None, email=None, name=None, project=None, unique_id=None,", "GetDefaultServiceAccountResult( display_name=self.display_name, email=self.email, name=self.name, project=self.project, unique_id=self.unique_id, id=self.id) def get_default_service_account(project=None,opts=None): \"\"\" Use this data", "isinstance(display_name, str): raise TypeError(\"Expected argument 'display_name' to be a str\") __self__.display_name = display_name", "certain you know what you are doing! *** import json import warnings import", "edit by hand unless you're certain you know what you are doing! ***", "\"\"\" Email address of the default service account used by VMs running in", "email and not isinstance(email, str): raise TypeError(\"Expected argument 'email' to be a str\")", "not isinstance(project, str): raise TypeError(\"Expected argument 'project' to be a str\") __self__.project =", "__self__.name = name \"\"\" The fully-qualified name of the service account. \"\"\" if", "display_name and not isinstance(display_name, str): raise TypeError(\"Expected argument 'display_name' to be a str\")", "service account. \"\"\" if id and not isinstance(id, str): raise TypeError(\"Expected argument 'id'", "this managed resource. \"\"\" class AwaitableGetDefaultServiceAccountResult(GetDefaultServiceAccountResult): # pylint: disable=using-constant-test def __await__(self): if False:", "email=None, name=None, project=None, unique_id=None, id=None): if display_name and not isinstance(display_name, str): raise TypeError(\"Expected", "not isinstance(email, str): raise TypeError(\"Expected argument 'email' to be a str\") __self__.email =", "If it is not provided, the provider project is used. > This content", "provider project is used. > This content is derived from https://github.com/terraform-providers/terraform-provider-google/blob/master/website/docs/d/compute_default_service_account.html.markdown. \"\"\" __args__", "None: opts.version = utilities.get_version() __ret__ = pulumi.runtime.invoke('gcp:compute/getDefaultServiceAccount:getDefaultServiceAccount', __args__, opts=opts).value return AwaitableGetDefaultServiceAccountResult( display_name=__ret__.get('displayName'), email=__ret__.get('email'),", "raise TypeError(\"Expected argument 'unique_id' to be a str\") __self__.unique_id = unique_id \"\"\" The", "opts.version is None: opts.version = utilities.get_version() __ret__ = pulumi.runtime.invoke('gcp:compute/getDefaultServiceAccount:getDefaultServiceAccount', __args__, opts=opts).value return AwaitableGetDefaultServiceAccountResult(", "the service account. \"\"\" if email and not isinstance(email, str): raise TypeError(\"Expected argument", "account. \"\"\" if id and not isinstance(id, str): raise TypeError(\"Expected argument 'id' to", "'unique_id' to be a str\") __self__.unique_id = unique_id \"\"\" The unique id of", "class AwaitableGetDefaultServiceAccountResult(GetDefaultServiceAccountResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetDefaultServiceAccountResult(", "\"\"\" if email and not isinstance(email, str): raise TypeError(\"Expected argument 'email' to be", "account. \"\"\" if email and not isinstance(email, str): raise TypeError(\"Expected argument 'email' to", "= unique_id \"\"\" The unique id of the service account. \"\"\" if id", "address of the default service account used by VMs running in this project", "utilities, tables class GetDefaultServiceAccountResult: \"\"\" A collection of values returned by getDefaultServiceAccount. \"\"\"", "\"\"\" The unique id of the service account. \"\"\" if id and not", "id \"\"\" id is the provider-assigned unique ID for this managed resource. \"\"\"", "GetDefaultServiceAccountResult: \"\"\" A collection of values returned by getDefaultServiceAccount. \"\"\" def __init__(__self__, display_name=None,", "is derived from https://github.com/terraform-providers/terraform-provider-google/blob/master/website/docs/d/compute_default_service_account.html.markdown. \"\"\" __args__ = dict() __args__['project'] = project if opts", "str): raise TypeError(\"Expected argument 'project' to be a str\") __self__.project = project if", "to retrieve default service account for this project :param str project: The project", "Use this data source to retrieve default service account for this project :param", "if email and not isinstance(email, str): raise TypeError(\"Expected argument 'email' to be a", "default service account for this project :param str project: The project ID. If", "by VMs running in this project \"\"\" if name and not isinstance(name, str):", "data source to retrieve default service account for this project :param str project:", "project :param str project: The project ID. If it is not provided, the", "content is derived from https://github.com/terraform-providers/terraform-provider-google/blob/master/website/docs/d/compute_default_service_account.html.markdown. \"\"\" __args__ = dict() __args__['project'] = project if", "Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless", "getDefaultServiceAccount. \"\"\" def __init__(__self__, display_name=None, email=None, name=None, project=None, unique_id=None, id=None): if display_name and", "derived from https://github.com/terraform-providers/terraform-provider-google/blob/master/website/docs/d/compute_default_service_account.html.markdown. \"\"\" __args__ = dict() __args__['project'] = project if opts is", "str): raise TypeError(\"Expected argument 'unique_id' to be a str\") __self__.unique_id = unique_id \"\"\"", "is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = utilities.get_version() __ret__", "\"\"\" if project and not isinstance(project, str): raise TypeError(\"Expected argument 'project' to be", "dict() __args__['project'] = project if opts is None: opts = pulumi.InvokeOptions() if opts.version", "'email' to be a str\") __self__.email = email \"\"\" Email address of the", "unique_id and not isinstance(unique_id, str): raise TypeError(\"Expected argument 'unique_id' to be a str\")", "you're certain you know what you are doing! *** import json import warnings", "unique_id=self.unique_id, id=self.id) def get_default_service_account(project=None,opts=None): \"\"\" Use this data source to retrieve default service", "str\") __self__.project = project if unique_id and not isinstance(unique_id, str): raise TypeError(\"Expected argument", "file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # ***", "fully-qualified name of the service account. \"\"\" if project and not isinstance(project, str):", "display_name \"\"\" The display name for the service account. \"\"\" if email and", "if project and not isinstance(project, str): raise TypeError(\"Expected argument 'project' to be a", "\"\"\" def __init__(__self__, display_name=None, email=None, name=None, project=None, unique_id=None, id=None): if display_name and not", "be a str\") __self__.unique_id = unique_id \"\"\" The unique id of the service", "you know what you are doing! *** import json import warnings import pulumi", "returned by getDefaultServiceAccount. \"\"\" def __init__(__self__, display_name=None, email=None, name=None, project=None, unique_id=None, id=None): if", "\"\"\" The display name for the service account. \"\"\" if email and not", "pulumi import pulumi.runtime from typing import Union from .. import utilities, tables class", "a str\") __self__.name = name \"\"\" The fully-qualified name of the service account.", "know what you are doing! *** import json import warnings import pulumi import", "and not isinstance(id, str): raise TypeError(\"Expected argument 'id' to be a str\") __self__.id", "str\") __self__.id = id \"\"\" id is the provider-assigned unique ID for this", "__args__ = dict() __args__['project'] = project if opts is None: opts = pulumi.InvokeOptions()", "opts.version = utilities.get_version() __ret__ = pulumi.runtime.invoke('gcp:compute/getDefaultServiceAccount:getDefaultServiceAccount', __args__, opts=opts).value return AwaitableGetDefaultServiceAccountResult( display_name=__ret__.get('displayName'), email=__ret__.get('email'), name=__ret__.get('name'),", "AwaitableGetDefaultServiceAccountResult(GetDefaultServiceAccountResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetDefaultServiceAccountResult( display_name=self.display_name,", "in this project \"\"\" if name and not isinstance(name, str): raise TypeError(\"Expected argument", "service account. \"\"\" if email and not isinstance(email, str): raise TypeError(\"Expected argument 'email'", "from typing import Union from .. import utilities, tables class GetDefaultServiceAccountResult: \"\"\" A", "and not isinstance(display_name, str): raise TypeError(\"Expected argument 'display_name' to be a str\") __self__.display_name", "account for this project :param str project: The project ID. If it is", "raise TypeError(\"Expected argument 'name' to be a str\") __self__.name = name \"\"\" The", "\"\"\" class AwaitableGetDefaultServiceAccountResult(GetDefaultServiceAccountResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return", "was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do", "Union from .. import utilities, tables class GetDefaultServiceAccountResult: \"\"\" A collection of values", "ID for this managed resource. \"\"\" class AwaitableGetDefaultServiceAccountResult(GetDefaultServiceAccountResult): # pylint: disable=using-constant-test def __await__(self):", "project if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version", "not isinstance(id, str): raise TypeError(\"Expected argument 'id' to be a str\") __self__.id =", "= display_name \"\"\" The display name for the service account. \"\"\" if email", "\"\"\" Use this data source to retrieve default service account for this project", "= name \"\"\" The fully-qualified name of the service account. \"\"\" if project", "import warnings import pulumi import pulumi.runtime from typing import Union from .. import", "str): raise TypeError(\"Expected argument 'display_name' to be a str\") __self__.display_name = display_name \"\"\"", "str): raise TypeError(\"Expected argument 'email' to be a str\") __self__.email = email \"\"\"", "the provider-assigned unique ID for this managed resource. \"\"\" class AwaitableGetDefaultServiceAccountResult(GetDefaultServiceAccountResult): # pylint:", "def get_default_service_account(project=None,opts=None): \"\"\" Use this data source to retrieve default service account for", "\"\"\" __args__ = dict() __args__['project'] = project if opts is None: opts =", "a str\") __self__.email = email \"\"\" Email address of the default service account", "resource. \"\"\" class AwaitableGetDefaultServiceAccountResult(GetDefaultServiceAccountResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self", "if opts.version is None: opts.version = utilities.get_version() __ret__ = pulumi.runtime.invoke('gcp:compute/getDefaultServiceAccount:getDefaultServiceAccount', __args__, opts=opts).value return", "def __init__(__self__, display_name=None, email=None, name=None, project=None, unique_id=None, id=None): if display_name and not isinstance(display_name,", "str\") __self__.display_name = display_name \"\"\" The display name for the service account. \"\"\"", "name and not isinstance(name, str): raise TypeError(\"Expected argument 'name' to be a str\")", "id and not isinstance(id, str): raise TypeError(\"Expected argument 'id' to be a str\")", "__init__(__self__, display_name=None, email=None, name=None, project=None, unique_id=None, id=None): if display_name and not isinstance(display_name, str):", "project and not isinstance(project, str): raise TypeError(\"Expected argument 'project' to be a str\")", "project if unique_id and not isinstance(unique_id, str): raise TypeError(\"Expected argument 'unique_id' to be", "id=None): if display_name and not isinstance(display_name, str): raise TypeError(\"Expected argument 'display_name' to be", "\"\"\" if id and not isinstance(id, str): raise TypeError(\"Expected argument 'id' to be", "__self__.project = project if unique_id and not isinstance(unique_id, str): raise TypeError(\"Expected argument 'unique_id'", "project ID. If it is not provided, the provider project is used. >" ]
[ "plt.axis([0, 1100, 0, 1100000]) plt.scatter(x_values, y_values, c=y_values, cmap=plt.cm.Reds, edgecolor='none', s=40) plt.savefig('squares_plot.png', bbox_inches='tight') plt.show()", "1001)) y_values = [x**2 for x in x_values] plt.title(\"Square Numbers\", fontsize=24) plt.xlabel(\"Value\", fontsize=14)", "which='major', labelsize=14) plt.axis([0, 1100, 0, 1100000]) plt.scatter(x_values, y_values, c=y_values, cmap=plt.cm.Reds, edgecolor='none', s=40) plt.savefig('squares_plot.png',", "x_values = list(range(1, 1001)) y_values = [x**2 for x in x_values] plt.title(\"Square Numbers\",", "in x_values] plt.title(\"Square Numbers\", fontsize=24) plt.xlabel(\"Value\", fontsize=14) plt.ylabel(\"Square of Value\", fontsize=14) # Set", "x_values] plt.title(\"Square Numbers\", fontsize=24) plt.xlabel(\"Value\", fontsize=14) plt.ylabel(\"Square of Value\", fontsize=14) # Set size", "plt.tick_params(axis='both', which='major', labelsize=14) plt.axis([0, 1100, 0, 1100000]) plt.scatter(x_values, y_values, c=y_values, cmap=plt.cm.Reds, edgecolor='none', s=40)", "matplotlib.pyplot as plt x_values = list(range(1, 1001)) y_values = [x**2 for x in", "fontsize=14) plt.ylabel(\"Square of Value\", fontsize=14) # Set size of tick labels. plt.tick_params(axis='both', which='major',", "plt x_values = list(range(1, 1001)) y_values = [x**2 for x in x_values] plt.title(\"Square", "import matplotlib.pyplot as plt x_values = list(range(1, 1001)) y_values = [x**2 for x", "of tick labels. plt.tick_params(axis='both', which='major', labelsize=14) plt.axis([0, 1100, 0, 1100000]) plt.scatter(x_values, y_values, c=y_values,", "as plt x_values = list(range(1, 1001)) y_values = [x**2 for x in x_values]", "for x in x_values] plt.title(\"Square Numbers\", fontsize=24) plt.xlabel(\"Value\", fontsize=14) plt.ylabel(\"Square of Value\", fontsize=14)", "plt.title(\"Square Numbers\", fontsize=24) plt.xlabel(\"Value\", fontsize=14) plt.ylabel(\"Square of Value\", fontsize=14) # Set size of", "of Value\", fontsize=14) # Set size of tick labels. plt.tick_params(axis='both', which='major', labelsize=14) plt.axis([0,", "Value\", fontsize=14) # Set size of tick labels. plt.tick_params(axis='both', which='major', labelsize=14) plt.axis([0, 1100,", "tick labels. plt.tick_params(axis='both', which='major', labelsize=14) plt.axis([0, 1100, 0, 1100000]) plt.scatter(x_values, y_values, c=y_values, cmap=plt.cm.Reds,", "plt.xlabel(\"Value\", fontsize=14) plt.ylabel(\"Square of Value\", fontsize=14) # Set size of tick labels. plt.tick_params(axis='both',", "labels. plt.tick_params(axis='both', which='major', labelsize=14) plt.axis([0, 1100, 0, 1100000]) plt.scatter(x_values, y_values, c=y_values, cmap=plt.cm.Reds, edgecolor='none',", "plt.ylabel(\"Square of Value\", fontsize=14) # Set size of tick labels. plt.tick_params(axis='both', which='major', labelsize=14)", "<reponame>sergejsm/pythonCrashCourseProjects import matplotlib.pyplot as plt x_values = list(range(1, 1001)) y_values = [x**2 for", "size of tick labels. plt.tick_params(axis='both', which='major', labelsize=14) plt.axis([0, 1100, 0, 1100000]) plt.scatter(x_values, y_values,", "list(range(1, 1001)) y_values = [x**2 for x in x_values] plt.title(\"Square Numbers\", fontsize=24) plt.xlabel(\"Value\",", "labelsize=14) plt.axis([0, 1100, 0, 1100000]) plt.scatter(x_values, y_values, c=y_values, cmap=plt.cm.Reds, edgecolor='none', s=40) plt.savefig('squares_plot.png', bbox_inches='tight')", "Set size of tick labels. plt.tick_params(axis='both', which='major', labelsize=14) plt.axis([0, 1100, 0, 1100000]) plt.scatter(x_values,", "= list(range(1, 1001)) y_values = [x**2 for x in x_values] plt.title(\"Square Numbers\", fontsize=24)", "[x**2 for x in x_values] plt.title(\"Square Numbers\", fontsize=24) plt.xlabel(\"Value\", fontsize=14) plt.ylabel(\"Square of Value\",", "fontsize=24) plt.xlabel(\"Value\", fontsize=14) plt.ylabel(\"Square of Value\", fontsize=14) # Set size of tick labels.", "fontsize=14) # Set size of tick labels. plt.tick_params(axis='both', which='major', labelsize=14) plt.axis([0, 1100, 0,", "= [x**2 for x in x_values] plt.title(\"Square Numbers\", fontsize=24) plt.xlabel(\"Value\", fontsize=14) plt.ylabel(\"Square of", "x in x_values] plt.title(\"Square Numbers\", fontsize=24) plt.xlabel(\"Value\", fontsize=14) plt.ylabel(\"Square of Value\", fontsize=14) #", "y_values = [x**2 for x in x_values] plt.title(\"Square Numbers\", fontsize=24) plt.xlabel(\"Value\", fontsize=14) plt.ylabel(\"Square", "Numbers\", fontsize=24) plt.xlabel(\"Value\", fontsize=14) plt.ylabel(\"Square of Value\", fontsize=14) # Set size of tick", "# Set size of tick labels. plt.tick_params(axis='both', which='major', labelsize=14) plt.axis([0, 1100, 0, 1100000])" ]
[ "100 else: print('extra potency: ' + str((ambient_area - 6))) potency = 100 +", "ambient_name in class2: number_tugs = math.ceil(perimeter/3.5) if number_tugs <= 3: power_tugs = number_tugs", "Potency: '+ str(100) +' (VA)') potency = 100 else: print('extra potency: ' +", "(number_tugs - 3) elif ambient_name in class3: number_tugs = 1 power_tugs = number_tugs", "elif ambient_name in class2: number_tugs = math.ceil(perimeter/3.5) if number_tugs <= 3: power_tugs =", "3 * 600 + 100 * (number_tugs - 3) elif ambient_name in class3:", "'copa','copa-cozinha', 'area de servico', 'lavanderia'] class3 = ['varanda'] class4 = ['sala', 'quarto', 'dormitorio',", "3: power_tugs = number_tugs * 600 else: power_tugs = 3 * 600 +", "600 else: power_tugs = 3 * 600 + 100 * (number_tugs - 3)", "by area, see in 54.10 norma\\nEntry with area: ') area = float(input()) if", "float(input()) if area <= 2.55: number_tugs = 1 power_tugs = number_tugs * 100", "+ 60 * int((ambient_area - 6)/4) print('Lighting Potency: '+ str(potency) +' (VA)') print('')", "100 * (number_tugs - 3) elif ambient_name in class3: number_tugs = 1 power_tugs", "1 power_tugs = number_tugs * 100 elif ambient_name in class4: number_tugs = math.ceil(perimeter/5)", "= 1 power_tugs = number_tugs * 100 elif ambient_name in class4: number_tugs =", "= 0 if ambient_area <= 6: print('Lighting Potency: '+ str(100) +' (VA)') potency", "= float(input()) if area <= 2.55: number_tugs = 1 power_tugs = number_tugs *", "100 elif ambient_name in class4: number_tugs = math.ceil(perimeter/5) power_tugs = number_tugs * 100", "import math def calculate_power_luminance(ambient_area): #area in m^2 potency = 0 if ambient_area <=", "<= 6: print('Lighting Potency: '+ str(100) +' (VA)') potency = 100 else: print('extra", "'+ str(potency) +' (VA)') print('') return potency \"\"\"#Dimensionamento de TUGs\"\"\" def calculate_number_and_power_of_tugs(ambient_name, perimeter", "+' (VA)') print('') return potency \"\"\"#Dimensionamento de TUGs\"\"\" def calculate_number_and_power_of_tugs(ambient_name, perimeter = 0):", "with area: ') area = float(input()) if area <= 2.55: number_tugs = 1", "area = float(input()) if area <= 2.55: number_tugs = 1 power_tugs = number_tugs", "number_tugs * 100 elif ambient_name in class4: number_tugs = math.ceil(perimeter/5) power_tugs = number_tugs", "power_tugs = number_tugs * 100 else: print('No matches found') print('warning: ambient is calculated", "#area in m^2 potency = 0 if ambient_area <= 6: print('Lighting Potency: '+", "power_tugs = number_tugs * 600 elif ambient_name in class2: number_tugs = math.ceil(perimeter/3.5) if", "number_tugs = 1 power_tugs = number_tugs * 100 elif ambient_name in class4: number_tugs", "= 0): #area in m^2 #perimeter in m class1 = ['banheiro'] class2 =", "600 elif ambient_name in class2: number_tugs = math.ceil(perimeter/3.5) if number_tugs <= 3: power_tugs", "TUG: ' + str(number_tugs) + '\\nTUG Potency:' + str(power_tugs) +'(VA)') print('') return number_tugs,", "' + str(number_tugs) + '\\nTUG Potency:' + str(power_tugs) +'(VA)') print('') return number_tugs, power_tugs", "100 return 0 print('Numbers TUG: ' + str(number_tugs) + '\\nTUG Potency:' + str(power_tugs)", "0 print('Numbers TUG: ' + str(number_tugs) + '\\nTUG Potency:' + str(power_tugs) +'(VA)') print('')", "number_tugs = 1 power_tugs = number_tugs * 600 elif ambient_name in class2: number_tugs", "class2 = ['cozinha', 'copa','copa-cozinha', 'area de servico', 'lavanderia'] class3 = ['varanda'] class4 =", "if number_tugs <= 3: power_tugs = number_tugs * 600 else: power_tugs = 3", "ambient is calculated by area, see in 54.10 norma\\nEntry with area: ') area", "* 600 elif ambient_name in class2: number_tugs = math.ceil(perimeter/3.5) if number_tugs <= 3:", "<= 2.55: number_tugs = 1 power_tugs = number_tugs * 100 return 0 print('Numbers", "number_tugs * 600 else: power_tugs = 3 * 600 + 100 * (number_tugs", "= number_tugs * 100 elif ambient_name in class4: number_tugs = math.ceil(perimeter/5) power_tugs =", "return potency \"\"\"#Dimensionamento de TUGs\"\"\" def calculate_number_and_power_of_tugs(ambient_name, perimeter = 0): #area in m^2", "number_tugs = math.ceil(perimeter/5) power_tugs = number_tugs * 100 else: print('No matches found') print('warning:", "= 100 else: print('extra potency: ' + str((ambient_area - 6))) potency = 100", "print('Lighting Potency: '+ str(potency) +' (VA)') print('') return potency \"\"\"#Dimensionamento de TUGs\"\"\" def", "else: print('No matches found') print('warning: ambient is calculated by area, see in 54.10", "' + str((ambient_area - 6))) potency = 100 + 60 * int((ambient_area -", "in m^2 potency = 0 if ambient_area <= 6: print('Lighting Potency: '+ str(100)", "m class1 = ['banheiro'] class2 = ['cozinha', 'copa','copa-cozinha', 'area de servico', 'lavanderia'] class3", "in class2: number_tugs = math.ceil(perimeter/3.5) if number_tugs <= 3: power_tugs = number_tugs *", "if ambient_area <= 6: print('Lighting Potency: '+ str(100) +' (VA)') potency = 100", "potency: ' + str((ambient_area - 6))) potency = 100 + 60 * int((ambient_area", "calculate_power_luminance(ambient_area): #area in m^2 potency = 0 if ambient_area <= 6: print('Lighting Potency:", "number_tugs = 0 power_tugs = 0 if ambient_name in class1: number_tugs = 1", "class4: number_tugs = math.ceil(perimeter/5) power_tugs = number_tugs * 100 else: print('No matches found')", "'dormitorio', 'escritorio'] number_tugs = 0 power_tugs = 0 if ambient_name in class1: number_tugs", "= 0 power_tugs = 0 if ambient_name in class1: number_tugs = 1 power_tugs", "in class1: number_tugs = 1 power_tugs = number_tugs * 600 elif ambient_name in", "class4 = ['sala', 'quarto', 'dormitorio', 'escritorio'] number_tugs = 0 power_tugs = 0 if", "matches found') print('warning: ambient is calculated by area, see in 54.10 norma\\nEntry with", "- 3) elif ambient_name in class3: number_tugs = 1 power_tugs = number_tugs *", "is calculated by area, see in 54.10 norma\\nEntry with area: ') area =", "* 600 else: power_tugs = 3 * 600 + 100 * (number_tugs -", "600 + 100 * (number_tugs - 3) elif ambient_name in class3: number_tugs =", "number_tugs = math.ceil(perimeter/3.5) if number_tugs <= 3: power_tugs = number_tugs * 600 else:", "+ 100 * (number_tugs - 3) elif ambient_name in class3: number_tugs = 1", "- 6)/4) print('Lighting Potency: '+ str(potency) +' (VA)') print('') return potency \"\"\"#Dimensionamento de", "+ str((ambient_area - 6))) potency = 100 + 60 * int((ambient_area - 6)/4)", "ambient_name in class4: number_tugs = math.ceil(perimeter/5) power_tugs = number_tugs * 100 else: print('No", "TUGs\"\"\" def calculate_number_and_power_of_tugs(ambient_name, perimeter = 0): #area in m^2 #perimeter in m class1", "class3 = ['varanda'] class4 = ['sala', 'quarto', 'dormitorio', 'escritorio'] number_tugs = 0 power_tugs", "= math.ceil(perimeter/3.5) if number_tugs <= 3: power_tugs = number_tugs * 600 else: power_tugs", "area, see in 54.10 norma\\nEntry with area: ') area = float(input()) if area", "'quarto', 'dormitorio', 'escritorio'] number_tugs = 0 power_tugs = 0 if ambient_name in class1:", "'lavanderia'] class3 = ['varanda'] class4 = ['sala', 'quarto', 'dormitorio', 'escritorio'] number_tugs = 0", "- 6))) potency = 100 + 60 * int((ambient_area - 6)/4) print('Lighting Potency:", "6: print('Lighting Potency: '+ str(100) +' (VA)') potency = 100 else: print('extra potency:", "power_tugs = number_tugs * 600 else: power_tugs = 3 * 600 + 100", "(VA)') print('') return potency \"\"\"#Dimensionamento de TUGs\"\"\" def calculate_number_and_power_of_tugs(ambient_name, perimeter = 0): #area", "['banheiro'] class2 = ['cozinha', 'copa','copa-cozinha', 'area de servico', 'lavanderia'] class3 = ['varanda'] class4", "0 if ambient_name in class1: number_tugs = 1 power_tugs = number_tugs * 600", "power_tugs = 3 * 600 + 100 * (number_tugs - 3) elif ambient_name", "number_tugs <= 3: power_tugs = number_tugs * 600 else: power_tugs = 3 *", "= math.ceil(perimeter/5) power_tugs = number_tugs * 100 else: print('No matches found') print('warning: ambient", "else: print('extra potency: ' + str((ambient_area - 6))) potency = 100 + 60", "Potency: '+ str(potency) +' (VA)') print('') return potency \"\"\"#Dimensionamento de TUGs\"\"\" def calculate_number_and_power_of_tugs(ambient_name,", "= ['cozinha', 'copa','copa-cozinha', 'area de servico', 'lavanderia'] class3 = ['varanda'] class4 = ['sala',", "print('warning: ambient is calculated by area, see in 54.10 norma\\nEntry with area: ')", "* 100 return 0 print('Numbers TUG: ' + str(number_tugs) + '\\nTUG Potency:' +", "6))) potency = 100 + 60 * int((ambient_area - 6)/4) print('Lighting Potency: '+", "number_tugs * 100 else: print('No matches found') print('warning: ambient is calculated by area,", "<= 3: power_tugs = number_tugs * 600 else: power_tugs = 3 * 600", "1 power_tugs = number_tugs * 600 elif ambient_name in class2: number_tugs = math.ceil(perimeter/3.5)", "math.ceil(perimeter/5) power_tugs = number_tugs * 100 else: print('No matches found') print('warning: ambient is", "#perimeter in m class1 = ['banheiro'] class2 = ['cozinha', 'copa','copa-cozinha', 'area de servico',", "class1: number_tugs = 1 power_tugs = number_tugs * 600 elif ambient_name in class2:", "number_tugs * 600 elif ambient_name in class2: number_tugs = math.ceil(perimeter/3.5) if number_tugs <=", "see in 54.10 norma\\nEntry with area: ') area = float(input()) if area <=", "ambient_name in class3: number_tugs = 1 power_tugs = number_tugs * 100 elif ambient_name", "* 100 else: print('No matches found') print('warning: ambient is calculated by area, see", "= 1 power_tugs = number_tugs * 100 return 0 print('Numbers TUG: ' +", "100 + 60 * int((ambient_area - 6)/4) print('Lighting Potency: '+ str(potency) +' (VA)')", "= 1 power_tugs = number_tugs * 600 elif ambient_name in class2: number_tugs =", "* 600 + 100 * (number_tugs - 3) elif ambient_name in class3: number_tugs", "#area in m^2 #perimeter in m class1 = ['banheiro'] class2 = ['cozinha', 'copa','copa-cozinha',", "in m class1 = ['banheiro'] class2 = ['cozinha', 'copa','copa-cozinha', 'area de servico', 'lavanderia']", "class2: number_tugs = math.ceil(perimeter/3.5) if number_tugs <= 3: power_tugs = number_tugs * 600", "3) elif ambient_name in class3: number_tugs = 1 power_tugs = number_tugs * 100", "potency = 0 if ambient_area <= 6: print('Lighting Potency: '+ str(100) +' (VA)')", "'area de servico', 'lavanderia'] class3 = ['varanda'] class4 = ['sala', 'quarto', 'dormitorio', 'escritorio']", "= 0 if ambient_name in class1: number_tugs = 1 power_tugs = number_tugs *", "= ['sala', 'quarto', 'dormitorio', 'escritorio'] number_tugs = 0 power_tugs = 0 if ambient_name", "60 * int((ambient_area - 6)/4) print('Lighting Potency: '+ str(potency) +' (VA)') print('') return", "(VA)') potency = 100 else: print('extra potency: ' + str((ambient_area - 6))) potency", "servico', 'lavanderia'] class3 = ['varanda'] class4 = ['sala', 'quarto', 'dormitorio', 'escritorio'] number_tugs =", "de TUGs\"\"\" def calculate_number_and_power_of_tugs(ambient_name, perimeter = 0): #area in m^2 #perimeter in m", "54.10 norma\\nEntry with area: ') area = float(input()) if area <= 2.55: number_tugs", "print('Lighting Potency: '+ str(100) +' (VA)') potency = 100 else: print('extra potency: '", "math.ceil(perimeter/3.5) if number_tugs <= 3: power_tugs = number_tugs * 600 else: power_tugs =", "perimeter = 0): #area in m^2 #perimeter in m class1 = ['banheiro'] class2", "power_tugs = 0 if ambient_name in class1: number_tugs = 1 power_tugs = number_tugs", "if area <= 2.55: number_tugs = 1 power_tugs = number_tugs * 100 return", "ambient_area <= 6: print('Lighting Potency: '+ str(100) +' (VA)') potency = 100 else:", "in class4: number_tugs = math.ceil(perimeter/5) power_tugs = number_tugs * 100 else: print('No matches", "= 3 * 600 + 100 * (number_tugs - 3) elif ambient_name in", "2.55: number_tugs = 1 power_tugs = number_tugs * 100 return 0 print('Numbers TUG:", "math def calculate_power_luminance(ambient_area): #area in m^2 potency = 0 if ambient_area <= 6:", "return 0 print('Numbers TUG: ' + str(number_tugs) + '\\nTUG Potency:' + str(power_tugs) +'(VA)')", "print('Numbers TUG: ' + str(number_tugs) + '\\nTUG Potency:' + str(power_tugs) +'(VA)') print('') return", "number_tugs = 1 power_tugs = number_tugs * 100 return 0 print('Numbers TUG: '", "class3: number_tugs = 1 power_tugs = number_tugs * 100 elif ambient_name in class4:", "in 54.10 norma\\nEntry with area: ') area = float(input()) if area <= 2.55:", "= number_tugs * 600 elif ambient_name in class2: number_tugs = math.ceil(perimeter/3.5) if number_tugs", "* (number_tugs - 3) elif ambient_name in class3: number_tugs = 1 power_tugs =", "in class3: number_tugs = 1 power_tugs = number_tugs * 100 elif ambient_name in", "= ['banheiro'] class2 = ['cozinha', 'copa','copa-cozinha', 'area de servico', 'lavanderia'] class3 = ['varanda']", "= number_tugs * 100 return 0 print('Numbers TUG: ' + str(number_tugs) + '\\nTUG", "= 100 + 60 * int((ambient_area - 6)/4) print('Lighting Potency: '+ str(potency) +'", "print('') return potency \"\"\"#Dimensionamento de TUGs\"\"\" def calculate_number_and_power_of_tugs(ambient_name, perimeter = 0): #area in", "calculate_number_and_power_of_tugs(ambient_name, perimeter = 0): #area in m^2 #perimeter in m class1 = ['banheiro']", "number_tugs * 100 return 0 print('Numbers TUG: ' + str(number_tugs) + '\\nTUG Potency:'", "def calculate_power_luminance(ambient_area): #area in m^2 potency = 0 if ambient_area <= 6: print('Lighting", "potency \"\"\"#Dimensionamento de TUGs\"\"\" def calculate_number_and_power_of_tugs(ambient_name, perimeter = 0): #area in m^2 #perimeter", "in m^2 #perimeter in m class1 = ['banheiro'] class2 = ['cozinha', 'copa','copa-cozinha', 'area", "m^2 potency = 0 if ambient_area <= 6: print('Lighting Potency: '+ str(100) +'", "6)/4) print('Lighting Potency: '+ str(potency) +' (VA)') print('') return potency \"\"\"#Dimensionamento de TUGs\"\"\"", "norma\\nEntry with area: ') area = float(input()) if area <= 2.55: number_tugs =", "= number_tugs * 100 else: print('No matches found') print('warning: ambient is calculated by", "0 if ambient_area <= 6: print('Lighting Potency: '+ str(100) +' (VA)') potency =", "power_tugs = number_tugs * 100 elif ambient_name in class4: number_tugs = math.ceil(perimeter/5) power_tugs", "\"\"\"#Dimensionamento de TUGs\"\"\" def calculate_number_and_power_of_tugs(ambient_name, perimeter = 0): #area in m^2 #perimeter in", "area: ') area = float(input()) if area <= 2.55: number_tugs = 1 power_tugs", "0): #area in m^2 #perimeter in m class1 = ['banheiro'] class2 = ['cozinha',", "de servico', 'lavanderia'] class3 = ['varanda'] class4 = ['sala', 'quarto', 'dormitorio', 'escritorio'] number_tugs", "int((ambient_area - 6)/4) print('Lighting Potency: '+ str(potency) +' (VA)') print('') return potency \"\"\"#Dimensionamento", "* int((ambient_area - 6)/4) print('Lighting Potency: '+ str(potency) +' (VA)') print('') return potency", "power_tugs = number_tugs * 100 return 0 print('Numbers TUG: ' + str(number_tugs) +", "print('extra potency: ' + str((ambient_area - 6))) potency = 100 + 60 *", "potency = 100 else: print('extra potency: ' + str((ambient_area - 6))) potency =", "0 power_tugs = 0 if ambient_name in class1: number_tugs = 1 power_tugs =", "ambient_name in class1: number_tugs = 1 power_tugs = number_tugs * 600 elif ambient_name", "class1 = ['banheiro'] class2 = ['cozinha', 'copa','copa-cozinha', 'area de servico', 'lavanderia'] class3 =", "100 else: print('No matches found') print('warning: ambient is calculated by area, see in", "= ['varanda'] class4 = ['sala', 'quarto', 'dormitorio', 'escritorio'] number_tugs = 0 power_tugs =", "else: power_tugs = 3 * 600 + 100 * (number_tugs - 3) elif", "1 power_tugs = number_tugs * 100 return 0 print('Numbers TUG: ' + str(number_tugs)", "calculated by area, see in 54.10 norma\\nEntry with area: ') area = float(input())", "str(potency) +' (VA)') print('') return potency \"\"\"#Dimensionamento de TUGs\"\"\" def calculate_number_and_power_of_tugs(ambient_name, perimeter =", "* 100 elif ambient_name in class4: number_tugs = math.ceil(perimeter/5) power_tugs = number_tugs *", "print('No matches found') print('warning: ambient is calculated by area, see in 54.10 norma\\nEntry", "'+ str(100) +' (VA)') potency = 100 else: print('extra potency: ' + str((ambient_area", "') area = float(input()) if area <= 2.55: number_tugs = 1 power_tugs =", "elif ambient_name in class3: number_tugs = 1 power_tugs = number_tugs * 100 elif", "['varanda'] class4 = ['sala', 'quarto', 'dormitorio', 'escritorio'] number_tugs = 0 power_tugs = 0", "= number_tugs * 600 else: power_tugs = 3 * 600 + 100 *", "area <= 2.55: number_tugs = 1 power_tugs = number_tugs * 100 return 0", "if ambient_name in class1: number_tugs = 1 power_tugs = number_tugs * 600 elif", "+' (VA)') potency = 100 else: print('extra potency: ' + str((ambient_area - 6)))", "str(100) +' (VA)') potency = 100 else: print('extra potency: ' + str((ambient_area -", "['sala', 'quarto', 'dormitorio', 'escritorio'] number_tugs = 0 power_tugs = 0 if ambient_name in", "'escritorio'] number_tugs = 0 power_tugs = 0 if ambient_name in class1: number_tugs =", "m^2 #perimeter in m class1 = ['banheiro'] class2 = ['cozinha', 'copa','copa-cozinha', 'area de", "def calculate_number_and_power_of_tugs(ambient_name, perimeter = 0): #area in m^2 #perimeter in m class1 =", "elif ambient_name in class4: number_tugs = math.ceil(perimeter/5) power_tugs = number_tugs * 100 else:", "found') print('warning: ambient is calculated by area, see in 54.10 norma\\nEntry with area:", "['cozinha', 'copa','copa-cozinha', 'area de servico', 'lavanderia'] class3 = ['varanda'] class4 = ['sala', 'quarto',", "str((ambient_area - 6))) potency = 100 + 60 * int((ambient_area - 6)/4) print('Lighting", "potency = 100 + 60 * int((ambient_area - 6)/4) print('Lighting Potency: '+ str(potency)" ]
[ "open(SAVE_PATH, \"w\") as fw: for pair in pairs: support = two_deg[pair] / itemsets_len", "itemset_3[k] for itemset in itemsets: if item_i in itemset and item_j in itemset", "in itemset: two_deg[key] = two_deg.get(key, 0) + 1 pairs = list(two_deg.keys()) two_deg_count =", "three_deg[tup] print(f\"频繁三项集数量: {three_deg_count}\", ) print(f\"频繁三项集保存在`{SAVE_PATH}`\") return three_deg three_deg = build3deg(two_deg, itemsets) def gen3deg_rules(one_deg,", "= list(two_deg.keys()) itemset_3 = set() for pair in pairs: itemset_3.add(pair[0]) itemset_3.add(pair[1]) itemset_3 =", "one_deg[rule[0]] if conf > CONF: rules[rule] = conf with open(SAVE_PATH, \"w\") as fw:", "itemsets: for item in itemset: one_deg[item] = one_deg.get(item, 0) + 1 one_deg_count =", "except: try: conf = three_deg[three] / two_deg[(two[1], two[0])] except: print(two, \"not found\") if", "0.005 CONF = 0.5 def csv2list(): df = pd.read_csv(\"./实验三/数据/Groceries.csv\") itemsets = [] for", "one_deg[one] if conf > CONF: fw.write(f\"{one}->{two}: {conf}\\n\") three_deg_rule_num += 1 for three, two,", "range(0, len(itemset_3)): for j in range(i+1, len(itemset_3)): for k in range(j+1, len(itemset_3)): item_i", "three_deg[three] / two_deg[(two[1], two[0])] except: print(two, \"not found\") if conf > CONF: fw.write(f\"{two}->{one}:", "pair in pairs: itemset_3.add(pair[0]) itemset_3.add(pair[1]) itemset_3 = list(itemset_3) itemset_3.sort() three_deg = {} for", "tup in tups: support = three_deg[tup] / itemsets_len if support > SUPPORT: three_deg[tup]", "one_deg.get(item, 0) + 1 one_deg_count = 0 items = list(one_deg.keys()) with open(SAVE_PATH, \"w\")", "set() for pair in pairs: itemset_3.add(pair[0]) itemset_3.add(pair[1]) itemset_3 = list(itemset_3) itemset_3.sort() three_deg =", "conf = three_deg[three] / one_deg[one] if conf > CONF: fw.write(f\"{one}->{two}: {conf}\\n\") three_deg_rule_num +=", "> SUPPORT: three_deg[tup] = support fw.write(f\"{tup}: {support}\\n\") three_deg_count += 1 else: del three_deg[tup]", "in items: support = one_deg[item] / itemsets_len if support > SUPPORT: one_deg[item] =", "itemset: tup = (item_i, item_j, item_k) three_deg[tup] = three_deg.get(tup, 0)+1 three_deg_count = 0", "(tup[0], tup[2]), tup[1]), (tup, (tup[0], tup[1]), tup[2]), ] three_deg_rule_num = 0 with open(SAVE_PATH,", "for item in itemset: one_deg[item] = one_deg.get(item, 0) + 1 one_deg_count = 0", "if item_i in itemset and item_j in itemset and item_k in itemset: tup", "in itemsets: for item in itemset: one_deg[item] = one_deg.get(item, 0) + 1 one_deg_count", "itemset_str in df[\"items\"]: itemsets.append(set(itemset_str[1:-1].split(\",\"))) return itemsets itemsets = csv2list() itemsets_len = itemsets.__len__() def", "open(SAVE_PATH, \"w\") as fw: for item in items: support = one_deg[item] / itemsets_len", "key[1] in itemset: two_deg[key] = two_deg.get(key, 0) + 1 pairs = list(two_deg.keys()) two_deg_count", "in pairs: rule = (pair[0], pair[1]) conf = two_deg[pair] / one_deg[rule[0]] if conf", "(tup[1], tup[2]), tup[0]), (tup, (tup[0], tup[2]), tup[1]), (tup, (tup[0], tup[1]), tup[2]), ] three_deg_rule_num", "fw.write(f\"{one}->{two}: {conf}\\n\") three_deg_rule_num += 1 for three, two, one in rules[3:]: try: conf", "pd.read_csv(\"./实验三/数据/Groceries.csv\") itemsets = [] for itemset_str in df[\"items\"]: itemsets.append(set(itemset_str[1:-1].split(\",\"))) return itemsets itemsets =", "if conf > CONF: rules[rule] = conf rule = (pair[1], pair[0]) conf =", "build2deg(one_deg, itemsets): SAVE_PATH = \"./two_deg_support.txt\" items = list(one_deg.keys()) two_deg = {} for i", "= itemset_3[k] for itemset in itemsets: if item_i in itemset and item_j in", "tups = list(three_deg.keys()) with open(SAVE_PATH, \"w\") as fw: for tup in tups: support", "in rules[:3]: conf = three_deg[three] / one_deg[one] if conf > CONF: fw.write(f\"{one}->{two}: {conf}\\n\")", "+= 1 else: del two_deg[pair] print(f\"频繁二项集数量: {two_deg_count}\", ) print(f\"频繁二项集保存在`{SAVE_PATH}`\") return two_deg two_deg =", "= two_deg[pair] / one_deg[rule[0]] if conf > CONF: rules[rule] = conf with open(SAVE_PATH,", "tup[1])), (tup, (tup[1], tup[2]), tup[0]), (tup, (tup[0], tup[2]), tup[1]), (tup, (tup[0], tup[1]), tup[2]),", "> CONF: rules[rule] = conf with open(SAVE_PATH, \"w\") as fw: for k, v", "pd SUPPORT = 0.005 CONF = 0.5 def csv2list(): df = pd.read_csv(\"./实验三/数据/Groceries.csv\") itemsets", "list(two_deg.keys()) two_deg_count = 0 with open(SAVE_PATH, \"w\") as fw: for pair in pairs:", "= list(three_deg.keys()) rules = {} def enumTup(tup): return [ (tup, tup[0], (tup[1], tup[2])),", "SAVE_PATH = \"./three_deg_rules.txt\" tups = list(three_deg.keys()) rules = {} def enumTup(tup): return [", "= build1deg(itemsets) def build2deg(one_deg, itemsets): SAVE_PATH = \"./two_deg_support.txt\" items = list(one_deg.keys()) two_deg =", "in range(i+1, len(items)): key = (items[i], items[j]) for itemset in itemsets: if key[0]", "CONF: fw.write(f\"{one}->{two}: {conf}\\n\") three_deg_rule_num += 1 for three, two, one in rules[3:]: try:", "len(items)): key = (items[i], items[j]) for itemset in itemsets: if key[0] in itemset", "open(SAVE_PATH, \"w\") as fw: for tup in tups: support = three_deg[tup] / itemsets_len", "with open(SAVE_PATH, \"w\") as fw: for tup in tups: support = three_deg[tup] /", "CONF: fw.write(f\"{two}->{one}: {conf}\\n\") three_deg_rule_num += 1 print(f\"频繁三项集规则数量: {three_deg_rule_num}\", ) print(f\"频繁三项集规则保存在`{SAVE_PATH}`\") gen3deg_rules(one_deg, two_deg, three_deg)", "two_deg): SAVE_PATH = \"./two_deg_rules.txt\" pairs = list(two_deg.keys()) rules = {} for pair in", "print(f\"频繁一项集数量: {one_deg_count}\", ) print(f\"频繁一项集保存在`{SAVE_PATH}`\") return one_deg one_deg = build1deg(itemsets) def build2deg(one_deg, itemsets): SAVE_PATH", "print(f\"频繁三项集保存在`{SAVE_PATH}`\") return three_deg three_deg = build3deg(two_deg, itemsets) def gen3deg_rules(one_deg, two_deg, three_deg): SAVE_PATH =", "= list(two_deg.keys()) two_deg_count = 0 with open(SAVE_PATH, \"w\") as fw: for pair in", "/ one_deg[rule[0]] if conf > CONF: rules[rule] = conf rule = (pair[1], pair[0])", "gen2deg_rules(one_deg, two_deg) def build3deg(two_deg, itemsets): SAVE_PATH = \"./three_deg_support.txt\" pairs = list(two_deg.keys()) itemset_3 =", "if key[0] in itemset and key[1] in itemset: two_deg[key] = two_deg.get(key, 0) +", "fw: for k, v in rules.items(): fw.write(f\"{k[0]}->{k[1]}: {v}\\n\") print(f\"频繁二项集规则数量: {len(rules.keys())}\", ) print(f\"频繁二项集规则保存在`{SAVE_PATH}`\") gen2deg_rules(one_deg,", "{three_deg_count}\", ) print(f\"频繁三项集保存在`{SAVE_PATH}`\") return three_deg three_deg = build3deg(two_deg, itemsets) def gen3deg_rules(one_deg, two_deg, three_deg):", "in df[\"items\"]: itemsets.append(set(itemset_str[1:-1].split(\",\"))) return itemsets itemsets = csv2list() itemsets_len = itemsets.__len__() def build1deg(itemsets):", "conf = three_deg[three] / two_deg[two] except: try: conf = three_deg[three] / two_deg[(two[1], two[0])]", "v in rules.items(): fw.write(f\"{k[0]}->{k[1]}: {v}\\n\") print(f\"频繁二项集规则数量: {len(rules.keys())}\", ) print(f\"频繁二项集规则保存在`{SAVE_PATH}`\") gen2deg_rules(one_deg, two_deg) def build3deg(two_deg,", "support = three_deg[tup] / itemsets_len if support > SUPPORT: three_deg[tup] = support fw.write(f\"{tup}:", "itemsets: if key[0] in itemset and key[1] in itemset: two_deg[key] = two_deg.get(key, 0)", "one_deg[rule[0]] if conf > CONF: rules[rule] = conf rule = (pair[1], pair[0]) conf", "= conf with open(SAVE_PATH, \"w\") as fw: for k, v in rules.items(): fw.write(f\"{k[0]}->{k[1]}:", "SUPPORT: two_deg[pair] = support fw.write(f\"{pair}: {support}\\n\") two_deg_count += 1 else: del two_deg[pair] print(f\"频繁二项集数量:", "found\") if conf > CONF: fw.write(f\"{two}->{one}: {conf}\\n\") three_deg_rule_num += 1 print(f\"频繁三项集规则数量: {three_deg_rule_num}\", )", ") print(f\"频繁三项集保存在`{SAVE_PATH}`\") return three_deg three_deg = build3deg(two_deg, itemsets) def gen3deg_rules(one_deg, two_deg, three_deg): SAVE_PATH", "\"./two_deg_support.txt\" items = list(one_deg.keys()) two_deg = {} for i in range(0, len(items)): for", ") print(f\"频繁二项集规则保存在`{SAVE_PATH}`\") gen2deg_rules(one_deg, two_deg) def build3deg(two_deg, itemsets): SAVE_PATH = \"./three_deg_support.txt\" pairs = list(two_deg.keys())", "gen3deg_rules(one_deg, two_deg, three_deg): SAVE_PATH = \"./three_deg_rules.txt\" tups = list(three_deg.keys()) rules = {} def", "return itemsets itemsets = csv2list() itemsets_len = itemsets.__len__() def build1deg(itemsets): SAVE_PATH = \"./one_deg_support.txt\"", "rules = enumTup(tup) for three, one, two in rules[:3]: conf = three_deg[three] /", "len(itemset_3)): for k in range(j+1, len(itemset_3)): item_i = itemset_3[i] item_j = itemset_3[j] item_k", "pairs: itemset_3.add(pair[0]) itemset_3.add(pair[1]) itemset_3 = list(itemset_3) itemset_3.sort() three_deg = {} for i in", "SUPPORT: one_deg[item] = support fw.write(f\"{item}: {support}\\n\") one_deg_count += 1 else: del one_deg[item] print(f\"频繁一项集数量:", "one_deg[item] = one_deg.get(item, 0) + 1 one_deg_count = 0 items = list(one_deg.keys()) with", "pandas as pd SUPPORT = 0.005 CONF = 0.5 def csv2list(): df =", "gen2deg_rules(one_deg, two_deg): SAVE_PATH = \"./two_deg_rules.txt\" pairs = list(two_deg.keys()) rules = {} for pair", "(tup, (tup[1], tup[2]), tup[0]), (tup, (tup[0], tup[2]), tup[1]), (tup, (tup[0], tup[1]), tup[2]), ]", "two[0])] except: print(two, \"not found\") if conf > CONF: fw.write(f\"{two}->{one}: {conf}\\n\") three_deg_rule_num +=", "print(f\"频繁二项集数量: {two_deg_count}\", ) print(f\"频繁二项集保存在`{SAVE_PATH}`\") return two_deg two_deg = build2deg(one_deg, itemsets) def gen2deg_rules(one_deg, two_deg):", "list(three_deg.keys()) rules = {} def enumTup(tup): return [ (tup, tup[0], (tup[1], tup[2])), (tup,", "itemsets_len = itemsets.__len__() def build1deg(itemsets): SAVE_PATH = \"./one_deg_support.txt\" one_deg = {} for itemset", "1 pairs = list(two_deg.keys()) two_deg_count = 0 with open(SAVE_PATH, \"w\") as fw: for", "build3deg(two_deg, itemsets) def gen3deg_rules(one_deg, two_deg, three_deg): SAVE_PATH = \"./three_deg_rules.txt\" tups = list(three_deg.keys()) rules", "item_j, item_k) three_deg[tup] = three_deg.get(tup, 0)+1 three_deg_count = 0 tups = list(three_deg.keys()) with", "= build2deg(one_deg, itemsets) def gen2deg_rules(one_deg, two_deg): SAVE_PATH = \"./two_deg_rules.txt\" pairs = list(two_deg.keys()) rules", "csv2list() itemsets_len = itemsets.__len__() def build1deg(itemsets): SAVE_PATH = \"./one_deg_support.txt\" one_deg = {} for", "= csv2list() itemsets_len = itemsets.__len__() def build1deg(itemsets): SAVE_PATH = \"./one_deg_support.txt\" one_deg = {}", "if support > SUPPORT: three_deg[tup] = support fw.write(f\"{tup}: {support}\\n\") three_deg_count += 1 else:", "{one_deg_count}\", ) print(f\"频繁一项集保存在`{SAVE_PATH}`\") return one_deg one_deg = build1deg(itemsets) def build2deg(one_deg, itemsets): SAVE_PATH =", "else: del three_deg[tup] print(f\"频繁三项集数量: {three_deg_count}\", ) print(f\"频繁三项集保存在`{SAVE_PATH}`\") return three_deg three_deg = build3deg(two_deg, itemsets)", "= \"./three_deg_rules.txt\" tups = list(three_deg.keys()) rules = {} def enumTup(tup): return [ (tup,", "three_deg_rule_num += 1 for three, two, one in rules[3:]: try: conf = three_deg[three]", "return one_deg one_deg = build1deg(itemsets) def build2deg(one_deg, itemsets): SAVE_PATH = \"./two_deg_support.txt\" items =", "def build3deg(two_deg, itemsets): SAVE_PATH = \"./three_deg_support.txt\" pairs = list(two_deg.keys()) itemset_3 = set() for", "two_deg[two] except: try: conf = three_deg[three] / two_deg[(two[1], two[0])] except: print(two, \"not found\")", "= {} for itemset in itemsets: for item in itemset: one_deg[item] = one_deg.get(item,", "items[j]) for itemset in itemsets: if key[0] in itemset and key[1] in itemset:", "= \"./three_deg_support.txt\" pairs = list(two_deg.keys()) itemset_3 = set() for pair in pairs: itemset_3.add(pair[0])", "0 tups = list(three_deg.keys()) with open(SAVE_PATH, \"w\") as fw: for tup in tups:", "del two_deg[pair] print(f\"频繁二项集数量: {two_deg_count}\", ) print(f\"频繁二项集保存在`{SAVE_PATH}`\") return two_deg two_deg = build2deg(one_deg, itemsets) def", "\"./two_deg_rules.txt\" pairs = list(two_deg.keys()) rules = {} for pair in pairs: rule =", "with open(SAVE_PATH, \"w\") as fw: for item in items: support = one_deg[item] /", "len(itemset_3)): for j in range(i+1, len(itemset_3)): for k in range(j+1, len(itemset_3)): item_i =", "{support}\\n\") two_deg_count += 1 else: del two_deg[pair] print(f\"频繁二项集数量: {two_deg_count}\", ) print(f\"频繁二项集保存在`{SAVE_PATH}`\") return two_deg", "csv2list(): df = pd.read_csv(\"./实验三/数据/Groceries.csv\") itemsets = [] for itemset_str in df[\"items\"]: itemsets.append(set(itemset_str[1:-1].split(\",\"))) return", "tups = list(three_deg.keys()) rules = {} def enumTup(tup): return [ (tup, tup[0], (tup[1],", "support fw.write(f\"{tup}: {support}\\n\") three_deg_count += 1 else: del three_deg[tup] print(f\"频繁三项集数量: {three_deg_count}\", ) print(f\"频繁三项集保存在`{SAVE_PATH}`\")", "one, two in rules[:3]: conf = three_deg[three] / one_deg[one] if conf > CONF:", "rule = (pair[0], pair[1]) conf = two_deg[pair] / one_deg[rule[0]] if conf > CONF:", "tups: rules = enumTup(tup) for three, one, two in rules[:3]: conf = three_deg[three]", "print(f\"频繁二项集规则数量: {len(rules.keys())}\", ) print(f\"频繁二项集规则保存在`{SAVE_PATH}`\") gen2deg_rules(one_deg, two_deg) def build3deg(two_deg, itemsets): SAVE_PATH = \"./three_deg_support.txt\" pairs", "1 else: del one_deg[item] print(f\"频繁一项集数量: {one_deg_count}\", ) print(f\"频繁一项集保存在`{SAVE_PATH}`\") return one_deg one_deg = build1deg(itemsets)", "(tup, tup[0], (tup[1], tup[2])), (tup, tup[1], (tup[0], tup[2])), (tup, tup[2], (tup[0], tup[1])), (tup,", "itemset and item_k in itemset: tup = (item_i, item_j, item_k) three_deg[tup] = three_deg.get(tup,", "three_deg_rule_num = 0 with open(SAVE_PATH, \"w\") as fw: for tup in tups: rules", "= 0.005 CONF = 0.5 def csv2list(): df = pd.read_csv(\"./实验三/数据/Groceries.csv\") itemsets = []", "/ itemsets_len if support > SUPPORT: two_deg[pair] = support fw.write(f\"{pair}: {support}\\n\") two_deg_count +=", "three_deg.get(tup, 0)+1 three_deg_count = 0 tups = list(three_deg.keys()) with open(SAVE_PATH, \"w\") as fw:", "as fw: for tup in tups: rules = enumTup(tup) for three, one, two", "one_deg = build1deg(itemsets) def build2deg(one_deg, itemsets): SAVE_PATH = \"./two_deg_support.txt\" items = list(one_deg.keys()) two_deg", "= enumTup(tup) for three, one, two in rules[:3]: conf = three_deg[three] / one_deg[one]", "three, one, two in rules[:3]: conf = three_deg[three] / one_deg[one] if conf >", "itemset_3 = set() for pair in pairs: itemset_3.add(pair[0]) itemset_3.add(pair[1]) itemset_3 = list(itemset_3) itemset_3.sort()", "itemset_3.add(pair[0]) itemset_3.add(pair[1]) itemset_3 = list(itemset_3) itemset_3.sort() three_deg = {} for i in range(0,", "support > SUPPORT: three_deg[tup] = support fw.write(f\"{tup}: {support}\\n\") three_deg_count += 1 else: del", "\"w\") as fw: for k, v in rules.items(): fw.write(f\"{k[0]}->{k[1]}: {v}\\n\") print(f\"频繁二项集规则数量: {len(rules.keys())}\", )", "pairs = list(two_deg.keys()) two_deg_count = 0 with open(SAVE_PATH, \"w\") as fw: for pair", "in rules[3:]: try: conf = three_deg[three] / two_deg[two] except: try: conf = three_deg[three]", "for pair in pairs: itemset_3.add(pair[0]) itemset_3.add(pair[1]) itemset_3 = list(itemset_3) itemset_3.sort() three_deg = {}", "itemset_3 = list(itemset_3) itemset_3.sort() three_deg = {} for i in range(0, len(itemset_3)): for", "def gen3deg_rules(one_deg, two_deg, three_deg): SAVE_PATH = \"./three_deg_rules.txt\" tups = list(three_deg.keys()) rules = {}", "itemsets) def gen2deg_rules(one_deg, two_deg): SAVE_PATH = \"./two_deg_rules.txt\" pairs = list(two_deg.keys()) rules = {}", "fw.write(f\"{k[0]}->{k[1]}: {v}\\n\") print(f\"频繁二项集规则数量: {len(rules.keys())}\", ) print(f\"频繁二项集规则保存在`{SAVE_PATH}`\") gen2deg_rules(one_deg, two_deg) def build3deg(two_deg, itemsets): SAVE_PATH =", "conf > CONF: rules[rule] = conf with open(SAVE_PATH, \"w\") as fw: for k,", "= {} for i in range(0, len(itemset_3)): for j in range(i+1, len(itemset_3)): for", "= 0 with open(SAVE_PATH, \"w\") as fw: for tup in tups: rules =", "itemset: one_deg[item] = one_deg.get(item, 0) + 1 one_deg_count = 0 items = list(one_deg.keys())", "fw: for tup in tups: support = three_deg[tup] / itemsets_len if support >", "itemsets): SAVE_PATH = \"./two_deg_support.txt\" items = list(one_deg.keys()) two_deg = {} for i in", "try: conf = three_deg[three] / two_deg[(two[1], two[0])] except: print(two, \"not found\") if conf", "items = list(one_deg.keys()) with open(SAVE_PATH, \"w\") as fw: for item in items: support", "= itemsets.__len__() def build1deg(itemsets): SAVE_PATH = \"./one_deg_support.txt\" one_deg = {} for itemset in", "with open(SAVE_PATH, \"w\") as fw: for pair in pairs: support = two_deg[pair] /", "(item_i, item_j, item_k) three_deg[tup] = three_deg.get(tup, 0)+1 three_deg_count = 0 tups = list(three_deg.keys())", "(pair[1], pair[0]) conf = two_deg[pair] / one_deg[rule[0]] if conf > CONF: rules[rule] =", "len(itemset_3)): item_i = itemset_3[i] item_j = itemset_3[j] item_k = itemset_3[k] for itemset in", "= list(one_deg.keys()) with open(SAVE_PATH, \"w\") as fw: for item in items: support =", "= set() for pair in pairs: itemset_3.add(pair[0]) itemset_3.add(pair[1]) itemset_3 = list(itemset_3) itemset_3.sort() three_deg", "] three_deg_rule_num = 0 with open(SAVE_PATH, \"w\") as fw: for tup in tups:", "two_deg = {} for i in range(0, len(items)): for j in range(i+1, len(items)):", "CONF = 0.5 def csv2list(): df = pd.read_csv(\"./实验三/数据/Groceries.csv\") itemsets = [] for itemset_str", "+ 1 one_deg_count = 0 items = list(one_deg.keys()) with open(SAVE_PATH, \"w\") as fw:", "three_deg[three] / two_deg[two] except: try: conf = three_deg[three] / two_deg[(two[1], two[0])] except: print(two,", "itemsets_len if support > SUPPORT: two_deg[pair] = support fw.write(f\"{pair}: {support}\\n\") two_deg_count += 1", "= two_deg.get(key, 0) + 1 pairs = list(two_deg.keys()) two_deg_count = 0 with open(SAVE_PATH,", "enumTup(tup): return [ (tup, tup[0], (tup[1], tup[2])), (tup, tup[1], (tup[0], tup[2])), (tup, tup[2],", "1 else: del two_deg[pair] print(f\"频繁二项集数量: {two_deg_count}\", ) print(f\"频繁二项集保存在`{SAVE_PATH}`\") return two_deg two_deg = build2deg(one_deg,", "(tup[0], tup[1]), tup[2]), ] three_deg_rule_num = 0 with open(SAVE_PATH, \"w\") as fw: for", "i in range(0, len(itemset_3)): for j in range(i+1, len(itemset_3)): for k in range(j+1,", "0 items = list(one_deg.keys()) with open(SAVE_PATH, \"w\") as fw: for item in items:", "= (pair[1], pair[0]) conf = two_deg[pair] / one_deg[rule[0]] if conf > CONF: rules[rule]", "> SUPPORT: one_deg[item] = support fw.write(f\"{item}: {support}\\n\") one_deg_count += 1 else: del one_deg[item]", "if conf > CONF: fw.write(f\"{two}->{one}: {conf}\\n\") three_deg_rule_num += 1 print(f\"频繁三项集规则数量: {three_deg_rule_num}\", ) print(f\"频繁三项集规则保存在`{SAVE_PATH}`\")", "= {} def enumTup(tup): return [ (tup, tup[0], (tup[1], tup[2])), (tup, tup[1], (tup[0],", "items = list(one_deg.keys()) two_deg = {} for i in range(0, len(items)): for j", "itemsets.append(set(itemset_str[1:-1].split(\",\"))) return itemsets itemsets = csv2list() itemsets_len = itemsets.__len__() def build1deg(itemsets): SAVE_PATH =", "= three_deg[three] / one_deg[one] if conf > CONF: fw.write(f\"{one}->{two}: {conf}\\n\") three_deg_rule_num += 1", "rules = {} def enumTup(tup): return [ (tup, tup[0], (tup[1], tup[2])), (tup, tup[1],", "item_k in itemset: tup = (item_i, item_j, item_k) three_deg[tup] = three_deg.get(tup, 0)+1 three_deg_count", "if conf > CONF: rules[rule] = conf with open(SAVE_PATH, \"w\") as fw: for", "itemset_3[i] item_j = itemset_3[j] item_k = itemset_3[k] for itemset in itemsets: if item_i", "tup[0]), (tup, (tup[0], tup[2]), tup[1]), (tup, (tup[0], tup[1]), tup[2]), ] three_deg_rule_num = 0", "(tup, (tup[0], tup[1]), tup[2]), ] three_deg_rule_num = 0 with open(SAVE_PATH, \"w\") as fw:", "(tup, (tup[0], tup[2]), tup[1]), (tup, (tup[0], tup[1]), tup[2]), ] three_deg_rule_num = 0 with", "pairs: rule = (pair[0], pair[1]) conf = two_deg[pair] / one_deg[rule[0]] if conf >", "list(two_deg.keys()) rules = {} for pair in pairs: rule = (pair[0], pair[1]) conf", "two_deg) def build3deg(two_deg, itemsets): SAVE_PATH = \"./three_deg_support.txt\" pairs = list(two_deg.keys()) itemset_3 = set()", "as fw: for pair in pairs: support = two_deg[pair] / itemsets_len if support", "itemset: two_deg[key] = two_deg.get(key, 0) + 1 pairs = list(two_deg.keys()) two_deg_count = 0", "rules = {} for pair in pairs: rule = (pair[0], pair[1]) conf =", "item_i = itemset_3[i] item_j = itemset_3[j] item_k = itemset_3[k] for itemset in itemsets:", "\"w\") as fw: for tup in tups: rules = enumTup(tup) for three, one,", "= itemset_3[i] item_j = itemset_3[j] item_k = itemset_3[k] for itemset in itemsets: if", "pairs = list(two_deg.keys()) itemset_3 = set() for pair in pairs: itemset_3.add(pair[0]) itemset_3.add(pair[1]) itemset_3", "three_deg = {} for i in range(0, len(itemset_3)): for j in range(i+1, len(itemset_3)):", "\"w\") as fw: for pair in pairs: support = two_deg[pair] / itemsets_len if", "{support}\\n\") three_deg_count += 1 else: del three_deg[tup] print(f\"频繁三项集数量: {three_deg_count}\", ) print(f\"频繁三项集保存在`{SAVE_PATH}`\") return three_deg", "for i in range(0, len(items)): for j in range(i+1, len(items)): key = (items[i],", "print(f\"频繁二项集规则保存在`{SAVE_PATH}`\") gen2deg_rules(one_deg, two_deg) def build3deg(two_deg, itemsets): SAVE_PATH = \"./three_deg_support.txt\" pairs = list(two_deg.keys()) itemset_3", "key = (items[i], items[j]) for itemset in itemsets: if key[0] in itemset and", "tup[1]), tup[2]), ] three_deg_rule_num = 0 with open(SAVE_PATH, \"w\") as fw: for tup", "= (items[i], items[j]) for itemset in itemsets: if key[0] in itemset and key[1]", "{two_deg_count}\", ) print(f\"频繁二项集保存在`{SAVE_PATH}`\") return two_deg two_deg = build2deg(one_deg, itemsets) def gen2deg_rules(one_deg, two_deg): SAVE_PATH", "two_deg[pair] / one_deg[rule[0]] if conf > CONF: rules[rule] = conf with open(SAVE_PATH, \"w\")", "tup[2])), (tup, tup[1], (tup[0], tup[2])), (tup, tup[2], (tup[0], tup[1])), (tup, (tup[1], tup[2]), tup[0]),", "itemsets = [] for itemset_str in df[\"items\"]: itemsets.append(set(itemset_str[1:-1].split(\",\"))) return itemsets itemsets = csv2list()", "rules[3:]: try: conf = three_deg[three] / two_deg[two] except: try: conf = three_deg[three] /", "key[0] in itemset and key[1] in itemset: two_deg[key] = two_deg.get(key, 0) + 1", "+= 1 else: del three_deg[tup] print(f\"频繁三项集数量: {three_deg_count}\", ) print(f\"频繁三项集保存在`{SAVE_PATH}`\") return three_deg three_deg =", "in range(j+1, len(itemset_3)): item_i = itemset_3[i] item_j = itemset_3[j] item_k = itemset_3[k] for", "/ two_deg[two] except: try: conf = three_deg[three] / two_deg[(two[1], two[0])] except: print(two, \"not", "= \"./one_deg_support.txt\" one_deg = {} for itemset in itemsets: for item in itemset:", "def gen2deg_rules(one_deg, two_deg): SAVE_PATH = \"./two_deg_rules.txt\" pairs = list(two_deg.keys()) rules = {} for", "pair in pairs: rule = (pair[0], pair[1]) conf = two_deg[pair] / one_deg[rule[0]] if", "0 with open(SAVE_PATH, \"w\") as fw: for tup in tups: rules = enumTup(tup)", ") print(f\"频繁一项集保存在`{SAVE_PATH}`\") return one_deg one_deg = build1deg(itemsets) def build2deg(one_deg, itemsets): SAVE_PATH = \"./two_deg_support.txt\"", "CONF: rules[rule] = conf rule = (pair[1], pair[0]) conf = two_deg[pair] / one_deg[rule[0]]", "{conf}\\n\") three_deg_rule_num += 1 for three, two, one in rules[3:]: try: conf =", "= \"./two_deg_rules.txt\" pairs = list(two_deg.keys()) rules = {} for pair in pairs: rule", "print(f\"频繁三项集数量: {three_deg_count}\", ) print(f\"频繁三项集保存在`{SAVE_PATH}`\") return three_deg three_deg = build3deg(two_deg, itemsets) def gen3deg_rules(one_deg, two_deg,", "conf with open(SAVE_PATH, \"w\") as fw: for k, v in rules.items(): fw.write(f\"{k[0]}->{k[1]}: {v}\\n\")", "tup[2]), ] three_deg_rule_num = 0 with open(SAVE_PATH, \"w\") as fw: for tup in", "build1deg(itemsets): SAVE_PATH = \"./one_deg_support.txt\" one_deg = {} for itemset in itemsets: for item", "itemset_3.sort() three_deg = {} for i in range(0, len(itemset_3)): for j in range(i+1,", "rules.items(): fw.write(f\"{k[0]}->{k[1]}: {v}\\n\") print(f\"频繁二项集规则数量: {len(rules.keys())}\", ) print(f\"频繁二项集规则保存在`{SAVE_PATH}`\") gen2deg_rules(one_deg, two_deg) def build3deg(two_deg, itemsets): SAVE_PATH", "two_deg[(two[1], two[0])] except: print(two, \"not found\") if conf > CONF: fw.write(f\"{two}->{one}: {conf}\\n\") three_deg_rule_num", "= support fw.write(f\"{item}: {support}\\n\") one_deg_count += 1 else: del one_deg[item] print(f\"频繁一项集数量: {one_deg_count}\", )", "item in items: support = one_deg[item] / itemsets_len if support > SUPPORT: one_deg[item]", "as fw: for k, v in rules.items(): fw.write(f\"{k[0]}->{k[1]}: {v}\\n\") print(f\"频繁二项集规则数量: {len(rules.keys())}\", ) print(f\"频繁二项集规则保存在`{SAVE_PATH}`\")", "itemsets_len if support > SUPPORT: one_deg[item] = support fw.write(f\"{item}: {support}\\n\") one_deg_count += 1", "in itemsets: if item_i in itemset and item_j in itemset and item_k in", "in tups: rules = enumTup(tup) for three, one, two in rules[:3]: conf =", "(tup[0], tup[2])), (tup, tup[2], (tup[0], tup[1])), (tup, (tup[1], tup[2]), tup[0]), (tup, (tup[0], tup[2]),", "conf = two_deg[pair] / one_deg[rule[0]] if conf > CONF: rules[rule] = conf with", "three_deg = build3deg(two_deg, itemsets) def gen3deg_rules(one_deg, two_deg, three_deg): SAVE_PATH = \"./three_deg_rules.txt\" tups =", "pair in pairs: support = two_deg[pair] / itemsets_len if support > SUPPORT: two_deg[pair]", "1 else: del three_deg[tup] print(f\"频繁三项集数量: {three_deg_count}\", ) print(f\"频繁三项集保存在`{SAVE_PATH}`\") return three_deg three_deg = build3deg(two_deg,", "+ 1 pairs = list(two_deg.keys()) two_deg_count = 0 with open(SAVE_PATH, \"w\") as fw:", "as fw: for tup in tups: support = three_deg[tup] / itemsets_len if support", "print(f\"频繁一项集保存在`{SAVE_PATH}`\") return one_deg one_deg = build1deg(itemsets) def build2deg(one_deg, itemsets): SAVE_PATH = \"./two_deg_support.txt\" items", "0 with open(SAVE_PATH, \"w\") as fw: for pair in pairs: support = two_deg[pair]", "= three_deg[three] / two_deg[(two[1], two[0])] except: print(two, \"not found\") if conf > CONF:", "range(0, len(items)): for j in range(i+1, len(items)): key = (items[i], items[j]) for itemset", "conf > CONF: fw.write(f\"{one}->{two}: {conf}\\n\") three_deg_rule_num += 1 for three, two, one in", "= two_deg[pair] / one_deg[rule[0]] if conf > CONF: rules[rule] = conf rule =", "SUPPORT: three_deg[tup] = support fw.write(f\"{tup}: {support}\\n\") three_deg_count += 1 else: del three_deg[tup] print(f\"频繁三项集数量:", "itemset and item_j in itemset and item_k in itemset: tup = (item_i, item_j,", "for three, one, two in rules[:3]: conf = three_deg[three] / one_deg[one] if conf", "in pairs: support = two_deg[pair] / itemsets_len if support > SUPPORT: two_deg[pair] =", "for three, two, one in rules[3:]: try: conf = three_deg[three] / two_deg[two] except:", "pairs: support = two_deg[pair] / itemsets_len if support > SUPPORT: two_deg[pair] = support", "= [] for itemset_str in df[\"items\"]: itemsets.append(set(itemset_str[1:-1].split(\",\"))) return itemsets itemsets = csv2list() itemsets_len", "if conf > CONF: fw.write(f\"{one}->{two}: {conf}\\n\") three_deg_rule_num += 1 for three, two, one", ") print(f\"频繁二项集保存在`{SAVE_PATH}`\") return two_deg two_deg = build2deg(one_deg, itemsets) def gen2deg_rules(one_deg, two_deg): SAVE_PATH =", "build3deg(two_deg, itemsets): SAVE_PATH = \"./three_deg_support.txt\" pairs = list(two_deg.keys()) itemset_3 = set() for pair", "{} for i in range(0, len(items)): for j in range(i+1, len(items)): key =", "three, two, one in rules[3:]: try: conf = three_deg[three] / two_deg[two] except: try:", "SAVE_PATH = \"./two_deg_support.txt\" items = list(one_deg.keys()) two_deg = {} for i in range(0,", "rule = (pair[1], pair[0]) conf = two_deg[pair] / one_deg[rule[0]] if conf > CONF:", "{} def enumTup(tup): return [ (tup, tup[0], (tup[1], tup[2])), (tup, tup[1], (tup[0], tup[2])),", "conf > CONF: fw.write(f\"{two}->{one}: {conf}\\n\") three_deg_rule_num += 1 print(f\"频繁三项集规则数量: {three_deg_rule_num}\", ) print(f\"频繁三项集规则保存在`{SAVE_PATH}`\") gen3deg_rules(one_deg,", "three_deg): SAVE_PATH = \"./three_deg_rules.txt\" tups = list(three_deg.keys()) rules = {} def enumTup(tup): return", "= 0 items = list(one_deg.keys()) with open(SAVE_PATH, \"w\") as fw: for item in", "item_j in itemset and item_k in itemset: tup = (item_i, item_j, item_k) three_deg[tup]", "= one_deg.get(item, 0) + 1 one_deg_count = 0 items = list(one_deg.keys()) with open(SAVE_PATH,", "tup = (item_i, item_j, item_k) three_deg[tup] = three_deg.get(tup, 0)+1 three_deg_count = 0 tups", "k, v in rules.items(): fw.write(f\"{k[0]}->{k[1]}: {v}\\n\") print(f\"频繁二项集规则数量: {len(rules.keys())}\", ) print(f\"频繁二项集规则保存在`{SAVE_PATH}`\") gen2deg_rules(one_deg, two_deg) def", "if support > SUPPORT: one_deg[item] = support fw.write(f\"{item}: {support}\\n\") one_deg_count += 1 else:", "itemset_3.add(pair[1]) itemset_3 = list(itemset_3) itemset_3.sort() three_deg = {} for i in range(0, len(itemset_3)):", "\"./three_deg_rules.txt\" tups = list(three_deg.keys()) rules = {} def enumTup(tup): return [ (tup, tup[0],", "= \"./two_deg_support.txt\" items = list(one_deg.keys()) two_deg = {} for i in range(0, len(items)):", "support = two_deg[pair] / itemsets_len if support > SUPPORT: two_deg[pair] = support fw.write(f\"{pair}:", "item_k) three_deg[tup] = three_deg.get(tup, 0)+1 three_deg_count = 0 tups = list(three_deg.keys()) with open(SAVE_PATH,", "support fw.write(f\"{pair}: {support}\\n\") two_deg_count += 1 else: del two_deg[pair] print(f\"频繁二项集数量: {two_deg_count}\", ) print(f\"频繁二项集保存在`{SAVE_PATH}`\")", "def build2deg(one_deg, itemsets): SAVE_PATH = \"./two_deg_support.txt\" items = list(one_deg.keys()) two_deg = {} for", "= itemset_3[j] item_k = itemset_3[k] for itemset in itemsets: if item_i in itemset", "pairs = list(two_deg.keys()) rules = {} for pair in pairs: rule = (pair[0],", "list(one_deg.keys()) with open(SAVE_PATH, \"w\") as fw: for item in items: support = one_deg[item]", "tup[2]), tup[1]), (tup, (tup[0], tup[1]), tup[2]), ] three_deg_rule_num = 0 with open(SAVE_PATH, \"w\")", "{} for itemset in itemsets: for item in itemset: one_deg[item] = one_deg.get(item, 0)", "open(SAVE_PATH, \"w\") as fw: for k, v in rules.items(): fw.write(f\"{k[0]}->{k[1]}: {v}\\n\") print(f\"频繁二项集规则数量: {len(rules.keys())}\",", "in itemset: tup = (item_i, item_j, item_k) three_deg[tup] = three_deg.get(tup, 0)+1 three_deg_count =", "tup[1]), (tup, (tup[0], tup[1]), tup[2]), ] three_deg_rule_num = 0 with open(SAVE_PATH, \"w\") as", "else: del two_deg[pair] print(f\"频繁二项集数量: {two_deg_count}\", ) print(f\"频繁二项集保存在`{SAVE_PATH}`\") return two_deg two_deg = build2deg(one_deg, itemsets)", "/ one_deg[rule[0]] if conf > CONF: rules[rule] = conf with open(SAVE_PATH, \"w\") as", "three_deg_count = 0 tups = list(three_deg.keys()) with open(SAVE_PATH, \"w\") as fw: for tup", "for tup in tups: support = three_deg[tup] / itemsets_len if support > SUPPORT:", "with open(SAVE_PATH, \"w\") as fw: for tup in tups: rules = enumTup(tup) for", "/ one_deg[one] if conf > CONF: fw.write(f\"{one}->{two}: {conf}\\n\") three_deg_rule_num += 1 for three,", "two_deg[pair] = support fw.write(f\"{pair}: {support}\\n\") two_deg_count += 1 else: del two_deg[pair] print(f\"频繁二项集数量: {two_deg_count}\",", "= support fw.write(f\"{pair}: {support}\\n\") two_deg_count += 1 else: del two_deg[pair] print(f\"频繁二项集数量: {two_deg_count}\", )", "return [ (tup, tup[0], (tup[1], tup[2])), (tup, tup[1], (tup[0], tup[2])), (tup, tup[2], (tup[0],", "in tups: support = three_deg[tup] / itemsets_len if support > SUPPORT: three_deg[tup] =", "{v}\\n\") print(f\"频繁二项集规则数量: {len(rules.keys())}\", ) print(f\"频繁二项集规则保存在`{SAVE_PATH}`\") gen2deg_rules(one_deg, two_deg) def build3deg(two_deg, itemsets): SAVE_PATH = \"./three_deg_support.txt\"", "k in range(j+1, len(itemset_3)): item_i = itemset_3[i] item_j = itemset_3[j] item_k = itemset_3[k]", "item_i in itemset and item_j in itemset and item_k in itemset: tup =", "one_deg_count = 0 items = list(one_deg.keys()) with open(SAVE_PATH, \"w\") as fw: for item", "three_deg_count += 1 else: del three_deg[tup] print(f\"频繁三项集数量: {three_deg_count}\", ) print(f\"频繁三项集保存在`{SAVE_PATH}`\") return three_deg three_deg", "= list(one_deg.keys()) two_deg = {} for i in range(0, len(items)): for j in", "one_deg[item] print(f\"频繁一项集数量: {one_deg_count}\", ) print(f\"频繁一项集保存在`{SAVE_PATH}`\") return one_deg one_deg = build1deg(itemsets) def build2deg(one_deg, itemsets):", "list(one_deg.keys()) two_deg = {} for i in range(0, len(items)): for j in range(i+1,", "= 0.5 def csv2list(): df = pd.read_csv(\"./实验三/数据/Groceries.csv\") itemsets = [] for itemset_str in", "two_deg[pair] print(f\"频繁二项集数量: {two_deg_count}\", ) print(f\"频繁二项集保存在`{SAVE_PATH}`\") return two_deg two_deg = build2deg(one_deg, itemsets) def gen2deg_rules(one_deg,", "\"w\") as fw: for item in items: support = one_deg[item] / itemsets_len if", "support > SUPPORT: two_deg[pair] = support fw.write(f\"{pair}: {support}\\n\") two_deg_count += 1 else: del", "support = one_deg[item] / itemsets_len if support > SUPPORT: one_deg[item] = support fw.write(f\"{item}:", "= conf rule = (pair[1], pair[0]) conf = two_deg[pair] / one_deg[rule[0]] if conf", "as fw: for item in items: support = one_deg[item] / itemsets_len if support", "return two_deg two_deg = build2deg(one_deg, itemsets) def gen2deg_rules(one_deg, two_deg): SAVE_PATH = \"./two_deg_rules.txt\" pairs", "two_deg_count = 0 with open(SAVE_PATH, \"w\") as fw: for pair in pairs: support", "itemsets) def gen3deg_rules(one_deg, two_deg, three_deg): SAVE_PATH = \"./three_deg_rules.txt\" tups = list(three_deg.keys()) rules =", "one in rules[3:]: try: conf = three_deg[three] / two_deg[two] except: try: conf =", "= list(itemset_3) itemset_3.sort() three_deg = {} for i in range(0, len(itemset_3)): for j", "except: print(two, \"not found\") if conf > CONF: fw.write(f\"{two}->{one}: {conf}\\n\") three_deg_rule_num += 1", "tup[2], (tup[0], tup[1])), (tup, (tup[1], tup[2]), tup[0]), (tup, (tup[0], tup[2]), tup[1]), (tup, (tup[0],", "df[\"items\"]: itemsets.append(set(itemset_str[1:-1].split(\",\"))) return itemsets itemsets = csv2list() itemsets_len = itemsets.__len__() def build1deg(itemsets): SAVE_PATH", "fw: for tup in tups: rules = enumTup(tup) for three, one, two in", "in range(0, len(itemset_3)): for j in range(i+1, len(itemset_3)): for k in range(j+1, len(itemset_3)):", "SAVE_PATH = \"./two_deg_rules.txt\" pairs = list(two_deg.keys()) rules = {} for pair in pairs:", "(items[i], items[j]) for itemset in itemsets: if key[0] in itemset and key[1] in", "fw.write(f\"{item}: {support}\\n\") one_deg_count += 1 else: del one_deg[item] print(f\"频繁一项集数量: {one_deg_count}\", ) print(f\"频繁一项集保存在`{SAVE_PATH}`\") return", "> CONF: rules[rule] = conf rule = (pair[1], pair[0]) conf = two_deg[pair] /", "two_deg.get(key, 0) + 1 pairs = list(two_deg.keys()) two_deg_count = 0 with open(SAVE_PATH, \"w\")", "{} for i in range(0, len(itemset_3)): for j in range(i+1, len(itemset_3)): for k", "= (pair[0], pair[1]) conf = two_deg[pair] / one_deg[rule[0]] if conf > CONF: rules[rule]", "range(i+1, len(items)): key = (items[i], items[j]) for itemset in itemsets: if key[0] in", "as pd SUPPORT = 0.005 CONF = 0.5 def csv2list(): df = pd.read_csv(\"./实验三/数据/Groceries.csv\")", "0)+1 three_deg_count = 0 tups = list(three_deg.keys()) with open(SAVE_PATH, \"w\") as fw: for", "> CONF: fw.write(f\"{two}->{one}: {conf}\\n\") three_deg_rule_num += 1 print(f\"频繁三项集规则数量: {three_deg_rule_num}\", ) print(f\"频繁三项集规则保存在`{SAVE_PATH}`\") gen3deg_rules(one_deg, two_deg,", "> CONF: fw.write(f\"{one}->{two}: {conf}\\n\") three_deg_rule_num += 1 for three, two, one in rules[3:]:", "two, one in rules[3:]: try: conf = three_deg[three] / two_deg[two] except: try: conf", "two_deg_count += 1 else: del two_deg[pair] print(f\"频繁二项集数量: {two_deg_count}\", ) print(f\"频繁二项集保存在`{SAVE_PATH}`\") return two_deg two_deg", "\"not found\") if conf > CONF: fw.write(f\"{two}->{one}: {conf}\\n\") three_deg_rule_num += 1 print(f\"频繁三项集规则数量: {three_deg_rule_num}\",", "for itemset in itemsets: if key[0] in itemset and key[1] in itemset: two_deg[key]", "conf rule = (pair[1], pair[0]) conf = two_deg[pair] / one_deg[rule[0]] if conf >", "fw.write(f\"{tup}: {support}\\n\") three_deg_count += 1 else: del three_deg[tup] print(f\"频繁三项集数量: {three_deg_count}\", ) print(f\"频繁三项集保存在`{SAVE_PATH}`\") return", "{support}\\n\") one_deg_count += 1 else: del one_deg[item] print(f\"频繁一项集数量: {one_deg_count}\", ) print(f\"频繁一项集保存在`{SAVE_PATH}`\") return one_deg", "j in range(i+1, len(itemset_3)): for k in range(j+1, len(itemset_3)): item_i = itemset_3[i] item_j", "one_deg[item] = support fw.write(f\"{item}: {support}\\n\") one_deg_count += 1 else: del one_deg[item] print(f\"频繁一项集数量: {one_deg_count}\",", "len(items)): for j in range(i+1, len(items)): key = (items[i], items[j]) for itemset in", "two_deg two_deg = build2deg(one_deg, itemsets) def gen2deg_rules(one_deg, two_deg): SAVE_PATH = \"./two_deg_rules.txt\" pairs =", "in itemset and item_k in itemset: tup = (item_i, item_j, item_k) three_deg[tup] =", "enumTup(tup) for three, one, two in rules[:3]: conf = three_deg[three] / one_deg[one] if", "for k in range(j+1, len(itemset_3)): item_i = itemset_3[i] item_j = itemset_3[j] item_k =", "build2deg(one_deg, itemsets) def gen2deg_rules(one_deg, two_deg): SAVE_PATH = \"./two_deg_rules.txt\" pairs = list(two_deg.keys()) rules =", "= 0 tups = list(three_deg.keys()) with open(SAVE_PATH, \"w\") as fw: for tup in", "= (item_i, item_j, item_k) three_deg[tup] = three_deg.get(tup, 0)+1 three_deg_count = 0 tups =", "i in range(0, len(items)): for j in range(i+1, len(items)): key = (items[i], items[j])", "j in range(i+1, len(items)): key = (items[i], items[j]) for itemset in itemsets: if", "/ itemsets_len if support > SUPPORT: three_deg[tup] = support fw.write(f\"{tup}: {support}\\n\") three_deg_count +=", "{} for pair in pairs: rule = (pair[0], pair[1]) conf = two_deg[pair] /", "+= 1 for three, two, one in rules[3:]: try: conf = three_deg[three] /", "and key[1] in itemset: two_deg[key] = two_deg.get(key, 0) + 1 pairs = list(two_deg.keys())", "tup[2])), (tup, tup[2], (tup[0], tup[1])), (tup, (tup[1], tup[2]), tup[0]), (tup, (tup[0], tup[2]), tup[1]),", "itemsets = csv2list() itemsets_len = itemsets.__len__() def build1deg(itemsets): SAVE_PATH = \"./one_deg_support.txt\" one_deg =", "rules[rule] = conf with open(SAVE_PATH, \"w\") as fw: for k, v in rules.items():", "two_deg[key] = two_deg.get(key, 0) + 1 pairs = list(two_deg.keys()) two_deg_count = 0 with", "range(j+1, len(itemset_3)): item_i = itemset_3[i] item_j = itemset_3[j] item_k = itemset_3[k] for itemset", "pair[0]) conf = two_deg[pair] / one_deg[rule[0]] if conf > CONF: rules[rule] = conf", "def enumTup(tup): return [ (tup, tup[0], (tup[1], tup[2])), (tup, tup[1], (tup[0], tup[2])), (tup,", "(tup[0], tup[1])), (tup, (tup[1], tup[2]), tup[0]), (tup, (tup[0], tup[2]), tup[1]), (tup, (tup[0], tup[1]),", "for item in items: support = one_deg[item] / itemsets_len if support > SUPPORT:", "{len(rules.keys())}\", ) print(f\"频繁二项集规则保存在`{SAVE_PATH}`\") gen2deg_rules(one_deg, two_deg) def build3deg(two_deg, itemsets): SAVE_PATH = \"./three_deg_support.txt\" pairs =", "two in rules[:3]: conf = three_deg[three] / one_deg[one] if conf > CONF: fw.write(f\"{one}->{two}:", "conf = three_deg[three] / two_deg[(two[1], two[0])] except: print(two, \"not found\") if conf >", "SAVE_PATH = \"./one_deg_support.txt\" one_deg = {} for itemset in itemsets: for item in", "for itemset in itemsets: if item_i in itemset and item_j in itemset and", "fw: for item in items: support = one_deg[item] / itemsets_len if support >", "in itemset and key[1] in itemset: two_deg[key] = two_deg.get(key, 0) + 1 pairs", "tup[0], (tup[1], tup[2])), (tup, tup[1], (tup[0], tup[2])), (tup, tup[2], (tup[0], tup[1])), (tup, (tup[1],", "for itemset in itemsets: for item in itemset: one_deg[item] = one_deg.get(item, 0) +", "support > SUPPORT: one_deg[item] = support fw.write(f\"{item}: {support}\\n\") one_deg_count += 1 else: del", "= two_deg[pair] / itemsets_len if support > SUPPORT: two_deg[pair] = support fw.write(f\"{pair}: {support}\\n\")", "conf = two_deg[pair] / one_deg[rule[0]] if conf > CONF: rules[rule] = conf rule", "if support > SUPPORT: two_deg[pair] = support fw.write(f\"{pair}: {support}\\n\") two_deg_count += 1 else:", "try: conf = three_deg[three] / two_deg[two] except: try: conf = three_deg[three] / two_deg[(two[1],", "[] for itemset_str in df[\"items\"]: itemsets.append(set(itemset_str[1:-1].split(\",\"))) return itemsets itemsets = csv2list() itemsets_len =", "/ two_deg[(two[1], two[0])] except: print(two, \"not found\") if conf > CONF: fw.write(f\"{two}->{one}: {conf}\\n\")", "one_deg = {} for itemset in itemsets: for item in itemset: one_deg[item] =", "for pair in pairs: support = two_deg[pair] / itemsets_len if support > SUPPORT:", "one_deg one_deg = build1deg(itemsets) def build2deg(one_deg, itemsets): SAVE_PATH = \"./two_deg_support.txt\" items = list(one_deg.keys())", "= list(three_deg.keys()) with open(SAVE_PATH, \"w\") as fw: for tup in tups: support =", "for k, v in rules.items(): fw.write(f\"{k[0]}->{k[1]}: {v}\\n\") print(f\"频繁二项集规则数量: {len(rules.keys())}\", ) print(f\"频繁二项集规则保存在`{SAVE_PATH}`\") gen2deg_rules(one_deg, two_deg)", "itemset in itemsets: for item in itemset: one_deg[item] = one_deg.get(item, 0) + 1", "with open(SAVE_PATH, \"w\") as fw: for k, v in rules.items(): fw.write(f\"{k[0]}->{k[1]}: {v}\\n\") print(f\"频繁二项集规则数量:", "0) + 1 pairs = list(two_deg.keys()) two_deg_count = 0 with open(SAVE_PATH, \"w\") as", "\"./one_deg_support.txt\" one_deg = {} for itemset in itemsets: for item in itemset: one_deg[item]", "conf > CONF: rules[rule] = conf rule = (pair[1], pair[0]) conf = two_deg[pair]", "two_deg[pair] / one_deg[rule[0]] if conf > CONF: rules[rule] = conf rule = (pair[1],", "= support fw.write(f\"{tup}: {support}\\n\") three_deg_count += 1 else: del three_deg[tup] print(f\"频繁三项集数量: {three_deg_count}\", )", "in pairs: itemset_3.add(pair[0]) itemset_3.add(pair[1]) itemset_3 = list(itemset_3) itemset_3.sort() three_deg = {} for i", "build1deg(itemsets) def build2deg(one_deg, itemsets): SAVE_PATH = \"./two_deg_support.txt\" items = list(one_deg.keys()) two_deg = {}", "= {} for pair in pairs: rule = (pair[0], pair[1]) conf = two_deg[pair]", "two_deg, three_deg): SAVE_PATH = \"./three_deg_rules.txt\" tups = list(three_deg.keys()) rules = {} def enumTup(tup):", "tup in tups: rules = enumTup(tup) for three, one, two in rules[:3]: conf", "one_deg_count += 1 else: del one_deg[item] print(f\"频繁一项集数量: {one_deg_count}\", ) print(f\"频繁一项集保存在`{SAVE_PATH}`\") return one_deg one_deg", "/ itemsets_len if support > SUPPORT: one_deg[item] = support fw.write(f\"{item}: {support}\\n\") one_deg_count +=", "for i in range(0, len(itemset_3)): for j in range(i+1, len(itemset_3)): for k in", "for tup in tups: rules = enumTup(tup) for three, one, two in rules[:3]:", "return three_deg three_deg = build3deg(two_deg, itemsets) def gen3deg_rules(one_deg, two_deg, three_deg): SAVE_PATH = \"./three_deg_rules.txt\"", "CONF: rules[rule] = conf with open(SAVE_PATH, \"w\") as fw: for k, v in", "fw: for pair in pairs: support = two_deg[pair] / itemsets_len if support >", "list(two_deg.keys()) itemset_3 = set() for pair in pairs: itemset_3.add(pair[0]) itemset_3.add(pair[1]) itemset_3 = list(itemset_3)", "itemset in itemsets: if key[0] in itemset and key[1] in itemset: two_deg[key] =", "[ (tup, tup[0], (tup[1], tup[2])), (tup, tup[1], (tup[0], tup[2])), (tup, tup[2], (tup[0], tup[1])),", "(tup[1], tup[2])), (tup, tup[1], (tup[0], tup[2])), (tup, tup[2], (tup[0], tup[1])), (tup, (tup[1], tup[2]),", "three_deg[tup] = three_deg.get(tup, 0)+1 three_deg_count = 0 tups = list(three_deg.keys()) with open(SAVE_PATH, \"w\")", "= three_deg[three] / two_deg[two] except: try: conf = three_deg[three] / two_deg[(two[1], two[0])] except:", "rules[:3]: conf = three_deg[three] / one_deg[one] if conf > CONF: fw.write(f\"{one}->{two}: {conf}\\n\") three_deg_rule_num", "print(f\"频繁二项集保存在`{SAVE_PATH}`\") return two_deg two_deg = build2deg(one_deg, itemsets) def gen2deg_rules(one_deg, two_deg): SAVE_PATH = \"./two_deg_rules.txt\"", "itemset and key[1] in itemset: two_deg[key] = two_deg.get(key, 0) + 1 pairs =", "> SUPPORT: two_deg[pair] = support fw.write(f\"{pair}: {support}\\n\") two_deg_count += 1 else: del two_deg[pair]", "(tup, tup[2], (tup[0], tup[1])), (tup, (tup[1], tup[2]), tup[0]), (tup, (tup[0], tup[2]), tup[1]), (tup,", "0) + 1 one_deg_count = 0 items = list(one_deg.keys()) with open(SAVE_PATH, \"w\") as", "for itemset_str in df[\"items\"]: itemsets.append(set(itemset_str[1:-1].split(\",\"))) return itemsets itemsets = csv2list() itemsets_len = itemsets.__len__()", "itemsets itemsets = csv2list() itemsets_len = itemsets.__len__() def build1deg(itemsets): SAVE_PATH = \"./one_deg_support.txt\" one_deg", "= three_deg.get(tup, 0)+1 three_deg_count = 0 tups = list(three_deg.keys()) with open(SAVE_PATH, \"w\") as", "SUPPORT = 0.005 CONF = 0.5 def csv2list(): df = pd.read_csv(\"./实验三/数据/Groceries.csv\") itemsets =", "support fw.write(f\"{item}: {support}\\n\") one_deg_count += 1 else: del one_deg[item] print(f\"频繁一项集数量: {one_deg_count}\", ) print(f\"频繁一项集保存在`{SAVE_PATH}`\")", "itemsets): SAVE_PATH = \"./three_deg_support.txt\" pairs = list(two_deg.keys()) itemset_3 = set() for pair in", "SAVE_PATH = \"./three_deg_support.txt\" pairs = list(two_deg.keys()) itemset_3 = set() for pair in pairs:", "= {} for i in range(0, len(items)): for j in range(i+1, len(items)): key", "= one_deg[item] / itemsets_len if support > SUPPORT: one_deg[item] = support fw.write(f\"{item}: {support}\\n\")", "(pair[0], pair[1]) conf = two_deg[pair] / one_deg[rule[0]] if conf > CONF: rules[rule] =", "itemset in itemsets: if item_i in itemset and item_j in itemset and item_k", "= list(two_deg.keys()) rules = {} for pair in pairs: rule = (pair[0], pair[1])", "1 one_deg_count = 0 items = list(one_deg.keys()) with open(SAVE_PATH, \"w\") as fw: for", "three_deg[tup] / itemsets_len if support > SUPPORT: three_deg[tup] = support fw.write(f\"{tup}: {support}\\n\") three_deg_count", "three_deg three_deg = build3deg(two_deg, itemsets) def gen3deg_rules(one_deg, two_deg, three_deg): SAVE_PATH = \"./three_deg_rules.txt\" tups", "1 for three, two, one in rules[3:]: try: conf = three_deg[three] / two_deg[two]", "for j in range(i+1, len(items)): key = (items[i], items[j]) for itemset in itemsets:", "fw.write(f\"{pair}: {support}\\n\") two_deg_count += 1 else: del two_deg[pair] print(f\"频繁二项集数量: {two_deg_count}\", ) print(f\"频繁二项集保存在`{SAVE_PATH}`\") return", "(tup, tup[1], (tup[0], tup[2])), (tup, tup[2], (tup[0], tup[1])), (tup, (tup[1], tup[2]), tup[0]), (tup,", "itemsets.__len__() def build1deg(itemsets): SAVE_PATH = \"./one_deg_support.txt\" one_deg = {} for itemset in itemsets:", "one_deg[item] / itemsets_len if support > SUPPORT: one_deg[item] = support fw.write(f\"{item}: {support}\\n\") one_deg_count", "def build1deg(itemsets): SAVE_PATH = \"./one_deg_support.txt\" one_deg = {} for itemset in itemsets: for", "in range(i+1, len(itemset_3)): for k in range(j+1, len(itemset_3)): item_i = itemset_3[i] item_j =", "\"w\") as fw: for tup in tups: support = three_deg[tup] / itemsets_len if", "item_k = itemset_3[k] for itemset in itemsets: if item_i in itemset and item_j", "list(three_deg.keys()) with open(SAVE_PATH, \"w\") as fw: for tup in tups: support = three_deg[tup]", "for pair in pairs: rule = (pair[0], pair[1]) conf = two_deg[pair] / one_deg[rule[0]]", "item_j = itemset_3[j] item_k = itemset_3[k] for itemset in itemsets: if item_i in", "in range(0, len(items)): for j in range(i+1, len(items)): key = (items[i], items[j]) for", "rules[rule] = conf rule = (pair[1], pair[0]) conf = two_deg[pair] / one_deg[rule[0]] if", "two_deg = build2deg(one_deg, itemsets) def gen2deg_rules(one_deg, two_deg): SAVE_PATH = \"./two_deg_rules.txt\" pairs = list(two_deg.keys())", "tup[1], (tup[0], tup[2])), (tup, tup[2], (tup[0], tup[1])), (tup, (tup[1], tup[2]), tup[0]), (tup, (tup[0],", "item in itemset: one_deg[item] = one_deg.get(item, 0) + 1 one_deg_count = 0 items", "in itemsets: if key[0] in itemset and key[1] in itemset: two_deg[key] = two_deg.get(key,", "else: del one_deg[item] print(f\"频繁一项集数量: {one_deg_count}\", ) print(f\"频繁一项集保存在`{SAVE_PATH}`\") return one_deg one_deg = build1deg(itemsets) def", "0.5 def csv2list(): df = pd.read_csv(\"./实验三/数据/Groceries.csv\") itemsets = [] for itemset_str in df[\"items\"]:", "= build3deg(two_deg, itemsets) def gen3deg_rules(one_deg, two_deg, three_deg): SAVE_PATH = \"./three_deg_rules.txt\" tups = list(three_deg.keys())", "in itemset: one_deg[item] = one_deg.get(item, 0) + 1 one_deg_count = 0 items =", "\"./three_deg_support.txt\" pairs = list(two_deg.keys()) itemset_3 = set() for pair in pairs: itemset_3.add(pair[0]) itemset_3.add(pair[1])", "in itemset and item_j in itemset and item_k in itemset: tup = (item_i,", "three_deg[three] / one_deg[one] if conf > CONF: fw.write(f\"{one}->{two}: {conf}\\n\") three_deg_rule_num += 1 for", "pair[1]) conf = two_deg[pair] / one_deg[rule[0]] if conf > CONF: rules[rule] = conf", "three_deg[tup] = support fw.write(f\"{tup}: {support}\\n\") three_deg_count += 1 else: del three_deg[tup] print(f\"频繁三项集数量: {three_deg_count}\",", "= 0 with open(SAVE_PATH, \"w\") as fw: for pair in pairs: support =", "= pd.read_csv(\"./实验三/数据/Groceries.csv\") itemsets = [] for itemset_str in df[\"items\"]: itemsets.append(set(itemset_str[1:-1].split(\",\"))) return itemsets itemsets", "itemsets: if item_i in itemset and item_j in itemset and item_k in itemset:", "tup[2]), tup[0]), (tup, (tup[0], tup[2]), tup[1]), (tup, (tup[0], tup[1]), tup[2]), ] three_deg_rule_num =", "two_deg[pair] / itemsets_len if support > SUPPORT: two_deg[pair] = support fw.write(f\"{pair}: {support}\\n\") two_deg_count", "del one_deg[item] print(f\"频繁一项集数量: {one_deg_count}\", ) print(f\"频繁一项集保存在`{SAVE_PATH}`\") return one_deg one_deg = build1deg(itemsets) def build2deg(one_deg,", "tups: support = three_deg[tup] / itemsets_len if support > SUPPORT: three_deg[tup] = support", "print(two, \"not found\") if conf > CONF: fw.write(f\"{two}->{one}: {conf}\\n\") three_deg_rule_num += 1 print(f\"频繁三项集规则数量:", "itemsets_len if support > SUPPORT: three_deg[tup] = support fw.write(f\"{tup}: {support}\\n\") three_deg_count += 1", "import pandas as pd SUPPORT = 0.005 CONF = 0.5 def csv2list(): df", "and item_j in itemset and item_k in itemset: tup = (item_i, item_j, item_k)", "items: support = one_deg[item] / itemsets_len if support > SUPPORT: one_deg[item] = support", "and item_k in itemset: tup = (item_i, item_j, item_k) three_deg[tup] = three_deg.get(tup, 0)+1", "list(itemset_3) itemset_3.sort() three_deg = {} for i in range(0, len(itemset_3)): for j in", "range(i+1, len(itemset_3)): for k in range(j+1, len(itemset_3)): item_i = itemset_3[i] item_j = itemset_3[j]", "for j in range(i+1, len(itemset_3)): for k in range(j+1, len(itemset_3)): item_i = itemset_3[i]", "= three_deg[tup] / itemsets_len if support > SUPPORT: three_deg[tup] = support fw.write(f\"{tup}: {support}\\n\")", "open(SAVE_PATH, \"w\") as fw: for tup in tups: rules = enumTup(tup) for three,", "del three_deg[tup] print(f\"频繁三项集数量: {three_deg_count}\", ) print(f\"频繁三项集保存在`{SAVE_PATH}`\") return three_deg three_deg = build3deg(two_deg, itemsets) def", "in rules.items(): fw.write(f\"{k[0]}->{k[1]}: {v}\\n\") print(f\"频繁二项集规则数量: {len(rules.keys())}\", ) print(f\"频繁二项集规则保存在`{SAVE_PATH}`\") gen2deg_rules(one_deg, two_deg) def build3deg(two_deg, itemsets):", "df = pd.read_csv(\"./实验三/数据/Groceries.csv\") itemsets = [] for itemset_str in df[\"items\"]: itemsets.append(set(itemset_str[1:-1].split(\",\"))) return itemsets", "def csv2list(): df = pd.read_csv(\"./实验三/数据/Groceries.csv\") itemsets = [] for itemset_str in df[\"items\"]: itemsets.append(set(itemset_str[1:-1].split(\",\")))", "+= 1 else: del one_deg[item] print(f\"频繁一项集数量: {one_deg_count}\", ) print(f\"频繁一项集保存在`{SAVE_PATH}`\") return one_deg one_deg =", "itemset_3[j] item_k = itemset_3[k] for itemset in itemsets: if item_i in itemset and" ]
[ "import LBFGSOptimizeAcquisition from ..models.meanstd_acqfunc_impl import EIAcquisitionFunction DEFAULT_ACQUISITION_FUNCTION = EIAcquisitionFunction DEFAULT_LOCAL_OPTIMIZER_CLASS = LBFGSOptimizeAcquisition DEFAULT_NUM_INITIAL_CANDIDATES", "from .bo_algorithm_components import LBFGSOptimizeAcquisition from ..models.meanstd_acqfunc_impl import EIAcquisitionFunction DEFAULT_ACQUISITION_FUNCTION = EIAcquisitionFunction DEFAULT_LOCAL_OPTIMIZER_CLASS =", "from ..models.meanstd_acqfunc_impl import EIAcquisitionFunction DEFAULT_ACQUISITION_FUNCTION = EIAcquisitionFunction DEFAULT_LOCAL_OPTIMIZER_CLASS = LBFGSOptimizeAcquisition DEFAULT_NUM_INITIAL_CANDIDATES = 250", ".bo_algorithm_components import LBFGSOptimizeAcquisition from ..models.meanstd_acqfunc_impl import EIAcquisitionFunction DEFAULT_ACQUISITION_FUNCTION = EIAcquisitionFunction DEFAULT_LOCAL_OPTIMIZER_CLASS = LBFGSOptimizeAcquisition", "LBFGSOptimizeAcquisition from ..models.meanstd_acqfunc_impl import EIAcquisitionFunction DEFAULT_ACQUISITION_FUNCTION = EIAcquisitionFunction DEFAULT_LOCAL_OPTIMIZER_CLASS = LBFGSOptimizeAcquisition DEFAULT_NUM_INITIAL_CANDIDATES =", "EIAcquisitionFunction DEFAULT_ACQUISITION_FUNCTION = EIAcquisitionFunction DEFAULT_LOCAL_OPTIMIZER_CLASS = LBFGSOptimizeAcquisition DEFAULT_NUM_INITIAL_CANDIDATES = 250 DEFAULT_NUM_INITIAL_RANDOM_EVALUATIONS = 3", "<gh_stars>1000+ from .bo_algorithm_components import LBFGSOptimizeAcquisition from ..models.meanstd_acqfunc_impl import EIAcquisitionFunction DEFAULT_ACQUISITION_FUNCTION = EIAcquisitionFunction DEFAULT_LOCAL_OPTIMIZER_CLASS", "import EIAcquisitionFunction DEFAULT_ACQUISITION_FUNCTION = EIAcquisitionFunction DEFAULT_LOCAL_OPTIMIZER_CLASS = LBFGSOptimizeAcquisition DEFAULT_NUM_INITIAL_CANDIDATES = 250 DEFAULT_NUM_INITIAL_RANDOM_EVALUATIONS =", "..models.meanstd_acqfunc_impl import EIAcquisitionFunction DEFAULT_ACQUISITION_FUNCTION = EIAcquisitionFunction DEFAULT_LOCAL_OPTIMIZER_CLASS = LBFGSOptimizeAcquisition DEFAULT_NUM_INITIAL_CANDIDATES = 250 DEFAULT_NUM_INITIAL_RANDOM_EVALUATIONS" ]
[ "# Patients with Autism autism = patients.with_these_clinical_events( Autism_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5}, )", ") # Patients with Autism autism = patients.with_these_clinical_events( Autism_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5},", "{\"earliest\": \"2019-01-01\", \"latest\": end_date}, \"rate\": \"uniform\", \"incidence\": 0.5, }, population=patients.registered_with_one_practice_between( \"2019-01-01\", end_date ),", ") # Define patient populations # Patients with a learning disability learning_disability =", "\"2019-01-01\", end_date ), ) # Set index date index_date = \"2019-01-01\" # Define", "combine_codelists, Measure # NOQA # Import codelists from codelist.py folder from codelists import", "Define Study population and variables study = StudyDefinition( default_expectations={ \"date\": {\"earliest\": \"2019-01-01\", \"latest\":", "population and variables study = StudyDefinition( default_expectations={ \"date\": {\"earliest\": \"2019-01-01\", \"latest\": end_date}, \"rate\":", "codelist, codelist_from_csv, filter_codes_by_category, combine_codelists, Measure # NOQA # Import codelists from codelist.py folder", "patients, codelist, codelist_from_csv, filter_codes_by_category, combine_codelists, Measure # NOQA # Import codelists from codelist.py", "StudyDefinition, patients, codelist, codelist_from_csv, filter_codes_by_category, combine_codelists, Measure # NOQA # Import codelists from", "Set index date index_date = \"2019-01-01\" # Define Medication variables # Patients who", "Study population and variables study = StudyDefinition( default_expectations={ \"date\": {\"earliest\": \"2019-01-01\", \"latest\": end_date},", "variables # Patients who are taking SSRIs SSRI_cohort = patients.with_these_medications( SSRI_codes, on_or_before=index_date, returning=\"binary_flag\",", "= patients.with_these_clinical_events( LD_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Patients with Autism autism", "index_date = \"2019-01-01\" # Define Medication variables # Patients who are taking SSRIs", "returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Patients with Autism autism = patients.with_these_clinical_events( Autism_codes, on_or_before=index_date,", "# Define Medication variables # Patients who are taking SSRIs SSRI_cohort = patients.with_these_medications(", "patients.with_these_medications( SSRI_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Define patient populations # Patients", "0.5} ) # Define patient populations # Patients with a learning disability learning_disability", "learning disability learning_disability = patients.with_these_clinical_events( LD_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Patients", "codelists from codelist.py folder from codelists import SSRI_codes, LD_codes, Autism_codes # Define Study", "Study time variables from datetime import datetime end_date = datetime.today().strftime('%Y-%m-%d') # Define Study", "0.5} ) # Patients with Autism autism = patients.with_these_clinical_events( Autism_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\":", "\"date\": {\"earliest\": \"2019-01-01\", \"latest\": end_date}, \"rate\": \"uniform\", \"incidence\": 0.5, }, population=patients.registered_with_one_practice_between( \"2019-01-01\", end_date", "study = StudyDefinition( default_expectations={ \"date\": {\"earliest\": \"2019-01-01\", \"latest\": end_date}, \"rate\": \"uniform\", \"incidence\": 0.5,", "variables study = StudyDefinition( default_expectations={ \"date\": {\"earliest\": \"2019-01-01\", \"latest\": end_date}, \"rate\": \"uniform\", \"incidence\":", "variables from datetime import datetime end_date = datetime.today().strftime('%Y-%m-%d') # Define Study population and", "# Define Study time variables from datetime import datetime end_date = datetime.today().strftime('%Y-%m-%d') #", "\"incidence\": 0.5, }, population=patients.registered_with_one_practice_between( \"2019-01-01\", end_date ), ) # Set index date index_date", "Define Medication variables # Patients who are taking SSRIs SSRI_cohort = patients.with_these_medications( SSRI_codes,", "returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Define patient populations # Patients with a learning", "are taking SSRIs SSRI_cohort = patients.with_these_medications( SSRI_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) #", "patient populations # Patients with a learning disability learning_disability = patients.with_these_clinical_events( LD_codes, on_or_before=index_date,", "Import codelists from codelist.py folder from codelists import SSRI_codes, LD_codes, Autism_codes # Define", "SSRI_codes, LD_codes, Autism_codes # Define Study time variables from datetime import datetime end_date", "Medication variables # Patients who are taking SSRIs SSRI_cohort = patients.with_these_medications( SSRI_codes, on_or_before=index_date,", "from cohortextractor import StudyDefinition, patients, codelist, codelist_from_csv, filter_codes_by_category, combine_codelists, Measure # NOQA #", "= StudyDefinition( default_expectations={ \"date\": {\"earliest\": \"2019-01-01\", \"latest\": end_date}, \"rate\": \"uniform\", \"incidence\": 0.5, },", "LD_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Patients with Autism autism = patients.with_these_clinical_events(", "= datetime.today().strftime('%Y-%m-%d') # Define Study population and variables study = StudyDefinition( default_expectations={ \"date\":", "# Define Study population and variables study = StudyDefinition( default_expectations={ \"date\": {\"earliest\": \"2019-01-01\",", "return_expectations={\"incidence\": 0.5} ) # Patients with Autism autism = patients.with_these_clinical_events( Autism_codes, on_or_before=index_date, returning=\"binary_flag\",", "\"latest\": end_date}, \"rate\": \"uniform\", \"incidence\": 0.5, }, population=patients.registered_with_one_practice_between( \"2019-01-01\", end_date ), ) #", "import datetime end_date = datetime.today().strftime('%Y-%m-%d') # Define Study population and variables study =", "codelist.py folder from codelists import SSRI_codes, LD_codes, Autism_codes # Define Study time variables", "cohortextractor import StudyDefinition, patients, codelist, codelist_from_csv, filter_codes_by_category, combine_codelists, Measure # NOQA # Import", "codelist_from_csv, filter_codes_by_category, combine_codelists, Measure # NOQA # Import codelists from codelist.py folder from", "datetime end_date = datetime.today().strftime('%Y-%m-%d') # Define Study population and variables study = StudyDefinition(", "datetime import datetime end_date = datetime.today().strftime('%Y-%m-%d') # Define Study population and variables study", "0.5, }, population=patients.registered_with_one_practice_between( \"2019-01-01\", end_date ), ) # Set index date index_date =", "SSRI_cohort = patients.with_these_medications( SSRI_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Define patient populations", "on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Patients with Autism autism = patients.with_these_clinical_events( Autism_codes,", "}, population=patients.registered_with_one_practice_between( \"2019-01-01\", end_date ), ) # Set index date index_date = \"2019-01-01\"", "Autism_codes # Define Study time variables from datetime import datetime end_date = datetime.today().strftime('%Y-%m-%d')", "from datetime import datetime end_date = datetime.today().strftime('%Y-%m-%d') # Define Study population and variables", "SSRI_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Define patient populations # Patients with", "with a learning disability learning_disability = patients.with_these_clinical_events( LD_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} )", "on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Define patient populations # Patients with a", "populations # Patients with a learning disability learning_disability = patients.with_these_clinical_events( LD_codes, on_or_before=index_date, returning=\"binary_flag\",", "datetime.today().strftime('%Y-%m-%d') # Define Study population and variables study = StudyDefinition( default_expectations={ \"date\": {\"earliest\":", "end_date ), ) # Set index date index_date = \"2019-01-01\" # Define Medication", "Define Study time variables from datetime import datetime end_date = datetime.today().strftime('%Y-%m-%d') # Define", "disability learning_disability = patients.with_these_clinical_events( LD_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Patients with", "time variables from datetime import datetime end_date = datetime.today().strftime('%Y-%m-%d') # Define Study population", "learning_disability = patients.with_these_clinical_events( LD_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Patients with Autism", "Define patient populations # Patients with a learning disability learning_disability = patients.with_these_clinical_events( LD_codes,", "population=patients.registered_with_one_practice_between( \"2019-01-01\", end_date ), ) # Set index date index_date = \"2019-01-01\" #", "LD_codes, Autism_codes # Define Study time variables from datetime import datetime end_date =", "# Set index date index_date = \"2019-01-01\" # Define Medication variables # Patients", "\"uniform\", \"incidence\": 0.5, }, population=patients.registered_with_one_practice_between( \"2019-01-01\", end_date ), ) # Set index date", "import SSRI_codes, LD_codes, Autism_codes # Define Study time variables from datetime import datetime", "end_date}, \"rate\": \"uniform\", \"incidence\": 0.5, }, population=patients.registered_with_one_practice_between( \"2019-01-01\", end_date ), ) # Set", "StudyDefinition( default_expectations={ \"date\": {\"earliest\": \"2019-01-01\", \"latest\": end_date}, \"rate\": \"uniform\", \"incidence\": 0.5, }, population=patients.registered_with_one_practice_between(", "codelists import SSRI_codes, LD_codes, Autism_codes # Define Study time variables from datetime import", "Patients who are taking SSRIs SSRI_cohort = patients.with_these_medications( SSRI_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5}", "import StudyDefinition, patients, codelist, codelist_from_csv, filter_codes_by_category, combine_codelists, Measure # NOQA # Import codelists", "a learning disability learning_disability = patients.with_these_clinical_events( LD_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) #", "from codelists import SSRI_codes, LD_codes, Autism_codes # Define Study time variables from datetime", "end_date = datetime.today().strftime('%Y-%m-%d') # Define Study population and variables study = StudyDefinition( default_expectations={", "NOQA # Import codelists from codelist.py folder from codelists import SSRI_codes, LD_codes, Autism_codes", "from codelist.py folder from codelists import SSRI_codes, LD_codes, Autism_codes # Define Study time", "), ) # Set index date index_date = \"2019-01-01\" # Define Medication variables", "Measure # NOQA # Import codelists from codelist.py folder from codelists import SSRI_codes,", "folder from codelists import SSRI_codes, LD_codes, Autism_codes # Define Study time variables from", "<filename>analysis/study_definition.py<gh_stars>0 from cohortextractor import StudyDefinition, patients, codelist, codelist_from_csv, filter_codes_by_category, combine_codelists, Measure # NOQA", "return_expectations={\"incidence\": 0.5} ) # Define patient populations # Patients with a learning disability", "\"rate\": \"uniform\", \"incidence\": 0.5, }, population=patients.registered_with_one_practice_between( \"2019-01-01\", end_date ), ) # Set index", "= patients.with_these_medications( SSRI_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Define patient populations #", "# Define patient populations # Patients with a learning disability learning_disability = patients.with_these_clinical_events(", "date index_date = \"2019-01-01\" # Define Medication variables # Patients who are taking", "who are taking SSRIs SSRI_cohort = patients.with_these_medications( SSRI_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} )", "SSRIs SSRI_cohort = patients.with_these_medications( SSRI_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Define patient", ") # Set index date index_date = \"2019-01-01\" # Define Medication variables #", "default_expectations={ \"date\": {\"earliest\": \"2019-01-01\", \"latest\": end_date}, \"rate\": \"uniform\", \"incidence\": 0.5, }, population=patients.registered_with_one_practice_between( \"2019-01-01\",", "\"2019-01-01\" # Define Medication variables # Patients who are taking SSRIs SSRI_cohort =", "filter_codes_by_category, combine_codelists, Measure # NOQA # Import codelists from codelist.py folder from codelists", "# Import codelists from codelist.py folder from codelists import SSRI_codes, LD_codes, Autism_codes #", "\"2019-01-01\", \"latest\": end_date}, \"rate\": \"uniform\", \"incidence\": 0.5, }, population=patients.registered_with_one_practice_between( \"2019-01-01\", end_date ), )", "Patients with a learning disability learning_disability = patients.with_these_clinical_events( LD_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5}", "= \"2019-01-01\" # Define Medication variables # Patients who are taking SSRIs SSRI_cohort", "taking SSRIs SSRI_cohort = patients.with_these_medications( SSRI_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Define", "and variables study = StudyDefinition( default_expectations={ \"date\": {\"earliest\": \"2019-01-01\", \"latest\": end_date}, \"rate\": \"uniform\",", "index date index_date = \"2019-01-01\" # Define Medication variables # Patients who are", "# Patients with a learning disability learning_disability = patients.with_these_clinical_events( LD_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\":", "# NOQA # Import codelists from codelist.py folder from codelists import SSRI_codes, LD_codes,", "# Patients who are taking SSRIs SSRI_cohort = patients.with_these_medications( SSRI_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\":", "patients.with_these_clinical_events( LD_codes, on_or_before=index_date, returning=\"binary_flag\", return_expectations={\"incidence\": 0.5} ) # Patients with Autism autism =" ]
[ "@ GitHub : https://github.com/JackyPJB @ Contact : <EMAIL> ------------------------------------------------- Description : ------------------------------------------------- \"\"\"", ": ------------------------------------------------- \"\"\" import datetime __author__ = 'Max_Pengjb' from app.models import db from", "required=True, verbose_name='用户id') streetName = db.StringField(max_length=512, required=True, verbose_name='地址') userName = db.StringField(max_length=128, required=True, verbose_name='收货人姓名') tel", "db.StringField(max_length=512, required=True, verbose_name='地址') userName = db.StringField(max_length=128, required=True, verbose_name='收货人姓名') tel = db.StringField(max_length=64, required=True, verbose_name='收货人手机号')", "db.BooleanField(default=False, required=True, verbose_name='是否默认地址') create_time = db.DateTimeField(default=datetime.datetime.now, verbose_name='创建时间') def __unicode__(self): return str(self.streetName) + str(self.userName)", "date : 2019/12/10 15:45 @ IDE : PyCharm @ GitHub : https://github.com/JackyPJB @", "datetime __author__ = 'Max_Pengjb' from app.models import db from app.models.User import User class", "= 'Max_Pengjb' from app.models import db from app.models.User import User class Address(db.Document): user_id", "pengj @ date : 2019/12/10 15:45 @ IDE : PyCharm @ GitHub :", "-*- \"\"\" ------------------------------------------------- @ Author : pengj @ date : 2019/12/10 15:45 @", "streetName = db.StringField(max_length=512, required=True, verbose_name='地址') userName = db.StringField(max_length=128, required=True, verbose_name='收货人姓名') tel = db.StringField(max_length=64,", "user_id = db.ReferenceField(User, required=True, verbose_name='用户id') streetName = db.StringField(max_length=512, required=True, verbose_name='地址') userName = db.StringField(max_length=128,", "Author : pengj @ date : 2019/12/10 15:45 @ IDE : PyCharm @", "= db.StringField(max_length=64, required=True, verbose_name='收货人手机号') isDefault = db.BooleanField(default=False, required=True, verbose_name='是否默认地址') create_time = db.DateTimeField(default=datetime.datetime.now, verbose_name='创建时间')", "isDefault = db.BooleanField(default=False, required=True, verbose_name='是否默认地址') create_time = db.DateTimeField(default=datetime.datetime.now, verbose_name='创建时间') def __unicode__(self): return str(self.streetName)", "required=True, verbose_name='收货人姓名') tel = db.StringField(max_length=64, required=True, verbose_name='收货人手机号') isDefault = db.BooleanField(default=False, required=True, verbose_name='是否默认地址') create_time", "Address(db.Document): user_id = db.ReferenceField(User, required=True, verbose_name='用户id') streetName = db.StringField(max_length=512, required=True, verbose_name='地址') userName =", "class Address(db.Document): user_id = db.ReferenceField(User, required=True, verbose_name='用户id') streetName = db.StringField(max_length=512, required=True, verbose_name='地址') userName", "verbose_name='收货人姓名') tel = db.StringField(max_length=64, required=True, verbose_name='收货人手机号') isDefault = db.BooleanField(default=False, required=True, verbose_name='是否默认地址') create_time =", "import datetime __author__ = 'Max_Pengjb' from app.models import db from app.models.User import User", "from app.models import db from app.models.User import User class Address(db.Document): user_id = db.ReferenceField(User,", "db from app.models.User import User class Address(db.Document): user_id = db.ReferenceField(User, required=True, verbose_name='用户id') streetName", "utf-8 -*- \"\"\" ------------------------------------------------- @ Author : pengj @ date : 2019/12/10 15:45", "#!/usr/bin/env python # -*- coding: utf-8 -*- \"\"\" ------------------------------------------------- @ Author : pengj", "IDE : PyCharm @ GitHub : https://github.com/JackyPJB @ Contact : <EMAIL> ------------------------------------------------- Description", "https://github.com/JackyPJB @ Contact : <EMAIL> ------------------------------------------------- Description : ------------------------------------------------- \"\"\" import datetime __author__", "------------------------------------------------- Description : ------------------------------------------------- \"\"\" import datetime __author__ = 'Max_Pengjb' from app.models import", "------------------------------------------------- \"\"\" import datetime __author__ = 'Max_Pengjb' from app.models import db from app.models.User", "from app.models.User import User class Address(db.Document): user_id = db.ReferenceField(User, required=True, verbose_name='用户id') streetName =", "required=True, verbose_name='收货人手机号') isDefault = db.BooleanField(default=False, required=True, verbose_name='是否默认地址') create_time = db.DateTimeField(default=datetime.datetime.now, verbose_name='创建时间') def __unicode__(self):", "tel = db.StringField(max_length=64, required=True, verbose_name='收货人手机号') isDefault = db.BooleanField(default=False, required=True, verbose_name='是否默认地址') create_time = db.DateTimeField(default=datetime.datetime.now,", "Description : ------------------------------------------------- \"\"\" import datetime __author__ = 'Max_Pengjb' from app.models import db", ": PyCharm @ GitHub : https://github.com/JackyPJB @ Contact : <EMAIL> ------------------------------------------------- Description :", "2019/12/10 15:45 @ IDE : PyCharm @ GitHub : https://github.com/JackyPJB @ Contact :", ": pengj @ date : 2019/12/10 15:45 @ IDE : PyCharm @ GitHub", "\"\"\" import datetime __author__ = 'Max_Pengjb' from app.models import db from app.models.User import", "= db.StringField(max_length=512, required=True, verbose_name='地址') userName = db.StringField(max_length=128, required=True, verbose_name='收货人姓名') tel = db.StringField(max_length=64, required=True,", "-*- coding: utf-8 -*- \"\"\" ------------------------------------------------- @ Author : pengj @ date :", "# -*- coding: utf-8 -*- \"\"\" ------------------------------------------------- @ Author : pengj @ date", "verbose_name='地址') userName = db.StringField(max_length=128, required=True, verbose_name='收货人姓名') tel = db.StringField(max_length=64, required=True, verbose_name='收货人手机号') isDefault =", "GitHub : https://github.com/JackyPJB @ Contact : <EMAIL> ------------------------------------------------- Description : ------------------------------------------------- \"\"\" import", ": https://github.com/JackyPJB @ Contact : <EMAIL> ------------------------------------------------- Description : ------------------------------------------------- \"\"\" import datetime", "import User class Address(db.Document): user_id = db.ReferenceField(User, required=True, verbose_name='用户id') streetName = db.StringField(max_length=512, required=True,", "= db.BooleanField(default=False, required=True, verbose_name='是否默认地址') create_time = db.DateTimeField(default=datetime.datetime.now, verbose_name='创建时间') def __unicode__(self): return str(self.streetName) +", "\"\"\" ------------------------------------------------- @ Author : pengj @ date : 2019/12/10 15:45 @ IDE", "<EMAIL> ------------------------------------------------- Description : ------------------------------------------------- \"\"\" import datetime __author__ = 'Max_Pengjb' from app.models", "@ Contact : <EMAIL> ------------------------------------------------- Description : ------------------------------------------------- \"\"\" import datetime __author__ =", "userName = db.StringField(max_length=128, required=True, verbose_name='收货人姓名') tel = db.StringField(max_length=64, required=True, verbose_name='收货人手机号') isDefault = db.BooleanField(default=False,", "import db from app.models.User import User class Address(db.Document): user_id = db.ReferenceField(User, required=True, verbose_name='用户id')", "verbose_name='用户id') streetName = db.StringField(max_length=512, required=True, verbose_name='地址') userName = db.StringField(max_length=128, required=True, verbose_name='收货人姓名') tel =", "= db.ReferenceField(User, required=True, verbose_name='用户id') streetName = db.StringField(max_length=512, required=True, verbose_name='地址') userName = db.StringField(max_length=128, required=True,", "verbose_name='收货人手机号') isDefault = db.BooleanField(default=False, required=True, verbose_name='是否默认地址') create_time = db.DateTimeField(default=datetime.datetime.now, verbose_name='创建时间') def __unicode__(self): return", "db.StringField(max_length=64, required=True, verbose_name='收货人手机号') isDefault = db.BooleanField(default=False, required=True, verbose_name='是否默认地址') create_time = db.DateTimeField(default=datetime.datetime.now, verbose_name='创建时间') def", "Contact : <EMAIL> ------------------------------------------------- Description : ------------------------------------------------- \"\"\" import datetime __author__ = 'Max_Pengjb'", "User class Address(db.Document): user_id = db.ReferenceField(User, required=True, verbose_name='用户id') streetName = db.StringField(max_length=512, required=True, verbose_name='地址')", "= db.StringField(max_length=128, required=True, verbose_name='收货人姓名') tel = db.StringField(max_length=64, required=True, verbose_name='收货人手机号') isDefault = db.BooleanField(default=False, required=True,", "@ IDE : PyCharm @ GitHub : https://github.com/JackyPJB @ Contact : <EMAIL> -------------------------------------------------", "required=True, verbose_name='地址') userName = db.StringField(max_length=128, required=True, verbose_name='收货人姓名') tel = db.StringField(max_length=64, required=True, verbose_name='收货人手机号') isDefault", "app.models import db from app.models.User import User class Address(db.Document): user_id = db.ReferenceField(User, required=True,", "PyCharm @ GitHub : https://github.com/JackyPJB @ Contact : <EMAIL> ------------------------------------------------- Description : -------------------------------------------------", ": 2019/12/10 15:45 @ IDE : PyCharm @ GitHub : https://github.com/JackyPJB @ Contact", "db.ReferenceField(User, required=True, verbose_name='用户id') streetName = db.StringField(max_length=512, required=True, verbose_name='地址') userName = db.StringField(max_length=128, required=True, verbose_name='收货人姓名')", "@ Author : pengj @ date : 2019/12/10 15:45 @ IDE : PyCharm", "python # -*- coding: utf-8 -*- \"\"\" ------------------------------------------------- @ Author : pengj @", "__author__ = 'Max_Pengjb' from app.models import db from app.models.User import User class Address(db.Document):", "db.StringField(max_length=128, required=True, verbose_name='收货人姓名') tel = db.StringField(max_length=64, required=True, verbose_name='收货人手机号') isDefault = db.BooleanField(default=False, required=True, verbose_name='是否默认地址')", "@ date : 2019/12/10 15:45 @ IDE : PyCharm @ GitHub : https://github.com/JackyPJB", "15:45 @ IDE : PyCharm @ GitHub : https://github.com/JackyPJB @ Contact : <EMAIL>", "app.models.User import User class Address(db.Document): user_id = db.ReferenceField(User, required=True, verbose_name='用户id') streetName = db.StringField(max_length=512,", "------------------------------------------------- @ Author : pengj @ date : 2019/12/10 15:45 @ IDE :", "'Max_Pengjb' from app.models import db from app.models.User import User class Address(db.Document): user_id =", ": <EMAIL> ------------------------------------------------- Description : ------------------------------------------------- \"\"\" import datetime __author__ = 'Max_Pengjb' from", "coding: utf-8 -*- \"\"\" ------------------------------------------------- @ Author : pengj @ date : 2019/12/10" ]
[]
[ "import StreamExecutionEnvironment, TimeCharacteristic from pyflink.table import StreamTableEnvironment, DataTypes, EnvironmentSettings from pyflink.table.descriptors import (", "as fopen: dic = json.load(fopen) sentences = [string] x = np.zeros((len(sentences), maxlen)) for", "sess import tensorflow as tf import json import numpy as np def load_graph(frozen_graph_filename):", "or sess is None: g = load_graph('/notebooks/frozen_model.pb') X = g.get_tensor_by_name('import/Placeholder:0') Y = g.get_tensor_by_name('import/logits:0')", "-1 - no] = dic.get(k, UNK) indices = np.argmax(sess.run(Y, feed_dict = {X: x}),", ".property('bootstrap.servers', 'kafka:9092') ).with_format( Json() .fail_on_missing_field(True) .schema( DataTypes.ROW( [ DataTypes.FIELD('datetime', DataTypes.STRING()), DataTypes.FIELD('text', DataTypes.STRING()), ]", "np.argmax(sess.run(Y, feed_dict = {X: x}), axis = 1) return label[indices[0]] st_env.set_python_requirements('/notebooks/requirements.txt') st_env.register_function('predict', predict)", "= dic.get(k, UNK) indices = np.argmax(sess.run(Y, feed_dict = {X: x}), axis = 1)", "['negative', 'positive'] maxlen = 50 UNK = 3 with open('/notebooks/dictionary-test.json', 'r') as fopen:", "tensorflow as tf import json import numpy as np def load_graph(frozen_graph_filename): with tf.gfile.GFile(frozen_graph_filename,", "import udf s_env = StreamExecutionEnvironment.get_execution_environment() s_env.set_stream_time_characteristic(TimeCharacteristic.EventTime) s_env.set_parallelism(1) st_env = StreamTableEnvironment.create( s_env, environment_settings =", "DataTypes.STRING()) ).in_append_mode().register_table_source( 'source' ) result_path = '/notebooks/output-tensorflow.csv' t_env.connect(FileSystem().path(result_path)).with_format( OldCsv() .field_delimiter(',') .field('datetime', DataTypes.STRING()) .field('sentence',", "= 50 UNK = 3 with open('/notebooks/dictionary-test.json', 'r') as fopen: dic = json.load(fopen)", "load_graph(frozen_graph_filename): with tf.gfile.GFile(frozen_graph_filename, 'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) with tf.Graph().as_default() as", "if X is None or Y is None or sess is None: g", "load_graph('/notebooks/frozen_model.pb') X = g.get_tensor_by_name('import/Placeholder:0') Y = g.get_tensor_by_name('import/logits:0') sess = tf.Session(graph = g) label", "[string] x = np.zeros((len(sentences), maxlen)) for i, sentence in enumerate(sentences): for no, k", "is None or sess is None: g = load_graph('/notebooks/frozen_model.pb') X = g.get_tensor_by_name('import/Placeholder:0') Y", "no, k in enumerate(sentence.split()[:maxlen][::-1]): x[i, -1 - no] = dic.get(k, UNK) indices =", ".use_blink_planner() .build(), ) X, Y, sess = None, None, None @udf(result_type = DataTypes.STRING())", ") X, Y, sess = None, None, None @udf(result_type = DataTypes.STRING()) def predict(string):", "= tf.Session(graph = g) label = ['negative', 'positive'] maxlen = 50 UNK =", "graph: tf.import_graph_def(graph_def) return graph if X is None or Y is None or", "sentences = [string] x = np.zeros((len(sentences), maxlen)) for i, sentence in enumerate(sentences): for", "i, sentence in enumerate(sentences): for no, k in enumerate(sentence.split()[:maxlen][::-1]): x[i, -1 - no]", "X = g.get_tensor_by_name('import/Placeholder:0') Y = g.get_tensor_by_name('import/logits:0') sess = tf.Session(graph = g) label =", "tf import json import numpy as np def load_graph(frozen_graph_filename): with tf.gfile.GFile(frozen_graph_filename, 'rb') as", "from pyflink.table.descriptors import ( Schema, Kafka, Json, Rowtime, OldCsv, FileSystem, ) from pyflink.table.udf", ").in_append_mode().register_table_source( 'source' ) result_path = '/notebooks/output-tensorflow.csv' t_env.connect(FileSystem().path(result_path)).with_format( OldCsv() .field_delimiter(',') .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING())", ".field_delimiter(',') .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING())", "pyflink.table.descriptors import ( Schema, Kafka, Json, Rowtime, OldCsv, FileSystem, ) from pyflink.table.udf import", "= [string] x = np.zeros((len(sentences), maxlen)) for i, sentence in enumerate(sentences): for no,", "label[indices[0]] st_env.set_python_requirements('/notebooks/requirements.txt') st_env.register_function('predict', predict) st_env.connect( Kafka() .version('universal') .topic('test') .start_from_earliest() .property('zookeeper.connect', 'zookeeper:2181') .property('bootstrap.servers', 'kafka:9092')", "@udf(result_type = DataTypes.STRING()) def predict(string): global X, Y, sess import tensorflow as tf", "None or Y is None or sess is None: g = load_graph('/notebooks/frozen_model.pb') X", "global X, Y, sess import tensorflow as tf import json import numpy as", "is None or Y is None or sess is None: g = load_graph('/notebooks/frozen_model.pb')", "UNK) indices = np.argmax(sess.run(Y, feed_dict = {X: x}), axis = 1) return label[indices[0]]", "DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).in_append_mode().register_table_sink( 'sink' ) st_env.from_path('source').select( 'datetime, sentence, predict(sentence)' ).insert_into('sink')", "DataTypes.ROW( [ DataTypes.FIELD('datetime', DataTypes.STRING()), DataTypes.FIELD('text', DataTypes.STRING()), ] ) ) ).with_schema( Schema() .field('datetime', DataTypes.STRING())", "enumerate(sentence.split()[:maxlen][::-1]): x[i, -1 - no] = dic.get(k, UNK) indices = np.argmax(sess.run(Y, feed_dict =", "for no, k in enumerate(sentence.split()[:maxlen][::-1]): x[i, -1 - no] = dic.get(k, UNK) indices", "graph_def.ParseFromString(f.read()) with tf.Graph().as_default() as graph: tf.import_graph_def(graph_def) return graph if X is None or", "in enumerate(sentences): for no, k in enumerate(sentence.split()[:maxlen][::-1]): x[i, -1 - no] = dic.get(k,", ".field('label', DataTypes.STRING()) ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).in_append_mode().register_table_sink( 'sink' )", "= 3 with open('/notebooks/dictionary-test.json', 'r') as fopen: dic = json.load(fopen) sentences = [string]", "json import numpy as np def load_graph(frozen_graph_filename): with tf.gfile.GFile(frozen_graph_filename, 'rb') as f: graph_def", "= 1) return label[indices[0]] st_env.set_python_requirements('/notebooks/requirements.txt') st_env.register_function('predict', predict) st_env.connect( Kafka() .version('universal') .topic('test') .start_from_earliest() .property('zookeeper.connect',", "Kafka() .version('universal') .topic('test') .start_from_earliest() .property('zookeeper.connect', 'zookeeper:2181') .property('bootstrap.servers', 'kafka:9092') ).with_format( Json() .fail_on_missing_field(True) .schema( DataTypes.ROW(", "StreamExecutionEnvironment, TimeCharacteristic from pyflink.table import StreamTableEnvironment, DataTypes, EnvironmentSettings from pyflink.table.descriptors import ( Schema,", "from pyflink.table import StreamTableEnvironment, DataTypes, EnvironmentSettings from pyflink.table.descriptors import ( Schema, Kafka, Json,", "import ( Schema, Kafka, Json, Rowtime, OldCsv, FileSystem, ) from pyflink.table.udf import udf", "fopen: dic = json.load(fopen) sentences = [string] x = np.zeros((len(sentences), maxlen)) for i,", "- no] = dic.get(k, UNK) indices = np.argmax(sess.run(Y, feed_dict = {X: x}), axis", "import numpy as np def load_graph(frozen_graph_filename): with tf.gfile.GFile(frozen_graph_filename, 'rb') as f: graph_def =", "DataTypes.STRING()) .field('label', DataTypes.STRING()) ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).in_append_mode().register_table_sink( 'sink'", "maxlen = 50 UNK = 3 with open('/notebooks/dictionary-test.json', 'r') as fopen: dic =", "udf s_env = StreamExecutionEnvironment.get_execution_environment() s_env.set_stream_time_characteristic(TimeCharacteristic.EventTime) s_env.set_parallelism(1) st_env = StreamTableEnvironment.create( s_env, environment_settings = EnvironmentSettings.new_instance()", "dic = json.load(fopen) sentences = [string] x = np.zeros((len(sentences), maxlen)) for i, sentence", "as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) with tf.Graph().as_default() as graph: tf.import_graph_def(graph_def) return graph", "= load_graph('/notebooks/frozen_model.pb') X = g.get_tensor_by_name('import/Placeholder:0') Y = g.get_tensor_by_name('import/logits:0') sess = tf.Session(graph = g)", "is None: g = load_graph('/notebooks/frozen_model.pb') X = g.get_tensor_by_name('import/Placeholder:0') Y = g.get_tensor_by_name('import/logits:0') sess =", "import StreamTableEnvironment, DataTypes, EnvironmentSettings from pyflink.table.descriptors import ( Schema, Kafka, Json, Rowtime, OldCsv,", "'r') as fopen: dic = json.load(fopen) sentences = [string] x = np.zeros((len(sentences), maxlen))", "result_path = '/notebooks/output-tensorflow.csv' t_env.connect(FileSystem().path(result_path)).with_format( OldCsv() .field_delimiter(',') .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).with_schema(", ".field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).in_append_mode().register_table_sink( 'sink' ) st_env.from_path('source').select( 'datetime, sentence, predict(sentence)' ).insert_into('sink') st_env.execute('predict')", "'positive'] maxlen = 50 UNK = 3 with open('/notebooks/dictionary-test.json', 'r') as fopen: dic", ".build(), ) X, Y, sess = None, None, None @udf(result_type = DataTypes.STRING()) def", "s_env = StreamExecutionEnvironment.get_execution_environment() s_env.set_stream_time_characteristic(TimeCharacteristic.EventTime) s_env.set_parallelism(1) st_env = StreamTableEnvironment.create( s_env, environment_settings = EnvironmentSettings.new_instance() .in_streaming_mode()", "] ) ) ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('text', DataTypes.STRING()) ).in_append_mode().register_table_source( 'source' ) result_path", "DataTypes.STRING()), DataTypes.FIELD('text', DataTypes.STRING()), ] ) ) ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('text', DataTypes.STRING()) ).in_append_mode().register_table_source(", "Y is None or sess is None: g = load_graph('/notebooks/frozen_model.pb') X = g.get_tensor_by_name('import/Placeholder:0')", "in enumerate(sentence.split()[:maxlen][::-1]): x[i, -1 - no] = dic.get(k, UNK) indices = np.argmax(sess.run(Y, feed_dict", "as np def load_graph(frozen_graph_filename): with tf.gfile.GFile(frozen_graph_filename, 'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read())", "None: g = load_graph('/notebooks/frozen_model.pb') X = g.get_tensor_by_name('import/Placeholder:0') Y = g.get_tensor_by_name('import/logits:0') sess = tf.Session(graph", "DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING())", "pyflink.table import StreamTableEnvironment, DataTypes, EnvironmentSettings from pyflink.table.descriptors import ( Schema, Kafka, Json, Rowtime,", "= StreamExecutionEnvironment.get_execution_environment() s_env.set_stream_time_characteristic(TimeCharacteristic.EventTime) s_env.set_parallelism(1) st_env = StreamTableEnvironment.create( s_env, environment_settings = EnvironmentSettings.new_instance() .in_streaming_mode() .use_blink_planner()", "OldCsv, FileSystem, ) from pyflink.table.udf import udf s_env = StreamExecutionEnvironment.get_execution_environment() s_env.set_stream_time_characteristic(TimeCharacteristic.EventTime) s_env.set_parallelism(1) st_env", "None, None @udf(result_type = DataTypes.STRING()) def predict(string): global X, Y, sess import tensorflow", ".schema( DataTypes.ROW( [ DataTypes.FIELD('datetime', DataTypes.STRING()), DataTypes.FIELD('text', DataTypes.STRING()), ] ) ) ).with_schema( Schema() .field('datetime',", "DataTypes.STRING()) .field('text', DataTypes.STRING()) ).in_append_mode().register_table_source( 'source' ) result_path = '/notebooks/output-tensorflow.csv' t_env.connect(FileSystem().path(result_path)).with_format( OldCsv() .field_delimiter(',') .field('datetime',", ").with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('text', DataTypes.STRING()) ).in_append_mode().register_table_source( 'source' ) result_path = '/notebooks/output-tensorflow.csv' t_env.connect(FileSystem().path(result_path)).with_format(", "s_env, environment_settings = EnvironmentSettings.new_instance() .in_streaming_mode() .use_blink_planner() .build(), ) X, Y, sess = None,", "tf.gfile.GFile(frozen_graph_filename, 'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) with tf.Graph().as_default() as graph: tf.import_graph_def(graph_def)", "tf.GraphDef() graph_def.ParseFromString(f.read()) with tf.Graph().as_default() as graph: tf.import_graph_def(graph_def) return graph if X is None", "Schema() .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).in_append_mode().register_table_sink( 'sink' ) st_env.from_path('source').select( 'datetime, sentence,", "X, Y, sess import tensorflow as tf import json import numpy as np", ") ) ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('text', DataTypes.STRING()) ).in_append_mode().register_table_source( 'source' ) result_path =", "predict) st_env.connect( Kafka() .version('universal') .topic('test') .start_from_earliest() .property('zookeeper.connect', 'zookeeper:2181') .property('bootstrap.servers', 'kafka:9092') ).with_format( Json() .fail_on_missing_field(True)", "'zookeeper:2181') .property('bootstrap.servers', 'kafka:9092') ).with_format( Json() .fail_on_missing_field(True) .schema( DataTypes.ROW( [ DataTypes.FIELD('datetime', DataTypes.STRING()), DataTypes.FIELD('text', DataTypes.STRING()),", ") result_path = '/notebooks/output-tensorflow.csv' t_env.connect(FileSystem().path(result_path)).with_format( OldCsv() .field_delimiter(',') .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING())", ".topic('test') .start_from_earliest() .property('zookeeper.connect', 'zookeeper:2181') .property('bootstrap.servers', 'kafka:9092') ).with_format( Json() .fail_on_missing_field(True) .schema( DataTypes.ROW( [ DataTypes.FIELD('datetime',", "environment_settings = EnvironmentSettings.new_instance() .in_streaming_mode() .use_blink_planner() .build(), ) X, Y, sess = None, None,", "graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) with tf.Graph().as_default() as graph: tf.import_graph_def(graph_def) return graph if X", "'/notebooks/output-tensorflow.csv' t_env.connect(FileSystem().path(result_path)).with_format( OldCsv() .field_delimiter(',') .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).with_schema( Schema() .field('datetime',", "'source' ) result_path = '/notebooks/output-tensorflow.csv' t_env.connect(FileSystem().path(result_path)).with_format( OldCsv() .field_delimiter(',') .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label',", "as graph: tf.import_graph_def(graph_def) return graph if X is None or Y is None", "TimeCharacteristic from pyflink.table import StreamTableEnvironment, DataTypes, EnvironmentSettings from pyflink.table.descriptors import ( Schema, Kafka,", "{X: x}), axis = 1) return label[indices[0]] st_env.set_python_requirements('/notebooks/requirements.txt') st_env.register_function('predict', predict) st_env.connect( Kafka() .version('universal')", "return label[indices[0]] st_env.set_python_requirements('/notebooks/requirements.txt') st_env.register_function('predict', predict) st_env.connect( Kafka() .version('universal') .topic('test') .start_from_earliest() .property('zookeeper.connect', 'zookeeper:2181') .property('bootstrap.servers',", "tf.Session(graph = g) label = ['negative', 'positive'] maxlen = 50 UNK = 3", "g = load_graph('/notebooks/frozen_model.pb') X = g.get_tensor_by_name('import/Placeholder:0') Y = g.get_tensor_by_name('import/logits:0') sess = tf.Session(graph =", "g.get_tensor_by_name('import/Placeholder:0') Y = g.get_tensor_by_name('import/logits:0') sess = tf.Session(graph = g) label = ['negative', 'positive']", "DataTypes, EnvironmentSettings from pyflink.table.descriptors import ( Schema, Kafka, Json, Rowtime, OldCsv, FileSystem, )", "t_env.connect(FileSystem().path(result_path)).with_format( OldCsv() .field_delimiter(',') .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).with_schema( Schema() .field('datetime', DataTypes.STRING())", "Schema, Kafka, Json, Rowtime, OldCsv, FileSystem, ) from pyflink.table.udf import udf s_env =", "sess = None, None, None @udf(result_type = DataTypes.STRING()) def predict(string): global X, Y,", "OldCsv() .field_delimiter(',') .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('sentence',", "def predict(string): global X, Y, sess import tensorflow as tf import json import", "x}), axis = 1) return label[indices[0]] st_env.set_python_requirements('/notebooks/requirements.txt') st_env.register_function('predict', predict) st_env.connect( Kafka() .version('universal') .topic('test')", "g.get_tensor_by_name('import/logits:0') sess = tf.Session(graph = g) label = ['negative', 'positive'] maxlen = 50", "open('/notebooks/dictionary-test.json', 'r') as fopen: dic = json.load(fopen) sentences = [string] x = np.zeros((len(sentences),", "pyflink.table.udf import udf s_env = StreamExecutionEnvironment.get_execution_environment() s_env.set_stream_time_characteristic(TimeCharacteristic.EventTime) s_env.set_parallelism(1) st_env = StreamTableEnvironment.create( s_env, environment_settings", "sess = tf.Session(graph = g) label = ['negative', 'positive'] maxlen = 50 UNK", "tf.import_graph_def(graph_def) return graph if X is None or Y is None or sess", ") ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('text', DataTypes.STRING()) ).in_append_mode().register_table_source( 'source' ) result_path = '/notebooks/output-tensorflow.csv'", "None or sess is None: g = load_graph('/notebooks/frozen_model.pb') X = g.get_tensor_by_name('import/Placeholder:0') Y =", "json.load(fopen) sentences = [string] x = np.zeros((len(sentences), maxlen)) for i, sentence in enumerate(sentences):", "np.zeros((len(sentences), maxlen)) for i, sentence in enumerate(sentences): for no, k in enumerate(sentence.split()[:maxlen][::-1]): x[i,", "k in enumerate(sentence.split()[:maxlen][::-1]): x[i, -1 - no] = dic.get(k, UNK) indices = np.argmax(sess.run(Y,", "= StreamTableEnvironment.create( s_env, environment_settings = EnvironmentSettings.new_instance() .in_streaming_mode() .use_blink_planner() .build(), ) X, Y, sess", ".property('zookeeper.connect', 'zookeeper:2181') .property('bootstrap.servers', 'kafka:9092') ).with_format( Json() .fail_on_missing_field(True) .schema( DataTypes.ROW( [ DataTypes.FIELD('datetime', DataTypes.STRING()), DataTypes.FIELD('text',", "DataTypes.STRING()), ] ) ) ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('text', DataTypes.STRING()) ).in_append_mode().register_table_source( 'source' )", "label = ['negative', 'positive'] maxlen = 50 UNK = 3 with open('/notebooks/dictionary-test.json', 'r')", "g) label = ['negative', 'positive'] maxlen = 50 UNK = 3 with open('/notebooks/dictionary-test.json',", "Kafka, Json, Rowtime, OldCsv, FileSystem, ) from pyflink.table.udf import udf s_env = StreamExecutionEnvironment.get_execution_environment()", ".in_streaming_mode() .use_blink_planner() .build(), ) X, Y, sess = None, None, None @udf(result_type =", "50 UNK = 3 with open('/notebooks/dictionary-test.json', 'r') as fopen: dic = json.load(fopen) sentences", "from pyflink.table.udf import udf s_env = StreamExecutionEnvironment.get_execution_environment() s_env.set_stream_time_characteristic(TimeCharacteristic.EventTime) s_env.set_parallelism(1) st_env = StreamTableEnvironment.create( s_env,", ") from pyflink.table.udf import udf s_env = StreamExecutionEnvironment.get_execution_environment() s_env.set_stream_time_characteristic(TimeCharacteristic.EventTime) s_env.set_parallelism(1) st_env = StreamTableEnvironment.create(", "st_env.set_python_requirements('/notebooks/requirements.txt') st_env.register_function('predict', predict) st_env.connect( Kafka() .version('universal') .topic('test') .start_from_earliest() .property('zookeeper.connect', 'zookeeper:2181') .property('bootstrap.servers', 'kafka:9092') ).with_format(", "no] = dic.get(k, UNK) indices = np.argmax(sess.run(Y, feed_dict = {X: x}), axis =", "sentence in enumerate(sentences): for no, k in enumerate(sentence.split()[:maxlen][::-1]): x[i, -1 - no] =", "UNK = 3 with open('/notebooks/dictionary-test.json', 'r') as fopen: dic = json.load(fopen) sentences =", "= ['negative', 'positive'] maxlen = 50 UNK = 3 with open('/notebooks/dictionary-test.json', 'r') as", "3 with open('/notebooks/dictionary-test.json', 'r') as fopen: dic = json.load(fopen) sentences = [string] x", "from pyflink.datastream import StreamExecutionEnvironment, TimeCharacteristic from pyflink.table import StreamTableEnvironment, DataTypes, EnvironmentSettings from pyflink.table.descriptors", "DataTypes.STRING()) ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).in_append_mode().register_table_sink( 'sink' ) st_env.from_path('source').select(", ".field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).in_append_mode().register_table_sink( 'sink' ) st_env.from_path('source').select( 'datetime, sentence, predict(sentence)'", "EnvironmentSettings.new_instance() .in_streaming_mode() .use_blink_planner() .build(), ) X, Y, sess = None, None, None @udf(result_type", "feed_dict = {X: x}), axis = 1) return label[indices[0]] st_env.set_python_requirements('/notebooks/requirements.txt') st_env.register_function('predict', predict) st_env.connect(", "X, Y, sess = None, None, None @udf(result_type = DataTypes.STRING()) def predict(string): global", "= g.get_tensor_by_name('import/Placeholder:0') Y = g.get_tensor_by_name('import/logits:0') sess = tf.Session(graph = g) label = ['negative',", "= {X: x}), axis = 1) return label[indices[0]] st_env.set_python_requirements('/notebooks/requirements.txt') st_env.register_function('predict', predict) st_env.connect( Kafka()", "return graph if X is None or Y is None or sess is", "st_env.register_function('predict', predict) st_env.connect( Kafka() .version('universal') .topic('test') .start_from_earliest() .property('zookeeper.connect', 'zookeeper:2181') .property('bootstrap.servers', 'kafka:9092') ).with_format( Json()", "as tf import json import numpy as np def load_graph(frozen_graph_filename): with tf.gfile.GFile(frozen_graph_filename, 'rb')", "or Y is None or sess is None: g = load_graph('/notebooks/frozen_model.pb') X =", "= None, None, None @udf(result_type = DataTypes.STRING()) def predict(string): global X, Y, sess", "StreamExecutionEnvironment.get_execution_environment() s_env.set_stream_time_characteristic(TimeCharacteristic.EventTime) s_env.set_parallelism(1) st_env = StreamTableEnvironment.create( s_env, environment_settings = EnvironmentSettings.new_instance() .in_streaming_mode() .use_blink_planner() .build(),", ".field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).in_append_mode().register_table_sink(", "f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) with tf.Graph().as_default() as graph: tf.import_graph_def(graph_def) return graph if", "Y, sess = None, None, None @udf(result_type = DataTypes.STRING()) def predict(string): global X,", "numpy as np def load_graph(frozen_graph_filename): with tf.gfile.GFile(frozen_graph_filename, 'rb') as f: graph_def = tf.GraphDef()", "Y, sess import tensorflow as tf import json import numpy as np def", ".field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label',", "graph if X is None or Y is None or sess is None:", "FileSystem, ) from pyflink.table.udf import udf s_env = StreamExecutionEnvironment.get_execution_environment() s_env.set_stream_time_characteristic(TimeCharacteristic.EventTime) s_env.set_parallelism(1) st_env =", "import tensorflow as tf import json import numpy as np def load_graph(frozen_graph_filename): with", "import json import numpy as np def load_graph(frozen_graph_filename): with tf.gfile.GFile(frozen_graph_filename, 'rb') as f:", "np def load_graph(frozen_graph_filename): with tf.gfile.GFile(frozen_graph_filename, 'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) with", "= np.zeros((len(sentences), maxlen)) for i, sentence in enumerate(sentences): for no, k in enumerate(sentence.split()[:maxlen][::-1]):", "= EnvironmentSettings.new_instance() .in_streaming_mode() .use_blink_planner() .build(), ) X, Y, sess = None, None, None", "'kafka:9092') ).with_format( Json() .fail_on_missing_field(True) .schema( DataTypes.ROW( [ DataTypes.FIELD('datetime', DataTypes.STRING()), DataTypes.FIELD('text', DataTypes.STRING()), ] )", "StreamTableEnvironment.create( s_env, environment_settings = EnvironmentSettings.new_instance() .in_streaming_mode() .use_blink_planner() .build(), ) X, Y, sess =", "for i, sentence in enumerate(sentences): for no, k in enumerate(sentence.split()[:maxlen][::-1]): x[i, -1 -", "pyflink.datastream import StreamExecutionEnvironment, TimeCharacteristic from pyflink.table import StreamTableEnvironment, DataTypes, EnvironmentSettings from pyflink.table.descriptors import", "Rowtime, OldCsv, FileSystem, ) from pyflink.table.udf import udf s_env = StreamExecutionEnvironment.get_execution_environment() s_env.set_stream_time_characteristic(TimeCharacteristic.EventTime) s_env.set_parallelism(1)", "with tf.Graph().as_default() as graph: tf.import_graph_def(graph_def) return graph if X is None or Y", "= json.load(fopen) sentences = [string] x = np.zeros((len(sentences), maxlen)) for i, sentence in", "st_env = StreamTableEnvironment.create( s_env, environment_settings = EnvironmentSettings.new_instance() .in_streaming_mode() .use_blink_planner() .build(), ) X, Y,", "with open('/notebooks/dictionary-test.json', 'r') as fopen: dic = json.load(fopen) sentences = [string] x =", ".start_from_earliest() .property('zookeeper.connect', 'zookeeper:2181') .property('bootstrap.servers', 'kafka:9092') ).with_format( Json() .fail_on_missing_field(True) .schema( DataTypes.ROW( [ DataTypes.FIELD('datetime', DataTypes.STRING()),", ").with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).in_append_mode().register_table_sink( 'sink' ) st_env.from_path('source').select( 'datetime,", "= DataTypes.STRING()) def predict(string): global X, Y, sess import tensorflow as tf import", "= '/notebooks/output-tensorflow.csv' t_env.connect(FileSystem().path(result_path)).with_format( OldCsv() .field_delimiter(',') .field('datetime', DataTypes.STRING()) .field('sentence', DataTypes.STRING()) .field('label', DataTypes.STRING()) ).with_schema( Schema()", "tf.Graph().as_default() as graph: tf.import_graph_def(graph_def) return graph if X is None or Y is", "1) return label[indices[0]] st_env.set_python_requirements('/notebooks/requirements.txt') st_env.register_function('predict', predict) st_env.connect( Kafka() .version('universal') .topic('test') .start_from_earliest() .property('zookeeper.connect', 'zookeeper:2181')", "None, None, None @udf(result_type = DataTypes.STRING()) def predict(string): global X, Y, sess import", "st_env.connect( Kafka() .version('universal') .topic('test') .start_from_earliest() .property('zookeeper.connect', 'zookeeper:2181') .property('bootstrap.servers', 'kafka:9092') ).with_format( Json() .fail_on_missing_field(True) .schema(", "'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) with tf.Graph().as_default() as graph: tf.import_graph_def(graph_def) return", ".field('datetime', DataTypes.STRING()) .field('text', DataTypes.STRING()) ).in_append_mode().register_table_source( 'source' ) result_path = '/notebooks/output-tensorflow.csv' t_env.connect(FileSystem().path(result_path)).with_format( OldCsv() .field_delimiter(',')", "enumerate(sentences): for no, k in enumerate(sentence.split()[:maxlen][::-1]): x[i, -1 - no] = dic.get(k, UNK)", "with tf.gfile.GFile(frozen_graph_filename, 'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) with tf.Graph().as_default() as graph:", ".version('universal') .topic('test') .start_from_earliest() .property('zookeeper.connect', 'zookeeper:2181') .property('bootstrap.servers', 'kafka:9092') ).with_format( Json() .fail_on_missing_field(True) .schema( DataTypes.ROW( [", "x[i, -1 - no] = dic.get(k, UNK) indices = np.argmax(sess.run(Y, feed_dict = {X:", "DataTypes.FIELD('datetime', DataTypes.STRING()), DataTypes.FIELD('text', DataTypes.STRING()), ] ) ) ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('text', DataTypes.STRING())", ".field('text', DataTypes.STRING()) ).in_append_mode().register_table_source( 'source' ) result_path = '/notebooks/output-tensorflow.csv' t_env.connect(FileSystem().path(result_path)).with_format( OldCsv() .field_delimiter(',') .field('datetime', DataTypes.STRING())", "s_env.set_parallelism(1) st_env = StreamTableEnvironment.create( s_env, environment_settings = EnvironmentSettings.new_instance() .in_streaming_mode() .use_blink_planner() .build(), ) X,", "s_env.set_stream_time_characteristic(TimeCharacteristic.EventTime) s_env.set_parallelism(1) st_env = StreamTableEnvironment.create( s_env, environment_settings = EnvironmentSettings.new_instance() .in_streaming_mode() .use_blink_planner() .build(), )", "predict(string): global X, Y, sess import tensorflow as tf import json import numpy", "DataTypes.STRING()) def predict(string): global X, Y, sess import tensorflow as tf import json", "DataTypes.FIELD('text', DataTypes.STRING()), ] ) ) ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('text', DataTypes.STRING()) ).in_append_mode().register_table_source( 'source'", "indices = np.argmax(sess.run(Y, feed_dict = {X: x}), axis = 1) return label[indices[0]] st_env.set_python_requirements('/notebooks/requirements.txt')", "def load_graph(frozen_graph_filename): with tf.gfile.GFile(frozen_graph_filename, 'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) with tf.Graph().as_default()", ".fail_on_missing_field(True) .schema( DataTypes.ROW( [ DataTypes.FIELD('datetime', DataTypes.STRING()), DataTypes.FIELD('text', DataTypes.STRING()), ] ) ) ).with_schema( Schema()", ").with_format( Json() .fail_on_missing_field(True) .schema( DataTypes.ROW( [ DataTypes.FIELD('datetime', DataTypes.STRING()), DataTypes.FIELD('text', DataTypes.STRING()), ] ) )", "Y = g.get_tensor_by_name('import/logits:0') sess = tf.Session(graph = g) label = ['negative', 'positive'] maxlen", "X is None or Y is None or sess is None: g =", "maxlen)) for i, sentence in enumerate(sentences): for no, k in enumerate(sentence.split()[:maxlen][::-1]): x[i, -1", "axis = 1) return label[indices[0]] st_env.set_python_requirements('/notebooks/requirements.txt') st_env.register_function('predict', predict) st_env.connect( Kafka() .version('universal') .topic('test') .start_from_earliest()", "( Schema, Kafka, Json, Rowtime, OldCsv, FileSystem, ) from pyflink.table.udf import udf s_env", "= tf.GraphDef() graph_def.ParseFromString(f.read()) with tf.Graph().as_default() as graph: tf.import_graph_def(graph_def) return graph if X is", "[ DataTypes.FIELD('datetime', DataTypes.STRING()), DataTypes.FIELD('text', DataTypes.STRING()), ] ) ) ).with_schema( Schema() .field('datetime', DataTypes.STRING()) .field('text',", "dic.get(k, UNK) indices = np.argmax(sess.run(Y, feed_dict = {X: x}), axis = 1) return", "None @udf(result_type = DataTypes.STRING()) def predict(string): global X, Y, sess import tensorflow as", "Json, Rowtime, OldCsv, FileSystem, ) from pyflink.table.udf import udf s_env = StreamExecutionEnvironment.get_execution_environment() s_env.set_stream_time_characteristic(TimeCharacteristic.EventTime)", "Json() .fail_on_missing_field(True) .schema( DataTypes.ROW( [ DataTypes.FIELD('datetime', DataTypes.STRING()), DataTypes.FIELD('text', DataTypes.STRING()), ] ) ) ).with_schema(", "= np.argmax(sess.run(Y, feed_dict = {X: x}), axis = 1) return label[indices[0]] st_env.set_python_requirements('/notebooks/requirements.txt') st_env.register_function('predict',", "Schema() .field('datetime', DataTypes.STRING()) .field('text', DataTypes.STRING()) ).in_append_mode().register_table_source( 'source' ) result_path = '/notebooks/output-tensorflow.csv' t_env.connect(FileSystem().path(result_path)).with_format( OldCsv()", "= g) label = ['negative', 'positive'] maxlen = 50 UNK = 3 with", "x = np.zeros((len(sentences), maxlen)) for i, sentence in enumerate(sentences): for no, k in", "EnvironmentSettings from pyflink.table.descriptors import ( Schema, Kafka, Json, Rowtime, OldCsv, FileSystem, ) from", "= g.get_tensor_by_name('import/logits:0') sess = tf.Session(graph = g) label = ['negative', 'positive'] maxlen =", "StreamTableEnvironment, DataTypes, EnvironmentSettings from pyflink.table.descriptors import ( Schema, Kafka, Json, Rowtime, OldCsv, FileSystem,", "sess is None: g = load_graph('/notebooks/frozen_model.pb') X = g.get_tensor_by_name('import/Placeholder:0') Y = g.get_tensor_by_name('import/logits:0') sess" ]
[ "dependencies = [ ('servers', '0003_auto_20170523_1409'), ] operations = [ migrations.AlterField( model_name='server', name='operating_system', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,", "import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('servers', '0003_auto_20170523_1409'), ]", "from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):", "import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies =", "Migration(migrations.Migration): dependencies = [ ('servers', '0003_auto_20170523_1409'), ] operations = [ migrations.AlterField( model_name='server', name='operating_system',", "unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [", "# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2017-05-24 12:07", "__future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies", "Generated by Django 1.11 on 2017-05-24 12:07 from __future__ import unicode_literals from django.db", "-*- # Generated by Django 1.11 on 2017-05-24 12:07 from __future__ import unicode_literals", "on 2017-05-24 12:07 from __future__ import unicode_literals from django.db import migrations, models import", "2017-05-24 12:07 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion", "('servers', '0003_auto_20170523_1409'), ] operations = [ migrations.AlterField( model_name='server', name='operating_system', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='servers', to='servers.OperatingSystem'), ),", "# Generated by Django 1.11 on 2017-05-24 12:07 from __future__ import unicode_literals from", "coding: utf-8 -*- # Generated by Django 1.11 on 2017-05-24 12:07 from __future__", "django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('servers', '0003_auto_20170523_1409'), ] operations = [ migrations.AlterField(", "class Migration(migrations.Migration): dependencies = [ ('servers', '0003_auto_20170523_1409'), ] operations = [ migrations.AlterField( model_name='server',", "from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('servers',", "by Django 1.11 on 2017-05-24 12:07 from __future__ import unicode_literals from django.db import", "Django 1.11 on 2017-05-24 12:07 from __future__ import unicode_literals from django.db import migrations,", "[ ('servers', '0003_auto_20170523_1409'), ] operations = [ migrations.AlterField( model_name='server', name='operating_system', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='servers', to='servers.OperatingSystem'),", "django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('servers', '0003_auto_20170523_1409'),", "'0003_auto_20170523_1409'), ] operations = [ migrations.AlterField( model_name='server', name='operating_system', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='servers', to='servers.OperatingSystem'), ), ]", "utf-8 -*- # Generated by Django 1.11 on 2017-05-24 12:07 from __future__ import", "import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('servers', '0003_auto_20170523_1409'), ] operations = [", "= [ ('servers', '0003_auto_20170523_1409'), ] operations = [ migrations.AlterField( model_name='server', name='operating_system', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='servers',", "12:07 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class", "migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('servers', '0003_auto_20170523_1409'), ] operations", "-*- coding: utf-8 -*- # Generated by Django 1.11 on 2017-05-24 12:07 from", "models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('servers', '0003_auto_20170523_1409'), ] operations =", "1.11 on 2017-05-24 12:07 from __future__ import unicode_literals from django.db import migrations, models" ]
[ "step=1, description='Fold:', disabled=False, continuous_update=False, orientation='horizontal', readout=True, readout_format='d', ) if show: display(select) return select", "value=False, description='CPLEX linking', disabled=False, indent=False ) if show: display(select) return select def load_info(dataset,", "the following conditions : # The above copyright notice and this permission notice", "EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "fn = respath+'{}/F{}.S{}/{}.F{}.S{}.train{}.csv'.format(dataset, F, S, dataset, F, S, fold) df_train = pd.read_csv(fn) df_test", "the Software without restriction, including without limitation the rights # to use, copy,", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import", "A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT", "2020 <NAME> # Permission is hereby granted, free of charge, to any person", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "pandas as pd import visualization as tree_view from IPython.display import display dataset_names =", "\"FICO\", \"HTRU2\", \"Pima-Diabetes\", \"Seeds\" ] def create_dataset_selection(show=True ,no_fico_sa=True): import ipywidgets as widgets selected_datasets", "all # copies or substantial portions of the Software. # THE SOFTWARE IS", "# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies", "conditions : # The above copyright notice and this permission notice shall be", "select def create_cplex_linking_selection(show=True): import ipywidgets as widgets select = widgets.Checkbox( value=False, description='CPLEX linking',", "= pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/{}.test{}.csv'.format(dataset, dataset, fold)) return df_train, df_test, load_info(dataset, df_train, fn)", "selected_datasets def create_kfold_selection(min_v=1, max_v=10, show=True): import ipywidgets as widgets select = widgets.IntSlider( value=1,", "# SOFTWARE. import pathlib import pandas as pd import visualization as tree_view from", "is hereby granted, free of charge, to any person obtaining a copy #", "persons to whom the Software is # furnished to do so, subject to", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A", "show=True): import ipywidgets as widgets select = widgets.IntSlider( value=1, min=min_v, max=max_v, step=1, description='Fold:',", "info def load(dataset, fold, F=None, S=None): respath = str(pathlib.Path(__file__).parent.absolute()) + '/resources/datasets/' if F", "OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "widgets.Select( options=dataset_names, value=dataset_names[0], description=\"Datasets\", disabled=False ) if show: display(selected_datasets) return selected_datasets def create_kfold_selection(min_v=1,", "<filename>src/datasets.py<gh_stars>10-100 # MIT License # Copyright(c) 2020 <NAME> # Permission is hereby granted,", "to permit persons to whom the Software is # furnished to do so,", "ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "as widgets selected_datasets = widgets.Select( options=dataset_names, value=dataset_names[0], description=\"Datasets\", disabled=False ) if show: display(selected_datasets)", "linking', disabled=False, indent=False ) if show: display(select) return select def load_info(dataset, df_train, fn):", "of charge, to any person obtaining a copy # of this software and", "S, fold)) else: fn = respath+'{}/{}.train{}.csv'.format(dataset, dataset, fold) df_train = pd.read_csv(fn) df_test =", "max_v=10, show=True): import ipywidgets as widgets select = widgets.IntSlider( value=1, min=min_v, max=max_v, step=1,", "k,v in zip(range(len(df_train.columns)-1), df_train.columns[:-1])} return info def load(dataset, fold, F=None, S=None): respath =", "F, S, fold) df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/F{}.S{}/{}.F{}.S{}.test{}.csv'.format(dataset, F, S, dataset, F,", ": # The above copyright notice and this permission notice shall be included", "= pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/F{}.S{}/{}.F{}.S{}.test{}.csv'.format(dataset, F, S, dataset, F, S, fold)) else: fn", "df_train, fn): info = { 'classes': {}, 'filename': fn, 'colors': None, } info['features']", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION", "ipywidgets as widgets select = widgets.IntSlider( value=1, min=min_v, max=max_v, step=1, description='Fold:', disabled=False, continuous_update=False,", "selected_datasets = widgets.Select( options=dataset_names, value=dataset_names[0], description=\"Datasets\", disabled=False ) if show: display(selected_datasets) return selected_datasets", "fn, 'colors': None, } info['features'] = {k:v for k,v in zip(range(len(df_train.columns)-1), df_train.columns[:-1])} return", "files(the \"Software\"), to deal # in the Software without restriction, including without limitation", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE #", "disabled=False ) if show: display(selected_datasets) return selected_datasets def create_kfold_selection(min_v=1, max_v=10, show=True): import ipywidgets", "to do so, subject to the following conditions : # The above copyright", "fold) df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/{}.test{}.csv'.format(dataset, dataset, fold)) return df_train, df_test, load_info(dataset,", "a copy # of this software and associated documentation files(the \"Software\"), to deal", "orientation='horizontal', readout=True, readout_format='d', ) if show: display(select) return select def create_cplex_linking_selection(show=True): import ipywidgets", "{k:v for k,v in zip(range(len(df_train.columns)-1), df_train.columns[:-1])} return info def load(dataset, fold, F=None, S=None):", "in zip(range(len(df_train.columns)-1), df_train.columns[:-1])} return info def load(dataset, fold, F=None, S=None): respath = str(pathlib.Path(__file__).parent.absolute())", "in all # copies or substantial portions of the Software. # THE SOFTWARE", "\"Seeds\" ] def create_dataset_selection(show=True ,no_fico_sa=True): import ipywidgets as widgets selected_datasets = widgets.Select( options=dataset_names,", "dataset, F, S, fold)) else: fn = respath+'{}/{}.train{}.csv'.format(dataset, dataset, fold) df_train = pd.read_csv(fn)", "S: fn = respath+'{}/F{}.S{}/{}.F{}.S{}.train{}.csv'.format(dataset, F, S, dataset, F, S, fold) df_train = pd.read_csv(fn)", "readout_format='d', ) if show: display(select) return select def create_cplex_linking_selection(show=True): import ipywidgets as widgets", "description='Fold:', disabled=False, continuous_update=False, orientation='horizontal', readout=True, readout_format='d', ) if show: display(select) return select def", "AND NONINFRINGEMENT.IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN", "sublicense, and/or sell # copies of the Software, and to permit persons to", "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "dataset_names = [ \"Breast-Cancer-Wisconsin\", \"COMPAS-ProPublica\", \"FICO\", \"HTRU2\", \"Pima-Diabetes\", \"Seeds\" ] def create_dataset_selection(show=True ,no_fico_sa=True):", "S, dataset, F, S, fold)) else: fn = respath+'{}/{}.train{}.csv'.format(dataset, dataset, fold) df_train =", "show: display(selected_datasets) return selected_datasets def create_kfold_selection(min_v=1, max_v=10, show=True): import ipywidgets as widgets select", "furnished to do so, subject to the following conditions : # The above", "CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "import ipywidgets as widgets selected_datasets = widgets.Select( options=dataset_names, value=dataset_names[0], description=\"Datasets\", disabled=False ) if", "dataset, F, S, fold) df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/F{}.S{}/{}.F{}.S{}.test{}.csv'.format(dataset, F, S, dataset,", "else: fn = respath+'{}/{}.train{}.csv'.format(dataset, dataset, fold) df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/{}.test{}.csv'.format(dataset, dataset,", "pd import visualization as tree_view from IPython.display import display dataset_names = [ \"Breast-Cancer-Wisconsin\",", "THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import pathlib import pandas", "None, } info['features'] = {k:v for k,v in zip(range(len(df_train.columns)-1), df_train.columns[:-1])} return info def", "# copies of the Software, and to permit persons to whom the Software", "F, S, fold)) else: fn = respath+'{}/{}.train{}.csv'.format(dataset, dataset, fold) df_train = pd.read_csv(fn) df_test", "associated documentation files(the \"Software\"), to deal # in the Software without restriction, including", "DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS", "permission notice shall be included in all # copies or substantial portions of", "IN THE # SOFTWARE. import pathlib import pandas as pd import visualization as", "widgets select = widgets.Checkbox( value=False, description='CPLEX linking', disabled=False, indent=False ) if show: display(select)", "obtaining a copy # of this software and associated documentation files(the \"Software\"), to", "fn = respath+'{}/{}.train{}.csv'.format(dataset, dataset, fold) df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/{}.test{}.csv'.format(dataset, dataset, fold))", "notice and this permission notice shall be included in all # copies or", "software and associated documentation files(the \"Software\"), to deal # in the Software without", "widgets.Checkbox( value=False, description='CPLEX linking', disabled=False, indent=False ) if show: display(select) return select def", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT", "SOFTWARE. import pathlib import pandas as pd import visualization as tree_view from IPython.display", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #", "def load_info(dataset, df_train, fn): info = { 'classes': {}, 'filename': fn, 'colors': None,", "subject to the following conditions : # The above copyright notice and this", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE # AUTHORS", "documentation files(the \"Software\"), to deal # in the Software without restriction, including without", "ipywidgets as widgets selected_datasets = widgets.Select( options=dataset_names, value=dataset_names[0], description=\"Datasets\", disabled=False ) if show:", "def create_cplex_linking_selection(show=True): import ipywidgets as widgets select = widgets.Checkbox( value=False, description='CPLEX linking', disabled=False,", "and to permit persons to whom the Software is # furnished to do", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND", "Copyright(c) 2020 <NAME> # Permission is hereby granted, free of charge, to any", "the Software, and to permit persons to whom the Software is # furnished", "= [ \"Breast-Cancer-Wisconsin\", \"COMPAS-ProPublica\", \"FICO\", \"HTRU2\", \"Pima-Diabetes\", \"Seeds\" ] def create_dataset_selection(show=True ,no_fico_sa=True): import", "rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #", "TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN", "FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF", "def load(dataset, fold, F=None, S=None): respath = str(pathlib.Path(__file__).parent.absolute()) + '/resources/datasets/' if F or", "merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to", "OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "readout=True, readout_format='d', ) if show: display(select) return select def create_cplex_linking_selection(show=True): import ipywidgets as", "as tree_view from IPython.display import display dataset_names = [ \"Breast-Cancer-Wisconsin\", \"COMPAS-ProPublica\", \"FICO\", \"HTRU2\",", "return selected_datasets def create_kfold_selection(min_v=1, max_v=10, show=True): import ipywidgets as widgets select = widgets.IntSlider(", "pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/F{}.S{}/{}.F{}.S{}.test{}.csv'.format(dataset, F, S, dataset, F, S, fold)) else: fn =", "display(selected_datasets) return selected_datasets def create_kfold_selection(min_v=1, max_v=10, show=True): import ipywidgets as widgets select =", "import ipywidgets as widgets select = widgets.IntSlider( value=1, min=min_v, max=max_v, step=1, description='Fold:', disabled=False,", "so, subject to the following conditions : # The above copyright notice and", "OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL", "show: display(select) return select def create_cplex_linking_selection(show=True): import ipywidgets as widgets select = widgets.Checkbox(", "respath = str(pathlib.Path(__file__).parent.absolute()) + '/resources/datasets/' if F or S: fn = respath+'{}/F{}.S{}/{}.F{}.S{}.train{}.csv'.format(dataset, F,", "description=\"Datasets\", disabled=False ) if show: display(selected_datasets) return selected_datasets def create_kfold_selection(min_v=1, max_v=10, show=True): import", "copy # of this software and associated documentation files(the \"Software\"), to deal #", "whom the Software is # furnished to do so, subject to the following", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH", "options=dataset_names, value=dataset_names[0], description=\"Datasets\", disabled=False ) if show: display(selected_datasets) return selected_datasets def create_kfold_selection(min_v=1, max_v=10,", "continuous_update=False, orientation='horizontal', readout=True, readout_format='d', ) if show: display(select) return select def create_cplex_linking_selection(show=True): import", "import display dataset_names = [ \"Breast-Cancer-Wisconsin\", \"COMPAS-ProPublica\", \"FICO\", \"HTRU2\", \"Pima-Diabetes\", \"Seeds\" ] def", "min=min_v, max=max_v, step=1, description='Fold:', disabled=False, continuous_update=False, orientation='horizontal', readout=True, readout_format='d', ) if show: display(select)", "free of charge, to any person obtaining a copy # of this software", "Software. # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "pd.read_csv(respath+'{}/F{}.S{}/{}.F{}.S{}.test{}.csv'.format(dataset, F, S, dataset, F, S, fold)) else: fn = respath+'{}/{}.train{}.csv'.format(dataset, dataset, fold)", "copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software,", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE # AUTHORS OR", "without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense,", "info = { 'classes': {}, 'filename': fn, 'colors': None, } info['features'] = {k:v", "do so, subject to the following conditions : # The above copyright notice", "# of this software and associated documentation files(the \"Software\"), to deal # in", "'classes': {}, 'filename': fn, 'colors': None, } info['features'] = {k:v for k,v in", "= widgets.Select( options=dataset_names, value=dataset_names[0], description=\"Datasets\", disabled=False ) if show: display(selected_datasets) return selected_datasets def", "df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/{}.test{}.csv'.format(dataset, dataset, fold)) return df_train, df_test, load_info(dataset, df_train,", "to deal # in the Software without restriction, including without limitation the rights", "to any person obtaining a copy # of this software and associated documentation", "load(dataset, fold, F=None, S=None): respath = str(pathlib.Path(__file__).parent.absolute()) + '/resources/datasets/' if F or S:", "License # Copyright(c) 2020 <NAME> # Permission is hereby granted, free of charge,", "OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #", "and associated documentation files(the \"Software\"), to deal # in the Software without restriction,", "df_test = pd.read_csv(respath+'{}/F{}.S{}/{}.F{}.S{}.test{}.csv'.format(dataset, F, S, dataset, F, S, fold)) else: fn = respath+'{}/{}.train{}.csv'.format(dataset,", "this software and associated documentation files(the \"Software\"), to deal # in the Software", "OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE", "SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", ") if show: display(select) return select def create_cplex_linking_selection(show=True): import ipywidgets as widgets select", "Software, and to permit persons to whom the Software is # furnished to", "if show: display(select) return select def create_cplex_linking_selection(show=True): import ipywidgets as widgets select =", "widgets select = widgets.IntSlider( value=1, min=min_v, max=max_v, step=1, description='Fold:', disabled=False, continuous_update=False, orientation='horizontal', readout=True,", "the Software. # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "display dataset_names = [ \"Breast-Cancer-Wisconsin\", \"COMPAS-ProPublica\", \"FICO\", \"HTRU2\", \"Pima-Diabetes\", \"Seeds\" ] def create_dataset_selection(show=True", "or S: fn = respath+'{}/F{}.S{}/{}.F{}.S{}.train{}.csv'.format(dataset, F, S, dataset, F, S, fold) df_train =", "def create_kfold_selection(min_v=1, max_v=10, show=True): import ipywidgets as widgets select = widgets.IntSlider( value=1, min=min_v,", "# MIT License # Copyright(c) 2020 <NAME> # Permission is hereby granted, free", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY,", "granted, free of charge, to any person obtaining a copy # of this", "info['features'] = {k:v for k,v in zip(range(len(df_train.columns)-1), df_train.columns[:-1])} return info def load(dataset, fold,", "select def load_info(dataset, df_train, fn): info = { 'classes': {}, 'filename': fn, 'colors':", "widgets.IntSlider( value=1, min=min_v, max=max_v, step=1, description='Fold:', disabled=False, continuous_update=False, orientation='horizontal', readout=True, readout_format='d', ) if", "return select def create_cplex_linking_selection(show=True): import ipywidgets as widgets select = widgets.Checkbox( value=False, description='CPLEX", "and this permission notice shall be included in all # copies or substantial", "\"HTRU2\", \"Pima-Diabetes\", \"Seeds\" ] def create_dataset_selection(show=True ,no_fico_sa=True): import ipywidgets as widgets selected_datasets =", "modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and", "ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "load_info(dataset, df_train, fn): info = { 'classes': {}, 'filename': fn, 'colors': None, }", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "# Permission is hereby granted, free of charge, to any person obtaining a", "THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF", "description='CPLEX linking', disabled=False, indent=False ) if show: display(select) return select def load_info(dataset, df_train,", "publish, distribute, sublicense, and/or sell # copies of the Software, and to permit", "if show: display(selected_datasets) return selected_datasets def create_kfold_selection(min_v=1, max_v=10, show=True): import ipywidgets as widgets", "S, dataset, F, S, fold) df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/F{}.S{}/{}.F{}.S{}.test{}.csv'.format(dataset, F, S,", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT", "{}, 'filename': fn, 'colors': None, } info['features'] = {k:v for k,v in zip(range(len(df_train.columns)-1),", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION", "without restriction, including without limitation the rights # to use, copy, modify, merge,", "import visualization as tree_view from IPython.display import display dataset_names = [ \"Breast-Cancer-Wisconsin\", \"COMPAS-ProPublica\",", "disabled=False, continuous_update=False, orientation='horizontal', readout=True, readout_format='d', ) if show: display(select) return select def create_cplex_linking_selection(show=True):", "USE OR OTHER DEALINGS IN THE # SOFTWARE. import pathlib import pandas as", "of this software and associated documentation files(the \"Software\"), to deal # in the", "= pd.read_csv(respath+'{}/F{}.S{}/{}.F{}.S{}.test{}.csv'.format(dataset, F, S, dataset, F, S, fold)) else: fn = respath+'{}/{}.train{}.csv'.format(dataset, dataset,", "{ 'classes': {}, 'filename': fn, 'colors': None, } info['features'] = {k:v for k,v", "in the Software without restriction, including without limitation the rights # to use,", "'/resources/datasets/' if F or S: fn = respath+'{}/F{}.S{}/{}.F{}.S{}.train{}.csv'.format(dataset, F, S, dataset, F, S,", "copies of the Software, and to permit persons to whom the Software is", "= respath+'{}/{}.train{}.csv'.format(dataset, dataset, fold) df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/{}.test{}.csv'.format(dataset, dataset, fold)) return", "if F or S: fn = respath+'{}/F{}.S{}/{}.F{}.S{}.train{}.csv'.format(dataset, F, S, dataset, F, S, fold)", "# furnished to do so, subject to the following conditions : # The", "\"COMPAS-ProPublica\", \"FICO\", \"HTRU2\", \"Pima-Diabetes\", \"Seeds\" ] def create_dataset_selection(show=True ,no_fico_sa=True): import ipywidgets as widgets", "# Copyright(c) 2020 <NAME> # Permission is hereby granted, free of charge, to", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR", "= str(pathlib.Path(__file__).parent.absolute()) + '/resources/datasets/' if F or S: fn = respath+'{}/F{}.S{}/{}.F{}.S{}.train{}.csv'.format(dataset, F, S,", "return select def load_info(dataset, df_train, fn): info = { 'classes': {}, 'filename': fn,", "IPython.display import display dataset_names = [ \"Breast-Cancer-Wisconsin\", \"COMPAS-ProPublica\", \"FICO\", \"HTRU2\", \"Pima-Diabetes\", \"Seeds\" ]", "notice shall be included in all # copies or substantial portions of the", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE", "WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT", "F, S, dataset, F, S, fold) df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/F{}.S{}/{}.F{}.S{}.test{}.csv'.format(dataset, F,", "PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE", "shall be included in all # copies or substantial portions of the Software.", "return info def load(dataset, fold, F=None, S=None): respath = str(pathlib.Path(__file__).parent.absolute()) + '/resources/datasets/' if", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import pathlib", "display(select) return select def load_info(dataset, df_train, fn): info = { 'classes': {}, 'filename':", "as pd import visualization as tree_view from IPython.display import display dataset_names = [", "The above copyright notice and this permission notice shall be included in all", "and/or sell # copies of the Software, and to permit persons to whom", "NONINFRINGEMENT.IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "widgets selected_datasets = widgets.Select( options=dataset_names, value=dataset_names[0], description=\"Datasets\", disabled=False ) if show: display(selected_datasets) return", "# in the Software without restriction, including without limitation the rights # to", "copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED \"AS", "is # furnished to do so, subject to the following conditions : #", "+ '/resources/datasets/' if F or S: fn = respath+'{}/F{}.S{}/{}.F{}.S{}.train{}.csv'.format(dataset, F, S, dataset, F,", "import pandas as pd import visualization as tree_view from IPython.display import display dataset_names", "MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE", "TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE", "[ \"Breast-Cancer-Wisconsin\", \"COMPAS-ProPublica\", \"FICO\", \"HTRU2\", \"Pima-Diabetes\", \"Seeds\" ] def create_dataset_selection(show=True ,no_fico_sa=True): import ipywidgets", "Software is # furnished to do so, subject to the following conditions :", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS", "indent=False ) if show: display(select) return select def load_info(dataset, df_train, fn): info =", "show: display(select) return select def load_info(dataset, df_train, fn): info = { 'classes': {},", "value=dataset_names[0], description=\"Datasets\", disabled=False ) if show: display(selected_datasets) return selected_datasets def create_kfold_selection(min_v=1, max_v=10, show=True):", "fold) df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/F{}.S{}/{}.F{}.S{}.test{}.csv'.format(dataset, F, S, dataset, F, S, fold))", "'colors': None, } info['features'] = {k:v for k,v in zip(range(len(df_train.columns)-1), df_train.columns[:-1])} return info", "fn): info = { 'classes': {}, 'filename': fn, 'colors': None, } info['features'] =", "\"Software\"), to deal # in the Software without restriction, including without limitation the", "\"Breast-Cancer-Wisconsin\", \"COMPAS-ProPublica\", \"FICO\", \"HTRU2\", \"Pima-Diabetes\", \"Seeds\" ] def create_dataset_selection(show=True ,no_fico_sa=True): import ipywidgets as", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED,", "deal # in the Software without restriction, including without limitation the rights #", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #", "respath+'{}/{}.train{}.csv'.format(dataset, dataset, fold) df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/{}.test{}.csv'.format(dataset, dataset, fold)) return df_train,", "distribute, sublicense, and/or sell # copies of the Software, and to permit persons", "charge, to any person obtaining a copy # of this software and associated", ") if show: display(select) return select def load_info(dataset, df_train, fn): info = {", "= { 'classes': {}, 'filename': fn, 'colors': None, } info['features'] = {k:v for", "select = widgets.IntSlider( value=1, min=min_v, max=max_v, step=1, description='Fold:', disabled=False, continuous_update=False, orientation='horizontal', readout=True, readout_format='d',", "included in all # copies or substantial portions of the Software. # THE", "WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED", "KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "to whom the Software is # furnished to do so, subject to the", "limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER", "be included in all # copies or substantial portions of the Software. #", "disabled=False, indent=False ) if show: display(select) return select def load_info(dataset, df_train, fn): info", "following conditions : # The above copyright notice and this permission notice shall", "str(pathlib.Path(__file__).parent.absolute()) + '/resources/datasets/' if F or S: fn = respath+'{}/F{}.S{}/{}.F{}.S{}.train{}.csv'.format(dataset, F, S, dataset,", "S, fold) df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/F{}.S{}/{}.F{}.S{}.test{}.csv'.format(dataset, F, S, dataset, F, S,", "display(select) return select def create_cplex_linking_selection(show=True): import ipywidgets as widgets select = widgets.Checkbox( value=False,", "from IPython.display import display dataset_names = [ \"Breast-Cancer-Wisconsin\", \"COMPAS-ProPublica\", \"FICO\", \"HTRU2\", \"Pima-Diabetes\", \"Seeds\"", "ipywidgets as widgets select = widgets.Checkbox( value=False, description='CPLEX linking', disabled=False, indent=False ) if", "of the Software. # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "df_train.columns[:-1])} return info def load(dataset, fold, F=None, S=None): respath = str(pathlib.Path(__file__).parent.absolute()) + '/resources/datasets/'", "= widgets.IntSlider( value=1, min=min_v, max=max_v, step=1, description='Fold:', disabled=False, continuous_update=False, orientation='horizontal', readout=True, readout_format='d', )", "THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", ",no_fico_sa=True): import ipywidgets as widgets selected_datasets = widgets.Select( options=dataset_names, value=dataset_names[0], description=\"Datasets\", disabled=False )", "permit persons to whom the Software is # furnished to do so, subject", "Permission is hereby granted, free of charge, to any person obtaining a copy", "value=1, min=min_v, max=max_v, step=1, description='Fold:', disabled=False, continuous_update=False, orientation='horizontal', readout=True, readout_format='d', ) if show:", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT", "EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "Software without restriction, including without limitation the rights # to use, copy, modify,", "# copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED", "OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import pathlib import", "create_dataset_selection(show=True ,no_fico_sa=True): import ipywidgets as widgets selected_datasets = widgets.Select( options=dataset_names, value=dataset_names[0], description=\"Datasets\", disabled=False", "substantial portions of the Software. # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "portions of the Software. # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "MIT License # Copyright(c) 2020 <NAME> # Permission is hereby granted, free of", "visualization as tree_view from IPython.display import display dataset_names = [ \"Breast-Cancer-Wisconsin\", \"COMPAS-ProPublica\", \"FICO\",", "DEALINGS IN THE # SOFTWARE. import pathlib import pandas as pd import visualization", "as widgets select = widgets.IntSlider( value=1, min=min_v, max=max_v, step=1, description='Fold:', disabled=False, continuous_update=False, orientation='horizontal',", "THE # SOFTWARE. import pathlib import pandas as pd import visualization as tree_view", "# The above copyright notice and this permission notice shall be included in", "any person obtaining a copy # of this software and associated documentation files(the", "or substantial portions of the Software. # THE SOFTWARE IS PROVIDED \"AS IS\",", "OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "above copyright notice and this permission notice shall be included in all #", "dataset, fold) df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/{}.test{}.csv'.format(dataset, dataset, fold)) return df_train, df_test,", "the Software is # furnished to do so, subject to the following conditions", "sell # copies of the Software, and to permit persons to whom the", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE.", "import ipywidgets as widgets select = widgets.Checkbox( value=False, description='CPLEX linking', disabled=False, indent=False )", "pathlib import pandas as pd import visualization as tree_view from IPython.display import display", "= respath+'{}/F{}.S{}/{}.F{}.S{}.train{}.csv'.format(dataset, F, S, dataset, F, S, fold) df_train = pd.read_csv(fn) df_test =", "restriction, including without limitation the rights # to use, copy, modify, merge, publish,", "if show: display(select) return select def load_info(dataset, df_train, fn): info = { 'classes':", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR", "<NAME> # Permission is hereby granted, free of charge, to any person obtaining", "this permission notice shall be included in all # copies or substantial portions", "to the following conditions : # The above copyright notice and this permission", "zip(range(len(df_train.columns)-1), df_train.columns[:-1])} return info def load(dataset, fold, F=None, S=None): respath = str(pathlib.Path(__file__).parent.absolute()) +", "as widgets select = widgets.Checkbox( value=False, description='CPLEX linking', disabled=False, indent=False ) if show:", "FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING", "'filename': fn, 'colors': None, } info['features'] = {k:v for k,v in zip(range(len(df_train.columns)-1), df_train.columns[:-1])}", "\"Pima-Diabetes\", \"Seeds\" ] def create_dataset_selection(show=True ,no_fico_sa=True): import ipywidgets as widgets selected_datasets = widgets.Select(", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "create_cplex_linking_selection(show=True): import ipywidgets as widgets select = widgets.Checkbox( value=False, description='CPLEX linking', disabled=False, indent=False", "the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "select = widgets.Checkbox( value=False, description='CPLEX linking', disabled=False, indent=False ) if show: display(select) return", "= {k:v for k,v in zip(range(len(df_train.columns)-1), df_train.columns[:-1])} return info def load(dataset, fold, F=None,", "of the Software, and to permit persons to whom the Software is #", "person obtaining a copy # of this software and associated documentation files(the \"Software\"),", "OTHER DEALINGS IN THE # SOFTWARE. import pathlib import pandas as pd import", "S=None): respath = str(pathlib.Path(__file__).parent.absolute()) + '/resources/datasets/' if F or S: fn = respath+'{}/F{}.S{}/{}.F{}.S{}.train{}.csv'.format(dataset,", "fold)) else: fn = respath+'{}/{}.train{}.csv'.format(dataset, dataset, fold) df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/{}.test{}.csv'.format(dataset,", "F, S, dataset, F, S, fold)) else: fn = respath+'{}/{}.train{}.csv'.format(dataset, dataset, fold) df_train", "def create_dataset_selection(show=True ,no_fico_sa=True): import ipywidgets as widgets selected_datasets = widgets.Select( options=dataset_names, value=dataset_names[0], description=\"Datasets\",", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR", "respath+'{}/F{}.S{}/{}.F{}.S{}.train{}.csv'.format(dataset, F, S, dataset, F, S, fold) df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/F{}.S{}/{}.F{}.S{}.test{}.csv'.format(dataset,", "tree_view from IPython.display import display dataset_names = [ \"Breast-Cancer-Wisconsin\", \"COMPAS-ProPublica\", \"FICO\", \"HTRU2\", \"Pima-Diabetes\",", "] def create_dataset_selection(show=True ,no_fico_sa=True): import ipywidgets as widgets selected_datasets = widgets.Select( options=dataset_names, value=dataset_names[0],", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "max=max_v, step=1, description='Fold:', disabled=False, continuous_update=False, orientation='horizontal', readout=True, readout_format='d', ) if show: display(select) return", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN", "including without limitation the rights # to use, copy, modify, merge, publish, distribute,", "} info['features'] = {k:v for k,v in zip(range(len(df_train.columns)-1), df_train.columns[:-1])} return info def load(dataset,", "fold, F=None, S=None): respath = str(pathlib.Path(__file__).parent.absolute()) + '/resources/datasets/' if F or S: fn", "copyright notice and this permission notice shall be included in all # copies", "for k,v in zip(range(len(df_train.columns)-1), df_train.columns[:-1])} return info def load(dataset, fold, F=None, S=None): respath", "create_kfold_selection(min_v=1, max_v=10, show=True): import ipywidgets as widgets select = widgets.IntSlider( value=1, min=min_v, max=max_v,", "F=None, S=None): respath = str(pathlib.Path(__file__).parent.absolute()) + '/resources/datasets/' if F or S: fn =", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "import pathlib import pandas as pd import visualization as tree_view from IPython.display import", "= widgets.Checkbox( value=False, description='CPLEX linking', disabled=False, indent=False ) if show: display(select) return select", "hereby granted, free of charge, to any person obtaining a copy # of", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", ") if show: display(selected_datasets) return selected_datasets def create_kfold_selection(min_v=1, max_v=10, show=True): import ipywidgets as", "df_train = pd.read_csv(fn) df_test = pd.read_csv(respath+'{}/F{}.S{}/{}.F{}.S{}.test{}.csv'.format(dataset, F, S, dataset, F, S, fold)) else:", "OR OTHER DEALINGS IN THE # SOFTWARE. import pathlib import pandas as pd", "NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of", "F or S: fn = respath+'{}/F{}.S{}/{}.F{}.S{}.train{}.csv'.format(dataset, F, S, dataset, F, S, fold) df_train" ]
[ "of r. If we require # the number of terms to be finite", "10 = r -10 + r -7 + r 6. # Interestingly, the", "n = 1000 i = 10000 prob_id = 558 timed.caller(dummy, n, i, prob_id)", "http://github.com/lcsm29/project-euler import timed def dummy(n): pass if __name__ == '__main__': n = 1000", "'__main__': n = 1000 i = 10000 prob_id = 558 timed.caller(dummy, n, i,", "Solution of; # Project Euler Problem 558: Irrational base # https://projecteuler.net/problem=558 # #", "we have:n = # $\\displaystyle \\sum_{k=-\\infty}^{\\infty}$ bk rkunder the conditions that:bk # is", "exponents to be three or more, then the representation is unique. For #", "distinct increasing powers of r. If we require # the number of terms", "1. Every positive integer # can be written as the sum of distinct", "for the complex roots of the equation. Let # w(n) be the number", "of distinct increasing powers of r. If we require # the number of", "any two # exponents to be three or more, then the representation is", "= 19403. Find # S(5 000 000). # # by lcsm29 http://github.com/lcsm29/project-euler import", "finite. Let S(m) = $\\displaystyle # \\sum_{j=1}^{m}$ w(j2). You are given S(10) =", "of; # Project Euler Problem 558: Irrational base # https://projecteuler.net/problem=558 # # Let", "\\sum_{j=1}^{m}$ w(j2). You are given S(10) = 61 and S(1000) = 19403. Find", "root of the equation x3 = x2 + 1. Every positive integer #", "the number of terms to be finite and the difference between any two", "Find # S(5 000 000). # # by lcsm29 http://github.com/lcsm29/project-euler import timed def", "bk+1 + bk+2 ≤ 1 for all k;w(n) = $\\displaystyle # \\sum_{k=-\\infty}^{\\infty}$ bk", "$\\displaystyle # \\sum_{j=1}^{m}$ w(j2). You are given S(10) = 61 and S(1000) =", "sum of distinct increasing powers of r. If we require # the number", "r be the real root of the equation x3 = x2 + 1.", "If we require # the number of terms to be finite and the", "for all positive integers n, we have:n = # $\\displaystyle \\sum_{k=-\\infty}^{\\infty}$ bk rkunder", "61 and S(1000) = 19403. Find # S(5 000 000). # # by", "example, 3 = r -10 + r -5 + r -1 + r", "if __name__ == '__main__': n = 1000 i = 10000 prob_id = 558", "+ r 6. # Interestingly, the relation holds for the complex roots of", "as the sum of distinct increasing powers of r. If we require #", "representation of n. Thus w(3) = # 4 and w(10) = 3. More", "formally, for all positive integers n, we have:n = # $\\displaystyle \\sum_{k=-\\infty}^{\\infty}$ bk", "rkunder the conditions that:bk # is 0 or 1 for all k;bk +", "the real root of the equation x3 = x2 + 1. Every positive", "to be finite and the difference between any two # exponents to be", "roots of the equation. Let # w(n) be the number of terms in", "# 4 and w(10) = 3. More formally, for all positive integers n,", "that:bk # is 0 or 1 for all k;bk + bk+1 + bk+2", "lcsm29 http://github.com/lcsm29/project-euler import timed def dummy(n): pass if __name__ == '__main__': n =", "# S(5 000 000). # # by lcsm29 http://github.com/lcsm29/project-euler import timed def dummy(n):", "+ r 2 and 10 = r -10 + r -7 + r", "https://projecteuler.net/problem=558 # # Let r be the real root of the equation x3", "unique representation of n. Thus w(3) = # 4 and w(10) = 3.", "and w(10) = 3. More formally, for all positive integers n, we have:n", "r. If we require # the number of terms to be finite and", "powers of r. If we require # the number of terms to be", "require # the number of terms to be finite and the difference between", "the representation is unique. For # example, 3 = r -10 + r", "= r -10 + r -7 + r 6. # Interestingly, the relation", "2 and 10 = r -10 + r -7 + r 6. #", "≤ 1 for all k;w(n) = $\\displaystyle # \\sum_{k=-\\infty}^{\\infty}$ bk is finite. Let", "all k;w(n) = $\\displaystyle # \\sum_{k=-\\infty}^{\\infty}$ bk is finite. Let S(m) = $\\displaystyle", "import timed def dummy(n): pass if __name__ == '__main__': n = 1000 i", "holds for the complex roots of the equation. Let # w(n) be the", "be the number of terms in this unique representation of n. Thus w(3)", "= 3. More formally, for all positive integers n, we have:n = #", "complex roots of the equation. Let # w(n) be the number of terms", "+ bk+1 + bk+2 ≤ 1 for all k;w(n) = $\\displaystyle # \\sum_{k=-\\infty}^{\\infty}$", "-1 + r 2 and 10 = r -10 + r -7 +", "have:n = # $\\displaystyle \\sum_{k=-\\infty}^{\\infty}$ bk rkunder the conditions that:bk # is 0", "increasing powers of r. If we require # the number of terms to", "S(1000) = 19403. Find # S(5 000 000). # # by lcsm29 http://github.com/lcsm29/project-euler", "the complex roots of the equation. Let # w(n) be the number of", "equation. Let # w(n) be the number of terms in this unique representation", "equation x3 = x2 + 1. Every positive integer # can be written", "= # 4 and w(10) = 3. More formally, for all positive integers", "x2 + 1. Every positive integer # can be written as the sum", "3. More formally, for all positive integers n, we have:n = # $\\displaystyle", "in this unique representation of n. Thus w(3) = # 4 and w(10)", "integer # can be written as the sum of distinct increasing powers of", "bk rkunder the conditions that:bk # is 0 or 1 for all k;bk", "finite and the difference between any two # exponents to be three or", "the relation holds for the complex roots of the equation. Let # w(n)", "k;bk + bk+1 + bk+2 ≤ 1 for all k;w(n) = $\\displaystyle #", "is 0 or 1 for all k;bk + bk+1 + bk+2 ≤ 1", "S(5 000 000). # # by lcsm29 http://github.com/lcsm29/project-euler import timed def dummy(n): pass", "of terms in this unique representation of n. Thus w(3) = # 4", "all positive integers n, we have:n = # $\\displaystyle \\sum_{k=-\\infty}^{\\infty}$ bk rkunder the", "# is 0 or 1 for all k;bk + bk+1 + bk+2 ≤", "# # by lcsm29 http://github.com/lcsm29/project-euler import timed def dummy(n): pass if __name__ ==", "Thus w(3) = # 4 and w(10) = 3. More formally, for all", "to be three or more, then the representation is unique. For # example,", "the equation. Let # w(n) be the number of terms in this unique", "-10 + r -7 + r 6. # Interestingly, the relation holds for", "4 and w(10) = 3. More formally, for all positive integers n, we", "# Let r be the real root of the equation x3 = x2", "bk is finite. Let S(m) = $\\displaystyle # \\sum_{j=1}^{m}$ w(j2). You are given", "r -5 + r -1 + r 2 and 10 = r -10", "19403. Find # S(5 000 000). # # by lcsm29 http://github.com/lcsm29/project-euler import timed", "-10 + r -5 + r -1 + r 2 and 10 =", "# Interestingly, the relation holds for the complex roots of the equation. Let", "the sum of distinct increasing powers of r. If we require # the", "timed def dummy(n): pass if __name__ == '__main__': n = 1000 i =", "558: Irrational base # https://projecteuler.net/problem=558 # # Let r be the real root", "Let r be the real root of the equation x3 = x2 +", "dummy(n): pass if __name__ == '__main__': n = 1000 i = 10000 prob_id", "w(10) = 3. More formally, for all positive integers n, we have:n =", "$\\displaystyle \\sum_{k=-\\infty}^{\\infty}$ bk rkunder the conditions that:bk # is 0 or 1 for", "base # https://projecteuler.net/problem=558 # # Let r be the real root of the", "for all k;bk + bk+1 + bk+2 ≤ 1 for all k;w(n) =", "Euler Problem 558: Irrational base # https://projecteuler.net/problem=558 # # Let r be the", "More formally, for all positive integers n, we have:n = # $\\displaystyle \\sum_{k=-\\infty}^{\\infty}$", "or 1 for all k;bk + bk+1 + bk+2 ≤ 1 for all", "S(m) = $\\displaystyle # \\sum_{j=1}^{m}$ w(j2). You are given S(10) = 61 and", "be finite and the difference between any two # exponents to be three", "r 6. # Interestingly, the relation holds for the complex roots of the", "representation is unique. For # example, 3 = r -10 + r -5", "terms in this unique representation of n. Thus w(3) = # 4 and", "\\sum_{k=-\\infty}^{\\infty}$ bk is finite. Let S(m) = $\\displaystyle # \\sum_{j=1}^{m}$ w(j2). You are", "# w(n) be the number of terms in this unique representation of n.", "positive integer # can be written as the sum of distinct increasing powers", "S(10) = 61 and S(1000) = 19403. Find # S(5 000 000). #", "= # $\\displaystyle \\sum_{k=-\\infty}^{\\infty}$ bk rkunder the conditions that:bk # is 0 or", "\\sum_{k=-\\infty}^{\\infty}$ bk rkunder the conditions that:bk # is 0 or 1 for all", "three or more, then the representation is unique. For # example, 3 =", "of n. Thus w(3) = # 4 and w(10) = 3. More formally,", "conditions that:bk # is 0 or 1 for all k;bk + bk+1 +", "-7 + r 6. # Interestingly, the relation holds for the complex roots", "+ 1. Every positive integer # can be written as the sum of", "You are given S(10) = 61 and S(1000) = 19403. Find # S(5", "the number of terms in this unique representation of n. Thus w(3) =", "# https://projecteuler.net/problem=558 # # Let r be the real root of the equation", "3 = r -10 + r -5 + r -1 + r 2", "n. Thus w(3) = # 4 and w(10) = 3. More formally, for", "# the number of terms to be finite and the difference between any", "then the representation is unique. For # example, 3 = r -10 +", "of the equation x3 = x2 + 1. Every positive integer # can", "unique. For # example, 3 = r -10 + r -5 + r", "000 000). # # by lcsm29 http://github.com/lcsm29/project-euler import timed def dummy(n): pass if", "# $\\displaystyle \\sum_{k=-\\infty}^{\\infty}$ bk rkunder the conditions that:bk # is 0 or 1", "two # exponents to be three or more, then the representation is unique.", "+ r -7 + r 6. # Interestingly, the relation holds for the", "0 or 1 for all k;bk + bk+1 + bk+2 ≤ 1 for", "# Project Euler Problem 558: Irrational base # https://projecteuler.net/problem=558 # # Let r", "relation holds for the complex roots of the equation. Let # w(n) be", "# \\sum_{j=1}^{m}$ w(j2). You are given S(10) = 61 and S(1000) = 19403.", "by lcsm29 http://github.com/lcsm29/project-euler import timed def dummy(n): pass if __name__ == '__main__': n", "bk+2 ≤ 1 for all k;w(n) = $\\displaystyle # \\sum_{k=-\\infty}^{\\infty}$ bk is finite.", "this unique representation of n. Thus w(3) = # 4 and w(10) =", "for all k;w(n) = $\\displaystyle # \\sum_{k=-\\infty}^{\\infty}$ bk is finite. Let S(m) =", "Problem 558: Irrational base # https://projecteuler.net/problem=558 # # Let r be the real", "and 10 = r -10 + r -7 + r 6. # Interestingly,", "$\\displaystyle # \\sum_{k=-\\infty}^{\\infty}$ bk is finite. Let S(m) = $\\displaystyle # \\sum_{j=1}^{m}$ w(j2).", "w(j2). You are given S(10) = 61 and S(1000) = 19403. Find #", "x3 = x2 + 1. Every positive integer # can be written as", "Let S(m) = $\\displaystyle # \\sum_{j=1}^{m}$ w(j2). You are given S(10) = 61", "we require # the number of terms to be finite and the difference", "= r -10 + r -5 + r -1 + r 2 and", "r 2 and 10 = r -10 + r -7 + r 6.", "be the real root of the equation x3 = x2 + 1. Every", "6. # Interestingly, the relation holds for the complex roots of the equation.", "Interestingly, the relation holds for the complex roots of the equation. Let #", "positive integers n, we have:n = # $\\displaystyle \\sum_{k=-\\infty}^{\\infty}$ bk rkunder the conditions", "= 61 and S(1000) = 19403. Find # S(5 000 000). # #", "pass if __name__ == '__main__': n = 1000 i = 10000 prob_id =", "is unique. For # example, 3 = r -10 + r -5 +", "# example, 3 = r -10 + r -5 + r -1 +", "all k;bk + bk+1 + bk+2 ≤ 1 for all k;w(n) = $\\displaystyle", "For # example, 3 = r -10 + r -5 + r -1", "the conditions that:bk # is 0 or 1 for all k;bk + bk+1", "are given S(10) = 61 and S(1000) = 19403. Find # S(5 000", "the difference between any two # exponents to be three or more, then", "k;w(n) = $\\displaystyle # \\sum_{k=-\\infty}^{\\infty}$ bk is finite. Let S(m) = $\\displaystyle #", "r -10 + r -5 + r -1 + r 2 and 10", "+ r -5 + r -1 + r 2 and 10 = r", "# can be written as the sum of distinct increasing powers of r.", "of the equation. Let # w(n) be the number of terms in this", "+ bk+2 ≤ 1 for all k;w(n) = $\\displaystyle # \\sum_{k=-\\infty}^{\\infty}$ bk is", "number of terms to be finite and the difference between any two #", "# # Let r be the real root of the equation x3 =", "Project Euler Problem 558: Irrational base # https://projecteuler.net/problem=558 # # Let r be", "integers n, we have:n = # $\\displaystyle \\sum_{k=-\\infty}^{\\infty}$ bk rkunder the conditions that:bk", "-5 + r -1 + r 2 and 10 = r -10 +", "be written as the sum of distinct increasing powers of r. If we", "difference between any two # exponents to be three or more, then the", "more, then the representation is unique. For # example, 3 = r -10", "= x2 + 1. Every positive integer # can be written as the", "def dummy(n): pass if __name__ == '__main__': n = 1000 i = 10000", "n, we have:n = # $\\displaystyle \\sum_{k=-\\infty}^{\\infty}$ bk rkunder the conditions that:bk #", "+ r -1 + r 2 and 10 = r -10 + r", "__name__ == '__main__': n = 1000 i = 10000 prob_id = 558 timed.caller(dummy,", "# by lcsm29 http://github.com/lcsm29/project-euler import timed def dummy(n): pass if __name__ == '__main__':", "Every positive integer # can be written as the sum of distinct increasing", "is finite. Let S(m) = $\\displaystyle # \\sum_{j=1}^{m}$ w(j2). You are given S(10)", "Irrational base # https://projecteuler.net/problem=558 # # Let r be the real root of", "can be written as the sum of distinct increasing powers of r. If", "or more, then the representation is unique. For # example, 3 = r", "r -7 + r 6. # Interestingly, the relation holds for the complex", "between any two # exponents to be three or more, then the representation", "= $\\displaystyle # \\sum_{j=1}^{m}$ w(j2). You are given S(10) = 61 and S(1000)", "== '__main__': n = 1000 i = 10000 prob_id = 558 timed.caller(dummy, n,", "r -10 + r -7 + r 6. # Interestingly, the relation holds", "real root of the equation x3 = x2 + 1. Every positive integer", "written as the sum of distinct increasing powers of r. If we require", "terms to be finite and the difference between any two # exponents to", "# exponents to be three or more, then the representation is unique. For", "w(n) be the number of terms in this unique representation of n. Thus", "= $\\displaystyle # \\sum_{k=-\\infty}^{\\infty}$ bk is finite. Let S(m) = $\\displaystyle # \\sum_{j=1}^{m}$", "# \\sum_{k=-\\infty}^{\\infty}$ bk is finite. Let S(m) = $\\displaystyle # \\sum_{j=1}^{m}$ w(j2). You", "given S(10) = 61 and S(1000) = 19403. Find # S(5 000 000).", "be three or more, then the representation is unique. For # example, 3", "r -1 + r 2 and 10 = r -10 + r -7", "1 for all k;w(n) = $\\displaystyle # \\sum_{k=-\\infty}^{\\infty}$ bk is finite. Let S(m)", "and S(1000) = 19403. Find # S(5 000 000). # # by lcsm29", "number of terms in this unique representation of n. Thus w(3) = #", "of terms to be finite and the difference between any two # exponents", "w(3) = # 4 and w(10) = 3. More formally, for all positive", "the equation x3 = x2 + 1. Every positive integer # can be", "1 for all k;bk + bk+1 + bk+2 ≤ 1 for all k;w(n)", "# Solution of; # Project Euler Problem 558: Irrational base # https://projecteuler.net/problem=558 #", "and the difference between any two # exponents to be three or more,", "Let # w(n) be the number of terms in this unique representation of", "000). # # by lcsm29 http://github.com/lcsm29/project-euler import timed def dummy(n): pass if __name__" ]
[ "# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A", "materials provided with the # distribution. # * Neither the name of the", "of the nor the names of its # contributors may be used to", "ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT #", "in the documentation and/or other materials provided with the # distribution. # *", "* #~ from component import * #~ from comp_lib import * #~ #from", "import library #~ import mat_lib #~ import shape #~ import surface #~ import", "mat_lib import * #~ from ray import * #~ from shape import *", "All rights reserved. # # Redistribution and use in source and binary forms,", "software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY", "mat_lib #~ import shape #~ import surface #~ import system #~ #~ __all__=[\"calc\",", "<reponame>fcichos/pyoptools<gh_stars>1-10 #!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2007, 2008,", "# -*- coding: utf-8 -*- # Copyright (c) 2007, 2008, 2009,2010 <NAME> #", "# this software without specific prior written permission. # # THIS SOFTWARE IS", "python # -*- coding: utf-8 -*- # Copyright (c) 2007, 2008, 2009,2010 <NAME>", "* Redistributions of source code must retain the above copyright # notice, this", "<NAME> # <<EMAIL>>, # All rights reserved. # # Redistribution and use in", "#~ import comp_lib #~ import library #~ import mat_lib #~ import shape #~", "\"system\"] #~ from calc import * #~ from component import * #~ from", "# modification, are permitted provided that the following conditions are # met: #", "an *API* that can be used to describe Optical surfaces, components and systems.", "-*- coding: utf-8 -*- # Copyright (c) 2007, 2008, 2009,2010 <NAME> # <<EMAIL>>,", "of conditions and the following disclaimer. # * Redistributions in binary form must", "\"surface\", #~ \"system\"] #~ from calc import * #~ from component import *", "IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY", "PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR", "BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE", "#from config import * #~ from library import * #~ from mat_lib import", "this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED", "TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR", "without # modification, are permitted provided that the following conditions are # met:", "this list of conditions and the following disclaimer. # * Redistributions in binary", "ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED", "NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,", "and/or other materials provided with the # distribution. # * Neither the name", "with or without # modification, are permitted provided that the following conditions are", "conditions and the following disclaimer. # * Redistributions in binary form must reproduce", "other materials provided with the # distribution. # * Neither the name of", "# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY #", "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED.", "BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN", "# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #", "endorse or promote products derived from # this software without specific prior written", "THE POSSIBILITY OF SUCH DAMAGE. ''' Package containing modules and submodules defining an", "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER", "the documentation and/or other materials provided with the # distribution. # * Neither", "import * #~ #from config import * #~ from library import * #~", "2007, 2008, 2009,2010 <NAME> # <<EMAIL>>, # All rights reserved. # # Redistribution", "WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF", "above copyright # notice, this list of conditions and the following disclaimer. #", "DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT #", "NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR #", "list of conditions and the following disclaimer. # * Redistributions in binary form", "rights reserved. # # Redistribution and use in source and binary forms, with", "OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER", "permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS", "EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ''' Package containing modules", "OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE,", "config import * #~ from library import * #~ from mat_lib import *", "THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF", "OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR", "or without # modification, are permitted provided that the following conditions are #", "from ray import * #~ from shape import * #~ from surface import", "coding: utf-8 -*- # Copyright (c) 2007, 2008, 2009,2010 <NAME> # <<EMAIL>>, #", "# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, #", "LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE)", "#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2007, 2008, 2009,2010", "and submodules defining an *API* that can be used to describe Optical surfaces,", "SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS IS\" AND", "#~ from shape import * #~ from surface import * #~ from system", "notice, this list of conditions and the following disclaimer # in the documentation", "POSSIBILITY OF SUCH DAMAGE. ''' Package containing modules and submodules defining an *API*", "SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,", "NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY", "COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES,", "FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT", "without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE", "ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ''' Package containing modules and submodules", "code must retain the above copyright # notice, this list of conditions and", "# in the documentation and/or other materials provided with the # distribution. #", "ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT", "conditions are # met: # # * Redistributions of source code must retain", "component import * #~ from comp_lib import * #~ #from config import *", "following conditions are # met: # # * Redistributions of source code must", "and systems. ''' #~ import calc #~ import component #~ import comp_lib #~", "SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED", "Redistributions in binary form must reproduce the above # copyright notice, this list", "* Neither the name of the nor the names of its # contributors", "OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR", "name of the nor the names of its # contributors may be used", "SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS", "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY", "component #~ import comp_lib #~ import library #~ import mat_lib #~ import shape", "#~ import calc #~ import component #~ import comp_lib #~ import library #~", "#~ \"mat_lib\", #~ \"shape\", #~ \"surface\", #~ \"system\"] #~ from calc import *", "* #~ from shape import * #~ from surface import * #~ from", "* #~ from comp_lib import * #~ #from config import * #~ from", "PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY,", "NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF", "AND CONTRIBUTORS # \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT", "and use in source and binary forms, with or without # modification, are", "THE COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS IS\" AND ANY EXPRESS OR IMPLIED", "* #~ from ray import * #~ from shape import * #~ from", "the following conditions are # met: # # * Redistributions of source code", "CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY", "submodules defining an *API* that can be used to describe Optical surfaces, components", "EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES", "containing modules and submodules defining an *API* that can be used to describe", "# * Redistributions of source code must retain the above copyright # notice,", "from comp_lib import * #~ #from config import * #~ from library import", "SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ''' Package containing", "IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ''' Package containing modules and", "#~ import component #~ import comp_lib #~ import library #~ import mat_lib #~", "of its # contributors may be used to endorse or promote products derived", "its # contributors may be used to endorse or promote products derived from", "(INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS", "the # distribution. # * Neither the name of the nor the names", "import surface #~ import system #~ #~ __all__=[\"calc\", #~ \"component\", #~ \"comp_lib\", #~", "# met: # # * Redistributions of source code must retain the above", "with the # distribution. # * Neither the name of the nor the", "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA,", "OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE", "OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS", "OF THE POSSIBILITY OF SUCH DAMAGE. ''' Package containing modules and submodules defining", "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING", "be used to describe Optical surfaces, components and systems. ''' #~ import calc", "contributors may be used to endorse or promote products derived from # this", "permitted provided that the following conditions are # met: # # * Redistributions", "must retain the above copyright # notice, this list of conditions and the", "* Redistributions in binary form must reproduce the above # copyright notice, this", "Redistribution and use in source and binary forms, with or without # modification,", "import * #~ from mat_lib import * #~ from ray import * #~", "comp_lib #~ import library #~ import mat_lib #~ import shape #~ import surface", "source and binary forms, with or without # modification, are permitted provided that", "#~ import mat_lib #~ import shape #~ import surface #~ import system #~", "# distribution. # * Neither the name of the nor the names of", "import comp_lib #~ import library #~ import mat_lib #~ import shape #~ import", "BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF", "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS IS\"", "calc import * #~ from component import * #~ from comp_lib import *", "Redistributions of source code must retain the above copyright # notice, this list", "modification, are permitted provided that the following conditions are # met: # #", "STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY", "#~ from comp_lib import * #~ #from config import * #~ from library", "notice, this list of conditions and the following disclaimer. # * Redistributions in", "the name of the nor the names of its # contributors may be", "derived from # this software without specific prior written permission. # # THIS", "distribution. # * Neither the name of the nor the names of its", "WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN", "from calc import * #~ from component import * #~ from comp_lib import", "IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR", "#~ \"component\", #~ \"comp_lib\", #~ \"library\", #~ \"mat_lib\", #~ \"shape\", #~ \"surface\", #~", "are permitted provided that the following conditions are # met: # # *", "# contributors may be used to endorse or promote products derived from #", "shape #~ import surface #~ import system #~ #~ __all__=[\"calc\", #~ \"component\", #~", "#~ \"system\"] #~ from calc import * #~ from component import * #~", "PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS IS\" AND ANY EXPRESS", "DAMAGE. ''' Package containing modules and submodules defining an *API* that can be", "EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,", "(c) 2007, 2008, 2009,2010 <NAME> # <<EMAIL>>, # All rights reserved. # #", "ray import * #~ from shape import * #~ from surface import *", "forms, with or without # modification, are permitted provided that the following conditions", "import * #~ from shape import * #~ from surface import * #~", "source code must retain the above copyright # notice, this list of conditions", "FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT", "CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR", "in source and binary forms, with or without # modification, are permitted provided", "following disclaimer. # * Redistributions in binary form must reproduce the above #", "disclaimer. # * Redistributions in binary form must reproduce the above # copyright", "IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF", "disclaimer # in the documentation and/or other materials provided with the # distribution.", "''' #~ import calc #~ import component #~ import comp_lib #~ import library", "# # Redistribution and use in source and binary forms, with or without", "and the following disclaimer # in the documentation and/or other materials provided with", "AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR", "from mat_lib import * #~ from ray import * #~ from shape import", "#~ import surface #~ import system #~ #~ __all__=[\"calc\", #~ \"component\", #~ \"comp_lib\",", "above # copyright notice, this list of conditions and the following disclaimer #", "OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER", "form must reproduce the above # copyright notice, this list of conditions and", "used to endorse or promote products derived from # this software without specific", "import * #~ from comp_lib import * #~ #from config import * #~", "IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN", "list of conditions and the following disclaimer # in the documentation and/or other", "BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS IS\" AND ANY EXPRESS OR", "#~ from mat_lib import * #~ from ray import * #~ from shape", "promote products derived from # this software without specific prior written permission. #", "COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, #", "\"comp_lib\", #~ \"library\", #~ \"mat_lib\", #~ \"shape\", #~ \"surface\", #~ \"system\"] #~ from", "prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS", "ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT", "components and systems. ''' #~ import calc #~ import component #~ import comp_lib", "# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE", "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE #", "HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT", "#~ from calc import * #~ from component import * #~ from comp_lib", "-*- # Copyright (c) 2007, 2008, 2009,2010 <NAME> # <<EMAIL>>, # All rights", "can be used to describe Optical surfaces, components and systems. ''' #~ import", "AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE", "IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO,", "use in source and binary forms, with or without # modification, are permitted", "OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF", "to describe Optical surfaces, components and systems. ''' #~ import calc #~ import", "#~ __all__=[\"calc\", #~ \"component\", #~ \"comp_lib\", #~ \"library\", #~ \"mat_lib\", #~ \"shape\", #~", "# All rights reserved. # # Redistribution and use in source and binary", "CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,", "#~ \"surface\", #~ \"system\"] #~ from calc import * #~ from component import", "to endorse or promote products derived from # this software without specific prior", "# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS", "#~ from component import * #~ from comp_lib import * #~ #from config", "# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL,", "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED", "''' Package containing modules and submodules defining an *API* that can be used", "import component #~ import comp_lib #~ import library #~ import mat_lib #~ import", "Package containing modules and submodules defining an *API* that can be used to", "HOLDERS AND CONTRIBUTORS # \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,", "# <<EMAIL>>, # All rights reserved. # # Redistribution and use in source", "that can be used to describe Optical surfaces, components and systems. ''' #~", "products derived from # this software without specific prior written permission. # #", "USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY", "and the following disclaimer. # * Redistributions in binary form must reproduce the", "# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING", "must reproduce the above # copyright notice, this list of conditions and the", "import system #~ #~ __all__=[\"calc\", #~ \"component\", #~ \"comp_lib\", #~ \"library\", #~ \"mat_lib\",", "# copyright notice, this list of conditions and the following disclaimer # in", "documentation and/or other materials provided with the # distribution. # * Neither the", "from # this software without specific prior written permission. # # THIS SOFTWARE", "comp_lib import * #~ #from config import * #~ from library import *", "LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND", "import mat_lib #~ import shape #~ import surface #~ import system #~ #~", "be used to endorse or promote products derived from # this software without", "OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO", "USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH", "__all__=[\"calc\", #~ \"component\", #~ \"comp_lib\", #~ \"library\", #~ \"mat_lib\", #~ \"shape\", #~ \"surface\",", "<<EMAIL>>, # All rights reserved. # # Redistribution and use in source and", "BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR", "OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS", "and binary forms, with or without # modification, are permitted provided that the", "names of its # contributors may be used to endorse or promote products", "import shape #~ import surface #~ import system #~ #~ __all__=[\"calc\", #~ \"component\",", "# * Neither the name of the nor the names of its #", "AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL", "SUCH DAMAGE. ''' Package containing modules and submodules defining an *API* that can", "#~ \"comp_lib\", #~ \"library\", #~ \"mat_lib\", #~ \"shape\", #~ \"surface\", #~ \"system\"] #~", "#~ from ray import * #~ from shape import * #~ from surface", "systems. ''' #~ import calc #~ import component #~ import comp_lib #~ import", "the nor the names of its # contributors may be used to endorse", "library #~ import mat_lib #~ import shape #~ import surface #~ import system", "defining an *API* that can be used to describe Optical surfaces, components and", "2008, 2009,2010 <NAME> # <<EMAIL>>, # All rights reserved. # # Redistribution and", "IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # \"AS IS\" AND ANY", "import calc #~ import component #~ import comp_lib #~ import library #~ import", "TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE", "WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND", "# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT #", "copyright notice, this list of conditions and the following disclaimer # in the", "* #~ #from config import * #~ from library import * #~ from", "ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED", "ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN", "provided with the # distribution. # * Neither the name of the nor", "provided that the following conditions are # met: # # * Redistributions of", "OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR", "FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE", "* #~ from mat_lib import * #~ from ray import * #~ from", "INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT,", "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. '''", "CONTRIBUTORS # \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT", "OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF", "met: # # * Redistributions of source code must retain the above copyright", "\"shape\", #~ \"surface\", #~ \"system\"] #~ from calc import * #~ from component", "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR", "# Copyright (c) 2007, 2008, 2009,2010 <NAME> # <<EMAIL>>, # All rights reserved.", "following disclaimer # in the documentation and/or other materials provided with the #", "THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ''' Package", "describe Optical surfaces, components and systems. ''' #~ import calc #~ import component", "OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF", "#~ \"library\", #~ \"mat_lib\", #~ \"shape\", #~ \"surface\", #~ \"system\"] #~ from calc", "#~ #from config import * #~ from library import * #~ from mat_lib", "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE", "# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT", "utf-8 -*- # Copyright (c) 2007, 2008, 2009,2010 <NAME> # <<EMAIL>>, # All", "in binary form must reproduce the above # copyright notice, this list of", "of conditions and the following disclaimer # in the documentation and/or other materials", "EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE", "shape import * #~ from surface import * #~ from system import *", "DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE", "* #~ from library import * #~ from mat_lib import * #~ from", "PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS", "CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL", "the above # copyright notice, this list of conditions and the following disclaimer", "used to describe Optical surfaces, components and systems. ''' #~ import calc #~", "ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE", "the following disclaimer. # * Redistributions in binary form must reproduce the above", "# * Redistributions in binary form must reproduce the above # copyright notice,", "are # met: # # * Redistributions of source code must retain the", "MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT", "calc #~ import component #~ import comp_lib #~ import library #~ import mat_lib", "the names of its # contributors may be used to endorse or promote", "retain the above copyright # notice, this list of conditions and the following", "conditions and the following disclaimer # in the documentation and/or other materials provided", "Optical surfaces, components and systems. ''' #~ import calc #~ import component #~", "reserved. # # Redistribution and use in source and binary forms, with or", "# Redistribution and use in source and binary forms, with or without #", "binary form must reproduce the above # copyright notice, this list of conditions", "nor the names of its # contributors may be used to endorse or", "*API* that can be used to describe Optical surfaces, components and systems. '''", "the following disclaimer # in the documentation and/or other materials provided with the", "that the following conditions are # met: # # * Redistributions of source", "DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;", "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF", "Neither the name of the nor the names of its # contributors may", "PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS;", "#~ from library import * #~ from mat_lib import * #~ from ray", "OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON", "#~ #~ __all__=[\"calc\", #~ \"component\", #~ \"comp_lib\", #~ \"library\", #~ \"mat_lib\", #~ \"shape\",", "\"library\", #~ \"mat_lib\", #~ \"shape\", #~ \"surface\", #~ \"system\"] #~ from calc import", "LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,", "Copyright (c) 2007, 2008, 2009,2010 <NAME> # <<EMAIL>>, # All rights reserved. #", "system #~ #~ __all__=[\"calc\", #~ \"component\", #~ \"comp_lib\", #~ \"library\", #~ \"mat_lib\", #~", "TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE", "OF SUCH DAMAGE. ''' Package containing modules and submodules defining an *API* that", "binary forms, with or without # modification, are permitted provided that the following", "of source code must retain the above copyright # notice, this list of", "OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY,", "\"mat_lib\", #~ \"shape\", #~ \"surface\", #~ \"system\"] #~ from calc import * #~", "reproduce the above # copyright notice, this list of conditions and the following", "# notice, this list of conditions and the following disclaimer. # * Redistributions", "may be used to endorse or promote products derived from # this software", "this list of conditions and the following disclaimer # in the documentation and/or", "#~ import library #~ import mat_lib #~ import shape #~ import surface #~", "2009,2010 <NAME> # <<EMAIL>>, # All rights reserved. # # Redistribution and use", "# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS #", "#~ \"shape\", #~ \"surface\", #~ \"system\"] #~ from calc import * #~ from", "import * #~ from component import * #~ from comp_lib import * #~", "the above copyright # notice, this list of conditions and the following disclaimer.", "from library import * #~ from mat_lib import * #~ from ray import", "from component import * #~ from comp_lib import * #~ #from config import", "THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,", "INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED", "import * #~ from ray import * #~ from shape import * #~", "GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION)", "LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT", "OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY", "INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", "#~ import system #~ #~ __all__=[\"calc\", #~ \"component\", #~ \"comp_lib\", #~ \"library\", #~", "surfaces, components and systems. ''' #~ import calc #~ import component #~ import", "# # * Redistributions of source code must retain the above copyright #", "import * #~ from library import * #~ from mat_lib import * #~", "# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", "modules and submodules defining an *API* that can be used to describe Optical", "\"component\", #~ \"comp_lib\", #~ \"library\", #~ \"mat_lib\", #~ \"shape\", #~ \"surface\", #~ \"system\"]", "library import * #~ from mat_lib import * #~ from ray import *", "written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND", "surface #~ import system #~ #~ __all__=[\"calc\", #~ \"component\", #~ \"comp_lib\", #~ \"library\",", "or promote products derived from # this software without specific prior written permission.", "INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO,", "#~ import shape #~ import surface #~ import system #~ #~ __all__=[\"calc\", #~", "specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT", "from shape import * #~ from surface import * #~ from system import", "copyright # notice, this list of conditions and the following disclaimer. # *" ]
[ "np from message_ix import Scenario msg_args = ('canning problem (MESSAGE scheme)', 'standard') msg_multiyear_args", "message_ix import Scenario msg_args = ('canning problem (MESSAGE scheme)', 'standard') msg_multiyear_args = ('canning", "('canning problem (MESSAGE scheme)', 'standard') msg_multiyear_args = ('canning problem (MESSAGE scheme)', 'multi-year') def", "('canning problem (MESSAGE scheme)', 'multi-year') def test_solve_legacy_scenario(test_legacy_mp): scen = Scenario(test_legacy_mp, *msg_args) exp =", "(MESSAGE scheme)', 'multi-year') def test_solve_legacy_scenario(test_legacy_mp): scen = Scenario(test_legacy_mp, *msg_args) exp = scen.var('OBJ')['lvl'] #", "Scenario(test_legacy_mp, *msg_args) exp = scen.var('OBJ')['lvl'] # solve scenario, assert that the new objective", "scheme)', 'standard') msg_multiyear_args = ('canning problem (MESSAGE scheme)', 'multi-year') def test_solve_legacy_scenario(test_legacy_mp): scen =", "problem (MESSAGE scheme)', 'standard') msg_multiyear_args = ('canning problem (MESSAGE scheme)', 'multi-year') def test_solve_legacy_scenario(test_legacy_mp):", "= Scenario(test_legacy_mp, *msg_args) exp = scen.var('OBJ')['lvl'] # solve scenario, assert that the new", "problem (MESSAGE scheme)', 'multi-year') def test_solve_legacy_scenario(test_legacy_mp): scen = Scenario(test_legacy_mp, *msg_args) exp = scen.var('OBJ')['lvl']", "from message_ix import Scenario msg_args = ('canning problem (MESSAGE scheme)', 'standard') msg_multiyear_args =", "Scenario msg_args = ('canning problem (MESSAGE scheme)', 'standard') msg_multiyear_args = ('canning problem (MESSAGE", "= ('canning problem (MESSAGE scheme)', 'multi-year') def test_solve_legacy_scenario(test_legacy_mp): scen = Scenario(test_legacy_mp, *msg_args) exp", "that the new objective value is close to previous scen.remove_solution() scen.solve() assert np.isclose(exp,", "test_solve_legacy_scenario(test_legacy_mp): scen = Scenario(test_legacy_mp, *msg_args) exp = scen.var('OBJ')['lvl'] # solve scenario, assert that", "solve scenario, assert that the new objective value is close to previous scen.remove_solution()", "numpy as np from message_ix import Scenario msg_args = ('canning problem (MESSAGE scheme)',", "msg_multiyear_args = ('canning problem (MESSAGE scheme)', 'multi-year') def test_solve_legacy_scenario(test_legacy_mp): scen = Scenario(test_legacy_mp, *msg_args)", "'multi-year') def test_solve_legacy_scenario(test_legacy_mp): scen = Scenario(test_legacy_mp, *msg_args) exp = scen.var('OBJ')['lvl'] # solve scenario,", "# solve scenario, assert that the new objective value is close to previous", "'standard') msg_multiyear_args = ('canning problem (MESSAGE scheme)', 'multi-year') def test_solve_legacy_scenario(test_legacy_mp): scen = Scenario(test_legacy_mp,", "the new objective value is close to previous scen.remove_solution() scen.solve() assert np.isclose(exp, scen.var('OBJ')['lvl'])", "msg_args = ('canning problem (MESSAGE scheme)', 'standard') msg_multiyear_args = ('canning problem (MESSAGE scheme)',", "scheme)', 'multi-year') def test_solve_legacy_scenario(test_legacy_mp): scen = Scenario(test_legacy_mp, *msg_args) exp = scen.var('OBJ')['lvl'] # solve", "scen = Scenario(test_legacy_mp, *msg_args) exp = scen.var('OBJ')['lvl'] # solve scenario, assert that the", "<reponame>GamzeUnlu95/message_ix import numpy as np from message_ix import Scenario msg_args = ('canning problem", "as np from message_ix import Scenario msg_args = ('canning problem (MESSAGE scheme)', 'standard')", "scenario, assert that the new objective value is close to previous scen.remove_solution() scen.solve()", "def test_solve_legacy_scenario(test_legacy_mp): scen = Scenario(test_legacy_mp, *msg_args) exp = scen.var('OBJ')['lvl'] # solve scenario, assert", "*msg_args) exp = scen.var('OBJ')['lvl'] # solve scenario, assert that the new objective value", "import Scenario msg_args = ('canning problem (MESSAGE scheme)', 'standard') msg_multiyear_args = ('canning problem", "(MESSAGE scheme)', 'standard') msg_multiyear_args = ('canning problem (MESSAGE scheme)', 'multi-year') def test_solve_legacy_scenario(test_legacy_mp): scen", "scen.var('OBJ')['lvl'] # solve scenario, assert that the new objective value is close to", "exp = scen.var('OBJ')['lvl'] # solve scenario, assert that the new objective value is", "= scen.var('OBJ')['lvl'] # solve scenario, assert that the new objective value is close", "assert that the new objective value is close to previous scen.remove_solution() scen.solve() assert", "= ('canning problem (MESSAGE scheme)', 'standard') msg_multiyear_args = ('canning problem (MESSAGE scheme)', 'multi-year')", "import numpy as np from message_ix import Scenario msg_args = ('canning problem (MESSAGE" ]
[ "yourself...\", ) return await ctx.send(embed=embed69) em = nextcord.Embed( title=\"Are you sure?\", description=\"This is", "is None: embed = nextcord.Embed( title=\"Add Role Error\", description=\"Please ping a user to", "sync class AllConfirm(nextcord.ui.View): def __init__(self,ctx): super().__init__(timeout=200) self.value = None self.ctx=ctx @nextcord.ui.button( label=\"Confirm\", style=nextcord.ButtonStyle.grey,emoji=\"<a:yes:909765403801182208>\")", "self.value = False self.stop() async def interaction_check(self, interaction) -> bool: if interaction.user !=self.ctx.author:", "to mute - Not Found\" ) return await ctx.send(embed=em1) elif member.id == ctx.author.id:", "\", value=reason) view=AllConfirm(ctx) await ctx.send(embed=banEmbed,view=view) await view.wait() if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> |", "= nextcord.Embed(title=f\"<a:yes:909765403801182208> | {user.name} Was Banned indefinitely\") await ctx.send(embed=em) await ctx.guild.ban(user) if time", "ctx.send(embed=em3) embed = nextcord.Embed( title=\"Mute Success\", description=f\"{member.mention} was muted Indefinitly \", colour=nextcord.Colour.blue(), )", "| Reason: **{reason}**\") await member.ban(reason=reason) @commands.command(description=\"Lucas unban method\") @commands.has_permissions(ban_members=True) async def unban(self, ctx,*,member):", "Embed from nextcord.ext import commands from nextcord.ext.commands.cooldowns import BucketType from nextcord.ui.view import View", "await ctx.send(embed=em) await member.send(f\"You got kicked in **{ctx.guild}** | Reason: **{reason}**\") await member.kick(reason=reason)", "slowmode(self, ctx, time: int): try: if time == 0: em1 = nextcord.Embed( title=\"Slowmode", "situations such as, `NSFW or NSFLPosting` or `Raid on the Server`. Only use", "(member_name, member_discriminator): await ctx.guild.unban(user) view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was", "addRole: embed = nextcord.Embed( title=\"Add Role Error\", description=f\"{member.mention} already has the role you", "nextcord.Embed( title=\"Add Role Success\", description=f\"{role.mention} has been assigned to {member.mention}\", ) await ctx.send(embed=em)", "member.top_role.position: em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Member **higher** than you in the role", "= None): f\"\"\" **Info**: Get ChannelStats *Syntax*: \"{self.ctx.prefix}\" channelstats [channel] \"\"\" if channel", ") @commands.has_permissions(manage_messages=True) async def mute(self, ctx, member: nextcord.Member = None, *, reason=None): guild", "have enough permissions to remove this role\", ) return await ctx.send(embed=em) if ctx.guild.me.top_role.position", "ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Muted role too high to", "return await ctx.send(embed=em4) if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Muted", "nextcord.embeds import Embed from nextcord.ext import commands from nextcord.ext.commands.cooldowns import BucketType from nextcord.ui.view", "banned in **{guild}** | Reason: **{reason}**\") await member.ban(reason=reason) @commands.command(description=\"Lucas unban method\") @commands.has_permissions(ban_members=True) async", "member.send( f\"You have been unmuted from: **{guild.name}** | Reason: **{reason}**\" ) return @commands.command(description=\"Clears", "role: nextcord.Role = None, *, reason=None, ): if member is None: embed =", "3600, \"d\": 86400} tempmute = int(time[0]) * time_convert[time[-1]] embed = nextcord.Embed( title=\"Tempmute Success\",", "= nextcord.utils.get(guild.roles, name=\"Muted\") if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Mute Error\", description=\"Muted", "await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name} using --server override\", send_messages=None, ) embed =", ") return await ctx.send(embed=embed69) em = nextcord.Embed( title=\"Are you sure?\", description=\"This is a", "member.ban(reason=reason) await member.send(f\"You got banned in **{guild}** | Reason: **{reason}**\") else: banEmbed =", "async def tempban(self,ctx, user:nextcord.User, time=None,reason=None): if reason==None: reason=\"No Reason\" if user!= None: if", "unban method\") @commands.has_permissions(ban_members=True) async def unban(self, ctx,*,member): f\"\"\" **Info**: Unbans a member \"\"\"", "return await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Mute Error\", description=\"Member", "reason==None: reason=\"No Reason\" if user!= None: if time==None: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | {user.name}", "(user.name, user.discriminator) == (member_name, member_discriminator): await ctx.guild.unban(user) view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280>", "channelstats(self, ctx, channel: nextcord.TextChannel = None): f\"\"\" **Info**: Get ChannelStats *Syntax*: \"{self.ctx.prefix}\" channelstats", "is Annoucement?\", value=channel.is_news(), inline=True) embed.add_field(name=\"Channel Hash:\", value=hash(channel), inline=True) embed.add_field(name=\"Channel Creation Time:\", value=channel.created_at.strftime(\"%a, %d", "= nextcord.Embed( title=\"Slowmode Error\", description=\"Slowmode over 6 hours\" ) await ctx.send(embed=em2) else: await", "risky command only to be used in important situations such as, `NSFW or", "ctx.author.send(embed=banEmbed) await member.ban(reason=reason) await member.send(f\"You got banned in **{guild}** | Reason: **{reason}**\") else:", "member for a specific amount of time.\" ) @commands.has_permissions(manage_messages=True) async def mute(self, ctx,", "ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Mute Error\", description=\"I require the **Manage", "title=\"Slowmode Error\", description=\"Slowmode over 6 hours\" ) await ctx.send(embed=em2) else: await ctx.channel.edit(slowmode_delay=time) em3", ") return await ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Mute Error\", description=\"I", "is None: reason = f\"{ctx.author.name} modbanned {member.name}\" else: reason = ( f\"{ctx.author.name} modbanned", "commands.BucketType.user) async def modban(self, ctx, member, *, reason=None): if reason is None: reason", "self, ctx, member: nextcord.Member = None, role: nextcord.Role = None, *, reason=None, ):", "role_ in member.roles: if role_ == role: addRole = False break if not", "None): f\"\"\" **Info**: Get ChannelStats *Syntax*: \"{self.ctx.prefix}\" channelstats [channel] \"\"\" if channel ==", "Invalid Permission\", ) return await ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Unmute", "elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member **higher** than you", "= nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked {channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Modbans the member.\")", "Guild:-\", value=ctx.guild.name, inline=True) embed.add_field(name=\"Channel Id:-\", value=channel.id, inline=False) embed.add_field(name=\"Channel Topic:-\",value=f\"{channel.topic if channel.topic else 'No", "= nextcord.Embed( title=\"Remove Role Success!\", description=f\"{role.mention} has been removed from {member.mention}\", ) await", ") return @commands.command(name=\"unmute\", description=\"Unmutes a muted member.\") @commands.has_permissions(manage_messages=True) async def unmute(self, ctx, member:", "Error\", description=\"Please ping a user to give them a role!\", ) await ctx.send(embed=embed)", "await ctx.send(embed=embed69) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member **higher**", "em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | {user.name} Was Banned indefinitely\") await ctx.send(embed=em) await ctx.guild.ban(user) if", "*, reason=None): \"\"\" **Info**: Bans a member \"\"\" if member == None: embed1", "interaction) -> bool: if interaction.user !=self.ctx.author: await interaction.response.send_message(\"You can't use that!!\" , ephemeral=True)", "== None: em1 = nextcord.Embed( title=\"Mute Error\", description=\"Member to mute - Not Found\"", "| *{member.name} Was Kicked!*\") await ctx.send(embed=em) await member.send(f\"You got kicked in **{ctx.guild}** |", "description=\"Unmutes a muted member.\") @commands.has_permissions(manage_messages=True) async def unmute(self, ctx, member: nextcord.Member = None,", "await ctx.send(embed=em4) if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Muted role", "Permission\", ) return await ctx.send(embed=em3) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Tempmute Error\",", "nextcord.Interaction): self.value = True self.stop() @nextcord.ui.button(label=\"Cancel\", style=nextcord.ButtonStyle.grey, emoji=\"<a:no:909765403872481280>\") async def cancel( self, button:", "entire server \", ) await ctx.send(embed=embed) return if channel is None: channel =", "remove from a member\", ) return await ctx.send(embed=em3) if not mutedRole: mutedRole =", "Permission\",) return await ctx.send(embed=em3) guild = ctx.guild banEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> |", "amount=10): amount = amount + 1 if amount > 101: em1 = nextcord.Embed(", "embed = nextcord.Embed( title=\"Remove Role Error\", description=\"That role is too high for me", "em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member **higher** than you in the role heirarchy", "purpose you may risk getting demoted if not banned from the staff team.**\",", "return if role is None: embed = nextcord.Embed( title=\"Add Role Error\", description=\"Please ping", "for ban_entry in banned_user: user = ban_entry.user if (user.name, user.discriminator) == (member_name, member_discriminator):", "view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Unbanned!*\") await ctx.send(embed=em) @commands.command(name=\"kick\", description=\"Kicks", "return await ctx.send(embed=embed) try: roleRemoved = False for role_ in member.roles: if role_", "member.send( f\"You have been muted from: **{guild.name}** | Reason: **{reason}** | Time: **{time}**\"", "description=\"Please ping a role to remove the role from {}!\".format( member.mention ), )", "60, \"h\": 3600, \"d\": 86400} tempmute = int(time[0]) * time_convert[time[-1]] embed = nextcord.Embed(", "ctx.send(\"Error has occoured, notifying dev team\") print(Exception) @commands.command( aliases=[\"giverole\", \"addr\"], description=\"Gives a member", "not banned from the staff team.**\", ) await ctx.author.send(embed = em, view=view) await", "embed69 = nextcord.Embed( title=\"Ban Error\", description=\"No banning yourself...\", ) return await ctx.send(embed=embed69) em", "await ctx.send(embed=kickEmbed,view=view) await view.wait() view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was", "@nextcord.ui.button(label=\"Cancel\", style=nextcord.ButtonStyle.grey, emoji=\"<a:no:909765403872481280>\") async def cancel( self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value =", "Slowmode?\", value=channel.slowmode_delay, inline=True) embed.add_field(name=\"Channel is NSFW?\", value=channel.is_nsfw(), inline=True) embed.add_field(name=\"Channel Permissions Synced?\", value=bool(CategoryChannel.permissions_synced), inline=True)", "inline=False) await ctx.send(embed=embed) await member.remove_roles(mutedRole, reason=reason) await member.send( f\"You have been unmuted from:", "locked {channel.name} using --server override\", send_messages=False, ) embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked", "to perform this action\", ) return await ctx.send(embed=embed) try: addRole = True for", "**Manage Roles** permisson to run this command - Missing Permission\", ) return await", "return await ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Mute Error\", description=\"I require", "msg = await ctx.send(\"Cleared Messages\") asyncio.sleep(10) await msg.delete() @commands.command(description=\"Change the channels slowmode.\") @commands.has_permissions(manage_channels=True)", "ctx.send(embed=em3) guild = ctx.guild banEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Ban Case \",color=nextcord.Color.red())", "< role.position: em = nextcord.Embed( title=\"Remove Role Error\", description=\"You do not have enough", "reason=reason) await member.send( f\"You have been unmuted from: **{guild.name}** | Reason: **{reason}**\" )", "for channel in guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) embed =", "description=f\"{member.mention} was muted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.add_field(name=\"Duration\", value=time) await ctx.send(embed=embed)", "if time==None: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | {user.name} Was Banned indefinitely\") await ctx.send(embed=em) await", "embed2 = nextcord.Embed(title=\"Kick Error\",description=\"I require the ``Kick Members`` permisson to run this command", "ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name}\", send_messages=False, # ) embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked", "== role: await member.remove_roles(role) roleRemoved = True break if not roleRemoved: embed =", "reason=None ): guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if member == None:", "BucketType from nextcord.ui.view import View from nextcord.ext import commands import json import random", "= nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Banned!*\") await ctx.send(embed=em) elif view.value== True: em", "as you in the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em4)", "= nextcord.Embed( title=\"Unmute Success\", description=f\"{member.mention} was unmuted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False)", "perform this action\", ) return await ctx.send(embed=embed) try: roleRemoved = False for role_", "< member.top_role.position: em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member **higher** than you in the", "role too high to remove from a member\", ) return await ctx.send(embed=em3) if", "bot @commands.Cog.listener() async def on_message(self, message): if str(message.author.id) != str(BOT_USER_ID): send = message.channel.send", "nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping a role to remove the role from", "Found\" ) return await ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Unmute", "em1 = nextcord.Embed( title=\"Mute Error\", description=\"Member to mute - Not Found\" ) return", "ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Mute Error\", description=\"Muted role too high to", "not mutedRole: mutedRole = await guild.create_role(name=\"Muted\") await ctx.send(\"No mute role found. Creating mute", "nextcord.Embed( title=\"Mute Error\", description=\"Muted role too high to give to a member\", )", "not in any category'}\", color=nextcord.Color.random()) embed.add_field(name=\"Channel Guild:-\", value=ctx.guild.name, inline=True) embed.add_field(name=\"Channel Id:-\", value=channel.id, inline=False)", "inline=False) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await member.send( f\"You have been muted from:", "title=\"Slowmode Success\", description=\"Slowmode turned off\" ) await ctx.send(embed=em1) await ctx.channel.edit(slowmode_delay=0) elif time >", "member.roles: if role_ == role: await member.remove_roles(role) roleRemoved = True break if not", "Role Success!\", description=f\"{role.mention} has been removed from {member.mention}\", ) await ctx.send(embed=em) return except", "*{member.name} Was Unbanned!*\") await ctx.send(embed=em) @commands.command(name=\"kick\", description=\"Kicks the member from your server.\") @commands.has_permissions(kick_members=True)", "channel in guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) if not time", "= None ): if member is None: embed = nextcord.Embed( title=\"Add Role Error\",", "ctx, member: nextcord.Member = None, *, reason=None): if member == None: embed1 =", "Members`` permisson to run this command - Missing Permission\") return await ctx.send(embed=embed2) if", "< role.position: embed = nextcord.Embed( title=\"Add Role Error\", description=\"That role is too high", "ctx, channel: nextcord.TextChannel = None, setting=None): if setting == '--server': for channel in", "guild = ctx.guild banMsg = random.choice(\"BANNED\") banEmbed = nextcord.Embed( title=\"Ban Success\", description=f\"{member.mention} {banMsg}\"", "= nextcord.Embed( title=\"Tempmute Error\", description=\"Time to mute - Not Found\" ) return await", "= nextcord.Embed( title=\"Tempmute Error\", description=\"Member has same role as you in the role", "has the role you are trying to give\", ) await ctx.send(embed=embed) return else:", "view = LockConfirm() em = nextcord.Embed( title=\"Are you sure?\", description=\"This is a very", "from {member.mention}\", ) await ctx.send(embed=em) return except Exception: print(Exception) @commands.command(description=\"Locks the channel.\") @commands.has_permissions(kick_members=True)", "team\") print(Exception) @commands.command( aliases=[\"giverole\", \"addr\"], description=\"Gives a member a certain role.\" ) @commands.has_permissions(manage_roles=True)", "ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Unmute Error\", description=\"I require the ``Manage", "Missing Permission\", ) return await ctx.send(embed=embed2) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed(", "channel: nextcord.TextChannel = None, setting = None): if setting == '--server': view =", "Invalid Permission\",) return await ctx.send(embed=em3) guild = ctx.guild banEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721>", "= nextcord.Embed( title=\"Add Role Error\", description=\"Please ping a user to give them a", "member: nextcord.Member = None, time=None, *, reason=None ): guild = ctx.guild mutedRole =", "= int(time[0]) * time_convert[time[-1]] embed = nextcord.Embed( title=\"Tempmute Success\", description=f\"{member.mention} was muted \",", "None: em1 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member to mute - Not Found\" )", "specific amount of time.\" ) @commands.has_permissions(manage_messages=True) async def mute(self, ctx, member: nextcord.Member =", "was muted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.add_field(name=\"Duration\", value=time) await ctx.send(embed=embed) await", "interaction: nextcord.Interaction): self.value = True self.stop() @nextcord.ui.button(label=\"Cancel\", style=nextcord.ButtonStyle.grey, emoji=\"<a:no:909765403872481280>\") async def cancel( self,", "read_message_history=True, ) embed = nextcord.Embed( title=\"Unmute Success\", description=f\"{member.mention} was unmuted \", colour=nextcord.Colour.blue(), )", "embed = nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping a user to remove a", "em1 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member to mute - Not Found\" ) return", "f\"\"\" **Info**: Unbans a member \"\"\" banned_user = await ctx.guild.bans() member_name, member_discriminator =", "return else: em = nextcord.Embed( title=\"Remove Role Success!\", description=f\"{role.mention} has been removed from", "= em, view=view) await view.wait() if view.value is None: await ctx.author.send(\"Command has been", "in **{guild}** | Reason: **{reason}**\") await member.ban(reason=reason) @commands.command(description=\"Lucas unban method\") @commands.has_permissions(ban_members=True) async def", "description=\"Member has same role as you in the role heirarchy - Invalid Permission\",)", "guild = ctx.guild banEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Ban Case \",color=nextcord.Color.red()) banEmbed.add_field(name=\"Reason:", "label=\"Confirm\", style=nextcord.ButtonStyle.grey,emoji=\"<a:yes:909765403801182208>\") async def confirm( self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value = True", "nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Kicked!*\") await ctx.send(embed=em) elif view.value== True: em =", "Invalid Permission\", ) return await ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Mute", "channel in guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) embed = nextcord.Embed(", "def tempmute( self, ctx, member: nextcord.Member = None, time=None, *, reason=None ): guild", "def ban(self, ctx, member: nextcord.Member = None, *, reason=None): \"\"\" **Info**: Bans a", "nextcord.Embed( title=\"Are you sure?\", description=\"This is a very risky command only to be", ":I\", ) await ctx.author.send(embed=lockEmbed) return if channel is None: channel = ctx.message.channel await", "self.stop() async def interaction_check(self, interaction) -> bool: if interaction.user !=self.ctx.author: await interaction.response.send_message(\"You can't", "embed1 = nextcord.Embed( title=\"Ban Error\", description=\"Member to ban - Not Found\" ) return", "**{guild}** | Reason: **{reason}**\") await member.ban(reason=reason) @commands.command(description=\"Lucas unban method\") @commands.has_permissions(ban_members=True) async def unban(self,", ") if not time == None: time_convert = {\"s\": 1, \"m\": 60, \"h\":", "unmuted from: **{guild.name}** | Reason: **{reason}**\" ) return @commands.command(description=\"Clears a bundle of messages.\",aliases=['purge'])", "nextcord.Embed(title=\"Kick Error\", description=\"Can't kick yourself \",) return await ctx.send(embed=embed) elif ctx.author.top_role.position < member.top_role.position:", "= nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping a user to remove a role", "description=f\"{role.mention} has been removed from {member.mention}\", ) await ctx.send(embed=em) return except Exception: print(Exception)", "time=None, *, reason=None ): guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if member", "the channel.\") @commands.has_permissions(kick_members=True) async def unlock(self, ctx, channel: nextcord.TextChannel = None, setting=None): if", "nextcord.TextChannel = None, setting=None): if setting == '--server': for channel in ctx.guild.channels: await", "channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name} using --server override\", send_messages=False,", "member: nextcord.Member = None, *, reason=None): guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\")", "view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Banned!*\") await ctx.send(embed=em) elif view.value==", "description=\"wHat? <:WHA:815331017854025790>\" ) return await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed(", "ctx.send(embed=embed69) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member **higher** than", ") await ctx.author.send(embed=lockEmbed) return if channel is None: channel = ctx.message.channel await channel.set_permissions(", "Found\") return await ctx.send(embed=embed1) if not (ctx.guild.me.guild_permissions.kick_members): embed2 = nextcord.Embed(title=\"Kick Error\",description=\"I require the", "for channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name} using --server override\",", "bother, ive tried\" ) return await ctx.send(embed=em5) if time == None: em2 =", "if not time == None: time_convert = {\"s\": 1, \"m\": 60, \"h\": 3600,", "view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Kicked!*\") await ctx.send(embed=em) await member.send(f\"You", "\"remover\"], description=\"Removes a certain role from a member.\", ) @commands.has_permissions(manage_roles=True) async def removerole(", "inline=False) embed.add_field(name=\"Channel Topic:-\",value=f\"{channel.topic if channel.topic else 'No topic.'}\",inline=False,) embed.add_field(name=\"Channel Position:-\", value=channel.position, inline=True) embed.add_field(name=\"Channel", "not time == None: time_convert = {\"s\": 1, \"m\": 60, \"h\": 3600, \"d\":", "mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Unmute Error\",", "or responding. **If this command is used for the wrong purpose you may", "send_messages=False, read_message_history=True, ) if member == None: em1 = nextcord.Embed( title=\"Mute Error\", description=\"Member", "banMsg = random.choice(\"BANNED\") banEmbed = nextcord.Embed( title=\"Ban Success\", description=f\"{member.mention} {banMsg}\" ) banEmbed.add_field(name=\"Reason\", value=reason)", "to a member\", ) return await ctx.send(embed=em3) if not mutedRole: mutedRole = await", "Was Banned indefinitely\") await ctx.send(embed=em) await ctx.guild.ban(user) if time !=None : time_convert =", "return await ctx.send(embed=embed2) mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if ctx.guild.me.top_role.position < mutedRole.position: em3 =", "command only to be used in important situations such as, `NSFW or NSFLPosting`", "await ctx.send(embed=em) @commands.command(name=\"kick\", description=\"Kicks the member from your server.\") @commands.has_permissions(kick_members=True) async def kick(self,", "in important situations such as, `NSFW or NSFLPosting` or `Raid on the Server`.", "do not have enough permissions to remove this role\", ) return await ctx.send(embed=em)", "Exception: await ctx.send(\"Error has occoured, notifying dev team\") print(Exception) @commands.command( aliases=[\"giverole\", \"addr\"], description=\"Gives", "member\", ) return await ctx.send(embed=em3) if not mutedRole: mutedRole = await guild.create_role(name=\"Muted\") await", "risky command only to be used in important situations such as, `Raid on", "(ctx.guild.me.guild_permissions.kick_members): embed2 = nextcord.Embed(title=\"Kick Error\",description=\"I require the ``Kick Members`` permisson to run this", "embed = nextcord.Embed( title=\"Remove Role Error\", description=f\"{member.mention} already has the role you are", "time_convert = {\"s\": 1, \"m\": 60, \"h\": 3600, \"d\": 86400} tempmute = int(time[0])", ") embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await member.send( f\"You have", "command if no admin is online or responding. **If this command is used", "except Exception: await ctx.send(\"Error has occoured, notifying dev team\") print(Exception) @commands.command( aliases=[\"giverole\", \"addr\"],", "permissions to give this role\", ) return await ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position:", "embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked {channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Unlocks the", "nextcord.Embed( title=\"Unmute Error\", description=\"Member **higher** than you in the role heirarchy - Invalid", "in the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) if not", "== ctx.author.id: em5 = nextcord.Embed( title=\"Unmute Error\", description=\"wHat? <:WHA:815331017854025790>\" ) return await ctx.send(embed=em5)", "self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value = False self.stop() async def interaction_check(self, interaction)", "else: banEmbed = nextcord.Embed( title=\"Ban Cancelled\", description=\"Lets pretend like this never happened them", "the wrong purpose you may risk getting demoted if not banned from the", "have been muted from: **{guild.name}** | Reason: **{reason}**\" ) return @commands.command(name=\"unmute\", description=\"Unmutes a", ") await ctx.send(embed=em3) except Exception: await ctx.send(\"Error has occoured, notifying dev team\") print(Exception)", "Success\", description=f\"Locked entire server \", ) await ctx.send(embed=embed) else: lockEmbed = nextcord.Embed( title=\"Lock", "nextcord.Embed( title=\"Add Role Error\", description=\"Please ping a role to give {} that role!\".format(", "+ 1 if amount > 101: em1 = nextcord.Embed( title=\"Clear Error\", description=\"Purge limit", "ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Mute Error\", description=\"Member **higher** than you in", "await member.remove_roles(mutedRole, reason=reason) await member.send( f\"You have been unmuted from: **{guild.name}** | Reason:", "if member.id == ctx.author.id: embed = nextcord.Embed(title=\"Kick Error\", description=\"Can't kick yourself \",) return", "staff team.**\", ) await ctx.author.send(embed=em, view=view) await view.wait() if view.value is None: await", "than 100\", ) return await ctx.send(embed=em1) else: await ctx.channel.purge(limit=amount) msg = await ctx.send(\"Cleared", "return await ctx.send(embed=embed1) if member.id == ctx.author.id: embed69 = nextcord.Embed( title=\"Ban Error\", description=\"No", "ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Member **higher** than", "if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Muted role too high", "title=\"Remove Role Success!\", description=f\"{role.mention} has been removed from {member.mention}\", ) await ctx.send(embed=em) return", "heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em4", "to kick - Not Found\") return await ctx.send(embed=embed1) if not (ctx.guild.me.guild_permissions.kick_members): embed2 =", "nextcord.Embed( title=\"Kick Error\", description=\"Member **higher** than you in the role heirarchy - Invalid", "used in important situations such as, `NSFW or NSFLPosting` or `Raid on the", "@commands.command(name=\"tempmute\", description=\"Mutes a member indefinitely.\") @commands.has_permissions(manage_messages=True) async def tempmute( self, ctx, member: nextcord.Member", "ctx.send(embed=embed1) if member.id == ctx.author.id: embed69 = nextcord.Embed( title=\"Ban Error\", description=\"No banning yourself...\",", "ping a role to remove the role from {}!\".format( member.mention ), ) await", "nextcord.Embed(title=\"Kick Error\",description=\"I require the ``Kick Members`` permisson to run this command - Missing", "dev team\") print(Exception) @commands.command( aliases=[\"giverole\", \"addr\"], description=\"Gives a member a certain role.\" )", ") return await ctx.send(embed=embed) try: roleRemoved = False for role_ in member.roles: if", "of {reason}\" ) if member == None: embed1 = nextcord.Embed( title=\"Ban Error\", description=\"Member", "await member.send(f\"You got banned in **{guild}** | Reason: **{reason}**\") else: banEmbed = nextcord.Embed(", "ctx.send(embed=embed1) if not (ctx.guild.me.guild_permissions.kick_members): embed2 = nextcord.Embed(title=\"Kick Error\",description=\"I require the ``Kick Members`` permisson", "@commands.command( aliases=[\"takerole\", \"remover\"], description=\"Removes a certain role from a member.\", ) @commands.has_permissions(manage_roles=True) async", "return await ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Mute Error\", description=\"Error\"", "to perform this action\", ) return await ctx.send(embed=embed) try: roleRemoved = False for", "server \", ) await ctx.send(embed=embed) return if channel is None: channel = ctx.channel", "sure?\", description=\"This is a very risky command only to be used in important", "embed.add_field(name=\"Channel is Annoucement?\", value=channel.is_news(), inline=True) embed.add_field(name=\"Channel Hash:\", value=hash(channel), inline=True) embed.add_field(name=\"Channel Creation Time:\", value=channel.created_at.strftime(\"%a,", "XD !\", ) return await ctx.send(embed=embed69) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed(", "== ctx.author.id: em5 = nextcord.Embed( title=\"Tempmute Error\", description=\"Don't bother, ive tried\" ) return", "using --server override\", send_messages=None, ) embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked entire server", "the ``Manage Roles`` permisson to run this command - Missing Permission\", ) return", "await member.remove_roles(mutedRole) await member.send(f\"You have been unmuted from **{guild}**\") return @commands.command( name=\"mute\", description=\"Mutes", "on the Server`. Only use this command if no admin is online or", "if role_ == role: addRole = False break if not addRole: embed =", "return await ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position: embed = nextcord.Embed( title=\"Remove Role Error\",", "= ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if member == None: em1 = nextcord.Embed(", "ctx.channel embed = nextcord.Embed( title=f\"**ChannelStats for {channel.name}**\", description=f\"{'Category :{}'.format(channel.category.name) if channel.category else 'Channel", "ctx, member: nextcord.Member = None, role: nextcord.Role = None, *, reason=None, ): if", "Roles** permisson to run this command - Missing Permission\", ) return await ctx.send(embed=embed2)", "nextcord.Embed(title=f\"<a:no:909765403872481280> | Member To Ban Was Found\") await ctx.send(embed=em) @commands.command(name=\"ban\", description=\"Bans the member", "has occoured, notifying dev team\") print(Exception) @commands.command( aliases=[\"giverole\", \"addr\"], description=\"Gives a member a", "has been Timed Out, please try again.\") elif view.value: for channel in ctx.guild.channels:", "3600, \"d\": 86400} tempban1 = int(time[0]) * time_convert[time[-1]] em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | `{user.name}`", "= ( f\"{ctx.author.name} modbanned {member.name} for the reason of {reason}\" ) if member", "you sure?\", description=\"This is a very risky command only to be used in", "embed = nextcord.Embed(title=\"Kick Error\", description=\"Can't kick yourself \",) return await ctx.send(embed=embed) elif ctx.author.top_role.position", "time == 0: em1 = nextcord.Embed( title=\"Slowmode Success\", description=\"Slowmode turned off\" ) await", "{banMsg}\" ) banEmbed.add_field(name=\"Reason\", value=reason) await ctx.author.send(embed=banEmbed) await member.ban(reason=reason) await member.send(f\"You got banned in", "Success\", description=f\"Unlocked {channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Modbans the member.\") @commands.has_permissions(kick_members=True) @commands.cooldown(1, 21600,", "banEmbed.add_field(name=\"Reason\", value=reason) await ctx.author.send(embed=banEmbed) await member.ban(reason=reason) await member.send(f\"You got banned in **{guild}** |", "Time: **{time}**\" ) if not time == None: await asyncio.sleep(tempmute) await member.remove_roles(mutedRole) await", "import View from nextcord.ext import commands import json import random import asyncio from", "if member == None: em1 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member to mute -", "Was Banned!*\") await ctx.send(embed=em) await member.send(f\"You got banned in **{guild}** | Reason: **{reason}**\")", "ctx.send(embed=em) return except Exception: print(Exception) @commands.command(description=\"Locks the channel.\") @commands.has_permissions(kick_members=True) async def lock(self, ctx,", ") embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked {channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Unlocks", "you are trying to give\", ) await ctx.send(embed=embed) return else: em = nextcord.Embed(", "embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked entire server \", ) await ctx.send(embed=embed) else:", "= nextcord.Embed( title=\"Clear Error\", description=\"Purge limit exedeed - Greater than 100\", ) return", "was unmuted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await member.remove_roles(mutedRole, reason=reason)", "def lock(self, ctx, channel: nextcord.TextChannel = None, setting = None): if setting ==", "Case \",color=nextcord.Color.red()) banEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=banEmbed,view=view) await view.wait() if view.value==False: em", "reason=f\"{ctx.author.name} locked {channel.name}\", send_messages=False, # ) embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked {channel.mention}", "reason=None): if member == None: embed1 = nextcord.Embed( title=\"Kick Error\", description=\"Member to kick", "bundle of messages.\",aliases=['purge']) @commands.has_permissions(manage_messages=True) async def clear(self, ctx, amount=10): amount = amount +", "| Kick Case \",color=nextcord.Color.red()) kickEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=kickEmbed,view=view) await view.wait() view=AllConfirm(ctx)", "please try again.\") elif view.value: guild = ctx.guild banMsg = random.choice(\"BANNED\") banEmbed =", "await ctx.guild.bans() member_name, member_discriminator = member.split('#') for ban_entry in banned_user: user = ban_entry.user", "channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) if not time == None: time_convert =", "| Duration: {tempban1}{time[1:]} | Reason:{reason}\") await ctx.send(embed=em) if bool(user.bot)==True: await ctx.guild.ban(user) await asyncio.sleep(tempban1)", "embed.add_field(name=\"Channel is NSFW?\", value=channel.is_nsfw(), inline=True) embed.add_field(name=\"Channel Permissions Synced?\", value=bool(CategoryChannel.permissions_synced), inline=True) embed.add_field(name=\"Channel is Annoucement?\",", "nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Banned!*\") await ctx.send(embed=em) elif view.value== True: em =", "ctx.send(embed=em) await ctx.guild.ban(user) if time !=None : time_convert = {\"s\": 1, \"m\": 60,", "= ctx.guild banMsg = random.choice(\"BANNED\") banEmbed = nextcord.Embed( title=\"Ban Success\", description=f\"{member.mention} {banMsg}\" )", "em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Unbanned!*\") await ctx.send(embed=em) @commands.command(name=\"kick\", description=\"Kicks the member", "the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles):", "have been unmuted from: **{guild.name}** | Reason: **{reason}**\" ) return @commands.command(description=\"Clears a bundle", "send_messages=False, read_message_history=True, ) if not time == None: time_convert = {\"s\": 1, \"m\":", "| Reason: **{reason}**\" ) return @commands.command(name=\"unmute\", description=\"Unmutes a muted member.\") @commands.has_permissions(manage_messages=True) async def", "await ctx.send(embed=embed) await member.remove_roles(mutedRole, reason=reason) await member.send( f\"You have been unmuted from: **{guild.name}**", "guild.create_role(name=\"Muted\") await ctx.send(\"No mute role found. Creating mute role...\") for channel in guild.channels:", "amount + 1 if amount > 101: em1 = nextcord.Embed( title=\"Clear Error\", description=\"Purge", "Error\", description=\"Don't bother, ive tried\" ) return await ctx.send(embed=em5) if time == None:", "@commands.has_permissions(kick_members=True) async def unlock(self, ctx, channel: nextcord.TextChannel = None, setting=None): if setting ==", "- Not Found\" ) return await ctx.send(embed=embed1) if member.id == ctx.author.id: embed69 =", "role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em4) if ctx.guild.me.top_role.position < mutedRole.position:", "= nextcord.Embed( title=\"Ban Error\", description=\"No banning yourself...\", ) return await ctx.send(embed=embed69) em =", "await ctx.author.send(\"Command has been Timed Out, please try again.\") elif view.value: guild =", "member is None: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping a user", "ctx.send(embed=embed) return if ctx.author.top_role.position < role.position: em = nextcord.Embed( title=\"Remove Role Error\", description=\"You", "nextcord.Embed(title=f\"<a:yes:909765403801182208> | `{user.name}` Was Banned | Duration: {tempban1}{time[1:]} | Reason:{reason}\") await ctx.send(embed=em) if", "nextcord.Embed( title=\"Slowmode Error\", description=\"Slowmode over 6 hours\" ) await ctx.send(embed=em2) else: await ctx.channel.edit(slowmode_delay=time)", "= None, *, reason=None): guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if not", "return else: em = nextcord.Embed( title=\"Add Role Success\", description=f\"{role.mention} has been assigned to", "member.id == ctx.author.id: embed69 = nextcord.Embed( title=\"Ban Error\", description=\"No banning yourself...\", ) return", "= nextcord.Embed( title=\"Kick Error\", description=\"Member to kick - Not Found\") return await ctx.send(embed=embed1)", "= nextcord.Embed( title=\"Remove Role Error\", description=f\"{member.mention} already has the role you are trying", "if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Kicked!*\") await ctx.send(embed=em) elif", "Missing Permission\", ) return await ctx.send(embed=embed2) mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if ctx.guild.me.top_role.position <", "this action\", ) return await ctx.send(embed=embed) try: addRole = True for role_ in", "None: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping a role to remove", "True BOT_USER_ID=\"897762972603150346\" class Moderation(commands.Cog): def __init__(self, bot): self.bot = bot @commands.Cog.listener() async def", "| *{member.name} Was Not Banned!*\") await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208>", "member.add_roles(mutedRole, reason=reason) await member.send( f\"You have been muted from: **{guild.name}** | Reason: **{reason}**\"", "await ctx.send(embed=embed) @commands.command(description=\"Modbans the member.\") @commands.has_permissions(kick_members=True) @commands.cooldown(1, 21600, commands.BucketType.user) async def modban(self, ctx,", "banned from the staff team.**\", ) await ctx.author.send(embed=em, view=view) await view.wait() if view.value", "elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member has same role", "category'}\", color=nextcord.Color.random()) embed.add_field(name=\"Channel Guild:-\", value=ctx.guild.name, inline=True) embed.add_field(name=\"Channel Id:-\", value=channel.id, inline=False) embed.add_field(name=\"Channel Topic:-\",value=f\"{channel.topic if", "= ban_entry.user if (user.name, user.discriminator) == (member_name, member_discriminator): await ctx.guild.unban(user) view=AllConfirm(ctx) if view.value==False:", "class Moderation(commands.Cog): def __init__(self, bot): self.bot = bot @commands.Cog.listener() async def on_message(self, message):", "= nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked entire server \", ) await ctx.send(embed=embed) return if", "from nextcord.ext.commands.cooldowns import BucketType from nextcord.ui.view import View from nextcord.ext import commands import", "elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Mute Error\", description=\"Member **higher** than you", "< member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member **higher** than you in the", "None, *, reason=None): guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if not mutedRole:", "None: embed = nextcord.Embed( title=\"Add Role Error\", description=\"Please ping a user to give", ") return await ctx.send(embed=em4) if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Tempmute Error\",", "locked {channel.name}\", send_messages=False, # ) embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked {channel.mention} \",", "nextcord.Embed( title=\"Unmute Success\", description=f\"{member.mention} was unmuted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await", "Action <:moderation:910472145824542721> | Ban Case \",color=nextcord.Color.red()) banEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=banEmbed,view=view) await", ") await ctx.send(embed=embed) @commands.command(description=\"Unlocks the channel.\") @commands.has_permissions(kick_members=True) async def unlock(self, ctx, channel: nextcord.TextChannel", "embed = nextcord.Embed( title=\"Add Role Error\", description=\"Please ping a role to give {}", "do that XD !\", ) return await ctx.send(embed=embed69) elif ctx.author.top_role.position < member.top_role.position: em3", "this command - Missing Permission\", ) return await ctx.send(embed=embed2) mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\")", "== None: await asyncio.sleep(tempmute) await member.remove_roles(mutedRole) await member.send(f\"You have been unmuted from **{guild}**\")", "nextcord.Embed( title=\"Clear Error\", description=\"Purge limit exedeed - Greater than 100\", ) return await", "import commands import json import random import asyncio from datetime import datetime from", "send_messages=True, ) embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked {channel.mention} \", ) await ctx.send(embed=embed)", "em5 = nextcord.Embed( title=\"Mute Error\", description=\"Error\" ) return await ctx.send(embed=em5) elif ctx.author.top_role.position <", "if reason is None: reason = f\"{ctx.author.name} modbanned {member.name}\" else: reason = (", "ctx.author.send(\"Command has been Timed Out, please try again.\") elif view.value: for channel in", "is None: channel = ctx.message.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name}\", send_messages=False, #", "await ctx.send(embed=em3) kickEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Kick Case \",color=nextcord.Color.red()) kickEmbed.add_field(name=\"Reason: \",", "- Invalid Permission\", ) return await ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed(", "await msg.delete() @commands.command(description=\"Change the channels slowmode.\") @commands.has_permissions(manage_channels=True) async def slowmode(self, ctx, time: int):", "Bans a member \"\"\" if member == None: embed1 = nextcord.Embed( title=\"Ban Error\",", "nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked entire server \", ) await ctx.send(embed=embed) else: lockEmbed =", "ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Unmute Error\", description=\"Member has same role as", "await member.remove_roles(role) roleRemoved = True break if not roleRemoved: embed = nextcord.Embed( title=\"Remove", "a user to give them a role!\", ) await ctx.send(embed=embed) return if role", "if ctx.guild.me.top_role.position < role.position: embed = nextcord.Embed( title=\"Add Role Error\", description=\"That role is", "channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name} using --server override\", send_messages=None,", "the Server`. Only use this command if no admin is online or responding.", "role you are trying to give\", ) await ctx.send(embed=embed) return else: em =", "break if not addRole: embed = nextcord.Embed( title=\"Add Role Error\", description=f\"{member.mention} already has", "Role Error\", description=\"That role is too high for me to perform this action\",", "Role Error\", description=\"Please ping a user to remove a role from them!\", )", "hours\" ) await ctx.send(embed=em2) else: await ctx.channel.edit(slowmode_delay=time) em3 = nextcord.Embed( title=\"Slowmode Success\", description=f\"Slowmode", "nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Kicked!*\") await ctx.send(embed=em) await member.send(f\"You got kicked in **{ctx.guild}**", "Was Unbanned!*\") await ctx.send(embed=em) @commands.command(name=\"kick\", description=\"Kicks the member from your server.\") @commands.has_permissions(kick_members=True) async", "< mutedRole.position: em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Muted role too high to remove", "nextcord.utils.get(guild.roles, name=\"Muted\") if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Mute Error\", description=\"Muted role", "role.position: em = nextcord.Embed( title=\"Remove Role Error\", description=\"You do not have enough permissions", "= ctx.guild banEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Ban Case \",color=nextcord.Color.red()) banEmbed.add_field(name=\"Reason: \",", "if setting == '--server': for channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked", "embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await member.remove_roles(mutedRole, reason=reason) await member.send( f\"You have been", "been unmuted from **{guild}**\") return @commands.command( name=\"mute\", description=\"Mutes a member for a specific", "ctx.guild if member == None: em1 = nextcord.Embed( title=\"Unmute Error\", description=\"Member to unmute", "ctx.send(embed=embed) await member.remove_roles(mutedRole, reason=reason) await member.send( f\"You have been unmuted from: **{guild.name}** |", "return await ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position: embed = nextcord.Embed( title=\"Add Role Error\",", "%B %Y , %I:%M %p\"), inline=False) embed.set_thumbnail(url=ctx.guild.icon.url) await ctx.send(embed=embed) @commands.command(name=\"tempmute\", description=\"Mutes a member", "- Invalid Permission\", ) return await ctx.send(embed=em3) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed(", "await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) if not time == None: time_convert", "ctx.send(embed=embed69) em = nextcord.Embed( title=\"Are you sure?\", description=\"This is a very risky command", "title=\"Tempmute Error\", description=\"Member **higher** than you in the role heirarchy - Invalid Permission\",", "== member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member has same role as you", "ive tried\" ) return await ctx.send(embed=em5) if time == None: em2 = nextcord.Embed(", "< mutedRole.position: em3 = nextcord.Embed( title=\"Mute Error\", description=\"Muted role too high to give", "== role: addRole = False break if not addRole: embed = nextcord.Embed( title=\"Add", "asyncio.sleep(tempmute) await member.remove_roles(mutedRole) await member.send(f\"You have been unmuted from **{guild}**\") return @commands.command( name=\"mute\",", "time_convert[time[-1]] embed = nextcord.Embed( title=\"Tempmute Success\", description=f\"{member.mention} was muted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\",", "inline=True) embed.add_field(name=\"Channel Hash:\", value=hash(channel), inline=True) embed.add_field(name=\"Channel Creation Time:\", value=channel.created_at.strftime(\"%a, %d %B %Y ,", "60, \"h\": 3600, \"d\": 86400} tempban1 = int(time[0]) * time_convert[time[-1]] em = nextcord.Embed(title=f\"<a:yes:909765403801182208>", "NSFW?\", value=channel.is_nsfw(), inline=True) embed.add_field(name=\"Channel Permissions Synced?\", value=bool(CategoryChannel.permissions_synced), inline=True) embed.add_field(name=\"Channel is Annoucement?\", value=channel.is_news(), inline=True)", "str(BOT_USER_ID): send = message.channel.send @commands.command(name=\"tempban\") @commands.has_permissions(ban_members=True) async def tempban(self,ctx, user:nextcord.User, time=None,reason=None): if reason==None:", "None, *, role: nextcord.Role = None ): if member is None: embed =", "return True BOT_USER_ID=\"897762972603150346\" class Moderation(commands.Cog): def __init__(self, bot): self.bot = bot @commands.Cog.listener() async", "member: nextcord.Member = None, *, reason=None): guild = ctx.guild if member == None:", "return @commands.command(name=\"unmute\", description=\"Unmutes a muted member.\") @commands.has_permissions(manage_messages=True) async def unmute(self, ctx, member: nextcord.Member", "high for me to perform this action\", ) return await ctx.send(embed=embed) try: addRole", "async def unlock(self, ctx, channel: nextcord.TextChannel = None, setting=None): if setting == '--server':", "interaction.user !=self.ctx.author: await interaction.response.send_message(\"You can't use that!!\" , ephemeral=True) else: return True BOT_USER_ID=\"897762972603150346\"", "ctx, member: nextcord.Member = None, *, reason=None): guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles,", "True self.stop() @nextcord.ui.button(label=\"Cancel\", style=nextcord.ButtonStyle.grey, emoji=\"<a:no:909765403872481280>\") async def cancel( self, button: nextcord.ui.Button, interaction: nextcord.Interaction):", "the member.\") @commands.has_permissions(kick_members=True) @commands.cooldown(1, 21600, commands.BucketType.user) async def modban(self, ctx, member, *, reason=None):", "em = nextcord.Embed( title=\"Remove Role Success!\", description=f\"{role.mention} has been removed from {member.mention}\", )", "Exception: print(Exception) @commands.command(description=\"Locks the channel.\") @commands.has_permissions(kick_members=True) async def lock(self, ctx, channel: nextcord.TextChannel =", "the role you are trying to give\", ) await ctx.send(embed=embed) return else: em", "Error\", description=\"Slowmode over 6 hours\" ) await ctx.send(embed=em2) else: await ctx.channel.edit(slowmode_delay=time) em3 =", "| Reason: **{reason}**\") await member.kick(reason=reason) @commands.command(aliases=[\"cs\", \"ci\", \"channelinfo\"]) async def channelstats(self, ctx, channel:", "await ctx.send(embed=em) return except Exception: print(Exception) @commands.command(description=\"Locks the channel.\") @commands.has_permissions(kick_members=True) async def lock(self,", "{member.mention}\", ) await ctx.send(embed=em) await member.add_roles(role) return except Exception: print(Exception) @commands.command( aliases=[\"takerole\", \"remover\"],", "**{guild.name}** | Reason: **{reason}** | Time: **{time}**\" ) if not time == None:", "may risk getting demoted if not banned from the staff team.**\", ) await", "Error\", description=\"Member has same role as you in the role heirarchy - Invalid", "None: em2 = nextcord.Embed( title=\"Tempmute Error\", description=\"Time to mute - Not Found\" )", "ctx.send(embed=embed2) mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Unmute", "= False self.stop() async def interaction_check(self, interaction) -> bool: if interaction.user !=self.ctx.author: await", "embed = nextcord.Embed( title=\"Add Role Error\", description=\"That role is too high for me", "user to remove a role from them!\", ) await ctx.send(embed=embed) return if role", "in guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) if member == None:", "nextcord.Embed(title=f\"<a:yes:909765403801182208> | {user.name} Was Banned indefinitely\") await ctx.send(embed=em) await ctx.guild.ban(user) if time !=None", "modbanned {member.name} for the reason of {reason}\" ) if member == None: embed1", "give to a member\", ) return await ctx.send(embed=em3) if not mutedRole: mutedRole =", "if member == None: embed1 = nextcord.Embed( title=\"Ban Error\", description=\"Member to ban -", "value=reason, inline=False) await ctx.send(embed=embed) await member.remove_roles(mutedRole, reason=reason) await member.send( f\"You have been unmuted", "em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Kicked!*\") await ctx.send(embed=em) elif view.value== True:", "`Raid on the Server`. **If this command is used for the wrong purpose", "- Missing Permission\", ) return await ctx.send(embed=embed2) mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if ctx.guild.me.top_role.position", "Error\", description=\"I require the **Manage Roles** permisson to run this command - Missing", "inline=True) embed.add_field(name=\"Channel is NSFW?\", value=channel.is_nsfw(), inline=True) embed.add_field(name=\"Channel Permissions Synced?\", value=bool(CategoryChannel.permissions_synced), inline=True) embed.add_field(name=\"Channel is", "send_messages=False, read_message_history=True, ) embed = nextcord.Embed( title=\"Unmute Success\", description=f\"{member.mention} was unmuted \", colour=nextcord.Colour.blue(),", "datetime from difflib import get_close_matches from nextcord.webhook import sync class AllConfirm(nextcord.ui.View): def __init__(self,ctx):", "enough permissions to remove this role\", ) return await ctx.send(embed=em) if ctx.guild.me.top_role.position <", "await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Unbanned!*\") await", "elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Member **higher** than you", "self.value = True self.stop() @nextcord.ui.button(label=\"Cancel\", style=nextcord.ButtonStyle.grey, emoji=\"<a:no:909765403872481280>\") async def cancel( self, button: nextcord.ui.Button,", "await ctx.send(embed=em) if bool(user.bot)==True: await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: await DMChannel.send(user,f\"**{ctx.guild.name}**:", "mutedRole, speak=False, send_messages=False, read_message_history=True, ) embed = nextcord.Embed( title=\"Unmute Success\", description=f\"{member.mention} was unmuted", "Was Not Unbanned!*\") await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name}", ") return await ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Unmute Error\",", "kicked in **{ctx.guild}** | Reason: **{reason}**\") await member.kick(reason=reason) @commands.command(aliases=[\"cs\", \"ci\", \"channelinfo\"]) async def", "{} that role!\".format( member.mention ), ) await ctx.send(embed=embed) return if ctx.author.top_role.position < role.position:", "getting demoted if not banned from the staff team.**\", ) await ctx.author.send(embed =", "**{guild.name}** | Reason: **{reason}**\" ) return @commands.command(description=\"Clears a bundle of messages.\",aliases=['purge']) @commands.has_permissions(manage_messages=True) async", "if role_ == role: await member.remove_roles(role) roleRemoved = True break if not roleRemoved:", "Missing Permission\") return await ctx.send(embed=embed2) if member.id == ctx.author.id: embed = nextcord.Embed(title=\"Kick Error\",", "em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | `{user.name}` Was Banned | Duration: {tempban1}{time[1:]} | Reason:{reason}\") await", "ctx.author.id: embed = nextcord.Embed(title=\"Kick Error\", description=\"Can't kick yourself \",) return await ctx.send(embed=embed) elif", "role found. Creating mute role...\") for channel in guild.channels: await channel.set_permissions( mutedRole, speak=False,", ") return await ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Unmute Error\", description=\"I", "Id:-\", value=channel.id, inline=False) embed.add_field(name=\"Channel Topic:-\",value=f\"{channel.topic if channel.topic else 'No topic.'}\",inline=False,) embed.add_field(name=\"Channel Position:-\", value=channel.position,", "Permission\", ) return await ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Unmute Error\",", "if channel is None: channel = ctx.message.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name}\",", "run this command - Missing Permission\") return await ctx.send(embed=embed2) if member.id == ctx.author.id:", "description=\"Purge limit exedeed - Greater than 100\", ) return await ctx.send(embed=em1) else: await", "title=\"Mute Success\", description=f\"{member.mention} was muted Indefinitly \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await", "description=f\"Unlocked entire server \", ) await ctx.send(embed=embed) return if channel is None: channel", "is not in any category'}\", color=nextcord.Color.random()) embed.add_field(name=\"Channel Guild:-\", value=ctx.guild.name, inline=True) embed.add_field(name=\"Channel Id:-\", value=channel.id,", "mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if not mutedRole: mutedRole = await guild.create_role(name=\"Muted\") await ctx.send(\"No", "if not mutedRole: mutedRole = await guild.create_role(name=\"Muted\") await ctx.send(\"No mute role found. Creating", "\",color=nextcord.Color.red()) banEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=banEmbed,view=view) await view.wait() if view.value==False: em =", "def unlock(self, ctx, channel: nextcord.TextChannel = None, setting=None): if setting == '--server': for", "ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Muted role too high to", "nextcord.Embed( title=\"Tempmute Error\", description=\"Don't bother, ive tried\" ) return await ctx.send(embed=em5) if time", "= random.choice(\"BANNED\") banEmbed = nextcord.Embed( title=\"Ban Success\", description=f\"{member.mention} {banMsg}\" ) banEmbed.add_field(name=\"Reason\", value=reason) await", "very risky command only to be used in important situations such as, `Raid", "for channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name} using --server override\",", "return await ctx.send(embed=em3) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Tempmute Error\", description=\"I require", "nextcord.Embed( title=\"Ban Error\", description=\"Member to ban - Not Found\" ) return await ctx.send(embed=embed1)", "a member \"\"\" banned_user = await ctx.guild.bans() member_name, member_discriminator = member.split('#') for ban_entry", "== '--server': view = LockConfirm() em = nextcord.Embed( title=\"Are you sure?\", description=\"This is", "or `Raid on the Server`. Only use this command if no admin is", "mute - Not Found\" ) return await ctx.send(embed=em2) elif ctx.author.top_role.position < member.top_role.position: em3", "description=\"Please ping a user to give them a role!\", ) await ctx.send(embed=embed) return", "description=f\"{member.mention} was muted Indefinitly \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await", "member: nextcord.Member = None, *, reason=None): if member == None: embed1 = nextcord.Embed(", "def cancel( self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value = False self.stop() async def", "if not time == None: await asyncio.sleep(tempmute) await member.remove_roles(mutedRole) await member.send(f\"You have been", "asyncio.sleep(10) await msg.delete() @commands.command(description=\"Change the channels slowmode.\") @commands.has_permissions(manage_channels=True) async def slowmode(self, ctx, time:", "{}!\".format( member.mention ), ) await ctx.send(embed=embed) return if ctx.author.top_role.position < role.position: em =", "heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) kickEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721>", "NSFLPosting` or `Raid on the Server`. Only use this command if no admin", "nextcord.components import Button from nextcord.embeds import Embed from nextcord.ext import commands from nextcord.ext.commands.cooldowns", "def clear(self, ctx, amount=10): amount = amount + 1 if amount > 101:", "< role.position: em = nextcord.Embed( title=\"Add Role Error\", description=\"You do not have enough", "not time == None: await asyncio.sleep(tempmute) await member.remove_roles(mutedRole) await member.send(f\"You have been unmuted", "em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Member **higher** than you in the role heirarchy", "notifying dev team\") print(Exception) @commands.command( aliases=[\"giverole\", \"addr\"], description=\"Gives a member a certain role.\"", "@nextcord.ui.button( label=\"Confirm\", style=nextcord.ButtonStyle.grey,emoji=\"<a:yes:909765403801182208>\") async def confirm( self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value =", "for a specific amount of time.\" ) @commands.has_permissions(manage_messages=True) async def mute(self, ctx, member:", "from datetime import datetime from difflib import get_close_matches from nextcord.webhook import sync class", "been removed from {member.mention}\", ) await ctx.send(embed=em) return except Exception: print(Exception) @commands.command(description=\"Locks the", "async def clear(self, ctx, amount=10): amount = amount + 1 if amount >", "You have been banned for {tempban1}{time[1:]}\\n**Reason:** {reason}\") await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user)", "in member.roles: if role_ == role: await member.remove_roles(role) roleRemoved = True break if", "the Server`. **If this command is used for the wrong purpose you may", "return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member", "**{guild}**\") return @commands.command( name=\"mute\", description=\"Mutes a member for a specific amount of time.\"", "em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member **higher** than you in the role heirarchy", "ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Mute Error\", description=\"Member has same", "this command if no admin is online or responding. **If this command is", "Permission\") return await ctx.send(embed=embed2) if member.id == ctx.author.id: embed = nextcord.Embed(title=\"Kick Error\", description=\"Can't", "such as, `Raid on the Server`. **If this command is used for the", "reason of {reason}\" ) if member == None: embed1 = nextcord.Embed( title=\"Ban Error\",", "the member from your server.\") async def ban(self, ctx, member: nextcord.Member = None,", "to {time} seconds\", ) await ctx.send(embed=em3) except Exception: await ctx.send(\"Error has occoured, notifying", "nextcord.Embed( title=\"Ban Error\", description=\"Member **higher** than you in the role heirarchy - Invalid", "await ctx.send(\"No mute role found. Creating mute role...\") for channel in guild.channels: await", "topic.'}\",inline=False,) embed.add_field(name=\"Channel Position:-\", value=channel.position, inline=True) embed.add_field(name=\"Channel Slowmode?\", value=channel.slowmode_delay, inline=True) embed.add_field(name=\"Channel is NSFW?\", value=channel.is_nsfw(),", "@commands.has_permissions(ban_members=True) async def tempban(self,ctx, user:nextcord.User, time=None,reason=None): if reason==None: reason=\"No Reason\" if user!= None:", "Reason\" if user!= None: if time==None: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | {user.name} Was Banned", "nextcord.Embed( title=\"Unmute Error\", description=\"Member to unmute - Not Found\" ) return await ctx.send(embed=em1)", "high to give to a member\", ) return await ctx.send(embed=em3) if not mutedRole:", "em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Unbanned!*\") await ctx.send(embed=em) elif view.value== True:", "unmute - Not Found\" ) return await ctx.send(embed=em1) elif member.id == ctx.author.id: em5", "this role\", ) return await ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position: embed = nextcord.Embed(", "value=hash(channel), inline=True) embed.add_field(name=\"Channel Creation Time:\", value=channel.created_at.strftime(\"%a, %d %B %Y , %I:%M %p\"), inline=False)", "the staff team.**\", ) await ctx.author.send(embed=em, view=view) await view.wait() if view.value is None:", "role as you in the role heirarchy - Invalid Permission\",) return await ctx.send(embed=em3)", "await member.send(f\"You got banned in **{guild}** | Reason: **{reason}**\") await member.ban(reason=reason) @commands.command(description=\"Lucas unban", "same role as you in the role heirarchy - Invalid Permission\", ) return", "= nextcord.Embed( title=f\"**ChannelStats for {channel.name}**\", description=f\"{'Category :{}'.format(channel.category.name) if channel.category else 'Channel is not", "%I:%M %p\"), inline=False) embed.set_thumbnail(url=ctx.guild.icon.url) await ctx.send(embed=embed) @commands.command(name=\"tempmute\", description=\"Mutes a member indefinitely.\") @commands.has_permissions(manage_messages=True) async", "a certain role.\" ) @commands.has_permissions(manage_roles=True) async def addrole( self, ctx, member: nextcord.Member =", "== (member_name, member_discriminator): await ctx.guild.unban(user) view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name}", "= nextcord.Embed( title=\"Add Role Error\", description=f\"{member.mention} already has the role you are trying", "the channel.\") @commands.has_permissions(kick_members=True) async def lock(self, ctx, channel: nextcord.TextChannel = None, setting =", "return await ctx.send(embed=em1) else: await ctx.channel.purge(limit=amount) msg = await ctx.send(\"Cleared Messages\") asyncio.sleep(10) await", "nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Ban Case \",color=nextcord.Color.red()) banEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=banEmbed,view=view)", "Unbanned!*\") await ctx.send(embed=em) @commands.command(name=\"kick\", description=\"Kicks the member from your server.\") @commands.has_permissions(kick_members=True) async def", "inline=True) embed.add_field(name=\"Channel Creation Time:\", value=channel.created_at.strftime(\"%a, %d %B %Y , %I:%M %p\"), inline=False) embed.set_thumbnail(url=ctx.guild.icon.url)", "a member\", ) return await ctx.send(embed=em3) if not mutedRole: mutedRole = await guild.create_role(name=\"Muted\")", "else: await ctx.channel.edit(slowmode_delay=time) em3 = nextcord.Embed( title=\"Slowmode Success\", description=f\"Slowmode set to {time} seconds\",", "tried\" ) return await ctx.send(embed=em5) if time == None: em2 = nextcord.Embed( title=\"Tempmute", "= nextcord.Embed( title=\"Mute Error\", description=\"I require the **Manage Roles** permisson to run this", "em4 = nextcord.Embed( title=\"Unmute Error\", description=\"Member has same role as you in the", "\"d\": 86400} tempmute = int(time[0]) * time_convert[time[-1]] embed = nextcord.Embed( title=\"Tempmute Success\", description=f\"{member.mention}", "Topic:-\",value=f\"{channel.topic if channel.topic else 'No topic.'}\",inline=False,) embed.add_field(name=\"Channel Position:-\", value=channel.position, inline=True) embed.add_field(name=\"Channel Slowmode?\", value=channel.slowmode_delay,", "has same role as you in the role heirarchy - Invalid Permission\", )", "you in the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) kickEmbed", "such as, `NSFW or NSFLPosting` or `Raid on the Server`. Only use this", "if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Unmute Error\", description=\"I require the ``Manage Roles``", "try: roleRemoved = False for role_ in member.roles: if role_ == role: await", "datetime import datetime from difflib import get_close_matches from nextcord.webhook import sync class AllConfirm(nextcord.ui.View):", "reason=reason) await member.send( f\"You have been muted from: **{guild.name}** | Reason: **{reason}**\" )", "= False break if not addRole: embed = nextcord.Embed( title=\"Add Role Error\", description=f\"{member.mention}", "embed2 = nextcord.Embed( title=\"Tempmute Error\", description=\"I require the ``Manage Roles`` permisson to run", "Error\", description=\"Purge limit exedeed - Greater than 100\", ) return await ctx.send(embed=em1) else:", "else: reason = ( f\"{ctx.author.name} modbanned {member.name} for the reason of {reason}\" )", "try again.\") elif view.value: for channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked", "name=\"Muted\") if not mutedRole: mutedRole = await guild.create_role(name=\"Muted\") await ctx.send(\"No mute role found.", "{channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Modbans the member.\") @commands.has_permissions(kick_members=True) @commands.cooldown(1, 21600, commands.BucketType.user) async", "a member a certain role.\" ) @commands.has_permissions(manage_roles=True) async def addrole( self, ctx, member:", "None self.ctx=ctx @nextcord.ui.button( label=\"Confirm\", style=nextcord.ButtonStyle.grey,emoji=\"<a:yes:909765403801182208>\") async def confirm( self, button: nextcord.ui.Button, interaction: nextcord.Interaction):", "else: await ctx.channel.purge(limit=amount) msg = await ctx.send(\"Cleared Messages\") asyncio.sleep(10) await msg.delete() @commands.command(description=\"Change the", "ctx, member: nextcord.Member = None, *, reason=None): guild = ctx.guild if member ==", "description=\"I require the ``Manage Roles`` permisson to run this command - Missing Permission\",", "`{user.name}` Was Banned | Duration: {tempban1}{time[1:]} | Reason:{reason}\") await ctx.send(embed=em) if bool(user.bot)==True: await", "await ctx.guild.unban(user) else: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | Member To Ban Was Found\") await", "return if channel is None: channel = ctx.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked", "roleRemoved = False for role_ in member.roles: if role_ == role: await member.remove_roles(role)", "command only to be used in important situations such as, `Raid on the", "= {\"s\": 1, \"m\": 60, \"h\": 3600, \"d\": 86400} tempban1 = int(time[0]) *", "important situations such as, `Raid on the Server`. **If this command is used", "nextcord from nextcord.channel import CategoryChannel,DMChannel from nextcord.colour import Color from nextcord.components import Button", "title=\"Tempmute Error\", description=\"Muted role too high to give to a member\", ) return", "member.split('#') for ban_entry in banned_user: user = ban_entry.user if (user.name, user.discriminator) == (member_name,", "Error\", description=\"Please ping a role to remove the role from {}!\".format( member.mention ),", "else: await DMChannel.send(user,f\"**{ctx.guild.name}**: You have been banned for {tempban1}{time[1:]}\\n**Reason:** {reason}\") await ctx.guild.ban(user) await", "nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Banned!*\") await ctx.send(embed=em) await member.send(f\"You got banned in **{guild}**", "title=f\"**ChannelStats for {channel.name}**\", description=f\"{'Category :{}'.format(channel.category.name) if channel.category else 'Channel is not in any", "ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name} using --server override\", send_messages=None, ) embed", ") banEmbed.add_field(name=\"Reason\", value=reason) await ctx.author.send(embed=banEmbed) await member.ban(reason=reason) await member.send(f\"You got banned in **{guild}**", "aliases=[\"takerole\", \"remover\"], description=\"Removes a certain role from a member.\", ) @commands.has_permissions(manage_roles=True) async def", "await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: await DMChannel.send(user,f\"**{ctx.guild.name}**: You have been banned for {tempban1}{time[1:]}\\n**Reason:**", "return await ctx.send(embed=em3) guild = ctx.guild banEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Ban", "Position:-\", value=channel.position, inline=True) embed.add_field(name=\"Channel Slowmode?\", value=channel.slowmode_delay, inline=True) embed.add_field(name=\"Channel is NSFW?\", value=channel.is_nsfw(), inline=True) embed.add_field(name=\"Channel", "if time == 0: em1 = nextcord.Embed( title=\"Slowmode Success\", description=\"Slowmode turned off\" )", "*, reason=None): guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if not mutedRole: mutedRole", ") await ctx.send(embed=embed) return if role is None: embed = nextcord.Embed( title=\"Remove Role", "can't use that!!\" , ephemeral=True) else: return True BOT_USER_ID=\"897762972603150346\" class Moderation(commands.Cog): def __init__(self,", "value=channel.is_news(), inline=True) embed.add_field(name=\"Channel Hash:\", value=hash(channel), inline=True) embed.add_field(name=\"Channel Creation Time:\", value=channel.created_at.strftime(\"%a, %d %B %Y", "= ctx.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name}\", send_messages=True, ) embed = nextcord.Embed(", "@commands.command(description=\"Change the channels slowmode.\") @commands.has_permissions(manage_channels=True) async def slowmode(self, ctx, time: int): try: if", "nextcord.Embed( title=\"Tempmute Error\", description=\"Time to mute - Not Found\" ) return await ctx.send(embed=em2)", "nextcord.Member = None, *, reason=None): guild = ctx.guild if member == None: em1", "Annoucement?\", value=channel.is_news(), inline=True) embed.add_field(name=\"Channel Hash:\", value=hash(channel), inline=True) embed.add_field(name=\"Channel Creation Time:\", value=channel.created_at.strftime(\"%a, %d %B", "for me to perform this action\", ) return await ctx.send(embed=embed) try: roleRemoved =", "\"\"\" if channel == None: channel = ctx.channel embed = nextcord.Embed( title=f\"**ChannelStats for", "ctx.author.send(embed=lockEmbed) return if channel is None: channel = ctx.message.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name}", "== None: time_convert = {\"s\": 1, \"m\": 60, \"h\": 3600, \"d\": 86400} tempmute", "nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Kick Case \",color=nextcord.Color.red()) kickEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=kickEmbed,view=view)", "from the staff team.**\", ) await ctx.author.send(embed=em, view=view) await view.wait() if view.value is", "nextcord.ui.Button, interaction: nextcord.Interaction): self.value = True self.stop() @nextcord.ui.button(label=\"Cancel\", style=nextcord.ButtonStyle.grey, emoji=\"<a:no:909765403872481280>\") async def cancel(", "Creating mute role...\") for channel in guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True,", "Error\", description=\"Member **higher** than you in the role heirarchy - Invalid Permission\", )", "Error\", description=\"Please ping a user to remove a role from them!\", ) await", "await ctx.send(embed=em) await member.add_roles(role) return except Exception: print(Exception) @commands.command( aliases=[\"takerole\", \"remover\"], description=\"Removes a", "!= str(BOT_USER_ID): send = message.channel.send @commands.command(name=\"tempban\") @commands.has_permissions(ban_members=True) async def tempban(self,ctx, user:nextcord.User, time=None,reason=None): if", "from nextcord.embeds import Embed from nextcord.ext import commands from nextcord.ext.commands.cooldowns import BucketType from", "ctx.send(embed=banEmbed,view=view) await view.wait() if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Banned!*\")", "unlock(self, ctx, channel: nextcord.TextChannel = None, setting=None): if setting == '--server': for channel", "nextcord.channel import CategoryChannel,DMChannel from nextcord.colour import Color from nextcord.components import Button from nextcord.embeds", "< member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member **higher** than you in the", "\"\"\" banned_user = await ctx.guild.bans() member_name, member_discriminator = member.split('#') for ban_entry in banned_user:", "guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) if not time == None:", "View from nextcord.ext import commands import json import random import asyncio from datetime", "!=self.ctx.author: await interaction.response.send_message(\"You can't use that!!\" , ephemeral=True) else: return True BOT_USER_ID=\"897762972603150346\" class", "mute(self, ctx, member: nextcord.Member = None, *, reason=None): guild = ctx.guild mutedRole =", "that!!\" , ephemeral=True) else: return True BOT_USER_ID=\"897762972603150346\" class Moderation(commands.Cog): def __init__(self, bot): self.bot", "user:nextcord.User, time=None,reason=None): if reason==None: reason=\"No Reason\" if user!= None: if time==None: em =", "\", value=reason) view=AllConfirm(ctx) await ctx.send(embed=kickEmbed,view=view) await view.wait() view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280>", "a member.\", ) @commands.has_permissions(manage_roles=True) async def removerole( self, ctx, member: nextcord.Member = None,", "guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) embed = nextcord.Embed( title=\"Unmute Success\",", "nextcord.Embed( title=\"Kick Error\", description=\"Member has same role as you in the role heirarchy", "nextcord.Interaction): self.value = False self.stop() async def interaction_check(self, interaction) -> bool: if interaction.user", "await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Banned!*\") await", "title=\"Tempmute Success\", description=f\"{member.mention} was muted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.add_field(name=\"Duration\", value=time)", "channel.\") @commands.has_permissions(kick_members=True) async def unlock(self, ctx, channel: nextcord.TextChannel = None, setting=None): if setting", "if member is None: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping a", "setting = None): if setting == '--server': view = LockConfirm() em = nextcord.Embed(", "ctx.author.top_role.position < role.position: em = nextcord.Embed( title=\"Remove Role Error\", description=\"You do not have", "too high to give to a member\", ) return await ctx.send(embed=em3) if not", "'No topic.'}\",inline=False,) embed.add_field(name=\"Channel Position:-\", value=channel.position, inline=True) embed.add_field(name=\"Channel Slowmode?\", value=channel.slowmode_delay, inline=True) embed.add_field(name=\"Channel is NSFW?\",", "role_ in member.roles: if role_ == role: await member.remove_roles(role) roleRemoved = True break", "banEmbed = nextcord.Embed( title=\"Ban Success\", description=f\"{member.mention} {banMsg}\" ) banEmbed.add_field(name=\"Reason\", value=reason) await ctx.author.send(embed=banEmbed) await", "ctx.author.top_role.position == member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member has same role as", "channel.category else 'Channel is not in any category'}\", color=nextcord.Color.random()) embed.add_field(name=\"Channel Guild:-\", value=ctx.guild.name, inline=True)", "no admin is online or responding. **If this command is used for the", "await ctx.send(embed=em3) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Tempmute Error\", description=\"I require the", "seconds\", ) await ctx.send(embed=em3) except Exception: await ctx.send(\"Error has occoured, notifying dev team\")", "await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member has", "inline=True) embed.add_field(name=\"Channel Id:-\", value=channel.id, inline=False) embed.add_field(name=\"Channel Topic:-\",value=f\"{channel.topic if channel.topic else 'No topic.'}\",inline=False,) embed.add_field(name=\"Channel", "- Invalid Permission\",) return await ctx.send(embed=em3) guild = ctx.guild banEmbed = nextcord.Embed(title=f\"Moderation Action", "= nextcord.Embed( title=\"Mute Error\", description=\"Error\" ) return await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position:", "ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Unmute Error\", description=\"wHat? <:WHA:815331017854025790>\" )", "time==None: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | {user.name} Was Banned indefinitely\") await ctx.send(embed=em) await ctx.guild.ban(user)", "description=\"I require the **Manage Roles** permisson to run this command - Missing Permission\",", "= nextcord.utils.get(guild.roles, name=\"Muted\") if not mutedRole: mutedRole = await guild.create_role(name=\"Muted\") await ctx.send(\"No mute", "await member.send(f\"You have been unmuted from **{guild}**\") return @commands.command( name=\"mute\", description=\"Mutes a member", "ctx.author.id: em5 = nextcord.Embed( title=\"Mute Error\", description=\"Error\" ) return await ctx.send(embed=em5) elif ctx.author.top_role.position", "channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) if member == None: em1 = nextcord.Embed(", ", ephemeral=True) else: return True BOT_USER_ID=\"897762972603150346\" class Moderation(commands.Cog): def __init__(self, bot): self.bot =", "member.roles: if role_ == role: addRole = False break if not addRole: embed", "{channel.name} using --server override\", send_messages=None, ) embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked entire", "name=\"Muted\") if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Mute Error\", description=\"Muted role too", "return await ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Unmute Error\", description=\"wHat?", "view.value: for channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name} using --server", "self.stop() @nextcord.ui.button(label=\"Cancel\", style=nextcord.ButtonStyle.grey, emoji=\"<a:no:909765403872481280>\") async def cancel( self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value", ") @commands.has_permissions(manage_roles=True) async def removerole( self, ctx, member: nextcord.Member = None, role: nextcord.Role", "Kick Case \",color=nextcord.Color.red()) kickEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=kickEmbed,view=view) await view.wait() view=AllConfirm(ctx) if", "async def unmute(self, ctx, member: nextcord.Member = None, *, reason=None): guild = ctx.guild", "view.wait() if view.value is None: await ctx.author.send(\"Command has been Timed Out, please try", "user.discriminator) == (member_name, member_discriminator): await ctx.guild.unban(user) view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> |", "if member.id == ctx.author.id: embed69 = nextcord.Embed( title=\"Ban Error\", description=\"No banning yourself...\", )", "title=\"Ban Error\", description=\"Ban yourself... only a skid would do that XD !\", )", "elif time > 21600: em2 = nextcord.Embed( title=\"Slowmode Error\", description=\"Slowmode over 6 hours\"", "== ctx.author.id: em5 = nextcord.Embed( title=\"Mute Error\", description=\"Error\" ) return await ctx.send(embed=em5) elif", "101: em1 = nextcord.Embed( title=\"Clear Error\", description=\"Purge limit exedeed - Greater than 100\",", "Permission\", ) return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Unmute", "role is None: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping a role", "permisson to run this command - Missing Permission\") return await ctx.send(embed=embed2) if member.id", "= message.channel.send @commands.command(name=\"tempban\") @commands.has_permissions(ban_members=True) async def tempban(self,ctx, user:nextcord.User, time=None,reason=None): if reason==None: reason=\"No Reason\"", "Error\",description=\"I require the ``Kick Members`` permisson to run this command - Missing Permission\")", "speak=False, send_messages=False, read_message_history=True, ) if not time == None: time_convert = {\"s\": 1,", "= nextcord.utils.get(guild.roles, name=\"Muted\") if member == None: em1 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member", "give them a role!\", ) await ctx.send(embed=embed) return if role is None: embed", "give this role\", ) return await ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position: embed =", "Kicked!*\") await ctx.send(embed=em) await member.send(f\"You got kicked in **{ctx.guild}** | Reason: **{reason}**\") await", "True break if not roleRemoved: embed = nextcord.Embed( title=\"Remove Role Error\", description=f\"{member.mention} already", "await ctx.send(embed=em1) else: await ctx.channel.purge(limit=amount) msg = await ctx.send(\"Cleared Messages\") asyncio.sleep(10) await msg.delete()", "demoted if not banned from the staff team.**\", ) await ctx.author.send(embed = em,", "**{reason}**\") else: banEmbed = nextcord.Embed( title=\"Ban Cancelled\", description=\"Lets pretend like this never happened", "channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name}\", send_messages=False, # ) embed = nextcord.Embed( title=\"Lockdown Success\",", "To Ban Was Found\") await ctx.send(embed=em) @commands.command(name=\"ban\", description=\"Bans the member from your server.\")", "86400} tempban1 = int(time[0]) * time_convert[time[-1]] em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | `{user.name}` Was Banned", "Action <:moderation:910472145824542721> | Kick Case \",color=nextcord.Color.red()) kickEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=kickEmbed,view=view) await", "await ctx.send(embed=embed2) if member.id == ctx.author.id: embed = nextcord.Embed(title=\"Kick Error\", description=\"Can't kick yourself", "role: nextcord.Role = None ): if member is None: embed = nextcord.Embed( title=\"Add", "= nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Ban Case \",color=nextcord.Color.red()) banEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await", "== None: em1 = nextcord.Embed( title=\"Unmute Error\", description=\"Member to unmute - Not Found\"", "def __init__(self,ctx): super().__init__(timeout=200) self.value = None self.ctx=ctx @nextcord.ui.button( label=\"Confirm\", style=nextcord.ButtonStyle.grey,emoji=\"<a:yes:909765403801182208>\") async def confirm(", "reason=f\"{ctx.author.name} locked {channel.name} using --server override\", send_messages=False, ) embed = nextcord.Embed( title=\"Lockdown Success\",", "Role Error\", description=\"You do not have enough permissions to remove this role\", )", "= None, *, reason=None): if member == None: embed1 = nextcord.Embed( title=\"Kick Error\",", "@commands.command( aliases=[\"giverole\", \"addr\"], description=\"Gives a member a certain role.\" ) @commands.has_permissions(manage_roles=True) async def", "member \"\"\" banned_user = await ctx.guild.bans() member_name, member_discriminator = member.split('#') for ban_entry in", "Duration: {tempban1}{time[1:]} | Reason:{reason}\") await ctx.send(embed=em) if bool(user.bot)==True: await ctx.guild.ban(user) await asyncio.sleep(tempban1) await", "to {member.mention}\", ) await ctx.send(embed=em) await member.add_roles(role) return except Exception: print(Exception) @commands.command( aliases=[\"takerole\",", "await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | Member To Ban Was", "await ctx.author.send(\"Command has been Timed Out, please try again.\") elif view.value: for channel", "indefinitely\") await ctx.send(embed=em) await ctx.guild.ban(user) if time !=None : time_convert = {\"s\": 1,", "Error\", description=\"Member to unmute - Not Found\" ) return await ctx.send(embed=em1) elif member.id", "ctx.guild.bans() member_name, member_discriminator = member.split('#') for ban_entry in banned_user: user = ban_entry.user if", "ctx.send(embed=embed1) if member.id == ctx.author.id: embed69 = nextcord.Embed( title=\"Ban Error\", description=\"Ban yourself... only", "role to give {} that role!\".format( member.mention ), ) await ctx.send(embed=embed) return if", "= None, *, reason=None): \"\"\" **Info**: Bans a member \"\"\" if member ==", "def addrole( self, ctx, member: nextcord.Member = None, *, role: nextcord.Role = None", "elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Kicked!*\") await ctx.send(embed=em) await", "has been removed from {member.mention}\", ) await ctx.send(embed=em) return except Exception: print(Exception) @commands.command(description=\"Locks", "member_name, member_discriminator = member.split('#') for ban_entry in banned_user: user = ban_entry.user if (user.name,", "to run this command - Missing Permission\") return await ctx.send(embed=embed2) if member.id ==", "= nextcord.Embed( title=\"Unmute Error\", description=\"Member has same role as you in the role", ") await ctx.send(embed=embed) return if ctx.author.top_role.position < role.position: em = nextcord.Embed( title=\"Remove Role", "lockEmbed = nextcord.Embed( title=\"Lock Cancelled\", description=\"Lets pretend like this never happened them :I\",", "title=\"Add Role Error\", description=\"Please ping a role to give {} that role!\".format( member.mention", "member == None: em1 = nextcord.Embed( title=\"Unmute Error\", description=\"Member to unmute - Not", "nextcord.Embed( title=\"Unmute Error\", description=\"I require the ``Manage Roles`` permisson to run this command", "turned off\" ) await ctx.send(embed=em1) await ctx.channel.edit(slowmode_delay=0) elif time > 21600: em2 =", ") await ctx.send(embed=em) await member.add_roles(role) return except Exception: print(Exception) @commands.command( aliases=[\"takerole\", \"remover\"], description=\"Removes", "reason=f\"{ctx.author.name} unlocked {channel.name}\", send_messages=True, ) embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked {channel.mention} \",", "limit exedeed - Greater than 100\", ) return await ctx.send(embed=em1) else: await ctx.channel.purge(limit=amount)", "getting demoted if not banned from the staff team.**\", ) await ctx.author.send(embed=em, view=view)", "override\", send_messages=None, ) embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked entire server \", )", "ctx.guild banEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Ban Case \",color=nextcord.Color.red()) banEmbed.add_field(name=\"Reason: \", value=reason)", "member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Unmute Error\", description=\"wHat? <:WHA:815331017854025790>\" ) return await", "from: **{guild.name}** | Reason: **{reason}**\" ) return @commands.command(description=\"Clears a bundle of messages.\",aliases=['purge']) @commands.has_permissions(manage_messages=True)", "embed = nextcord.Embed( title=\"Add Role Error\", description=f\"{member.mention} already has the role you are", "- Invalid Permission\", ) return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em4 =", "**Info**: Get ChannelStats *Syntax*: \"{self.ctx.prefix}\" channelstats [channel] \"\"\" if channel == None: channel", "None: channel = ctx.channel embed = nextcord.Embed( title=f\"**ChannelStats for {channel.name}**\", description=f\"{'Category :{}'.format(channel.category.name) if", "time == None: time_convert = {\"s\": 1, \"m\": 60, \"h\": 3600, \"d\": 86400}", "speak=False, send_messages=False, read_message_history=True, ) if member == None: em1 = nextcord.Embed( title=\"Mute Error\",", "description=\"Member has same role as you in the role heirarchy - Invalid Permission\",", ") return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Unmute Error\",", "ctx.send(embed=em) await member.send(f\"You got banned in **{guild}** | Reason: **{reason}**\") await member.ban(reason=reason) @commands.command(description=\"Lucas", "embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked {channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Modbans the", "unlocked {channel.name}\", send_messages=True, ) embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked {channel.mention} \", )", "ctx.send(embed=embed) return if role is None: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"Please", "user!= None: if time==None: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | {user.name} Was Banned indefinitely\") await", "ctx.guild.unban(user) else: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | Member To Ban Was Found\") await ctx.send(embed=em)", "muted from: **{guild.name}** | Reason: **{reason}** | Time: **{time}**\" ) if not time", "await ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Mute Error\", description=\"I require the", "embed.add_field(name=\"Channel Topic:-\",value=f\"{channel.topic if channel.topic else 'No topic.'}\",inline=False,) embed.add_field(name=\"Channel Position:-\", value=channel.position, inline=True) embed.add_field(name=\"Channel Slowmode?\",", "title=\"Unmute Success\", description=f\"{member.mention} was unmuted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed)", "slowmode.\") @commands.has_permissions(manage_channels=True) async def slowmode(self, ctx, time: int): try: if time == 0:", "from them!\", ) await ctx.send(embed=embed) return if role is None: embed = nextcord.Embed(", "Not Found\" ) return await ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed(", "title=\"Mute Error\", description=\"Member **higher** than you in the role heirarchy - Invalid Permission\",", "__init__(self,ctx): super().__init__(timeout=200) self.value = None self.ctx=ctx @nextcord.ui.button( label=\"Confirm\", style=nextcord.ButtonStyle.grey,emoji=\"<a:yes:909765403801182208>\") async def confirm( self,", "name=\"Muted\") if member == None: em1 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member to mute", "embed2 = nextcord.Embed( title=\"Mute Error\", description=\"I require the **Manage Roles** permisson to run", ":{}'.format(channel.category.name) if channel.category else 'Channel is not in any category'}\", color=nextcord.Color.random()) embed.add_field(name=\"Channel Guild:-\",", "em = nextcord.Embed( title=\"Add Role Success\", description=f\"{role.mention} has been assigned to {member.mention}\", )", "= bot @commands.Cog.listener() async def on_message(self, message): if str(message.author.id) != str(BOT_USER_ID): send =", "elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Mute Error\", description=\"Member has same role", "reason = f\"{ctx.author.name} modbanned {member.name}\" else: reason = ( f\"{ctx.author.name} modbanned {member.name} for", "'--server': for channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name} using --server", "if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Banned!*\") await ctx.send(embed=em) elif", "title=\"Kick Error\", description=\"Member **higher** than you in the role heirarchy - Invalid Permission\",", "import asyncio from datetime import datetime from difflib import get_close_matches from nextcord.webhook import", "ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if member == None: em1 = nextcord.Embed( title=\"Tempmute", "return if ctx.author.top_role.position < role.position: em = nextcord.Embed( title=\"Remove Role Error\", description=\"You do", "return except Exception: print(Exception) @commands.command(description=\"Locks the channel.\") @commands.has_permissions(kick_members=True) async def lock(self, ctx, channel:", "await member.send( f\"You have been muted from: **{guild.name}** | Reason: **{reason}** | Time:", "Color from nextcord.components import Button from nextcord.embeds import Embed from nextcord.ext import commands", "member.remove_roles(mutedRole, reason=reason) await member.send( f\"You have been unmuted from: **{guild.name}** | Reason: **{reason}**\"", "\", ) await ctx.send(embed=embed) @commands.command(description=\"Modbans the member.\") @commands.has_permissions(kick_members=True) @commands.cooldown(1, 21600, commands.BucketType.user) async def", "title=\"Add Role Error\", description=\"That role is too high for me to perform this", "if not addRole: embed = nextcord.Embed( title=\"Add Role Error\", description=f\"{member.mention} already has the", "banEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Ban Case \",color=nextcord.Color.red()) banEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx)", "role heirarchy - Invalid Permission\",) return await ctx.send(embed=em3) guild = ctx.guild banEmbed =", "nextcord.Embed( title=\"Ban Error\", description=\"Member has same role as you in the role heirarchy", "await interaction.response.send_message(\"You can't use that!!\" , ephemeral=True) else: return True BOT_USER_ID=\"897762972603150346\" class Moderation(commands.Cog):", "elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Unmute Error\", description=\"Member has same role", ") await ctx.send(embed=embed) else: lockEmbed = nextcord.Embed( title=\"Lock Cancelled\", description=\"Lets pretend like this", "ctx.send(embed=embed) @commands.command(description=\"Unlocks the channel.\") @commands.has_permissions(kick_members=True) async def unlock(self, ctx, channel: nextcord.TextChannel = None,", "self.bot = bot @commands.Cog.listener() async def on_message(self, message): if str(message.author.id) != str(BOT_USER_ID): send", "Only use this command if no admin is online or responding. **If this", "if member == None: em1 = nextcord.Embed( title=\"Mute Error\", description=\"Member to mute -", "view.wait() if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Banned!*\") await ctx.send(embed=em)", "commands import json import random import asyncio from datetime import datetime from difflib", "in the role heirarchy - Invalid Permission\",) return await ctx.send(embed=em3) guild = ctx.guild", "em1 = nextcord.Embed( title=\"Slowmode Success\", description=\"Slowmode turned off\" ) await ctx.send(embed=em1) await ctx.channel.edit(slowmode_delay=0)", "in banned_user: user = ban_entry.user if (user.name, user.discriminator) == (member_name, member_discriminator): await ctx.guild.unban(user)", "embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.add_field(name=\"Duration\", value=time) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await member.send( f\"You", "a bundle of messages.\",aliases=['purge']) @commands.has_permissions(manage_messages=True) async def clear(self, ctx, amount=10): amount = amount", "None, setting=None): if setting == '--server': for channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role,", "await ctx.send(embed=em5) if time == None: em2 = nextcord.Embed( title=\"Tempmute Error\", description=\"Time to", "await member.ban(reason=reason) await member.send(f\"You got banned in **{guild}** | Reason: **{reason}**\") else: banEmbed", "print(Exception) @commands.command(description=\"Locks the channel.\") @commands.has_permissions(kick_members=True) async def lock(self, ctx, channel: nextcord.TextChannel = None,", "member.\", ) @commands.has_permissions(manage_roles=True) async def removerole( self, ctx, member: nextcord.Member = None, role:", "nextcord.Embed( title=\"Remove Role Error\", description=\"You do not have enough permissions to remove this", "{channel.name} using --server override\", send_messages=False, ) embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked entire", "if interaction.user !=self.ctx.author: await interaction.response.send_message(\"You can't use that!!\" , ephemeral=True) else: return True", "LockConfirm() em = nextcord.Embed( title=\"Are you sure?\", description=\"This is a very risky command", "channel.topic else 'No topic.'}\",inline=False,) embed.add_field(name=\"Channel Position:-\", value=channel.position, inline=True) embed.add_field(name=\"Channel Slowmode?\", value=channel.slowmode_delay, inline=True) embed.add_field(name=\"Channel", "await ctx.send(embed=embed) return if channel is None: channel = ctx.channel await channel.set_permissions( ctx.guild.default_role,", "indefinitely.\") @commands.has_permissions(manage_messages=True) async def tempmute( self, ctx, member: nextcord.Member = None, time=None, *,", "channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name} using --server override\", send_messages=None, ) embed = nextcord.Embed(", ") return await ctx.send(embed=em5) if time == None: em2 = nextcord.Embed( title=\"Tempmute Error\",", "a specific amount of time.\" ) @commands.has_permissions(manage_messages=True) async def mute(self, ctx, member: nextcord.Member", "a member indefinitely.\") @commands.has_permissions(manage_messages=True) async def tempmute( self, ctx, member: nextcord.Member = None,", "ping a user to remove a role from them!\", ) await ctx.send(embed=embed) return", "in the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) elif ctx.author.top_role.position", "online or responding. **If this command is used for the wrong purpose you", "description=\"Lets pretend like this never happened them :I\", ) await ctx.author.send(embed=banEmbed) def setup(bot):", "Success\", description=f\"{member.mention} {banMsg}\" ) banEmbed.add_field(name=\"Reason\", value=reason) await ctx.author.send(embed=banEmbed) await member.ban(reason=reason) await member.send(f\"You got", "inline=False) embed.add_field(name=\"Duration\", value=time) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await member.send( f\"You have been", "- Invalid Permission\", ) return await ctx.send(embed=em4) if ctx.guild.me.top_role.position < mutedRole.position: em3 =", "Permission\", ) return await ctx.send(embed=embed2) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Tempmute", "view.value: guild = ctx.guild banMsg = random.choice(\"BANNED\") banEmbed = nextcord.Embed( title=\"Ban Success\", description=f\"{member.mention}", "to run this command - Missing Permission\", ) return await ctx.send(embed=embed2) elif ctx.author.top_role.position", "await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Mute Error\", description=\"Member has", "`NSFW or NSFLPosting` or `Raid on the Server`. Only use this command if", "the reason of {reason}\" ) if member == None: embed1 = nextcord.Embed( title=\"Ban", "nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Unbanned!*\") await ctx.send(embed=em) @commands.command(name=\"kick\", description=\"Kicks the member from your", "if member == None: em1 = nextcord.Embed( title=\"Unmute Error\", description=\"Member to unmute -", "banned in **{guild}** | Reason: **{reason}**\") else: banEmbed = nextcord.Embed( title=\"Ban Cancelled\", description=\"Lets", "action\", ) return await ctx.send(embed=embed) try: roleRemoved = False for role_ in member.roles:", "title=\"Are you sure?\", description=\"This is a very risky command only to be used", "else: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | Member To Ban Was Found\") await ctx.send(embed=em) @commands.command(name=\"ban\",", "= nextcord.Embed( title=\"Ban Error\", description=\"Member has same role as you in the role", "nextcord.Embed( title=\"Add Role Error\", description=\"Please ping a user to give them a role!\",", "ctx.guild.me.top_role.position < role.position: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"That role is too", "time=None,reason=None): if reason==None: reason=\"No Reason\" if user!= None: if time==None: em = nextcord.Embed(title=f\"<a:yes:909765403801182208>", "reason is None: reason = f\"{ctx.author.name} modbanned {member.name}\" else: reason = ( f\"{ctx.author.name}", "embed.add_field(name=\"Duration\", value=time) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await member.send( f\"You have been muted", "amount of time.\" ) @commands.has_permissions(manage_messages=True) async def mute(self, ctx, member: nextcord.Member = None,", "nextcord.Embed( title=\"Remove Role Success!\", description=f\"{role.mention} has been removed from {member.mention}\", ) await ctx.send(embed=em)", "in the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em4) if ctx.guild.me.top_role.position", "@commands.command( name=\"mute\", description=\"Mutes a member for a specific amount of time.\" ) @commands.has_permissions(manage_messages=True)", "nextcord.Embed( title=\"Add Role Error\", description=\"That role is too high for me to perform", "return await ctx.send(embed=em2) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member", "await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name}\", send_messages=False, # ) embed = nextcord.Embed( title=\"Lockdown", "None: time_convert = {\"s\": 1, \"m\": 60, \"h\": 3600, \"d\": 86400} tempmute =", "member_discriminator): await ctx.guild.unban(user) view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not", ") if member == None: em1 = nextcord.Embed( title=\"Mute Error\", description=\"Member to mute", "await member.send( f\"You have been unmuted from: **{guild.name}** | Reason: **{reason}**\" ) return", "Reason: **{reason}**\") await member.ban(reason=reason) @commands.command(description=\"Lucas unban method\") @commands.has_permissions(ban_members=True) async def unban(self, ctx,*,member): f\"\"\"", "else: em = nextcord.Embed( title=\"Remove Role Success!\", description=f\"{role.mention} has been removed from {member.mention}\",", "ctx.guild.unban(user) else: await DMChannel.send(user,f\"**{ctx.guild.name}**: You have been banned for {tempban1}{time[1:]}\\n**Reason:** {reason}\") await ctx.guild.ban(user)", "\"h\": 3600, \"d\": 86400} tempmute = int(time[0]) * time_convert[time[-1]] embed = nextcord.Embed( title=\"Tempmute", "(ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Mute Error\", description=\"I require the **Manage Roles** permisson to", "Timed Out, please try again.\") elif view.value: for channel in ctx.guild.channels: await channel.set_permissions(", "await ctx.send(embed=embed) return if role is None: embed = nextcord.Embed( title=\"Remove Role Error\",", "title=\"Remove Role Error\", description=\"Please ping a role to remove the role from {}!\".format(", "return await ctx.send(embed=em5) if time == None: em2 = nextcord.Embed( title=\"Tempmute Error\", description=\"Time", "= True for role_ in member.roles: if role_ == role: addRole = False", "6 hours\" ) await ctx.send(embed=em2) else: await ctx.channel.edit(slowmode_delay=time) em3 = nextcord.Embed( title=\"Slowmode Success\",", "``Manage Roles`` permisson to run this command - Missing Permission\", ) return await", "member: nextcord.Member = None, *, reason=None): \"\"\" **Info**: Bans a member \"\"\" if", "nextcord.Role = None, *, reason=None, ): if member is None: embed = nextcord.Embed(", "if role is None: embed = nextcord.Embed( title=\"Add Role Error\", description=\"Please ping a", "Case \",color=nextcord.Color.red()) kickEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=kickEmbed,view=view) await view.wait() view=AllConfirm(ctx) if view.value==False:", "channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name} using --server override\", send_messages=False, ) embed = nextcord.Embed(", "interaction.response.send_message(\"You can't use that!!\" , ephemeral=True) else: return True BOT_USER_ID=\"897762972603150346\" class Moderation(commands.Cog): def", "reason=None): \"\"\" **Info**: Bans a member \"\"\" if member == None: embed1 =", "a role from them!\", ) await ctx.send(embed=embed) return if role is None: embed", "__init__(self, bot): self.bot = bot @commands.Cog.listener() async def on_message(self, message): if str(message.author.id) !=", "| Reason:{reason}\") await ctx.send(embed=em) if bool(user.bot)==True: await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else:", "view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Unbanned!*\") await ctx.send(embed=em) elif view.value==", "await ctx.send(embed=embed) return else: em = nextcord.Embed( title=\"Add Role Success\", description=f\"{role.mention} has been", "a very risky command only to be used in important situations such as,", "import Button from nextcord.embeds import Embed from nextcord.ext import commands from nextcord.ext.commands.cooldowns import", "ctx.send(embed=embed) return else: em = nextcord.Embed( title=\"Remove Role Success!\", description=f\"{role.mention} has been removed", "please try again.\") elif view.value: for channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name}", "got banned in **{guild}** | Reason: **{reason}**\") else: banEmbed = nextcord.Embed( title=\"Ban Cancelled\",", "<:moderation:910472145824542721> | Kick Case \",color=nextcord.Color.red()) kickEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=kickEmbed,view=view) await view.wait()", "ctx.send(embed=kickEmbed,view=view) await view.wait() view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not", "muted member.\") @commands.has_permissions(manage_messages=True) async def unmute(self, ctx, member: nextcord.Member = None, *, reason=None):", "description=\"Member to unmute - Not Found\" ) return await ctx.send(embed=em1) elif member.id ==", "muted from: **{guild.name}** | Reason: **{reason}**\" ) return @commands.command(name=\"unmute\", description=\"Unmutes a muted member.\")", "await view.wait() if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Banned!*\") await", "*, reason=None ): guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if member ==", "a member for a specific amount of time.\" ) @commands.has_permissions(manage_messages=True) async def mute(self,", "nextcord.Embed( title=\"Lock Cancelled\", description=\"Lets pretend like this never happened them :I\", ) await", "setting == '--server': view = LockConfirm() em = nextcord.Embed( title=\"Are you sure?\", description=\"This", "@commands.cooldown(1, 21600, commands.BucketType.user) async def modban(self, ctx, member, *, reason=None): if reason is", "return await ctx.send(embed=em3) embed = nextcord.Embed( title=\"Mute Success\", description=f\"{member.mention} was muted Indefinitly \",", "used for the wrong purpose you may risk getting demoted if not banned", "description=\"Time to mute - Not Found\" ) return await ctx.send(embed=em2) elif ctx.author.top_role.position <", "await ctx.send(embed=embed) @commands.command(name=\"tempmute\", description=\"Mutes a member indefinitely.\") @commands.has_permissions(manage_messages=True) async def tempmute( self, ctx,", "colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await member.remove_roles(mutedRole, reason=reason) await member.send( f\"You", "if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Mute Error\", description=\"Muted role too high", "**If this command is used for the wrong purpose you may risk getting", "value=channel.created_at.strftime(\"%a, %d %B %Y , %I:%M %p\"), inline=False) embed.set_thumbnail(url=ctx.guild.icon.url) await ctx.send(embed=embed) @commands.command(name=\"tempmute\", description=\"Mutes", ") await ctx.send(embed=embed) return if channel is None: channel = ctx.channel await channel.set_permissions(", "ctx, channel: nextcord.TextChannel = None): f\"\"\" **Info**: Get ChannelStats *Syntax*: \"{self.ctx.prefix}\" channelstats [channel]", "Error\", description=\"Member to ban - Not Found\" ) return await ctx.send(embed=embed1) if member.id", "title=\"Mute Error\", description=\"Member has same role as you in the role heirarchy -", "else: em = nextcord.Embed( title=\"Add Role Success\", description=f\"{role.mention} has been assigned to {member.mention}\",", "== ctx.author.id: embed69 = nextcord.Embed( title=\"Ban Error\", description=\"No banning yourself...\", ) return await", "member from your server.\") @commands.has_permissions(kick_members=True) async def kick(self, ctx, member: nextcord.Member = None,", "description=f\"Unlocked {channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Modbans the member.\") @commands.has_permissions(kick_members=True) @commands.cooldown(1, 21600, commands.BucketType.user)", "if time == None: em2 = nextcord.Embed( title=\"Tempmute Error\", description=\"Time to mute -", ") return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\",", "bot): self.bot = bot @commands.Cog.listener() async def on_message(self, message): if str(message.author.id) != str(BOT_USER_ID):", "aliases=[\"giverole\", \"addr\"], description=\"Gives a member a certain role.\" ) @commands.has_permissions(manage_roles=True) async def addrole(", "your server.\") @commands.has_permissions(kick_members=True) async def kick(self, ctx, member: nextcord.Member = None, *, reason=None):", "permisson to run this command - Missing Permission\", ) return await ctx.send(embed=embed2) elif", "Exception: print(Exception) @commands.command( aliases=[\"takerole\", \"remover\"], description=\"Removes a certain role from a member.\", )", "on the Server`. **If this command is used for the wrong purpose you", "await ctx.send(embed=em) await member.send(f\"You got banned in **{guild}** | Reason: **{reason}**\") await member.ban(reason=reason)", "reason=f\"{ctx.author.name} unlocked {channel.name} using --server override\", send_messages=None, ) embed = nextcord.Embed( title=\"Unlock Success\",", "from nextcord.colour import Color from nextcord.components import Button from nextcord.embeds import Embed from", "return await ctx.send(embed=embed1) if not (ctx.guild.me.guild_permissions.kick_members): embed2 = nextcord.Embed(title=\"Kick Error\",description=\"I require the ``Kick", "role...\") for channel in guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) embed", "= nextcord.Embed(title=\"Kick Error\",description=\"I require the ``Kick Members`` permisson to run this command -", "member, *, reason=None): if reason is None: reason = f\"{ctx.author.name} modbanned {member.name}\" else:", "def tempban(self,ctx, user:nextcord.User, time=None,reason=None): if reason==None: reason=\"No Reason\" if user!= None: if time==None:", "banned from the staff team.**\", ) await ctx.author.send(embed = em, view=view) await view.wait()", "channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name}\", send_messages=True, ) embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked", "ctx.author.send(\"Command has been Timed Out, please try again.\") elif view.value: guild = ctx.guild", "None: reason = f\"{ctx.author.name} modbanned {member.name}\" else: reason = ( f\"{ctx.author.name} modbanned {member.name}", "* time_convert[time[-1]] em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | `{user.name}` Was Banned | Duration: {tempban1}{time[1:]} |", "21600, commands.BucketType.user) async def modban(self, ctx, member, *, reason=None): if reason is None:", "== member.top_role.position: em4 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member has same role as you", "Permission\", ) return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Mute", "the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) kickEmbed = nextcord.Embed(title=f\"Moderation", "enough permissions to give this role\", ) return await ctx.send(embed=em) if ctx.guild.me.top_role.position <", "a skid would do that XD !\", ) return await ctx.send(embed=embed69) elif ctx.author.top_role.position", "ctx.send(embed=embed2) mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Mute", ") return await ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Tempmute Error\",", "title=\"Tempmute Error\", description=\"Member to mute - Not Found\" ) return await ctx.send(embed=em1) elif", "= nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Banned!*\") await ctx.send(embed=em) await member.send(f\"You got banned in", "False self.stop() async def interaction_check(self, interaction) -> bool: if interaction.user !=self.ctx.author: await interaction.response.send_message(\"You", "True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Unbanned!*\") await ctx.send(embed=em) @commands.command(name=\"kick\", description=\"Kicks the", "role: await member.remove_roles(role) roleRemoved = True break if not roleRemoved: embed = nextcord.Embed(", "- Not Found\" ) return await ctx.send(embed=em1) elif member.id == ctx.author.id: em5 =", "channels slowmode.\") @commands.has_permissions(manage_channels=True) async def slowmode(self, ctx, time: int): try: if time ==", "role.position: embed = nextcord.Embed( title=\"Add Role Error\", description=\"That role is too high for", "Found\" ) return await ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Mute", "ctx.guild.unban(user) view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Unbanned!*\") await", "for {channel.name}**\", description=f\"{'Category :{}'.format(channel.category.name) if channel.category else 'Channel is not in any category'}\",", "await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await member.send( f\"You have been muted from: **{guild.name}**", "Reason: **{reason}**\" ) return @commands.command(name=\"unmute\", description=\"Unmutes a muted member.\") @commands.has_permissions(manage_messages=True) async def unmute(self,", "this action\", ) return await ctx.send(embed=embed) try: roleRemoved = False for role_ in", "Found\" ) return await ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Tempmute", "to give to a member\", ) return await ctx.send(embed=em3) embed = nextcord.Embed( title=\"Mute", "situations such as, `Raid on the Server`. **If this command is used for", "BOT_USER_ID=\"897762972603150346\" class Moderation(commands.Cog): def __init__(self, bot): self.bot = bot @commands.Cog.listener() async def on_message(self,", "await ctx.send(embed=em3) if not mutedRole: mutedRole = await guild.create_role(name=\"Muted\") await ctx.send(\"No mute role", "for role_ in member.roles: if role_ == role: await member.remove_roles(role) roleRemoved = True", "title=\"Ban Error\", description=\"No banning yourself...\", ) return await ctx.send(embed=embed69) em = nextcord.Embed( title=\"Are", "in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name} using --server override\", send_messages=None, )", "ctx.send(embed=embed) else: lockEmbed = nextcord.Embed( title=\"Lock Cancelled\", description=\"Lets pretend like this never happened", "nextcord.Embed( title=\"Mute Error\", description=\"Member to mute - Not Found\" ) return await ctx.send(embed=em1)", "None: await asyncio.sleep(tempmute) await member.remove_roles(mutedRole) await member.send(f\"You have been unmuted from **{guild}**\") return", "nextcord.Embed( title=\"Tempmute Error\", description=\"Member has same role as you in the role heirarchy", "as, `Raid on the Server`. **If this command is used for the wrong", "modbanned {member.name}\" else: reason = ( f\"{ctx.author.name} modbanned {member.name} for the reason of", "None: await ctx.author.send(\"Command has been Timed Out, please try again.\") elif view.value: guild", "nextcord.utils.get(guild.roles, name=\"Muted\") if not mutedRole: mutedRole = await guild.create_role(name=\"Muted\") await ctx.send(\"No mute role", "been muted from: **{guild.name}** | Reason: **{reason}**\" ) return @commands.command(name=\"unmute\", description=\"Unmutes a muted", "role_ == role: addRole = False break if not addRole: embed = nextcord.Embed(", "await ctx.send(embed=embed) return else: em = nextcord.Embed( title=\"Remove Role Success!\", description=f\"{role.mention} has been", "not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Tempmute Error\", description=\"I require the ``Manage Roles`` permisson", "await ctx.send(embed=em1) await ctx.channel.edit(slowmode_delay=0) elif time > 21600: em2 = nextcord.Embed( title=\"Slowmode Error\",", "= nextcord.Embed( title=\"Add Role Error\", description=\"Please ping a role to give {} that", "-> bool: if interaction.user !=self.ctx.author: await interaction.response.send_message(\"You can't use that!!\" , ephemeral=True) else:", "high for me to perform this action\", ) return await ctx.send(embed=embed) try: roleRemoved", "be used in important situations such as, `NSFW or NSFLPosting` or `Raid on", "ctx, member: nextcord.Member = None, *, role: nextcord.Role = None ): if member", "f\"You have been muted from: **{guild.name}** | Reason: **{reason}**\" ) return @commands.command(name=\"unmute\", description=\"Unmutes", "import Embed from nextcord.ext import commands from nextcord.ext.commands.cooldowns import BucketType from nextcord.ui.view import", "role.position: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"That role is too high for", "*{member.name} Was Banned!*\") await ctx.send(embed=em) await member.send(f\"You got banned in **{guild}** | Reason:", "except Exception: print(Exception) @commands.command(description=\"Locks the channel.\") @commands.has_permissions(kick_members=True) async def lock(self, ctx, channel: nextcord.TextChannel", "muted Indefinitly \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason)", "ctx, member: nextcord.Member = None, time=None, *, reason=None ): guild = ctx.guild mutedRole", "inline=False) embed.set_thumbnail(url=ctx.guild.icon.url) await ctx.send(embed=embed) @commands.command(name=\"tempmute\", description=\"Mutes a member indefinitely.\") @commands.has_permissions(manage_messages=True) async def tempmute(", "# ) embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked {channel.mention} \", ) await ctx.send(embed=embed)", "read_message_history=True, ) if member == None: em1 = nextcord.Embed( title=\"Mute Error\", description=\"Member to", "from nextcord.ui.view import View from nextcord.ext import commands import json import random import", "description=f\"Locked {channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Unlocks the channel.\") @commands.has_permissions(kick_members=True) async def unlock(self,", "Not Banned!*\") await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was", "Role Error\", description=\"Please ping a role to give {} that role!\".format( member.mention ),", "= nextcord.Embed( title=\"Kick Error\", description=\"Member **higher** than you in the role heirarchy -", "the role from {}!\".format( member.mention ), ) await ctx.send(embed=embed) return if ctx.author.top_role.position <", "Greater than 100\", ) return await ctx.send(embed=em1) else: await ctx.channel.purge(limit=amount) msg = await", "= LockConfirm() em = nextcord.Embed( title=\"Are you sure?\", description=\"This is a very risky", ") await ctx.send(embed=em) return except Exception: print(Exception) @commands.command(description=\"Locks the channel.\") @commands.has_permissions(kick_members=True) async def", "@commands.command(name=\"unmute\", description=\"Unmutes a muted member.\") @commands.has_permissions(manage_messages=True) async def unmute(self, ctx, member: nextcord.Member =", "nextcord.Member = None, *, reason=None): if member == None: embed1 = nextcord.Embed( title=\"Kick", "ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | Member To Ban", "a role to remove the role from {}!\".format( member.mention ), ) await ctx.send(embed=embed)", "title=\"Mute Error\", description=\"Muted role too high to give to a member\", ) return", "0: em1 = nextcord.Embed( title=\"Slowmode Success\", description=\"Slowmode turned off\" ) await ctx.send(embed=em1) await", "member: nextcord.Member = None, *, role: nextcord.Role = None ): if member is", "await member.ban(reason=reason) @commands.command(description=\"Lucas unban method\") @commands.has_permissions(ban_members=True) async def unban(self, ctx,*,member): f\"\"\" **Info**: Unbans", "same role as you in the role heirarchy - Invalid Permission\",) return await", "ctx.author.send(embed=em, view=view) await view.wait() if view.value is None: await ctx.author.send(\"Command has been Timed", "import nextcord from nextcord.channel import CategoryChannel,DMChannel from nextcord.colour import Color from nextcord.components import", "description=\"No banning yourself...\", ) return await ctx.send(embed=embed69) em = nextcord.Embed( title=\"Are you sure?\",", "await ctx.channel.edit(slowmode_delay=time) em3 = nextcord.Embed( title=\"Slowmode Success\", description=f\"Slowmode set to {time} seconds\", )", "embed.add_field(name=\"Channel Hash:\", value=hash(channel), inline=True) embed.add_field(name=\"Channel Creation Time:\", value=channel.created_at.strftime(\"%a, %d %B %Y , %I:%M", "style=nextcord.ButtonStyle.grey,emoji=\"<a:yes:909765403801182208>\") async def confirm( self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value = True self.stop()", "async def modban(self, ctx, member, *, reason=None): if reason is None: reason =", "@commands.has_permissions(manage_messages=True) async def unmute(self, ctx, member: nextcord.Member = None, *, reason=None): guild =", ") return await ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Mute Error\",", "member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Mute Error\", description=\"Error\" ) return await ctx.send(embed=em5)", "return await ctx.send(embed=embed69) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member", "not have enough permissions to remove this role\", ) return await ctx.send(embed=em) if", "\"addr\"], description=\"Gives a member a certain role.\" ) @commands.has_permissions(manage_roles=True) async def addrole( self,", "ctx.send(embed=embed) return if channel is None: channel = ctx.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name}", "ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position: embed = nextcord.Embed( title=\"Add Role Error\", description=\"That role", "CategoryChannel,DMChannel from nextcord.colour import Color from nextcord.components import Button from nextcord.embeds import Embed", "elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member **higher** than you", "color=nextcord.Color.random()) embed.add_field(name=\"Channel Guild:-\", value=ctx.guild.name, inline=True) embed.add_field(name=\"Channel Id:-\", value=channel.id, inline=False) embed.add_field(name=\"Channel Topic:-\",value=f\"{channel.topic if channel.topic", "been Timed Out, please try again.\") elif view.value: guild = ctx.guild banMsg =", "= {\"s\": 1, \"m\": 60, \"h\": 3600, \"d\": 86400} tempmute = int(time[0]) *", "staff team.**\", ) await ctx.author.send(embed = em, view=view) await view.wait() if view.value is", "ctx.send(embed=em5) if time == None: em2 = nextcord.Embed( title=\"Tempmute Error\", description=\"Time to mute", "member.id == ctx.author.id: embed = nextcord.Embed(title=\"Kick Error\", description=\"Can't kick yourself \",) return await", "if time !=None : time_convert = {\"s\": 1, \"m\": 60, \"h\": 3600, \"d\":", "value=reason) view=AllConfirm(ctx) await ctx.send(embed=kickEmbed,view=view) await view.wait() view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> |", "elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Unmute Error\", description=\"wHat? <:WHA:815331017854025790>\" ) return", "role_ == role: await member.remove_roles(role) roleRemoved = True break if not roleRemoved: embed", "than you in the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3)", "kick(self, ctx, member: nextcord.Member = None, *, reason=None): if member == None: embed1", "em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Muted role too high to remove from a", "**higher** than you in the role heirarchy - Invalid Permission\", ) return await", "await member.send( f\"You have been muted from: **{guild.name}** | Reason: **{reason}**\" ) return", "Error\", description=\"wHat? <:WHA:815331017854025790>\" ) return await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3 =", "remove this role\", ) return await ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position: embed =", "None: if time==None: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | {user.name} Was Banned indefinitely\") await ctx.send(embed=em)", "nextcord.ui.Button, interaction: nextcord.Interaction): self.value = False self.stop() async def interaction_check(self, interaction) -> bool:", "title=\"Add Role Error\", description=\"You do not have enough permissions to give this role\",", "break if not roleRemoved: embed = nextcord.Embed( title=\"Remove Role Error\", description=f\"{member.mention} already has", "== member.top_role.position: em4 = nextcord.Embed( title=\"Unmute Error\", description=\"Member has same role as you", ") await ctx.send(embed=embed) return if ctx.author.top_role.position < role.position: em = nextcord.Embed( title=\"Add Role", "again.\") elif view.value: guild = ctx.guild banMsg = random.choice(\"BANNED\") banEmbed = nextcord.Embed( title=\"Ban", "ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"That role", "is used for the wrong purpose you may risk getting demoted if not", "role: addRole = False break if not addRole: embed = nextcord.Embed( title=\"Add Role", "> 101: em1 = nextcord.Embed( title=\"Clear Error\", description=\"Purge limit exedeed - Greater than", "too high to remove from a member\", ) return await ctx.send(embed=em3) if not", "server \", ) await ctx.send(embed=embed) else: lockEmbed = nextcord.Embed( title=\"Lock Cancelled\", description=\"Lets pretend", ") await ctx.author.send(embed = em, view=view) await view.wait() if view.value is None: await", "banEmbed = nextcord.Embed( title=\"Ban Cancelled\", description=\"Lets pretend like this never happened them :I\",", ") return await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Mute Error\",", "only a skid would do that XD !\", ) return await ctx.send(embed=embed69) elif", "member.\") @commands.has_permissions(kick_members=True) @commands.cooldown(1, 21600, commands.BucketType.user) async def modban(self, ctx, member, *, reason=None): if", "run this command - Missing Permission\", ) return await ctx.send(embed=embed2) elif ctx.author.top_role.position ==", "risk getting demoted if not banned from the staff team.**\", ) await ctx.author.send(embed=em,", "= nextcord.Embed( title=\"Remove Role Error\", description=\"That role is too high for me to", "- Invalid Permission\", ) return await ctx.send(embed=em3) kickEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> |", "Error\", description=\"Member to kick - Not Found\") return await ctx.send(embed=embed1) if not (ctx.guild.me.guild_permissions.kick_members):", "Button from nextcord.embeds import Embed from nextcord.ext import commands from nextcord.ext.commands.cooldowns import BucketType", "description=\"Member to ban - Not Found\" ) return await ctx.send(embed=embed1) if member.id ==", ") return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\",", "if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Tempmute Error\", description=\"I require the ``Manage Roles``", "of time.\" ) @commands.has_permissions(manage_messages=True) async def mute(self, ctx, member: nextcord.Member = None, *,", "await member.add_roles(role) return except Exception: print(Exception) @commands.command( aliases=[\"takerole\", \"remover\"], description=\"Removes a certain role", "yourself \",) return await ctx.send(embed=embed) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Kick", "= nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked {channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Unlocks the channel.\")", "= nextcord.Embed( title=\"Tempmute Error\", description=\"Muted role too high to give to a member\",", "nextcord.Embed( title=\"Ban Cancelled\", description=\"Lets pretend like this never happened them :I\", ) await", "them :I\", ) await ctx.author.send(embed=lockEmbed) return if channel is None: channel = ctx.message.channel", "= member.split('#') for ban_entry in banned_user: user = ban_entry.user if (user.name, user.discriminator) ==", "em3 = nextcord.Embed( title=\"Slowmode Success\", description=f\"Slowmode set to {time} seconds\", ) await ctx.send(embed=em3)", "member.top_role.position: em4 = nextcord.Embed( title=\"Unmute Error\", description=\"Member has same role as you in", "description=\"Mutes a member for a specific amount of time.\" ) @commands.has_permissions(manage_messages=True) async def", "await ctx.send(embed=embed2) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member has", "== member.top_role.position: em4 = nextcord.Embed( title=\"Mute Error\", description=\"Member has same role as you", "role too high to give to a member\", ) return await ctx.send(embed=em3) embed", "description=\"Ban yourself... only a skid would do that XD !\", ) return await", "ctx.send(embed=em) @commands.command(name=\"ban\", description=\"Bans the member from your server.\") async def ban(self, ctx, member:", "description=\"Mutes a member indefinitely.\") @commands.has_permissions(manage_messages=True) async def tempmute( self, ctx, member: nextcord.Member =", "description=\"Member **higher** than you in the role heirarchy - Invalid Permission\", ) return", "Hash:\", value=hash(channel), inline=True) embed.add_field(name=\"Channel Creation Time:\", value=channel.created_at.strftime(\"%a, %d %B %Y , %I:%M %p\"),", "view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Kicked!*\") await ctx.send(embed=em) elif view.value==", "description=\"Slowmode over 6 hours\" ) await ctx.send(embed=em2) else: await ctx.channel.edit(slowmode_delay=time) em3 = nextcord.Embed(", "{channel.name}\", send_messages=False, # ) embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked {channel.mention} \", )", "reason = ( f\"{ctx.author.name} modbanned {member.name} for the reason of {reason}\" ) if", "asyncio.sleep(tempban1) await ctx.guild.unban(user) else: await DMChannel.send(user,f\"**{ctx.guild.name}**: You have been banned for {tempban1}{time[1:]}\\n**Reason:** {reason}\")", "me to perform this action\", ) return await ctx.send(embed=embed) try: addRole = True", "em4 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member has same role as you in the", "member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member **higher** than you in the role", "title=\"Tempmute Error\", description=\"Don't bother, ive tried\" ) return await ctx.send(embed=em5) if time ==", "Not Unbanned!*\") await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was", ") await ctx.send(embed=embed) return if role is None: embed = nextcord.Embed( title=\"Add Role", "ctx.channel.edit(slowmode_delay=time) em3 = nextcord.Embed( title=\"Slowmode Success\", description=f\"Slowmode set to {time} seconds\", ) await", "setting == '--server': for channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name}", "str(message.author.id) != str(BOT_USER_ID): send = message.channel.send @commands.command(name=\"tempban\") @commands.has_permissions(ban_members=True) async def tempban(self,ctx, user:nextcord.User, time=None,reason=None):", "bool(user.bot)==True: await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: await DMChannel.send(user,f\"**{ctx.guild.name}**: You have been", "for channel in guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) if not", "nextcord.Role = None ): if member is None: embed = nextcord.Embed( title=\"Add Role", "ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name}\", send_messages=True, ) embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked {channel.mention}", "ctx.author.top_role.position == member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member has same role as", "command is used for the wrong purpose you may risk getting demoted if", "await ctx.send(embed=em3) except Exception: await ctx.send(\"Error has occoured, notifying dev team\") print(Exception) @commands.command(", "Success!\", description=f\"{role.mention} has been removed from {member.mention}\", ) await ctx.send(embed=em) return except Exception:", "None, *, reason=None): \"\"\" **Info**: Bans a member \"\"\" if member == None:", "if member == None: embed1 = nextcord.Embed( title=\"Kick Error\", description=\"Member to kick -", "unlocked {channel.name} using --server override\", send_messages=None, ) embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked", "(ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Tempmute Error\", description=\"I require the ``Manage Roles`` permisson to", "ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Member **higher** than you in", "Invalid Permission\", ) return await ctx.send(embed=em3) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Tempmute", "{\"s\": 1, \"m\": 60, \"h\": 3600, \"d\": 86400} tempban1 = int(time[0]) * time_convert[time[-1]]", "None: em1 = nextcord.Embed( title=\"Mute Error\", description=\"Member to mute - Not Found\" )", "\", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await member.send(", "f\"\"\" **Info**: Get ChannelStats *Syntax*: \"{self.ctx.prefix}\" channelstats [channel] \"\"\" if channel == None:", "True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Kicked!*\") await ctx.send(embed=em) await member.send(f\"You got", "@commands.has_permissions(manage_messages=True) async def clear(self, ctx, amount=10): amount = amount + 1 if amount", "async def ban(self, ctx, member: nextcord.Member = None, *, reason=None): \"\"\" **Info**: Bans", "\", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await member.remove_roles(mutedRole, reason=reason) await member.send(", "ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name} using --server override\", send_messages=False, ) embed", "nextcord.Embed( title=\"Mute Success\", description=f\"{member.mention} was muted Indefinitly \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False)", "nextcord.ext import commands from nextcord.ext.commands.cooldowns import BucketType from nextcord.ui.view import View from nextcord.ext", "return await ctx.send(embed=embed2) if member.id == ctx.author.id: embed = nextcord.Embed(title=\"Kick Error\", description=\"Can't kick", "to ban - Not Found\" ) return await ctx.send(embed=embed1) if member.id == ctx.author.id:", "to be used in important situations such as, `NSFW or NSFLPosting` or `Raid", "mutedRole.position: em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Muted role too high to give to", "value=reason, inline=False) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await member.send( f\"You have been muted", "= ctx.channel embed = nextcord.Embed( title=f\"**ChannelStats for {channel.name}**\", description=f\"{'Category :{}'.format(channel.category.name) if channel.category else", "@commands.has_permissions(manage_messages=True) async def tempmute( self, ctx, member: nextcord.Member = None, time=None, *, reason=None", "== None: channel = ctx.channel embed = nextcord.Embed( title=f\"**ChannelStats for {channel.name}**\", description=f\"{'Category :{}'.format(channel.category.name)", ") await ctx.send(embed=embed) return else: em = nextcord.Embed( title=\"Remove Role Success!\", description=f\"{role.mention} has", "colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await member.send( f\"You", "title=\"Ban Success\", description=f\"{member.mention} {banMsg}\" ) banEmbed.add_field(name=\"Reason\", value=reason) await ctx.author.send(embed=banEmbed) await member.ban(reason=reason) await member.send(f\"You", "heirarchy - Invalid Permission\", ) return await ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 =", "description=\"Slowmode turned off\" ) await ctx.send(embed=em1) await ctx.channel.edit(slowmode_delay=0) elif time > 21600: em2", "give\", ) await ctx.send(embed=embed) return else: em = nextcord.Embed( title=\"Add Role Success\", description=f\"{role.mention}", "| Time: **{time}**\" ) if not time == None: await asyncio.sleep(tempmute) await member.remove_roles(mutedRole)", "em = nextcord.Embed( title=\"Add Role Error\", description=\"You do not have enough permissions to", "the ``Kick Members`` permisson to run this command - Missing Permission\") return await", "a member\", ) return await ctx.send(embed=em3) embed = nextcord.Embed( title=\"Mute Success\", description=f\"{member.mention} was", "if str(message.author.id) != str(BOT_USER_ID): send = message.channel.send @commands.command(name=\"tempban\") @commands.has_permissions(ban_members=True) async def tempban(self,ctx, user:nextcord.User,", "value=time) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await member.send( f\"You have been muted from:", "interaction_check(self, interaction) -> bool: if interaction.user !=self.ctx.author: await interaction.response.send_message(\"You can't use that!!\" ,", "the member from your server.\") @commands.has_permissions(kick_members=True) async def kick(self, ctx, member: nextcord.Member =", "await ctx.send(embed=em2) else: await ctx.channel.edit(slowmode_delay=time) em3 = nextcord.Embed( title=\"Slowmode Success\", description=f\"Slowmode set to", "as you in the role heirarchy - Invalid Permission\",) return await ctx.send(embed=em3) guild", "= nextcord.Embed( title=\"Unmute Error\", description=\"Member **higher** than you in the role heirarchy -", "ctx.author.send(embed = em, view=view) await view.wait() if view.value is None: await ctx.author.send(\"Command has", "ctx.send(embed=em) await member.send(f\"You got kicked in **{ctx.guild}** | Reason: **{reason}**\") await member.kick(reason=reason) @commands.command(aliases=[\"cs\",", "if reason==None: reason=\"No Reason\" if user!= None: if time==None: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> |", "import get_close_matches from nextcord.webhook import sync class AllConfirm(nextcord.ui.View): def __init__(self,ctx): super().__init__(timeout=200) self.value =", "| {user.name} Was Banned indefinitely\") await ctx.send(embed=em) await ctx.guild.ban(user) if time !=None :", "!\", ) return await ctx.send(embed=embed69) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Ban", "ctx.send(embed=embed) try: addRole = True for role_ in member.roles: if role_ == role:", "> 21600: em2 = nextcord.Embed( title=\"Slowmode Error\", description=\"Slowmode over 6 hours\" ) await", "ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Tempmute Error\", description=\"Don't bother, ive", "= None): if setting == '--server': view = LockConfirm() em = nextcord.Embed( title=\"Are", "async def kick(self, ctx, member: nextcord.Member = None, *, reason=None): if member ==", "**{reason}**\") await member.ban(reason=reason) @commands.command(description=\"Lucas unban method\") @commands.has_permissions(ban_members=True) async def unban(self, ctx,*,member): f\"\"\" **Info**:", "Ban Case \",color=nextcord.Color.red()) banEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=banEmbed,view=view) await view.wait() if view.value==False:", "nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Unbanned!*\") await ctx.send(embed=em) elif view.value== True: em =", "send_messages=False, ) embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked entire server \", ) await", "@commands.command(name=\"tempban\") @commands.has_permissions(ban_members=True) async def tempban(self,ctx, user:nextcord.User, time=None,reason=None): if reason==None: reason=\"No Reason\" if user!=", "**{time}**\" ) if not time == None: await asyncio.sleep(tempmute) await member.remove_roles(mutedRole) await member.send(f\"You", "@commands.command(aliases=[\"cs\", \"ci\", \"channelinfo\"]) async def channelstats(self, ctx, channel: nextcord.TextChannel = None): f\"\"\" **Info**:", "== member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member has same role as you", ") if not time == None: await asyncio.sleep(tempmute) await member.remove_roles(mutedRole) await member.send(f\"You have", "description=\"Member to mute - Not Found\" ) return await ctx.send(embed=em1) elif member.id ==", "await ctx.send(embed=embed) @commands.command(description=\"Unlocks the channel.\") @commands.has_permissions(kick_members=True) async def unlock(self, ctx, channel: nextcord.TextChannel =", "Moderation(commands.Cog): def __init__(self, bot): self.bot = bot @commands.Cog.listener() async def on_message(self, message): if", "@commands.has_permissions(kick_members=True) async def kick(self, ctx, member: nextcord.Member = None, *, reason=None): if member", "= True self.stop() @nextcord.ui.button(label=\"Cancel\", style=nextcord.ButtonStyle.grey, emoji=\"<a:no:909765403872481280>\") async def cancel( self, button: nextcord.ui.Button, interaction:", "ctx.send(embed=embed2) if member.id == ctx.author.id: embed = nextcord.Embed(title=\"Kick Error\", description=\"Can't kick yourself \",)", "| *{member.name} Was Banned!*\") await ctx.send(embed=em) await member.send(f\"You got banned in **{guild}** |", "True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Banned!*\") await ctx.send(embed=em) await member.send(f\"You got", "= nextcord.Embed( title=\"Add Role Success\", description=f\"{role.mention} has been assigned to {member.mention}\", ) await", "like this never happened them :I\", ) await ctx.author.send(embed=lockEmbed) return if channel is", "title=\"Mute Error\", description=\"I require the **Manage Roles** permisson to run this command -", "f\"You have been muted from: **{guild.name}** | Reason: **{reason}** | Time: **{time}**\" )", "be used in important situations such as, `Raid on the Server`. **If this", "guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) if member == None: em1", "await ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Unmute Error\", description=\"wHat? <:WHA:815331017854025790>\"", "you may risk getting demoted if not banned from the staff team.**\", )", "= nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Unbanned!*\") await ctx.send(embed=em) elif view.value== True: em", "too high to give to a member\", ) return await ctx.send(embed=em3) embed =", "await DMChannel.send(user,f\"**{ctx.guild.name}**: You have been banned for {tempban1}{time[1:]}\\n**Reason:** {reason}\") await ctx.guild.ban(user) await asyncio.sleep(tempban1)", "\", ) await ctx.send(embed=embed) return if channel is None: channel = ctx.channel await", "of messages.\",aliases=['purge']) @commands.has_permissions(manage_messages=True) async def clear(self, ctx, amount=10): amount = amount + 1", "| Member To Ban Was Found\") await ctx.send(embed=em) @commands.command(name=\"ban\", description=\"Bans the member from", "ctx.send(embed=em3) kickEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Kick Case \",color=nextcord.Color.red()) kickEmbed.add_field(name=\"Reason: \", value=reason)", "description=\"Bans the member from your server.\") async def ban(self, ctx, member: nextcord.Member =", "nextcord.Embed( title=\"Mute Error\", description=\"Error\" ) return await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3", "em1 = nextcord.Embed( title=\"Unmute Error\", description=\"Member to unmute - Not Found\" ) return", "user = ban_entry.user if (user.name, user.discriminator) == (member_name, member_discriminator): await ctx.guild.unban(user) view=AllConfirm(ctx) if", "not (ctx.guild.me.guild_permissions.kick_members): embed2 = nextcord.Embed(title=\"Kick Error\",description=\"I require the ``Kick Members`` permisson to run", "mutedRole = await guild.create_role(name=\"Muted\") await ctx.send(\"No mute role found. Creating mute role...\") for", "ctx.send(embed=embed) try: roleRemoved = False for role_ in member.roles: if role_ == role:", "inline=True) embed.add_field(name=\"Channel Permissions Synced?\", value=bool(CategoryChannel.permissions_synced), inline=True) embed.add_field(name=\"Channel is Annoucement?\", value=channel.is_news(), inline=True) embed.add_field(name=\"Channel Hash:\",", "| *{member.name} Was Unbanned!*\") await ctx.send(embed=em) @commands.command(name=\"kick\", description=\"Kicks the member from your server.\")", "remove a role from them!\", ) await ctx.send(embed=embed) return if role is None:", "title=\"Kick Error\", description=\"Member has same role as you in the role heirarchy -", "role.\" ) @commands.has_permissions(manage_roles=True) async def addrole( self, ctx, member: nextcord.Member = None, *,", "nextcord.TextChannel = None, setting = None): if setting == '--server': view = LockConfirm()", "to remove from a member\", ) return await ctx.send(embed=em3) if not mutedRole: mutedRole", "trying to give\", ) await ctx.send(embed=embed) return else: em = nextcord.Embed( title=\"Remove Role", "nextcord.colour import Color from nextcord.components import Button from nextcord.embeds import Embed from nextcord.ext", "member == None: em1 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member to mute - Not", "role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position:", "{tempban1}{time[1:]}\\n**Reason:** {reason}\") await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: em = nextcord.Embed(title=f\"<a:no:909765403872481280> |", "ban - Not Found\" ) return await ctx.send(embed=embed1) if member.id == ctx.author.id: embed69", "mutedRole.position: em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Muted role too high to remove from", "role\", ) return await ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position: embed = nextcord.Embed( title=\"Remove", "Get ChannelStats *Syntax*: \"{self.ctx.prefix}\" channelstats [channel] \"\"\" if channel == None: channel =", "remove the role from {}!\".format( member.mention ), ) await ctx.send(embed=embed) return if ctx.author.top_role.position", "\", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.add_field(name=\"Duration\", value=time) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason)", "member == None: embed1 = nextcord.Embed( title=\"Ban Error\", description=\"Member to ban - Not", "in important situations such as, `Raid on the Server`. **If this command is", "Server`. Only use this command if no admin is online or responding. **If", "Unbans a member \"\"\" banned_user = await ctx.guild.bans() member_name, member_discriminator = member.split('#') for", "is too high for me to perform this action\", ) return await ctx.send(embed=embed)", "reason=\"No Reason\" if user!= None: if time==None: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | {user.name} Was", "*, reason=None): if member == None: embed1 = nextcord.Embed( title=\"Kick Error\", description=\"Member to", "description=\"Member to kick - Not Found\") return await ctx.send(embed=embed1) if not (ctx.guild.me.guild_permissions.kick_members): embed2", "time_convert = {\"s\": 1, \"m\": 60, \"h\": 3600, \"d\": 86400} tempban1 = int(time[0])", "ctx.send(embed=em4) if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Muted role too", "None, *, reason=None): guild = ctx.guild if member == None: em1 = nextcord.Embed(", "button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value = False self.stop() async def interaction_check(self, interaction) ->", "ctx, channel: nextcord.TextChannel = None, setting = None): if setting == '--server': view", "async def tempmute( self, ctx, member: nextcord.Member = None, time=None, *, reason=None ):", "description=\"Don't bother, ive tried\" ) return await ctx.send(embed=em5) if time == None: em2", "\"{self.ctx.prefix}\" channelstats [channel] \"\"\" if channel == None: channel = ctx.channel embed =", "= nextcord.Embed(title=\"Kick Error\", description=\"Can't kick yourself \",) return await ctx.send(embed=embed) elif ctx.author.top_role.position <", "banned_user: user = ban_entry.user if (user.name, user.discriminator) == (member_name, member_discriminator): await ctx.guild.unban(user) view=AllConfirm(ctx)", "title=\"Lockdown Success\", description=f\"Locked {channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Unlocks the channel.\") @commands.has_permissions(kick_members=True) async", "admin is online or responding. **If this command is used for the wrong", "title=\"Add Role Error\", description=f\"{member.mention} already has the role you are trying to give\",", "button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value = True self.stop() @nextcord.ui.button(label=\"Cancel\", style=nextcord.ButtonStyle.grey, emoji=\"<a:no:909765403872481280>\") async def", "ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member **higher** than you in", "== '--server': for channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name} using", "mutedRole, speak=False, send_messages=False, read_message_history=True, ) if not time == None: time_convert = {\"s\":", "member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member **higher** than you in the role", "= await ctx.send(\"Cleared Messages\") asyncio.sleep(10) await msg.delete() @commands.command(description=\"Change the channels slowmode.\") @commands.has_permissions(manage_channels=True) async", "await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Mute Error\", description=\"Member **higher**", "message): if str(message.author.id) != str(BOT_USER_ID): send = message.channel.send @commands.command(name=\"tempban\") @commands.has_permissions(ban_members=True) async def tempban(self,ctx,", "not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Mute Error\", description=\"I require the **Manage Roles** permisson", "value=channel.position, inline=True) embed.add_field(name=\"Channel Slowmode?\", value=channel.slowmode_delay, inline=True) embed.add_field(name=\"Channel is NSFW?\", value=channel.is_nsfw(), inline=True) embed.add_field(name=\"Channel Permissions", "title=\"Tempmute Error\", description=\"I require the ``Manage Roles`` permisson to run this command -", "int(time[0]) * time_convert[time[-1]] em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | `{user.name}` Was Banned | Duration: {tempban1}{time[1:]}", "asyncio.sleep(tempban1) await ctx.guild.unban(user) else: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | Member To Ban Was Found\")", "title=\"Add Role Success\", description=f\"{role.mention} has been assigned to {member.mention}\", ) await ctx.send(embed=em) await", "title=\"Lockdown Success\", description=f\"Locked entire server \", ) await ctx.send(embed=embed) else: lockEmbed = nextcord.Embed(", "kickEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Kick Case \",color=nextcord.Color.red()) kickEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx)", "ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member has same", "!=None : time_convert = {\"s\": 1, \"m\": 60, \"h\": 3600, \"d\": 86400} tempban1", "return await ctx.send(embed=embed) try: addRole = True for role_ in member.roles: if role_", "= f\"{ctx.author.name} modbanned {member.name}\" else: reason = ( f\"{ctx.author.name} modbanned {member.name} for the", ") return await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Unmute Error\",", "member is None: embed = nextcord.Embed( title=\"Add Role Error\", description=\"Please ping a user", "if no admin is online or responding. **If this command is used for", "nextcord.webhook import sync class AllConfirm(nextcord.ui.View): def __init__(self,ctx): super().__init__(timeout=200) self.value = None self.ctx=ctx @nextcord.ui.button(", "- Invalid Permission\", ) return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em3 =", "%d %B %Y , %I:%M %p\"), inline=False) embed.set_thumbnail(url=ctx.guild.icon.url) await ctx.send(embed=embed) @commands.command(name=\"tempmute\", description=\"Mutes a", "nextcord.Member = None, role: nextcord.Role = None, *, reason=None, ): if member is", "await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name}\", send_messages=True, ) embed = nextcord.Embed( title=\"Unlock Success\",", "give\", ) await ctx.send(embed=embed) return else: em = nextcord.Embed( title=\"Remove Role Success!\", description=f\"{role.mention}", "certain role from a member.\", ) @commands.has_permissions(manage_roles=True) async def removerole( self, ctx, member:", "\"m\": 60, \"h\": 3600, \"d\": 86400} tempmute = int(time[0]) * time_convert[time[-1]] embed =", "await ctx.channel.purge(limit=amount) msg = await ctx.send(\"Cleared Messages\") asyncio.sleep(10) await msg.delete() @commands.command(description=\"Change the channels", "member.add_roles(mutedRole, reason=reason) await member.send( f\"You have been muted from: **{guild.name}** | Reason: **{reason}**", "async def cancel( self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value = False self.stop() async", "Ban Was Found\") await ctx.send(embed=em) @commands.command(name=\"ban\", description=\"Bans the member from your server.\") async", "== ctx.author.id: embed = nextcord.Embed(title=\"Kick Error\", description=\"Can't kick yourself \",) return await ctx.send(embed=embed)", "None, *, reason=None): if member == None: embed1 = nextcord.Embed( title=\"Kick Error\", description=\"Member", "member.send( f\"You have been muted from: **{guild.name}** | Reason: **{reason}**\" ) return @commands.command(name=\"unmute\",", "been assigned to {member.mention}\", ) await ctx.send(embed=em) await member.add_roles(role) return except Exception: print(Exception)", "self.value = None self.ctx=ctx @nextcord.ui.button( label=\"Confirm\", style=nextcord.ButtonStyle.grey,emoji=\"<a:yes:909765403801182208>\") async def confirm( self, button: nextcord.ui.Button,", "@commands.command(description=\"Lucas unban method\") @commands.has_permissions(ban_members=True) async def unban(self, ctx,*,member): f\"\"\" **Info**: Unbans a member", "= nextcord.Embed( title=\"Tempmute Error\", description=\"Member **higher** than you in the role heirarchy -", "return @commands.command( name=\"mute\", description=\"Mutes a member for a specific amount of time.\" )", "member\", ) return await ctx.send(embed=em3) embed = nextcord.Embed( title=\"Mute Success\", description=f\"{member.mention} was muted", "from your server.\") @commands.has_permissions(kick_members=True) async def kick(self, ctx, member: nextcord.Member = None, *,", "roleRemoved: embed = nextcord.Embed( title=\"Remove Role Error\", description=f\"{member.mention} already has the role you", "= nextcord.Embed( title=\"Add Role Error\", description=\"That role is too high for me to", "ctx.send(embed=embed) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member **higher** than", "nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked {channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Unlocks the channel.\") @commands.has_permissions(kick_members=True)", "await ctx.send(\"Error has occoured, notifying dev team\") print(Exception) @commands.command( aliases=[\"giverole\", \"addr\"], description=\"Gives a", "* time_convert[time[-1]] embed = nextcord.Embed( title=\"Tempmute Success\", description=f\"{member.mention} was muted \", colour=nextcord.Colour.blue(), )", "Messages\") asyncio.sleep(10) await msg.delete() @commands.command(description=\"Change the channels slowmode.\") @commands.has_permissions(manage_channels=True) async def slowmode(self, ctx,", "description=\"Please ping a user to remove a role from them!\", ) await ctx.send(embed=embed)", "ctx.send(embed=embed) return if role is None: embed = nextcord.Embed( title=\"Add Role Error\", description=\"Please", "important situations such as, `NSFW or NSFLPosting` or `Raid on the Server`. Only", ": time_convert = {\"s\": 1, \"m\": 60, \"h\": 3600, \"d\": 86400} tempban1 =", "reason=reason) await member.send( f\"You have been muted from: **{guild.name}** | Reason: **{reason}** |", "ChannelStats *Syntax*: \"{self.ctx.prefix}\" channelstats [channel] \"\"\" if channel == None: channel = ctx.channel", "any category'}\", color=nextcord.Color.random()) embed.add_field(name=\"Channel Guild:-\", value=ctx.guild.name, inline=True) embed.add_field(name=\"Channel Id:-\", value=channel.id, inline=False) embed.add_field(name=\"Channel Topic:-\",value=f\"{channel.topic", "await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Member **higher**", "title=\"Ban Error\", description=\"Member to ban - Not Found\" ) return await ctx.send(embed=embed1) if", "import json import random import asyncio from datetime import datetime from difflib import", "title=\"Tempmute Error\", description=\"Member has same role as you in the role heirarchy -", "*, reason=None, ): if member is None: embed = nextcord.Embed( title=\"Remove Role Error\",", "== None: em2 = nextcord.Embed( title=\"Tempmute Error\", description=\"Time to mute - Not Found\"", "{\"s\": 1, \"m\": 60, \"h\": 3600, \"d\": 86400} tempmute = int(time[0]) * time_convert[time[-1]]", "ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: await DMChannel.send(user,f\"**{ctx.guild.name}**: You have been banned for", "to give\", ) await ctx.send(embed=embed) return else: em = nextcord.Embed( title=\"Remove Role Success!\",", "nextcord.Embed( title=\"Slowmode Success\", description=\"Slowmode turned off\" ) await ctx.send(embed=em1) await ctx.channel.edit(slowmode_delay=0) elif time", "view=view) await view.wait() if view.value is None: await ctx.author.send(\"Command has been Timed Out,", "embed = nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping a role to remove the", "em, view=view) await view.wait() if view.value is None: await ctx.author.send(\"Command has been Timed", "value=channel.is_nsfw(), inline=True) embed.add_field(name=\"Channel Permissions Synced?\", value=bool(CategoryChannel.permissions_synced), inline=True) embed.add_field(name=\"Channel is Annoucement?\", value=channel.is_news(), inline=True) embed.add_field(name=\"Channel", "member == None: embed1 = nextcord.Embed( title=\"Kick Error\", description=\"Member to kick - Not", "[channel] \"\"\" if channel == None: channel = ctx.channel embed = nextcord.Embed( title=f\"**ChannelStats", "channel = ctx.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name}\", send_messages=True, ) embed =", "ctx.send(embed=embed) return else: em = nextcord.Embed( title=\"Add Role Success\", description=f\"{role.mention} has been assigned", "ctx.send(embed=em) await member.add_roles(role) return except Exception: print(Exception) @commands.command( aliases=[\"takerole\", \"remover\"], description=\"Removes a certain", "value=bool(CategoryChannel.permissions_synced), inline=True) embed.add_field(name=\"Channel is Annoucement?\", value=channel.is_news(), inline=True) embed.add_field(name=\"Channel Hash:\", value=hash(channel), inline=True) embed.add_field(name=\"Channel Creation", "member.top_role.position: em4 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member has same role as you in", "None ): if member is None: embed = nextcord.Embed( title=\"Add Role Error\", description=\"Please", "role!\", ) await ctx.send(embed=embed) return if role is None: embed = nextcord.Embed( title=\"Add", "ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Kicked!*\") await ctx.send(embed=em)", "ctx.guild banMsg = random.choice(\"BANNED\") banEmbed = nextcord.Embed( title=\"Ban Success\", description=f\"{member.mention} {banMsg}\" ) banEmbed.add_field(name=\"Reason\",", "in guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) if not time ==", "commands from nextcord.ext.commands.cooldowns import BucketType from nextcord.ui.view import View from nextcord.ext import commands", "title=\"Remove Role Error\", description=f\"{member.mention} already has the role you are trying to give\",", "embed = nextcord.Embed( title=f\"**ChannelStats for {channel.name}**\", description=f\"{'Category :{}'.format(channel.category.name) if channel.category else 'Channel is", "\",color=nextcord.Color.red()) kickEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=kickEmbed,view=view) await view.wait() view=AllConfirm(ctx) if view.value==False: em", "em5 = nextcord.Embed( title=\"Tempmute Error\", description=\"Don't bother, ive tried\" ) return await ctx.send(embed=em5)", "async def removerole( self, ctx, member: nextcord.Member = None, role: nextcord.Role = None,", "await ctx.guild.ban(user) if time !=None : time_convert = {\"s\": 1, \"m\": 60, \"h\":", ") return await ctx.send(embed=em3) embed = nextcord.Embed( title=\"Mute Success\", description=f\"{member.mention} was muted Indefinitly", "await ctx.send(embed=embed) try: addRole = True for role_ in member.roles: if role_ ==", "= None self.ctx=ctx @nextcord.ui.button( label=\"Confirm\", style=nextcord.ButtonStyle.grey,emoji=\"<a:yes:909765403801182208>\") async def confirm( self, button: nextcord.ui.Button, interaction:", "- Missing Permission\", ) return await ctx.send(embed=embed2) elif ctx.author.top_role.position == member.top_role.position: em4 =", "title=\"Remove Role Error\", description=\"Please ping a user to remove a role from them!\",", "= None, role: nextcord.Role = None, *, reason=None, ): if member is None:", "elif view.value: guild = ctx.guild banMsg = random.choice(\"BANNED\") banEmbed = nextcord.Embed( title=\"Ban Success\",", "ctx.send(\"No mute role found. Creating mute role...\") for channel in guild.channels: await channel.set_permissions(", "found. Creating mute role...\") for channel in guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False,", "from nextcord.ext import commands from nextcord.ext.commands.cooldowns import BucketType from nextcord.ui.view import View from", "Cancelled\", description=\"Lets pretend like this never happened them :I\", ) await ctx.author.send(embed=banEmbed) def", "await ctx.channel.edit(slowmode_delay=0) elif time > 21600: em2 = nextcord.Embed( title=\"Slowmode Error\", description=\"Slowmode over", "if member.id == ctx.author.id: embed69 = nextcord.Embed( title=\"Ban Error\", description=\"Ban yourself... only a", "*, reason=None): if reason is None: reason = f\"{ctx.author.name} modbanned {member.name}\" else: reason", "role...\") for channel in guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) if", "em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member has same role as you in the", "em = nextcord.Embed( title=\"Are you sure?\", description=\"This is a very risky command only", "description=\"Error\" ) return await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Mute", "for the wrong purpose you may risk getting demoted if not banned from", "False for role_ in member.roles: if role_ == role: await member.remove_roles(role) roleRemoved =", "Was Kicked!*\") await ctx.send(embed=em) await member.send(f\"You got kicked in **{ctx.guild}** | Reason: **{reason}**\")", "ctx.send(\"Cleared Messages\") asyncio.sleep(10) await msg.delete() @commands.command(description=\"Change the channels slowmode.\") @commands.has_permissions(manage_channels=True) async def slowmode(self,", "else: lockEmbed = nextcord.Embed( title=\"Lock Cancelled\", description=\"Lets pretend like this never happened them", "ctx, time: int): try: if time == 0: em1 = nextcord.Embed( title=\"Slowmode Success\",", "await ctx.author.send(embed=banEmbed) await member.ban(reason=reason) await member.send(f\"You got banned in **{guild}** | Reason: **{reason}**\")", "banned_user = await ctx.guild.bans() member_name, member_discriminator = member.split('#') for ban_entry in banned_user: user", "title=\"Add Role Error\", description=\"Please ping a user to give them a role!\", )", "21600: em2 = nextcord.Embed( title=\"Slowmode Error\", description=\"Slowmode over 6 hours\" ) await ctx.send(embed=em2)", "await ctx.author.send(embed=em, view=view) await view.wait() if view.value is None: await ctx.author.send(\"Command has been", "Creation Time:\", value=channel.created_at.strftime(\"%a, %d %B %Y , %I:%M %p\"), inline=False) embed.set_thumbnail(url=ctx.guild.icon.url) await ctx.send(embed=embed)", "description=\"Lets pretend like this never happened them :I\", ) await ctx.author.send(embed=lockEmbed) return if", "embed = nextcord.Embed( title=\"Add Role Error\", description=\"Please ping a user to give them", "do not have enough permissions to give this role\", ) return await ctx.send(embed=em)", "Error\", description=\"You do not have enough permissions to give this role\", ) return", "None, *, reason=None, ): if member is None: embed = nextcord.Embed( title=\"Remove Role", "\"\"\" **Info**: Bans a member \"\"\" if member == None: embed1 = nextcord.Embed(", "time: int): try: if time == 0: em1 = nextcord.Embed( title=\"Slowmode Success\", description=\"Slowmode", "is online or responding. **If this command is used for the wrong purpose", "= True break if not roleRemoved: embed = nextcord.Embed( title=\"Remove Role Error\", description=f\"{member.mention}", "def removerole( self, ctx, member: nextcord.Member = None, role: nextcord.Role = None, *,", "await ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"That", "channel in guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) if member ==", "them!\", ) await ctx.send(embed=embed) return if role is None: embed = nextcord.Embed( title=\"Remove", "return await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Member", "return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Mute Error\", description=\"Member", "give {} that role!\".format( member.mention ), ) await ctx.send(embed=embed) return if ctx.author.top_role.position <", "embed.add_field(name=\"Channel Guild:-\", value=ctx.guild.name, inline=True) embed.add_field(name=\"Channel Id:-\", value=channel.id, inline=False) embed.add_field(name=\"Channel Topic:-\",value=f\"{channel.topic if channel.topic else", "use that!!\" , ephemeral=True) else: return True BOT_USER_ID=\"897762972603150346\" class Moderation(commands.Cog): def __init__(self, bot):", "nextcord.Embed( title=\"Ban Error\", description=\"No banning yourself...\", ) return await ctx.send(embed=embed69) em = nextcord.Embed(", "return except Exception: print(Exception) @commands.command( aliases=[\"takerole\", \"remover\"], description=\"Removes a certain role from a", "async def unban(self, ctx,*,member): f\"\"\" **Info**: Unbans a member \"\"\" banned_user = await", "import datetime from difflib import get_close_matches from nextcord.webhook import sync class AllConfirm(nextcord.ui.View): def", "mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Mute Error\",", "a role to give {} that role!\".format( member.mention ), ) await ctx.send(embed=embed) return", "= nextcord.Embed( title=\"Mute Success\", description=f\"{member.mention} was muted Indefinitly \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason,", "= nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Unbanned!*\") await ctx.send(embed=em) @commands.command(name=\"kick\", description=\"Kicks the member from", "Member To Ban Was Found\") await ctx.send(embed=em) @commands.command(name=\"ban\", description=\"Bans the member from your", "have been unmuted from **{guild}**\") return @commands.command( name=\"mute\", description=\"Mutes a member for a", "ctx, amount=10): amount = amount + 1 if amount > 101: em1 =", "not have enough permissions to give this role\", ) return await ctx.send(embed=em) if", "= nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping a role to remove the role", "is None: channel = ctx.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name}\", send_messages=True, )", "Role Error\", description=\"Please ping a role to remove the role from {}!\".format( member.mention", "nextcord.Embed( title=\"Mute Error\", description=\"Member has same role as you in the role heirarchy", "**{reason}**\" ) return @commands.command(name=\"unmute\", description=\"Unmutes a muted member.\") @commands.has_permissions(manage_messages=True) async def unmute(self, ctx,", "= nextcord.Embed( title=\"Slowmode Success\", description=f\"Slowmode set to {time} seconds\", ) await ctx.send(embed=em3) except", "nextcord.Embed( title=\"Remove Role Error\", description=f\"{member.mention} already has the role you are trying to", "= nextcord.Embed( title=\"Tempmute Success\", description=f\"{member.mention} was muted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False)", "muted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.add_field(name=\"Duration\", value=time) await ctx.send(embed=embed) await member.add_roles(mutedRole,", "not banned from the staff team.**\", ) await ctx.author.send(embed=em, view=view) await view.wait() if", "{user.name} Was Banned indefinitely\") await ctx.send(embed=em) await ctx.guild.ban(user) if time !=None : time_convert", "\"h\": 3600, \"d\": 86400} tempban1 = int(time[0]) * time_convert[time[-1]] em = nextcord.Embed(title=f\"<a:yes:909765403801182208> |", "embed.add_field(name=\"Channel Permissions Synced?\", value=bool(CategoryChannel.permissions_synced), inline=True) embed.add_field(name=\"Channel is Annoucement?\", value=channel.is_news(), inline=True) embed.add_field(name=\"Channel Hash:\", value=hash(channel),", "None: await ctx.author.send(\"Command has been Timed Out, please try again.\") elif view.value: for", "member.id == ctx.author.id: embed69 = nextcord.Embed( title=\"Ban Error\", description=\"Ban yourself... only a skid", "heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 =", "mutedRole: mutedRole = await guild.create_role(name=\"Muted\") await ctx.send(\"No mute role found. Creating mute role...\")", "{tempban1}{time[1:]} | Reason:{reason}\") await ctx.send(embed=em) if bool(user.bot)==True: await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user)", "assigned to {member.mention}\", ) await ctx.send(embed=em) await member.add_roles(role) return except Exception: print(Exception) @commands.command(", "description=\"You do not have enough permissions to remove this role\", ) return await", "removed from {member.mention}\", ) await ctx.send(embed=em) return except Exception: print(Exception) @commands.command(description=\"Locks the channel.\")", "view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Kicked!*\") await ctx.send(embed=em)", "as you in the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3)", "embed.set_thumbnail(url=ctx.guild.icon.url) await ctx.send(embed=embed) @commands.command(name=\"tempmute\", description=\"Mutes a member indefinitely.\") @commands.has_permissions(manage_messages=True) async def tempmute( self,", "random import asyncio from datetime import datetime from difflib import get_close_matches from nextcord.webhook", "channel = ctx.message.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name}\", send_messages=False, # ) embed", "await ctx.send(embed=embed) try: roleRemoved = False for role_ in member.roles: if role_ ==", "ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member has same role as", "amount > 101: em1 = nextcord.Embed( title=\"Clear Error\", description=\"Purge limit exedeed - Greater", "await asyncio.sleep(tempmute) await member.remove_roles(mutedRole) await member.send(f\"You have been unmuted from **{guild}**\") return @commands.command(", "banEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=banEmbed,view=view) await view.wait() if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280>", "banned for {tempban1}{time[1:]}\\n**Reason:** {reason}\") await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: em =", "f\"{ctx.author.name} modbanned {member.name}\" else: reason = ( f\"{ctx.author.name} modbanned {member.name} for the reason", "= nextcord.Embed( title=\"Remove Role Error\", description=\"You do not have enough permissions to remove", "self, ctx, member: nextcord.Member = None, time=None, *, reason=None ): guild = ctx.guild", "import Color from nextcord.components import Button from nextcord.embeds import Embed from nextcord.ext import", ") await ctx.send(embed=em1) await ctx.channel.edit(slowmode_delay=0) elif time > 21600: em2 = nextcord.Embed( title=\"Slowmode", "been banned for {tempban1}{time[1:]}\\n**Reason:** {reason}\") await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: em", "embed2 = nextcord.Embed( title=\"Unmute Error\", description=\"I require the ``Manage Roles`` permisson to run", "time > 21600: em2 = nextcord.Embed( title=\"Slowmode Error\", description=\"Slowmode over 6 hours\" )", "unban(self, ctx,*,member): f\"\"\" **Info**: Unbans a member \"\"\" banned_user = await ctx.guild.bans() member_name,", "removerole( self, ctx, member: nextcord.Member = None, role: nextcord.Role = None, *, reason=None,", "title=\"Mute Error\", description=\"Member to mute - Not Found\" ) return await ctx.send(embed=em1) elif", "--server override\", send_messages=False, ) embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked entire server \",", "nextcord.Embed( title=\"Tempmute Success\", description=f\"{member.mention} was muted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.add_field(name=\"Duration\",", "= await ctx.guild.bans() member_name, member_discriminator = member.split('#') for ban_entry in banned_user: user =", "{member.name}\" else: reason = ( f\"{ctx.author.name} modbanned {member.name} for the reason of {reason}\"", "title=\"Remove Role Error\", description=\"You do not have enough permissions to remove this role\",", "been muted from: **{guild.name}** | Reason: **{reason}** | Time: **{time}**\" ) if not", "= None, setting=None): if setting == '--server': for channel in ctx.guild.channels: await channel.set_permissions(", "member == None: em1 = nextcord.Embed( title=\"Mute Error\", description=\"Member to mute - Not", "colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.add_field(name=\"Duration\", value=time) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await", "await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) if member == None: em1 =", "Not Found\" ) return await ctx.send(embed=embed1) if member.id == ctx.author.id: embed69 = nextcord.Embed(", ") return await ctx.send(embed=embed1) if member.id == ctx.author.id: embed69 = nextcord.Embed( title=\"Ban Error\",", "**{reason}**\") await member.kick(reason=reason) @commands.command(aliases=[\"cs\", \"ci\", \"channelinfo\"]) async def channelstats(self, ctx, channel: nextcord.TextChannel =", "has same role as you in the role heirarchy - Invalid Permission\",) return", "if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Muted role too high", "Success\", description=f\"Slowmode set to {time} seconds\", ) await ctx.send(embed=em3) except Exception: await ctx.send(\"Error", "None: em1 = nextcord.Embed( title=\"Unmute Error\", description=\"Member to unmute - Not Found\" )", "| Ban Case \",color=nextcord.Color.red()) banEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=banEmbed,view=view) await view.wait() if", "Error\", description=\"Error\" ) return await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed(", "mute - Not Found\" ) return await ctx.send(embed=em1) elif member.id == ctx.author.id: em5", "86400} tempmute = int(time[0]) * time_convert[time[-1]] embed = nextcord.Embed( title=\"Tempmute Success\", description=f\"{member.mention} was", "ctx.channel.purge(limit=amount) msg = await ctx.send(\"Cleared Messages\") asyncio.sleep(10) await msg.delete() @commands.command(description=\"Change the channels slowmode.\")", "`Raid on the Server`. Only use this command if no admin is online", "member \"\"\" if member == None: embed1 = nextcord.Embed( title=\"Ban Error\", description=\"Member to", "em = nextcord.Embed(title=f\"<a:no:909765403872481280> | Member To Ban Was Found\") await ctx.send(embed=em) @commands.command(name=\"ban\", description=\"Bans", "are trying to give\", ) await ctx.send(embed=embed) return else: em = nextcord.Embed( title=\"Remove", "Found\" ) return await ctx.send(embed=em2) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Tempmute", "await member.kick(reason=reason) @commands.command(aliases=[\"cs\", \"ci\", \"channelinfo\"]) async def channelstats(self, ctx, channel: nextcord.TextChannel = None):", "self, ctx, member: nextcord.Member = None, *, role: nextcord.Role = None ): if", "< role.position: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"That role is too high", "= await guild.create_role(name=\"Muted\") await ctx.send(\"No mute role found. Creating mute role...\") for channel", "embed = nextcord.Embed( title=\"Tempmute Success\", description=f\"{member.mention} was muted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason,", "else 'No topic.'}\",inline=False,) embed.add_field(name=\"Channel Position:-\", value=channel.position, inline=True) embed.add_field(name=\"Channel Slowmode?\", value=channel.slowmode_delay, inline=True) embed.add_field(name=\"Channel is", "= nextcord.Embed( title=\"Ban Error\", description=\"Member **higher** than you in the role heirarchy -", "ctx.author.id: em5 = nextcord.Embed( title=\"Unmute Error\", description=\"wHat? <:WHA:815331017854025790>\" ) return await ctx.send(embed=em5) elif", "100\", ) return await ctx.send(embed=em1) else: await ctx.channel.purge(limit=amount) msg = await ctx.send(\"Cleared Messages\")", "embed.add_field(name=\"Channel Id:-\", value=channel.id, inline=False) embed.add_field(name=\"Channel Topic:-\",value=f\"{channel.topic if channel.topic else 'No topic.'}\",inline=False,) embed.add_field(name=\"Channel Position:-\",", "addRole = True for role_ in member.roles: if role_ == role: addRole =", "== None: embed1 = nextcord.Embed( title=\"Kick Error\", description=\"Member to kick - Not Found\")", "command - Missing Permission\", ) return await ctx.send(embed=embed2) elif ctx.author.top_role.position == member.top_role.position: em4", "*{member.name} Was Not Unbanned!*\") await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> |", "= None, *, reason=None, ): if member is None: embed = nextcord.Embed( title=\"Remove", "ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name} using --server override\", send_messages=False, ) embed = nextcord.Embed( title=\"Lockdown", "ctx.send(embed=em3) except Exception: await ctx.send(\"Error has occoured, notifying dev team\") print(Exception) @commands.command( aliases=[\"giverole\",", "the staff team.**\", ) await ctx.author.send(embed = em, view=view) await view.wait() if view.value", "is None: await ctx.author.send(\"Command has been Timed Out, please try again.\") elif view.value:", "used in important situations such as, `Raid on the Server`. **If this command", "Error\", description=\"Muted role too high to give to a member\", ) return await", "@commands.command(description=\"Locks the channel.\") @commands.has_permissions(kick_members=True) async def lock(self, ctx, channel: nextcord.TextChannel = None, setting", "try: addRole = True for role_ in member.roles: if role_ == role: addRole", "unmuted from **{guild}**\") return @commands.command( name=\"mute\", description=\"Mutes a member for a specific amount", "guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if member == None: em1 =", "role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles): embed2", "{time} seconds\", ) await ctx.send(embed=em3) except Exception: await ctx.send(\"Error has occoured, notifying dev", "await ctx.send(embed=embed) else: lockEmbed = nextcord.Embed( title=\"Lock Cancelled\", description=\"Lets pretend like this never", "for role_ in member.roles: if role_ == role: addRole = False break if", "else: return True BOT_USER_ID=\"897762972603150346\" class Moderation(commands.Cog): def __init__(self, bot): self.bot = bot @commands.Cog.listener()", "Reason: **{reason}**\") else: banEmbed = nextcord.Embed( title=\"Ban Cancelled\", description=\"Lets pretend like this never", "async def confirm( self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value = True self.stop() @nextcord.ui.button(label=\"Cancel\",", "to give {} that role!\".format( member.mention ), ) await ctx.send(embed=embed) return if ctx.author.top_role.position", "Banned indefinitely\") await ctx.send(embed=em) await ctx.guild.ban(user) if time !=None : time_convert = {\"s\":", "view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Unbanned!*\") await ctx.send(embed=em)", "elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Tempmute Error\", description=\"Don't bother, ive tried\"", "super().__init__(timeout=200) self.value = None self.ctx=ctx @nextcord.ui.button( label=\"Confirm\", style=nextcord.ButtonStyle.grey,emoji=\"<a:yes:909765403801182208>\") async def confirm( self, button:", "(ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Unmute Error\", description=\"I require the ``Manage Roles`` permisson to", "ctx, member: nextcord.Member = None, *, reason=None): \"\"\" **Info**: Bans a member \"\"\"", "description=\"Removes a certain role from a member.\", ) @commands.has_permissions(manage_roles=True) async def removerole( self,", "title=\"Clear Error\", description=\"Purge limit exedeed - Greater than 100\", ) return await ctx.send(embed=em1)", "Role Error\", description=f\"{member.mention} already has the role you are trying to give\", )", "f\"You have been unmuted from: **{guild.name}** | Reason: **{reason}**\" ) return @commands.command(description=\"Clears a", "= nextcord.Embed( title=\"Mute Error\", description=\"Member has same role as you in the role", "Permission\", ) return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em3 = nextcord.Embed( title=\"Ban", "from: **{guild.name}** | Reason: **{reason}**\" ) return @commands.command(name=\"unmute\", description=\"Unmutes a muted member.\") @commands.has_permissions(manage_messages=True)", "return await ctx.send(embed=embed2) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member", "if not (ctx.guild.me.guild_permissions.kick_members): embed2 = nextcord.Embed(title=\"Kick Error\",description=\"I require the ``Kick Members`` permisson to", "description=\"Kicks the member from your server.\") @commands.has_permissions(kick_members=True) async def kick(self, ctx, member: nextcord.Member", "| Reason: **{reason}**\" ) return @commands.command(description=\"Clears a bundle of messages.\",aliases=['purge']) @commands.has_permissions(manage_messages=True) async def", "give to a member\", ) return await ctx.send(embed=em3) embed = nextcord.Embed( title=\"Mute Success\",", "ctx.send(embed=em2) else: await ctx.channel.edit(slowmode_delay=time) em3 = nextcord.Embed( title=\"Slowmode Success\", description=f\"Slowmode set to {time}", "def kick(self, ctx, member: nextcord.Member = None, *, reason=None): if member == None:", "description=\"Muted role too high to remove from a member\", ) return await ctx.send(embed=em3)", "= nextcord.Embed( title=\"Mute Error\", description=\"Muted role too high to give to a member\",", "= None, time=None, *, reason=None ): guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\")", "Permission\", ) return await ctx.send(embed=em3) kickEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Kick Case", "send = message.channel.send @commands.command(name=\"tempban\") @commands.has_permissions(ban_members=True) async def tempban(self,ctx, user:nextcord.User, time=None,reason=None): if reason==None: reason=\"No", "Success\", description=f\"Locked {channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Unlocks the channel.\") @commands.has_permissions(kick_members=True) async def", "None, role: nextcord.Role = None, *, reason=None, ): if member is None: embed", "you in the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em4) if", "**{guild.name}** | Reason: **{reason}**\" ) return @commands.command(name=\"unmute\", description=\"Unmutes a muted member.\") @commands.has_permissions(manage_messages=True) async", "def unban(self, ctx,*,member): f\"\"\" **Info**: Unbans a member \"\"\" banned_user = await ctx.guild.bans()", "await ctx.send(embed=embed69) em = nextcord.Embed( title=\"Are you sure?\", description=\"This is a very risky", ") return await ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position: embed = nextcord.Embed( title=\"Remove Role", "member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member has same role as you in", ") if member == None: embed1 = nextcord.Embed( title=\"Ban Error\", description=\"Member to ban", "await ctx.send(embed=embed) return if ctx.author.top_role.position < role.position: em = nextcord.Embed( title=\"Remove Role Error\",", "title=\"Ban Error\", description=\"Member has same role as you in the role heirarchy -", "value=channel.slowmode_delay, inline=True) embed.add_field(name=\"Channel is NSFW?\", value=channel.is_nsfw(), inline=True) embed.add_field(name=\"Channel Permissions Synced?\", value=bool(CategoryChannel.permissions_synced), inline=True) embed.add_field(name=\"Channel", "async def interaction_check(self, interaction) -> bool: if interaction.user !=self.ctx.author: await interaction.response.send_message(\"You can't use", "nextcord.Embed( title=\"Unmute Error\", description=\"wHat? <:WHA:815331017854025790>\" ) return await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position:", "elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Unbanned!*\") await ctx.send(embed=em) @commands.command(name=\"kick\",", "json import random import asyncio from datetime import datetime from difflib import get_close_matches", "def slowmode(self, ctx, time: int): try: if time == 0: em1 = nextcord.Embed(", "nextcord.Embed( title=\"Tempmute Error\", description=\"Member to mute - Not Found\" ) return await ctx.send(embed=em1)", ") await ctx.author.send(embed=em, view=view) await view.wait() if view.value is None: await ctx.author.send(\"Command has", "member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member has same role as you in", ") return await ctx.send(embed=em2) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Tempmute Error\",", "method\") @commands.has_permissions(ban_members=True) async def unban(self, ctx,*,member): f\"\"\" **Info**: Unbans a member \"\"\" banned_user", "def channelstats(self, ctx, channel: nextcord.TextChannel = None): f\"\"\" **Info**: Get ChannelStats *Syntax*: \"{self.ctx.prefix}\"", "if channel == None: channel = ctx.channel embed = nextcord.Embed( title=f\"**ChannelStats for {channel.name}**\",", "member.add_roles(role) return except Exception: print(Exception) @commands.command( aliases=[\"takerole\", \"remover\"], description=\"Removes a certain role from", "setting=None): if setting == '--server': for channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name}", "return if channel is None: channel = ctx.message.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked", "nextcord.Embed( title=\"Ban Success\", description=f\"{member.mention} {banMsg}\" ) banEmbed.add_field(name=\"Reason\", value=reason) await ctx.author.send(embed=banEmbed) await member.ban(reason=reason) await", "style=nextcord.ButtonStyle.grey, emoji=\"<a:no:909765403872481280>\") async def cancel( self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value = False", "title=\"Slowmode Success\", description=f\"Slowmode set to {time} seconds\", ) await ctx.send(embed=em3) except Exception: await", "ctx.author.id: em5 = nextcord.Embed( title=\"Tempmute Error\", description=\"Don't bother, ive tried\" ) return await", "== ctx.author.id: embed69 = nextcord.Embed( title=\"Ban Error\", description=\"Ban yourself... only a skid would", "reason=None): guild = ctx.guild if member == None: em1 = nextcord.Embed( title=\"Unmute Error\",", "class AllConfirm(nextcord.ui.View): def __init__(self,ctx): super().__init__(timeout=200) self.value = None self.ctx=ctx @nextcord.ui.button( label=\"Confirm\", style=nextcord.ButtonStyle.grey,emoji=\"<a:yes:909765403801182208>\") async", "Reason: **{reason}**\" ) return @commands.command(description=\"Clears a bundle of messages.\",aliases=['purge']) @commands.has_permissions(manage_messages=True) async def clear(self,", "description=\"Gives a member a certain role.\" ) @commands.has_permissions(manage_roles=True) async def addrole( self, ctx,", "Reason:{reason}\") await ctx.send(embed=em) if bool(user.bot)==True: await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: await", "confirm( self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value = True self.stop() @nextcord.ui.button(label=\"Cancel\", style=nextcord.ButtonStyle.grey, emoji=\"<a:no:909765403872481280>\")", "Invalid Permission\", ) return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed(", "return if ctx.author.top_role.position < role.position: em = nextcord.Embed( title=\"Add Role Error\", description=\"You do", "role as you in the role heirarchy - Invalid Permission\", ) return await", "too high for me to perform this action\", ) return await ctx.send(embed=embed) try:", "role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) kickEmbed = nextcord.Embed(title=f\"Moderation Action", "or NSFLPosting` or `Raid on the Server`. Only use this command if no", "print(Exception) @commands.command( aliases=[\"giverole\", \"addr\"], description=\"Gives a member a certain role.\" ) @commands.has_permissions(manage_roles=True) async", "= nextcord.utils.get(guild.roles, name=\"Muted\") if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Muted", "lock(self, ctx, channel: nextcord.TextChannel = None, setting = None): if setting == '--server':", "*{member.name} Was Kicked!*\") await ctx.send(embed=em) await member.send(f\"You got kicked in **{ctx.guild}** | Reason:", "a role!\", ) await ctx.send(embed=embed) return if role is None: embed = nextcord.Embed(", "send_messages=None, ) embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked entire server \", ) await", "``Kick Members`` permisson to run this command - Missing Permission\") return await ctx.send(embed=embed2)", "await ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Tempmute Error\", description=\"Don't bother,", "await ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position: embed = nextcord.Embed( title=\"Add Role Error\", description=\"That", "return await ctx.send(embed=embed1) if member.id == ctx.author.id: embed69 = nextcord.Embed( title=\"Ban Error\", description=\"Ban", "elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Banned!*\") await ctx.send(embed=em) await", "from **{guild}**\") return @commands.command( name=\"mute\", description=\"Mutes a member for a specific amount of", "have been muted from: **{guild.name}** | Reason: **{reason}** | Time: **{time}**\" ) if", "title=\"Remove Role Error\", description=\"That role is too high for me to perform this", "None): if setting == '--server': view = LockConfirm() em = nextcord.Embed( title=\"Are you", "using --server override\", send_messages=False, ) embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked entire server", "in the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) kickEmbed =", "async def slowmode(self, ctx, time: int): try: if time == 0: em1 =", "reason=None, ): if member is None: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"Please", "in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name} using --server override\", send_messages=False, )", "= nextcord.Embed( title=\"Mute Error\", description=\"Member **higher** than you in the role heirarchy -", "this command is used for the wrong purpose you may risk getting demoted", "set to {time} seconds\", ) await ctx.send(embed=em3) except Exception: await ctx.send(\"Error has occoured,", "time == None: em2 = nextcord.Embed( title=\"Tempmute Error\", description=\"Time to mute - Not", "member.kick(reason=reason) @commands.command(aliases=[\"cs\", \"ci\", \"channelinfo\"]) async def channelstats(self, ctx, channel: nextcord.TextChannel = None): f\"\"\"", "ctx.author.top_role.position < role.position: em = nextcord.Embed( title=\"Add Role Error\", description=\"You do not have", "return @commands.command(description=\"Clears a bundle of messages.\",aliases=['purge']) @commands.has_permissions(manage_messages=True) async def clear(self, ctx, amount=10): amount", "= nextcord.Embed( title=\"Add Role Error\", description=\"You do not have enough permissions to give", "to give this role\", ) return await ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position: embed", "return await ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Unmute Error\", description=\"I require", "action\", ) return await ctx.send(embed=embed) try: addRole = True for role_ in member.roles:", "Permission\", ) return await ctx.send(embed=embed2) mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if ctx.guild.me.top_role.position < mutedRole.position:", "Error\", description=\"Muted role too high to remove from a member\", ) return await", "nextcord.Embed( title=\"Tempmute Error\", description=\"Muted role too high to give to a member\", )", ") embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked entire server \", ) await ctx.send(embed=embed)", "print(Exception) @commands.command( aliases=[\"takerole\", \"remover\"], description=\"Removes a certain role from a member.\", ) @commands.has_permissions(manage_roles=True)", "if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Mute Error\", description=\"I require the **Manage Roles**", "await ctx.send(embed=em) @commands.command(name=\"ban\", description=\"Bans the member from your server.\") async def ban(self, ctx,", "to mute - Not Found\" ) return await ctx.send(embed=em2) elif ctx.author.top_role.position < member.top_role.position:", "use this command if no admin is online or responding. **If this command", "elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Mute Error\", description=\"Error\" ) return await", "guild = ctx.guild if member == None: em1 = nextcord.Embed( title=\"Unmute Error\", description=\"Member", "= nextcord.Embed( title=\"Are you sure?\", description=\"This is a very risky command only to", "from difflib import get_close_matches from nextcord.webhook import sync class AllConfirm(nextcord.ui.View): def __init__(self,ctx): super().__init__(timeout=200)", "Role Success\", description=f\"{role.mention} has been assigned to {member.mention}\", ) await ctx.send(embed=em) await member.add_roles(role)", "| *{member.name} Was Not Kicked!*\") await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208>", "title=\"Unmute Error\", description=\"Muted role too high to remove from a member\", ) return", "nextcord.Embed( title=\"Add Role Error\", description=\"You do not have enough permissions to give this", "only to be used in important situations such as, `Raid on the Server`.", ") return await ctx.send(embed=em3) if not mutedRole: mutedRole = await guild.create_role(name=\"Muted\") await ctx.send(\"No", "if ctx.author.top_role.position < role.position: em = nextcord.Embed( title=\"Add Role Error\", description=\"You do not", "value=reason) await ctx.author.send(embed=banEmbed) await member.ban(reason=reason) await member.send(f\"You got banned in **{guild}** | Reason:", "amount = amount + 1 if amount > 101: em1 = nextcord.Embed( title=\"Clear", "to give them a role!\", ) await ctx.send(embed=embed) return if role is None:", "= nextcord.Embed( title=\"Unmute Error\", description=\"wHat? <:WHA:815331017854025790>\" ) return await ctx.send(embed=em5) elif ctx.author.top_role.position <", "em1 = nextcord.Embed( title=\"Clear Error\", description=\"Purge limit exedeed - Greater than 100\", )", "channel: nextcord.TextChannel = None): f\"\"\" **Info**: Get ChannelStats *Syntax*: \"{self.ctx.prefix}\" channelstats [channel] \"\"\"", "role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) if not (ctx.guild.me.guild_permissions.manage_roles): embed2", "a member \"\"\" if member == None: embed1 = nextcord.Embed( title=\"Ban Error\", description=\"Member", "addRole = False break if not addRole: embed = nextcord.Embed( title=\"Add Role Error\",", "occoured, notifying dev team\") print(Exception) @commands.command( aliases=[\"giverole\", \"addr\"], description=\"Gives a member a certain", "Error\", description=\"No banning yourself...\", ) return await ctx.send(embed=embed69) em = nextcord.Embed( title=\"Are you", "Unbanned!*\") await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Unbanned!*\")", "demoted if not banned from the staff team.**\", ) await ctx.author.send(embed=em, view=view) await", "= nextcord.Embed(title=f\"<a:yes:909765403801182208> | `{user.name}` Was Banned | Duration: {tempban1}{time[1:]} | Reason:{reason}\") await ctx.send(embed=em)", "None: channel = ctx.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name}\", send_messages=True, ) embed", "been Timed Out, please try again.\") elif view.value: for channel in ctx.guild.channels: await", "if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Unbanned!*\") await ctx.send(embed=em) elif", "Synced?\", value=bool(CategoryChannel.permissions_synced), inline=True) embed.add_field(name=\"Channel is Annoucement?\", value=channel.is_news(), inline=True) embed.add_field(name=\"Channel Hash:\", value=hash(channel), inline=True) embed.add_field(name=\"Channel", "for me to perform this action\", ) return await ctx.send(embed=embed) try: addRole =", "mutedRole, speak=False, send_messages=False, read_message_history=True, ) if member == None: em1 = nextcord.Embed( title=\"Mute", ") return await ctx.send(embed=embed2) mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if ctx.guild.me.top_role.position < mutedRole.position: em3", "member.remove_roles(role) roleRemoved = True break if not roleRemoved: embed = nextcord.Embed( title=\"Remove Role", "@commands.command(description=\"Modbans the member.\") @commands.has_permissions(kick_members=True) @commands.cooldown(1, 21600, commands.BucketType.user) async def modban(self, ctx, member, *,", "description=f\"{member.mention} {banMsg}\" ) banEmbed.add_field(name=\"Reason\", value=reason) await ctx.author.send(embed=banEmbed) await member.ban(reason=reason) await member.send(f\"You got banned", "title=\"Unlock Success\", description=f\"Unlocked entire server \", ) await ctx.send(embed=embed) return if channel is", "\",) return await ctx.send(embed=embed) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\",", "await ctx.send(embed=embed1) if member.id == ctx.author.id: embed69 = nextcord.Embed( title=\"Ban Error\", description=\"Ban yourself...", "nextcord.Embed( title=\"Mute Error\", description=\"I require the **Manage Roles** permisson to run this command", "from nextcord.ext import commands import json import random import asyncio from datetime import", "Permission\", ) return await ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Mute Error\",", "nextcord.Embed( title=f\"**ChannelStats for {channel.name}**\", description=f\"{'Category :{}'.format(channel.category.name) if channel.category else 'Channel is not in", "Error\", description=\"You do not have enough permissions to remove this role\", ) return", "as, `NSFW or NSFLPosting` or `Raid on the Server`. Only use this command", "value=ctx.guild.name, inline=True) embed.add_field(name=\"Channel Id:-\", value=channel.id, inline=False) embed.add_field(name=\"Channel Topic:-\",value=f\"{channel.topic if channel.topic else 'No topic.'}\",inline=False,)", "nextcord.Embed( title=\"Tempmute Error\", description=\"Member **higher** than you in the role heirarchy - Invalid", "@commands.command(name=\"kick\", description=\"Kicks the member from your server.\") @commands.has_permissions(kick_members=True) async def kick(self, ctx, member:", "await ctx.send(embed=em3) embed = nextcord.Embed( title=\"Mute Success\", description=f\"{member.mention} was muted Indefinitly \", colour=nextcord.Colour.blue(),", "ctx.send(embed=em1) await ctx.channel.edit(slowmode_delay=0) elif time > 21600: em2 = nextcord.Embed( title=\"Slowmode Error\", description=\"Slowmode", "*, reason=None): guild = ctx.guild if member == None: em1 = nextcord.Embed( title=\"Unmute", "if ctx.author.top_role.position < role.position: em = nextcord.Embed( title=\"Remove Role Error\", description=\"You do not", "async def on_message(self, message): if str(message.author.id) != str(BOT_USER_ID): send = message.channel.send @commands.command(name=\"tempban\") @commands.has_permissions(ban_members=True)", "mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if member == None: em1 = nextcord.Embed( title=\"Tempmute Error\",", "== None: embed1 = nextcord.Embed( title=\"Ban Error\", description=\"Member to ban - Not Found\"", "Out, please try again.\") elif view.value: for channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role,", "channelstats [channel] \"\"\" if channel == None: channel = ctx.channel embed = nextcord.Embed(", "{channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Unlocks the channel.\") @commands.has_permissions(kick_members=True) async def unlock(self, ctx,", "user to give them a role!\", ) await ctx.send(embed=embed) return if role is", "the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em4) if ctx.guild.me.top_role.position <", "addrole( self, ctx, member: nextcord.Member = None, *, role: nextcord.Role = None ):", "are trying to give\", ) await ctx.send(embed=embed) return else: em = nextcord.Embed( title=\"Add", "in **{guild}** | Reason: **{reason}**\") else: banEmbed = nextcord.Embed( title=\"Ban Cancelled\", description=\"Lets pretend", "Success\", description=f\"{member.mention} was unmuted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await", "| *{member.name} Was Not Unbanned!*\") await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208>", "exedeed - Greater than 100\", ) return await ctx.send(embed=em1) else: await ctx.channel.purge(limit=amount) msg", "Not Kicked!*\") await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was", "ctx.author.id: embed69 = nextcord.Embed( title=\"Ban Error\", description=\"Ban yourself... only a skid would do", "| `{user.name}` Was Banned | Duration: {tempban1}{time[1:]} | Reason:{reason}\") await ctx.send(embed=em) if bool(user.bot)==True:", "unmuted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await member.remove_roles(mutedRole, reason=reason) await", "role\", ) return await ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position: embed = nextcord.Embed( title=\"Add", "embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked entire server \", ) await ctx.send(embed=embed) return", "description=\"Please ping a role to give {} that role!\".format( member.mention ), ) await", "True for role_ in member.roles: if role_ == role: addRole = False break", "description=\"That role is too high for me to perform this action\", ) return", "ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member **higher** than you in", "= ctx.guild if member == None: em1 = nextcord.Embed( title=\"Unmute Error\", description=\"Member to", "description=\"You do not have enough permissions to give this role\", ) return await", "{reason}\") await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | Member", "value=reason, inline=False) embed.add_field(name=\"Duration\", value=time) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await member.send( f\"You have", "= nextcord.Embed( title=\"Tempmute Error\", description=\"Don't bother, ive tried\" ) return await ctx.send(embed=em5) if", "await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name} using --server override\", send_messages=False, ) embed =", "member.ban(reason=reason) @commands.command(description=\"Lucas unban method\") @commands.has_permissions(ban_members=True) async def unban(self, ctx,*,member): f\"\"\" **Info**: Unbans a", "role from them!\", ) await ctx.send(embed=embed) return if role is None: embed =", "elif ctx.author.top_role.position == member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member has same role", ") await ctx.send(embed=embed) @commands.command(description=\"Modbans the member.\") @commands.has_permissions(kick_members=True) @commands.cooldown(1, 21600, commands.BucketType.user) async def modban(self,", "if not banned from the staff team.**\", ) await ctx.author.send(embed = em, view=view)", "await ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Mute Error\", description=\"Error\" )", "speak=False, send_messages=False, read_message_history=True, ) embed = nextcord.Embed( title=\"Unmute Success\", description=f\"{member.mention} was unmuted \",", "async def lock(self, ctx, channel: nextcord.TextChannel = None, setting = None): if setting", "Banned | Duration: {tempban1}{time[1:]} | Reason:{reason}\") await ctx.send(embed=em) if bool(user.bot)==True: await ctx.guild.ban(user) await", "return await ctx.send(embed=em3) if not mutedRole: mutedRole = await guild.create_role(name=\"Muted\") await ctx.send(\"No mute", "me to perform this action\", ) return await ctx.send(embed=embed) try: roleRemoved = False", "ban_entry.user if (user.name, user.discriminator) == (member_name, member_discriminator): await ctx.guild.unban(user) view=AllConfirm(ctx) if view.value==False: em", "== 0: em1 = nextcord.Embed( title=\"Slowmode Success\", description=\"Slowmode turned off\" ) await ctx.send(embed=em1)", "reason=None): if reason is None: reason = f\"{ctx.author.name} modbanned {member.name}\" else: reason =", "async def addrole( self, ctx, member: nextcord.Member = None, *, role: nextcord.Role =", "await ctx.send(embed=embed) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member **higher**", "- Greater than 100\", ) return await ctx.send(embed=em1) else: await ctx.channel.purge(limit=amount) msg =", "happened them :I\", ) await ctx.author.send(embed=lockEmbed) return if channel is None: channel =", "), ) await ctx.send(embed=embed) return if ctx.author.top_role.position < role.position: em = nextcord.Embed( title=\"Remove", "a user to remove a role from them!\", ) await ctx.send(embed=embed) return if", "embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await member.send( f\"You have been", ") embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await member.remove_roles(mutedRole, reason=reason) await member.send( f\"You have", "< mutedRole.position: em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Muted role too high to give", "in guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) embed = nextcord.Embed( title=\"Unmute", "await ctx.send(embed=em4) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Unmute Error\", description=\"I require the", "inline=True) embed.add_field(name=\"Channel is Annoucement?\", value=channel.is_news(), inline=True) embed.add_field(name=\"Channel Hash:\", value=hash(channel), inline=True) embed.add_field(name=\"Channel Creation Time:\",", "def on_message(self, message): if str(message.author.id) != str(BOT_USER_ID): send = message.channel.send @commands.command(name=\"tempban\") @commands.has_permissions(ban_members=True) async", "title=\"Mute Error\", description=\"Error\" ) return await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3 =", "pretend like this never happened them :I\", ) await ctx.author.send(embed=lockEmbed) return if channel", "Success\", description=f\"{role.mention} has been assigned to {member.mention}\", ) await ctx.send(embed=em) await member.add_roles(role) return", "off\" ) await ctx.send(embed=em1) await ctx.channel.edit(slowmode_delay=0) elif time > 21600: em2 = nextcord.Embed(", "*{member.name} Was Not Kicked!*\") await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> |", "nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked entire server \", ) await ctx.send(embed=embed) return if channel", ") embed = nextcord.Embed( title=\"Unmute Success\", description=f\"{member.mention} was unmuted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\",", "already has the role you are trying to give\", ) await ctx.send(embed=embed) return", "if setting == '--server': view = LockConfirm() em = nextcord.Embed( title=\"Are you sure?\",", "if bool(user.bot)==True: await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: await DMChannel.send(user,f\"**{ctx.guild.name}**: You have", "ctx.message.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name}\", send_messages=False, # ) embed = nextcord.Embed(", "has been Timed Out, please try again.\") elif view.value: guild = ctx.guild banMsg", "role too high to give to a member\", ) return await ctx.send(embed=em3) if", "= nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked entire server \", ) await ctx.send(embed=embed) else: lockEmbed", "def interaction_check(self, interaction) -> bool: if interaction.user !=self.ctx.author: await interaction.response.send_message(\"You can't use that!!\"", "ctx.send(embed=embed) @commands.command(description=\"Modbans the member.\") @commands.has_permissions(kick_members=True) @commands.cooldown(1, 21600, commands.BucketType.user) async def modban(self, ctx, member,", "description=\"Can't kick yourself \",) return await ctx.send(embed=embed) elif ctx.author.top_role.position < member.top_role.position: em3 =", "await member.send(f\"You got kicked in **{ctx.guild}** | Reason: **{reason}**\") await member.kick(reason=reason) @commands.command(aliases=[\"cs\", \"ci\",", "Not Found\") return await ctx.send(embed=embed1) if not (ctx.guild.me.guild_permissions.kick_members): embed2 = nextcord.Embed(title=\"Kick Error\",description=\"I require", "try: if time == 0: em1 = nextcord.Embed( title=\"Slowmode Success\", description=\"Slowmode turned off\"", "name=\"Muted\") if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Muted role too", "require the ``Manage Roles`` permisson to run this command - Missing Permission\", )", "Error\", description=\"Please ping a role to give {} that role!\".format( member.mention ), )", "ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Unmute Error\", description=\"Member has same", "= nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Kicked!*\") await ctx.send(embed=em) elif view.value== True: em", "ctx.send(embed=em3) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Tempmute Error\", description=\"I require the ``Manage", "Permissions Synced?\", value=bool(CategoryChannel.permissions_synced), inline=True) embed.add_field(name=\"Channel is Annoucement?\", value=channel.is_news(), inline=True) embed.add_field(name=\"Channel Hash:\", value=hash(channel), inline=True)", "time !=None : time_convert = {\"s\": 1, \"m\": 60, \"h\": 3600, \"d\": 86400}", "mutedRole.position: em3 = nextcord.Embed( title=\"Mute Error\", description=\"Muted role too high to give to", "title=\"Unmute Error\", description=\"wHat? <:WHA:815331017854025790>\" ) return await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3", "<:WHA:815331017854025790>\" ) return await ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Unmute", "| Reason: **{reason}**\") else: banEmbed = nextcord.Embed( title=\"Ban Cancelled\", description=\"Lets pretend like this", "= nextcord.Embed( title=\"Ban Success\", description=f\"{member.mention} {banMsg}\" ) banEmbed.add_field(name=\"Reason\", value=reason) await ctx.author.send(embed=banEmbed) await member.ban(reason=reason)", "if not banned from the staff team.**\", ) await ctx.author.send(embed=em, view=view) await view.wait()", "channel == None: channel = ctx.channel embed = nextcord.Embed( title=f\"**ChannelStats for {channel.name}**\", description=f\"{'Category", "ping a role to give {} that role!\".format( member.mention ), ) await ctx.send(embed=embed)", "is None: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping a role to", "from nextcord.components import Button from nextcord.embeds import Embed from nextcord.ext import commands from", "got banned in **{guild}** | Reason: **{reason}**\") await member.ban(reason=reason) @commands.command(description=\"Lucas unban method\") @commands.has_permissions(ban_members=True)", "this never happened them :I\", ) await ctx.author.send(embed=lockEmbed) return if channel is None:", "import CategoryChannel,DMChannel from nextcord.colour import Color from nextcord.components import Button from nextcord.embeds import", "await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | Member To", "to give to a member\", ) return await ctx.send(embed=em3) if not mutedRole: mutedRole", ") await ctx.send(embed=em2) else: await ctx.channel.edit(slowmode_delay=time) em3 = nextcord.Embed( title=\"Slowmode Success\", description=f\"Slowmode set", "*, role: nextcord.Role = None ): if member is None: embed = nextcord.Embed(", "): if member is None: embed = nextcord.Embed( title=\"Add Role Error\", description=\"Please ping", "@commands.has_permissions(kick_members=True) @commands.cooldown(1, 21600, commands.BucketType.user) async def modban(self, ctx, member, *, reason=None): if reason", "nextcord.Embed( title=\"Tempmute Error\", description=\"I require the ``Manage Roles`` permisson to run this command", "tempban1 = int(time[0]) * time_convert[time[-1]] em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | `{user.name}` Was Banned |", "channel: nextcord.TextChannel = None, setting=None): if setting == '--server': for channel in ctx.guild.channels:", "await guild.create_role(name=\"Muted\") await ctx.send(\"No mute role found. Creating mute role...\") for channel in", "em2 = nextcord.Embed( title=\"Slowmode Error\", description=\"Slowmode over 6 hours\" ) await ctx.send(embed=em2) else:", "time_convert[time[-1]] em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | `{user.name}` Was Banned | Duration: {tempban1}{time[1:]} | Reason:{reason}\")", "yourself... only a skid would do that XD !\", ) return await ctx.send(embed=embed69)", "await ctx.guild.unban(user) else: await DMChannel.send(user,f\"**{ctx.guild.name}**: You have been banned for {tempban1}{time[1:]}\\n**Reason:** {reason}\") await", "int(time[0]) * time_convert[time[-1]] embed = nextcord.Embed( title=\"Tempmute Success\", description=f\"{member.mention} was muted \", colour=nextcord.Colour.blue(),", "async def mute(self, ctx, member: nextcord.Member = None, *, reason=None): guild = ctx.guild", "%p\"), inline=False) embed.set_thumbnail(url=ctx.guild.icon.url) await ctx.send(embed=embed) @commands.command(name=\"tempmute\", description=\"Mutes a member indefinitely.\") @commands.has_permissions(manage_messages=True) async def", "perform this action\", ) return await ctx.send(embed=embed) try: addRole = True for role_", "time == None: await asyncio.sleep(tempmute) await member.remove_roles(mutedRole) await member.send(f\"You have been unmuted from", "member: nextcord.Member = None, role: nextcord.Role = None, *, reason=None, ): if member", "embed.add_field(name=\"Channel Position:-\", value=channel.position, inline=True) embed.add_field(name=\"Channel Slowmode?\", value=channel.slowmode_delay, inline=True) embed.add_field(name=\"Channel is NSFW?\", value=channel.is_nsfw(), inline=True)", "def confirm( self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value = True self.stop() @nextcord.ui.button(label=\"Cancel\", style=nextcord.ButtonStyle.grey,", "member.top_role.position: em4 = nextcord.Embed( title=\"Mute Error\", description=\"Member has same role as you in", "== None: em1 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member to mute - Not Found\"", "asyncio from datetime import datetime from difflib import get_close_matches from nextcord.webhook import sync", "name=\"mute\", description=\"Mutes a member for a specific amount of time.\" ) @commands.has_permissions(manage_messages=True) async", "Invalid Permission\", ) return await ctx.send(embed=em3) kickEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Kick", "channel is None: channel = ctx.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name}\", send_messages=True,", "= nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Kicked!*\") await ctx.send(embed=em) await member.send(f\"You got kicked in", "**{guild}** | Reason: **{reason}**\") else: banEmbed = nextcord.Embed( title=\"Ban Cancelled\", description=\"Lets pretend like", "view.value is None: await ctx.author.send(\"Command has been Timed Out, please try again.\") elif", "em4 = nextcord.Embed( title=\"Mute Error\", description=\"Member has same role as you in the", "member.remove_roles(mutedRole) await member.send(f\"You have been unmuted from **{guild}**\") return @commands.command( name=\"mute\", description=\"Mutes a", "ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await member.send( f\"You have been muted from: **{guild.name}** |", "ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Mute Error\", description=\"Error\" ) return", ") return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Mute Error\",", "a muted member.\") @commands.has_permissions(manage_messages=True) async def unmute(self, ctx, member: nextcord.Member = None, *,", "ephemeral=True) else: return True BOT_USER_ID=\"897762972603150346\" class Moderation(commands.Cog): def __init__(self, bot): self.bot = bot", "description=\"This is a very risky command only to be used in important situations", "time.\" ) @commands.has_permissions(manage_messages=True) async def mute(self, ctx, member: nextcord.Member = None, *, reason=None):", "nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping a user to remove a role from", "= nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Kick Case \",color=nextcord.Color.red()) kickEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await", "None, time=None, *, reason=None ): guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if", "= ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if not mutedRole: mutedRole = await guild.create_role(name=\"Muted\")", "nextcord.Embed( title=\"Kick Error\", description=\"Member to kick - Not Found\") return await ctx.send(embed=embed1) if", "from nextcord.webhook import sync class AllConfirm(nextcord.ui.View): def __init__(self,ctx): super().__init__(timeout=200) self.value = None self.ctx=ctx", "nextcord.ui.view import View from nextcord.ext import commands import json import random import asyncio", "@commands.command(description=\"Clears a bundle of messages.\",aliases=['purge']) @commands.has_permissions(manage_messages=True) async def clear(self, ctx, amount=10): amount =", "@commands.has_permissions(manage_messages=True) async def mute(self, ctx, member: nextcord.Member = None, *, reason=None): guild =", "value=reason) view=AllConfirm(ctx) await ctx.send(embed=banEmbed,view=view) await view.wait() if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name}", "certain role.\" ) @commands.has_permissions(manage_roles=True) async def addrole( self, ctx, member: nextcord.Member = None,", "this command - Missing Permission\") return await ctx.send(embed=embed2) if member.id == ctx.author.id: embed", "@commands.has_permissions(kick_members=True) async def lock(self, ctx, channel: nextcord.TextChannel = None, setting = None): if", "bool: if interaction.user !=self.ctx.author: await interaction.response.send_message(\"You can't use that!!\" , ephemeral=True) else: return", "ctx.send(embed=em5) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Mute Error\", description=\"Member **higher** than", "in any category'}\", color=nextcord.Color.random()) embed.add_field(name=\"Channel Guild:-\", value=ctx.guild.name, inline=True) embed.add_field(name=\"Channel Id:-\", value=channel.id, inline=False) embed.add_field(name=\"Channel", "await ctx.send(\"Cleared Messages\") asyncio.sleep(10) await msg.delete() @commands.command(description=\"Change the channels slowmode.\") @commands.has_permissions(manage_channels=True) async def", "Success\", description=f\"{member.mention} was muted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.add_field(name=\"Duration\", value=time) await", "got kicked in **{ctx.guild}** | Reason: **{reason}**\") await member.kick(reason=reason) @commands.command(aliases=[\"cs\", \"ci\", \"channelinfo\"]) async", "cancel( self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value = False self.stop() async def interaction_check(self,", "Error\", description=\"Ban yourself... only a skid would do that XD !\", ) return", "not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Unmute Error\", description=\"I require the ``Manage Roles`` permisson", "view=AllConfirm(ctx) await ctx.send(embed=kickEmbed,view=view) await view.wait() view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name}", "{member.mention}\", ) await ctx.send(embed=em) return except Exception: print(Exception) @commands.command(description=\"Locks the channel.\") @commands.has_permissions(kick_members=True) async", "ctx.send(embed=em2) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member **higher** than", "trying to give\", ) await ctx.send(embed=embed) return else: em = nextcord.Embed( title=\"Add Role", "risk getting demoted if not banned from the staff team.**\", ) await ctx.author.send(embed", "nextcord.ext import commands import json import random import asyncio from datetime import datetime", "require the **Manage Roles** permisson to run this command - Missing Permission\", )", "None: embed1 = nextcord.Embed( title=\"Kick Error\", description=\"Member to kick - Not Found\") return", "description=f\"{role.mention} has been assigned to {member.mention}\", ) await ctx.send(embed=em) await member.add_roles(role) return except", "): if member is None: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping", "a certain role from a member.\", ) @commands.has_permissions(manage_roles=True) async def removerole( self, ctx,", "await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: await DMChannel.send(user,f\"**{ctx.guild.name}**: You have been banned", "if amount > 101: em1 = nextcord.Embed( title=\"Clear Error\", description=\"Purge limit exedeed -", "Timed Out, please try again.\") elif view.value: guild = ctx.guild banMsg = random.choice(\"BANNED\")", "await view.wait() view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Kicked!*\")", "been unmuted from: **{guild.name}** | Reason: **{reason}**\" ) return @commands.command(description=\"Clears a bundle of", "Role Error\", description=\"You do not have enough permissions to give this role\", )", "= int(time[0]) * time_convert[time[-1]] em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | `{user.name}` Was Banned | Duration:", "member a certain role.\" ) @commands.has_permissions(manage_roles=True) async def addrole( self, ctx, member: nextcord.Member", "{channel.name}\", send_messages=True, ) embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked {channel.mention} \", ) await", "**{reason}**\" ) return @commands.command(description=\"Clears a bundle of messages.\",aliases=['purge']) @commands.has_permissions(manage_messages=True) async def clear(self, ctx,", "command - Missing Permission\", ) return await ctx.send(embed=embed2) mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if", "nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked {channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Modbans the member.\") @commands.has_permissions(kick_members=True)", "if user!= None: if time==None: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | {user.name} Was Banned indefinitely\")", "if ctx.guild.me.top_role.position < role.position: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"That role is", "Kicked!*\") await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Kicked!*\")", "in **{ctx.guild}** | Reason: **{reason}**\") await member.kick(reason=reason) @commands.command(aliases=[\"cs\", \"ci\", \"channelinfo\"]) async def channelstats(self,", "title=\"Kick Error\", description=\"Member to kick - Not Found\") return await ctx.send(embed=embed1) if not", "await ctx.guild.unban(user) view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Unbanned!*\")", "await ctx.author.send(embed=lockEmbed) return if channel is None: channel = ctx.message.channel await channel.set_permissions( ctx.guild.default_role,", "nextcord.Member = None, *, role: nextcord.Role = None ): if member is None:", "the role heirarchy - Invalid Permission\",) return await ctx.send(embed=em3) guild = ctx.guild banEmbed", "permisson to run this command - Missing Permission\", ) return await ctx.send(embed=embed2) mutedRole", "\"m\": 60, \"h\": 3600, \"d\": 86400} tempban1 = int(time[0]) * time_convert[time[-1]] em =", "ctx.send(embed=embed) return if ctx.author.top_role.position < role.position: em = nextcord.Embed( title=\"Add Role Error\", description=\"You", "'Channel is not in any category'}\", color=nextcord.Color.random()) embed.add_field(name=\"Channel Guild:-\", value=ctx.guild.name, inline=True) embed.add_field(name=\"Channel Id:-\",", "embed69 = nextcord.Embed( title=\"Ban Error\", description=\"Ban yourself... only a skid would do that", "= nextcord.Embed( title=\"Kick Error\", description=\"Member has same role as you in the role", "em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Muted role too high to give to a", "view.wait() view=AllConfirm(ctx) if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Kicked!*\") await", "- Not Found\") return await ctx.send(embed=embed1) if not (ctx.guild.me.guild_permissions.kick_members): embed2 = nextcord.Embed(title=\"Kick Error\",description=\"I", "None: channel = ctx.message.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name}\", send_messages=False, # )", "Success\", description=f\"Unlocked entire server \", ) await ctx.send(embed=embed) return if channel is None:", "@commands.has_permissions(ban_members=True) async def unban(self, ctx,*,member): f\"\"\" **Info**: Unbans a member \"\"\" banned_user =", "elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member **higher** than you", "on_message(self, message): if str(message.author.id) != str(BOT_USER_ID): send = message.channel.send @commands.command(name=\"tempban\") @commands.has_permissions(ban_members=True) async def", "member indefinitely.\") @commands.has_permissions(manage_messages=True) async def tempmute( self, ctx, member: nextcord.Member = None, time=None,", "AllConfirm(nextcord.ui.View): def __init__(self,ctx): super().__init__(timeout=200) self.value = None self.ctx=ctx @nextcord.ui.button( label=\"Confirm\", style=nextcord.ButtonStyle.grey,emoji=\"<a:yes:909765403801182208>\") async def", "Permission\", ) return await ctx.send(embed=em4) if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Tempmute", "not addRole: embed = nextcord.Embed( title=\"Add Role Error\", description=f\"{member.mention} already has the role", "async def channelstats(self, ctx, channel: nextcord.TextChannel = None): f\"\"\" **Info**: Get ChannelStats *Syntax*:", "difflib import get_close_matches from nextcord.webhook import sync class AllConfirm(nextcord.ui.View): def __init__(self,ctx): super().__init__(timeout=200) self.value", "= nextcord.Embed( title=\"Unmute Error\", description=\"Muted role too high to remove from a member\",", "very risky command only to be used in important situations such as, `NSFW", "is NSFW?\", value=channel.is_nsfw(), inline=True) embed.add_field(name=\"Channel Permissions Synced?\", value=bool(CategoryChannel.permissions_synced), inline=True) embed.add_field(name=\"Channel is Annoucement?\", value=channel.is_news(),", "description=f\"Slowmode set to {time} seconds\", ) await ctx.send(embed=em3) except Exception: await ctx.send(\"Error has", "ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member **higher** than you in", "%Y , %I:%M %p\"), inline=False) embed.set_thumbnail(url=ctx.guild.icon.url) await ctx.send(embed=embed) @commands.command(name=\"tempmute\", description=\"Mutes a member indefinitely.\")", "from a member.\", ) @commands.has_permissions(manage_roles=True) async def removerole( self, ctx, member: nextcord.Member =", "--server override\", send_messages=None, ) embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked entire server \",", "int): try: if time == 0: em1 = nextcord.Embed( title=\"Slowmode Success\", description=\"Slowmode turned", "ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Unbanned!*\") await ctx.send(embed=em)", "from the staff team.**\", ) await ctx.author.send(embed = em, view=view) await view.wait() if", "Was Banned | Duration: {tempban1}{time[1:]} | Reason:{reason}\") await ctx.send(embed=em) if bool(user.bot)==True: await ctx.guild.ban(user)", "role.position: em = nextcord.Embed( title=\"Add Role Error\", description=\"You do not have enough permissions", "permissions to remove this role\", ) return await ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position:", "view=AllConfirm(ctx) await ctx.send(embed=banEmbed,view=view) await view.wait() if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was", "em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Banned!*\") await ctx.send(embed=em) await member.send(f\"You got banned", "for {tempban1}{time[1:]}\\n**Reason:** {reason}\") await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: em = nextcord.Embed(title=f\"<a:no:909765403872481280>", "None: embed = nextcord.Embed( title=\"Add Role Error\", description=\"Please ping a role to give", "high to remove from a member\", ) return await ctx.send(embed=em3) if not mutedRole:", "1, \"m\": 60, \"h\": 3600, \"d\": 86400} tempban1 = int(time[0]) * time_convert[time[-1]] em", ") return @commands.command(description=\"Clears a bundle of messages.\",aliases=['purge']) @commands.has_permissions(manage_messages=True) async def clear(self, ctx, amount=10):", "None: embed1 = nextcord.Embed( title=\"Ban Error\", description=\"Member to ban - Not Found\" )", "pretend like this never happened them :I\", ) await ctx.author.send(embed=banEmbed) def setup(bot): bot.add_cog(Moderation(bot))", "run this command - Missing Permission\", ) return await ctx.send(embed=embed2) mutedRole = nextcord.utils.get(guild.roles,", "server.\") @commands.has_permissions(kick_members=True) async def kick(self, ctx, member: nextcord.Member = None, *, reason=None): if", "em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Kicked!*\") await ctx.send(embed=em) await member.send(f\"You got kicked", "@commands.has_permissions(manage_roles=True) async def addrole( self, ctx, member: nextcord.Member = None, *, role: nextcord.Role", "Cancelled\", description=\"Lets pretend like this never happened them :I\", ) await ctx.author.send(embed=lockEmbed) return", "Error\", description=\"That role is too high for me to perform this action\", )", "has been assigned to {member.mention}\", ) await ctx.send(embed=em) await member.add_roles(role) return except Exception:", "em2 = nextcord.Embed( title=\"Tempmute Error\", description=\"Time to mute - Not Found\" ) return", "DMChannel.send(user,f\"**{ctx.guild.name}**: You have been banned for {tempban1}{time[1:]}\\n**Reason:** {reason}\") await ctx.guild.ban(user) await asyncio.sleep(tempban1) await", "Permission\", ) return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em3 = nextcord.Embed( title=\"Kick", "from your server.\") async def ban(self, ctx, member: nextcord.Member = None, *, reason=None):", "def unmute(self, ctx, member: nextcord.Member = None, *, reason=None): guild = ctx.guild if", "description=f\"{'Category :{}'.format(channel.category.name) if channel.category else 'Channel is not in any category'}\", color=nextcord.Color.random()) embed.add_field(name=\"Channel", "em5 = nextcord.Embed( title=\"Unmute Error\", description=\"wHat? <:WHA:815331017854025790>\" ) return await ctx.send(embed=em5) elif ctx.author.top_role.position", "ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member has same", "< member.top_role.position: em3 = nextcord.Embed( title=\"Mute Error\", description=\"Member **higher** than you in the", "member.send(f\"You got kicked in **{ctx.guild}** | Reason: **{reason}**\") await member.kick(reason=reason) @commands.command(aliases=[\"cs\", \"ci\", \"channelinfo\"])", "member.top_role.position: em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member **higher** than you in the role", "ctx.send(embed=em1) else: await ctx.channel.purge(limit=amount) msg = await ctx.send(\"Cleared Messages\") asyncio.sleep(10) await msg.delete() @commands.command(description=\"Change", "Reason: **{reason}**\") await member.kick(reason=reason) @commands.command(aliases=[\"cs\", \"ci\", \"channelinfo\"]) async def channelstats(self, ctx, channel: nextcord.TextChannel", "= nextcord.Embed( title=\"Ban Cancelled\", description=\"Lets pretend like this never happened them :I\", )", "tempban(self,ctx, user:nextcord.User, time=None,reason=None): if reason==None: reason=\"No Reason\" if user!= None: if time==None: em", "\", ) await ctx.send(embed=embed) else: lockEmbed = nextcord.Embed( title=\"Lock Cancelled\", description=\"Lets pretend like", "**Info**: Unbans a member \"\"\" banned_user = await ctx.guild.bans() member_name, member_discriminator = member.split('#')", "Invalid Permission\", ) return await ctx.send(embed=em4) if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed(", "is None: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping a user to", "Not Found\" ) return await ctx.send(embed=em2) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed(", "the **Manage Roles** permisson to run this command - Missing Permission\", ) return", "- Missing Permission\") return await ctx.send(embed=embed2) if member.id == ctx.author.id: embed = nextcord.Embed(title=\"Kick", "for the reason of {reason}\" ) if member == None: embed1 = nextcord.Embed(", "that role!\".format( member.mention ), ) await ctx.send(embed=embed) return if ctx.author.top_role.position < role.position: em", "to remove this role\", ) return await ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position: embed", "message.channel.send @commands.command(name=\"tempban\") @commands.has_permissions(ban_members=True) async def tempban(self,ctx, user:nextcord.User, time=None,reason=None): if reason==None: reason=\"No Reason\" if", "*Syntax*: \"{self.ctx.prefix}\" channelstats [channel] \"\"\" if channel == None: channel = ctx.channel embed", "< member.top_role.position: em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Member **higher** than you in the", "Was Not Banned!*\") await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name}", "msg.delete() @commands.command(description=\"Change the channels slowmode.\") @commands.has_permissions(manage_channels=True) async def slowmode(self, ctx, time: int): try:", "Time:\", value=channel.created_at.strftime(\"%a, %d %B %Y , %I:%M %p\"), inline=False) embed.set_thumbnail(url=ctx.guild.icon.url) await ctx.send(embed=embed) @commands.command(name=\"tempmute\",", "None: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping a user to remove", "view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Banned!*\") await ctx.send(embed=em) await member.send(f\"You", "would do that XD !\", ) return await ctx.send(embed=embed69) elif ctx.author.top_role.position < member.top_role.position:", "from {}!\".format( member.mention ), ) await ctx.send(embed=embed) return if ctx.author.top_role.position < role.position: em", "nextcord.Embed( title=\"Slowmode Success\", description=f\"Slowmode set to {time} seconds\", ) await ctx.send(embed=em3) except Exception:", "member.send(f\"You got banned in **{guild}** | Reason: **{reason}**\") await member.ban(reason=reason) @commands.command(description=\"Lucas unban method\")", "never happened them :I\", ) await ctx.author.send(embed=lockEmbed) return if channel is None: channel", "nextcord.ext.commands.cooldowns import BucketType from nextcord.ui.view import View from nextcord.ext import commands import json", "title=\"Unmute Error\", description=\"Member has same role as you in the role heirarchy -", "mute role...\") for channel in guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, )", "nextcord.Member = None, *, reason=None): guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if", "role is None: embed = nextcord.Embed( title=\"Add Role Error\", description=\"Please ping a role", "Error\", description=\"Time to mute - Not Found\" ) return await ctx.send(embed=em2) elif ctx.author.top_role.position", "await member.add_roles(mutedRole, reason=reason) await member.send( f\"You have been muted from: **{guild.name}** | Reason:", "member.mention ), ) await ctx.send(embed=embed) return if ctx.author.top_role.position < role.position: em = nextcord.Embed(", "ctx,*,member): f\"\"\" **Info**: Unbans a member \"\"\" banned_user = await ctx.guild.bans() member_name, member_discriminator", "title=\"Unmute Error\", description=\"Member to unmute - Not Found\" ) return await ctx.send(embed=em1) elif", "read_message_history=True, ) if not time == None: time_convert = {\"s\": 1, \"m\": 60,", "server.\") async def ban(self, ctx, member: nextcord.Member = None, *, reason=None): \"\"\" **Info**:", "title=\"Unlock Success\", description=f\"Unlocked {channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Modbans the member.\") @commands.has_permissions(kick_members=True) @commands.cooldown(1,", "1 if amount > 101: em1 = nextcord.Embed( title=\"Clear Error\", description=\"Purge limit exedeed", "self.ctx=ctx @nextcord.ui.button( label=\"Confirm\", style=nextcord.ButtonStyle.grey,emoji=\"<a:yes:909765403801182208>\") async def confirm( self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value", "= nextcord.Embed( title=\"Tempmute Error\", description=\"Member to mute - Not Found\" ) return await", "Indefinitly \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await", "em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member **higher** than you in the role heirarchy", "return await ctx.send(embed=embed69) em = nextcord.Embed( title=\"Are you sure?\", description=\"This is a very", "@commands.command(description=\"Unlocks the channel.\") @commands.has_permissions(kick_members=True) async def unlock(self, ctx, channel: nextcord.TextChannel = None, setting=None):", "await ctx.send(embed=embed) return if role is None: embed = nextcord.Embed( title=\"Add Role Error\",", "await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Unmute Error\", description=\"Member has", "team.**\", ) await ctx.author.send(embed = em, view=view) await view.wait() if view.value is None:", "nextcord.Member = None, time=None, *, reason=None ): guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles,", "the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) elif ctx.author.top_role.position ==", "description=f\"{member.mention} was unmuted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await member.remove_roles(mutedRole,", "f\"{ctx.author.name} modbanned {member.name} for the reason of {reason}\" ) if member == None:", "nextcord.utils.get(guild.roles, name=\"Muted\") if member == None: em1 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member to", "( f\"{ctx.author.name} modbanned {member.name} for the reason of {reason}\" ) if member ==", "ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if not mutedRole: mutedRole = await guild.create_role(name=\"Muted\") await", ") @commands.has_permissions(manage_roles=True) async def addrole( self, ctx, member: nextcord.Member = None, *, role:", "embed1 = nextcord.Embed( title=\"Kick Error\", description=\"Member to kick - Not Found\") return await", "channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) embed = nextcord.Embed( title=\"Unmute Success\", description=f\"{member.mention} was", "return if role is None: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping", "ctx, member, *, reason=None): if reason is None: reason = f\"{ctx.author.name} modbanned {member.name}\"", "import BucketType from nextcord.ui.view import View from nextcord.ext import commands import json import", "Reason: **{reason}** | Time: **{time}**\" ) if not time == None: await asyncio.sleep(tempmute)", "override\", send_messages=False, ) embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked entire server \", )", "interaction: nextcord.Interaction): self.value = False self.stop() async def interaction_check(self, interaction) -> bool: if", "tempmute( self, ctx, member: nextcord.Member = None, time=None, *, reason=None ): guild =", "to run this command - Missing Permission\", ) return await ctx.send(embed=embed2) mutedRole =", "False break if not addRole: embed = nextcord.Embed( title=\"Add Role Error\", description=f\"{member.mention} already", "), ) await ctx.send(embed=embed) return if ctx.author.top_role.position < role.position: em = nextcord.Embed( title=\"Add", "nextcord.Embed( title=\"Unmute Error\", description=\"Member has same role as you in the role heirarchy", "to a member\", ) return await ctx.send(embed=em3) embed = nextcord.Embed( title=\"Mute Success\", description=f\"{member.mention}", "have enough permissions to give this role\", ) return await ctx.send(embed=em) if ctx.guild.me.top_role.position", "roleRemoved = True break if not roleRemoved: embed = nextcord.Embed( title=\"Remove Role Error\",", "@commands.has_permissions(manage_channels=True) async def slowmode(self, ctx, time: int): try: if time == 0: em1", "to give\", ) await ctx.send(embed=embed) return else: em = nextcord.Embed( title=\"Add Role Success\",", "clear(self, ctx, amount=10): amount = amount + 1 if amount > 101: em1", "title=\"Unmute Error\", description=\"I require the ``Manage Roles`` permisson to run this command -", "ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Mute Error\", description=\"Member has same role as", "the channels slowmode.\") @commands.has_permissions(manage_channels=True) async def slowmode(self, ctx, time: int): try: if time", "emoji=\"<a:no:909765403872481280>\") async def cancel( self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value = False self.stop()", "Error\", description=\"I require the ``Manage Roles`` permisson to run this command - Missing", "role!\".format( member.mention ), ) await ctx.send(embed=embed) return if ctx.author.top_role.position < role.position: em =", "you in the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) if", "await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member has", "except Exception: print(Exception) @commands.command( aliases=[\"takerole\", \"remover\"], description=\"Removes a certain role from a member.\",", "Roles`` permisson to run this command - Missing Permission\", ) return await ctx.send(embed=embed2)", "member.top_role.position: em3 = nextcord.Embed( title=\"Mute Error\", description=\"Member **higher** than you in the role", "\", ) await ctx.send(embed=embed) @commands.command(description=\"Unlocks the channel.\") @commands.has_permissions(kick_members=True) async def unlock(self, ctx, channel:", "the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) if not (ctx.guild.me.guild_permissions.manage_roles):", "= nextcord.Embed( title=\"Slowmode Success\", description=\"Slowmode turned off\" ) await ctx.send(embed=em1) await ctx.channel.edit(slowmode_delay=0) elif", "mute role found. Creating mute role...\") for channel in guild.channels: await channel.set_permissions( mutedRole,", "is None: embed = nextcord.Embed( title=\"Add Role Error\", description=\"Please ping a role to", "this command - Missing Permission\", ) return await ctx.send(embed=embed2) elif ctx.author.top_role.position == member.top_role.position:", "= None, *, role: nextcord.Role = None ): if member is None: embed", "em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member has same role as you in the", "embed.add_field(name=\"Channel Slowmode?\", value=channel.slowmode_delay, inline=True) embed.add_field(name=\"Channel is NSFW?\", value=channel.is_nsfw(), inline=True) embed.add_field(name=\"Channel Permissions Synced?\", value=bool(CategoryChannel.permissions_synced),", "return await ctx.send(embed=em3) kickEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Kick Case \",color=nextcord.Color.red()) kickEmbed.add_field(name=\"Reason:", "def modban(self, ctx, member, *, reason=None): if reason is None: reason = f\"{ctx.author.name}", "if (user.name, user.discriminator) == (member_name, member_discriminator): await ctx.guild.unban(user) view=AllConfirm(ctx) if view.value==False: em =", "= nextcord.Embed( title=\"Ban Error\", description=\"Member to ban - Not Found\" ) return await", "kick - Not Found\") return await ctx.send(embed=embed1) if not (ctx.guild.me.guild_permissions.kick_members): embed2 = nextcord.Embed(title=\"Kick", "ctx.guild.ban(user) if time !=None : time_convert = {\"s\": 1, \"m\": 60, \"h\": 3600,", "kickEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=kickEmbed,view=view) await view.wait() view=AllConfirm(ctx) if view.value==False: em =", "ban_entry in banned_user: user = ban_entry.user if (user.name, user.discriminator) == (member_name, member_discriminator): await", "**{ctx.guild}** | Reason: **{reason}**\") await member.kick(reason=reason) @commands.command(aliases=[\"cs\", \"ci\", \"channelinfo\"]) async def channelstats(self, ctx,", "from a member\", ) return await ctx.send(embed=em3) if not mutedRole: mutedRole = await", "if channel.topic else 'No topic.'}\",inline=False,) embed.add_field(name=\"Channel Position:-\", value=channel.position, inline=True) embed.add_field(name=\"Channel Slowmode?\", value=channel.slowmode_delay, inline=True)", "Invalid Permission\", ) return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em3 = nextcord.Embed(", "member.\") @commands.has_permissions(manage_messages=True) async def unmute(self, ctx, member: nextcord.Member = None, *, reason=None): guild", "for channel in guild.channels: await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) if member", "require the ``Kick Members`` permisson to run this command - Missing Permission\") return", "await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Kicked!*\") await", "Server`. **If this command is used for the wrong purpose you may risk", "await ctx.send(embed=em) await ctx.guild.ban(user) if time !=None : time_convert = {\"s\": 1, \"m\":", "high to give to a member\", ) return await ctx.send(embed=em3) embed = nextcord.Embed(", "messages.\",aliases=['purge']) @commands.has_permissions(manage_messages=True) async def clear(self, ctx, amount=10): amount = amount + 1 if", "= ctx.message.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name}\", send_messages=False, # ) embed =", "import commands from nextcord.ext.commands.cooldowns import BucketType from nextcord.ui.view import View from nextcord.ext import", "Was Not Kicked!*\") await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name}", "to remove the role from {}!\".format( member.mention ), ) await ctx.send(embed=embed) return if", "reason=None): guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if not mutedRole: mutedRole =", "): guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if member == None: em1", "member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Tempmute Error\", description=\"Don't bother, ive tried\" )", "**{reason}** | Time: **{time}**\" ) if not time == None: await asyncio.sleep(tempmute) await", ") embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.add_field(name=\"Duration\", value=time) await ctx.send(embed=embed) await member.add_roles(mutedRole, reason=reason) await member.send(", "= nextcord.Embed( title=\"Ban Error\", description=\"Ban yourself... only a skid would do that XD", "team.**\", ) await ctx.author.send(embed=em, view=view) await view.wait() if view.value is None: await ctx.author.send(\"Command", "else 'Channel is not in any category'}\", color=nextcord.Color.random()) embed.add_field(name=\"Channel Guild:-\", value=ctx.guild.name, inline=True) embed.add_field(name=\"Channel", "if view.value is None: await ctx.author.send(\"Command has been Timed Out, please try again.\")", ") return await ctx.send(embed=em3) if not (ctx.guild.me.guild_permissions.manage_roles): embed2 = nextcord.Embed( title=\"Tempmute Error\", description=\"I", "nextcord.TextChannel = None): f\"\"\" **Info**: Get ChannelStats *Syntax*: \"{self.ctx.prefix}\" channelstats [channel] \"\"\" if", "= False for role_ in member.roles: if role_ == role: await member.remove_roles(role) roleRemoved", "'--server': view = LockConfirm() em = nextcord.Embed( title=\"Are you sure?\", description=\"This is a", "return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Unmute Error\", description=\"Member", "embed = nextcord.Embed( title=\"Mute Success\", description=f\"{member.mention} was muted Indefinitly \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\",", "await ctx.send(embed=embed2) mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed(", "elif view.value: for channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name} using", "description=f\"{member.mention} already has the role you are trying to give\", ) await ctx.send(embed=embed)", ", %I:%M %p\"), inline=False) embed.set_thumbnail(url=ctx.guild.icon.url) await ctx.send(embed=embed) @commands.command(name=\"tempmute\", description=\"Mutes a member indefinitely.\") @commands.has_permissions(manage_messages=True)", "description=\"Muted role too high to give to a member\", ) return await ctx.send(embed=em3)", "Role Error\", description=\"Please ping a user to give them a role!\", ) await", "if not roleRemoved: embed = nextcord.Embed( title=\"Remove Role Error\", description=f\"{member.mention} already has the", "ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Banned!*\") await ctx.send(embed=em)", "nextcord.Embed( title=\"Unmute Error\", description=\"Muted role too high to remove from a member\", )", "channel is None: channel = ctx.message.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name}\", send_messages=False,", ") return await ctx.send(embed=em1) else: await ctx.channel.purge(limit=amount) msg = await ctx.send(\"Cleared Messages\") asyncio.sleep(10)", "inline=True) embed.add_field(name=\"Channel Slowmode?\", value=channel.slowmode_delay, inline=True) embed.add_field(name=\"Channel is NSFW?\", value=channel.is_nsfw(), inline=True) embed.add_field(name=\"Channel Permissions Synced?\",", "over 6 hours\" ) await ctx.send(embed=em2) else: await ctx.channel.edit(slowmode_delay=time) em3 = nextcord.Embed( title=\"Slowmode", "return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member", "Success\", description=f\"{member.mention} was muted Indefinitly \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed)", "def __init__(self, bot): self.bot = bot @commands.Cog.listener() async def on_message(self, message): if str(message.author.id)", "@commands.Cog.listener() async def on_message(self, message): if str(message.author.id) != str(BOT_USER_ID): send = message.channel.send @commands.command(name=\"tempban\")", "= nextcord.Embed( title=\"Unmute Error\", description=\"I require the ``Manage Roles`` permisson to run this", "Found\" ) return await ctx.send(embed=embed1) if member.id == ctx.author.id: embed69 = nextcord.Embed( title=\"Ban", "try again.\") elif view.value: guild = ctx.guild banMsg = random.choice(\"BANNED\") banEmbed = nextcord.Embed(", "ctx.author.id: embed69 = nextcord.Embed( title=\"Ban Error\", description=\"No banning yourself...\", ) return await ctx.send(embed=embed69)", "= nextcord.Embed( title=\"Unmute Error\", description=\"Member to unmute - Not Found\" ) return await", "role is too high for me to perform this action\", ) return await", "ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name} using --server override\", send_messages=None, ) embed = nextcord.Embed( title=\"Unlock", "import sync class AllConfirm(nextcord.ui.View): def __init__(self,ctx): super().__init__(timeout=200) self.value = None self.ctx=ctx @nextcord.ui.button( label=\"Confirm\",", "self, button: nextcord.ui.Button, interaction: nextcord.Interaction): self.value = True self.stop() @nextcord.ui.button(label=\"Cancel\", style=nextcord.ButtonStyle.grey, emoji=\"<a:no:909765403872481280>\") async", "Out, please try again.\") elif view.value: guild = ctx.guild banMsg = random.choice(\"BANNED\") banEmbed", "\"channelinfo\"]) async def channelstats(self, ctx, channel: nextcord.TextChannel = None): f\"\"\" **Info**: Get ChannelStats", "channel = ctx.channel embed = nextcord.Embed( title=f\"**ChannelStats for {channel.name}**\", description=f\"{'Category :{}'.format(channel.category.name) if channel.category", "member.send(f\"You have been unmuted from **{guild}**\") return @commands.command( name=\"mute\", description=\"Mutes a member for", "nextcord.Embed( title=\"Remove Role Error\", description=\"That role is too high for me to perform", "Was Found\") await ctx.send(embed=em) @commands.command(name=\"ban\", description=\"Bans the member from your server.\") async def", "that XD !\", ) return await ctx.send(embed=embed69) elif ctx.author.top_role.position < member.top_role.position: em3 =", "await ctx.send(embed=embed1) if member.id == ctx.author.id: embed69 = nextcord.Embed( title=\"Ban Error\", description=\"No banning", "random.choice(\"BANNED\") banEmbed = nextcord.Embed( title=\"Ban Success\", description=f\"{member.mention} {banMsg}\" ) banEmbed.add_field(name=\"Reason\", value=reason) await ctx.author.send(embed=banEmbed)", ") await ctx.send(embed=embed) return else: em = nextcord.Embed( title=\"Add Role Success\", description=f\"{role.mention} has", "role from {}!\".format( member.mention ), ) await ctx.send(embed=embed) return if ctx.author.top_role.position < role.position:", "nextcord.Embed( title=\"Ban Error\", description=\"Ban yourself... only a skid would do that XD !\",", "you in the role heirarchy - Invalid Permission\",) return await ctx.send(embed=em3) guild =", "ctx.send(embed=em) @commands.command(name=\"kick\", description=\"Kicks the member from your server.\") @commands.has_permissions(kick_members=True) async def kick(self, ctx,", ") return await ctx.send(embed=embed69) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\",", "em3 = nextcord.Embed( title=\"Mute Error\", description=\"Muted role too high to give to a", ") embed = nextcord.Embed( title=\"Unlock Success\", description=f\"Unlocked {channel.mention} \", ) await ctx.send(embed=embed) @commands.command(description=\"Modbans", "heirarchy - Invalid Permission\", ) return await ctx.send(embed=em4) if ctx.guild.me.top_role.position < mutedRole.position: em3", "ctx.send(embed=em) if bool(user.bot)==True: await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else: await DMChannel.send(user,f\"**{ctx.guild.name}**: You", "title=\"Ban Error\", description=\"Member **higher** than you in the role heirarchy - Invalid Permission\",", "def mute(self, ctx, member: nextcord.Member = None, *, reason=None): guild = ctx.guild mutedRole", "em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not Banned!*\") await ctx.send(embed=em) elif view.value== True:", "em = nextcord.Embed( title=\"Remove Role Error\", description=\"You do not have enough permissions to", "if channel is None: channel = ctx.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name}\",", "send_messages=False, # ) embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked {channel.mention} \", ) await", "role to remove the role from {}!\".format( member.mention ), ) await ctx.send(embed=embed) return", "title=\"Ban Cancelled\", description=\"Lets pretend like this never happened them :I\", ) await ctx.author.send(embed=banEmbed)", "guild = ctx.guild mutedRole = nextcord.utils.get(guild.roles, name=\"Muted\") if not mutedRole: mutedRole = await", "{reason}\" ) if member == None: embed1 = nextcord.Embed( title=\"Ban Error\", description=\"Member to", "them a role!\", ) await ctx.send(embed=embed) return if role is None: embed =", "return await ctx.send(embed=embed) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Kick Error\", description=\"Member", "ctx.send(embed=em3) if not mutedRole: mutedRole = await guild.create_role(name=\"Muted\") await ctx.send(\"No mute role found.", "await view.wait() if view.value is None: await ctx.author.send(\"Command has been Timed Out, please", "<:moderation:910472145824542721> | Ban Case \",color=nextcord.Color.red()) banEmbed.add_field(name=\"Reason: \", value=reason) view=AllConfirm(ctx) await ctx.send(embed=banEmbed,view=view) await view.wait()", "is a very risky command only to be used in important situations such", "= None, *, reason=None): guild = ctx.guild if member == None: em1 =", "if member is None: embed = nextcord.Embed( title=\"Add Role Error\", description=\"Please ping a", "unmute(self, ctx, member: nextcord.Member = None, *, reason=None): guild = ctx.guild if member", "to unmute - Not Found\" ) return await ctx.send(embed=em1) elif member.id == ctx.author.id:", "embed = nextcord.Embed( title=\"Unmute Success\", description=f\"{member.mention} was unmuted \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason,", "only to be used in important situations such as, `NSFW or NSFLPosting` or", "import random import asyncio from datetime import datetime from difflib import get_close_matches from", "Error\", description=f\"{member.mention} already has the role you are trying to give\", ) await", "@commands.command(name=\"ban\", description=\"Bans the member from your server.\") async def ban(self, ctx, member: nextcord.Member", "= nextcord.Embed(title=f\"<a:no:909765403872481280> | Member To Ban Was Found\") await ctx.send(embed=em) @commands.command(name=\"ban\", description=\"Bans the", "**Info**: Bans a member \"\"\" if member == None: embed1 = nextcord.Embed( title=\"Ban", "if channel.category else 'Channel is not in any category'}\", color=nextcord.Color.random()) embed.add_field(name=\"Channel Guild:-\", value=ctx.guild.name,", "ctx.send(embed=embed) @commands.command(name=\"tempmute\", description=\"Mutes a member indefinitely.\") @commands.has_permissions(manage_messages=True) async def tempmute( self, ctx, member:", "wrong purpose you may risk getting demoted if not banned from the staff", "= nextcord.Embed( title=\"Mute Error\", description=\"Member to mute - Not Found\" ) return await", "again.\") elif view.value: for channel in ctx.guild.channels: await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} locked {channel.name}", "ctx.send(embed=embed2) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member has same", "ban(self, ctx, member: nextcord.Member = None, *, reason=None): \"\"\" **Info**: Bans a member", "= nextcord.Embed( title=\"Lock Cancelled\", description=\"Lets pretend like this never happened them :I\", )", "tempmute = int(time[0]) * time_convert[time[-1]] embed = nextcord.Embed( title=\"Tempmute Success\", description=f\"{member.mention} was muted", "ping a user to give them a role!\", ) await ctx.send(embed=embed) return if", "channel.\") @commands.has_permissions(kick_members=True) async def lock(self, ctx, channel: nextcord.TextChannel = None, setting = None):", "skid would do that XD !\", ) return await ctx.send(embed=embed69) elif ctx.author.top_role.position <", "in member.roles: if role_ == role: addRole = False break if not addRole:", "<gh_stars>1-10 import nextcord from nextcord.channel import CategoryChannel,DMChannel from nextcord.colour import Color from nextcord.components", "await ctx.author.send(embed = em, view=view) await view.wait() if view.value is None: await ctx.author.send(\"Command", "your server.\") async def ban(self, ctx, member: nextcord.Member = None, *, reason=None): \"\"\"", "ctx.guild.me.top_role.position < role.position: embed = nextcord.Embed( title=\"Add Role Error\", description=\"That role is too", "{channel.name}**\", description=f\"{'Category :{}'.format(channel.category.name) if channel.category else 'Channel is not in any category'}\", color=nextcord.Color.random())", "\"\"\" if member == None: embed1 = nextcord.Embed( title=\"Ban Error\", description=\"Member to ban", "return await ctx.send(embed=em1) elif member.id == ctx.author.id: em5 = nextcord.Embed( title=\"Tempmute Error\", description=\"Don't", "Error\", description=\"Member to mute - Not Found\" ) return await ctx.send(embed=em1) elif member.id", "title=\"Unmute Error\", description=\"Member **higher** than you in the role heirarchy - Invalid Permission\",", "Error\", description=\"Can't kick yourself \",) return await ctx.send(embed=embed) elif ctx.author.top_role.position < member.top_role.position: em3", "await ctx.send(embed=embed1) if not (ctx.guild.me.guild_permissions.kick_members): embed2 = nextcord.Embed(title=\"Kick Error\",description=\"I require the ``Kick Members``", "entire server \", ) await ctx.send(embed=embed) else: lockEmbed = nextcord.Embed( title=\"Lock Cancelled\", description=\"Lets", "None, setting = None): if setting == '--server': view = LockConfirm() em =", "Success\", description=\"Slowmode turned off\" ) await ctx.send(embed=em1) await ctx.channel.edit(slowmode_delay=0) elif time > 21600:", "await ctx.send(embed=em2) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed( title=\"Tempmute Error\", description=\"Member **higher**", "was muted Indefinitly \", colour=nextcord.Colour.blue(), ) embed.add_field(name=\"Reason:\", value=reason, inline=False) await ctx.send(embed=embed) await member.add_roles(mutedRole,", "responding. **If this command is used for the wrong purpose you may risk", "{member.name} for the reason of {reason}\" ) if member == None: embed1 =", "ctx.channel.edit(slowmode_delay=0) elif time > 21600: em2 = nextcord.Embed( title=\"Slowmode Error\", description=\"Slowmode over 6", "await channel.set_permissions( mutedRole, speak=False, send_messages=False, read_message_history=True, ) embed = nextcord.Embed( title=\"Unmute Success\", description=f\"{member.mention}", "- Not Found\" ) return await ctx.send(embed=em2) elif ctx.author.top_role.position < member.top_role.position: em3 =", "not roleRemoved: embed = nextcord.Embed( title=\"Remove Role Error\", description=f\"{member.mention} already has the role", ") embed = nextcord.Embed( title=\"Lockdown Success\", description=f\"Locked entire server \", ) await ctx.send(embed=embed)", "banning yourself...\", ) return await ctx.send(embed=embed69) em = nextcord.Embed( title=\"Are you sure?\", description=\"This", "1, \"m\": 60, \"h\": 3600, \"d\": 86400} tempmute = int(time[0]) * time_convert[time[-1]] embed", "Found\") await ctx.send(embed=em) @commands.command(name=\"ban\", description=\"Bans the member from your server.\") async def ban(self,", "nextcord.Embed( title=\"Add Role Error\", description=f\"{member.mention} already has the role you are trying to", "nextcord.Member = None, *, reason=None): \"\"\" **Info**: Bans a member \"\"\" if member", "to remove a role from them!\", ) await ctx.send(embed=embed) return if role is", "*{member.name} Was Not Banned!*\") await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> |", "embed.add_field(name=\"Channel Creation Time:\", value=channel.created_at.strftime(\"%a, %d %B %Y , %I:%M %p\"), inline=False) embed.set_thumbnail(url=ctx.guild.icon.url) await", "member_discriminator = member.split('#') for ban_entry in banned_user: user = ban_entry.user if (user.name, user.discriminator)", "\"d\": 86400} tempban1 = int(time[0]) * time_convert[time[-1]] em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | `{user.name}` Was", "modban(self, ctx, member, *, reason=None): if reason is None: reason = f\"{ctx.author.name} modbanned", "have been banned for {tempban1}{time[1:]}\\n**Reason:** {reason}\") await ctx.guild.ban(user) await asyncio.sleep(tempban1) await ctx.guild.unban(user) else:", "from: **{guild.name}** | Reason: **{reason}** | Time: **{time}**\" ) if not time ==", "member from your server.\") async def ban(self, ctx, member: nextcord.Member = None, *,", "from nextcord.channel import CategoryChannel,DMChannel from nextcord.colour import Color from nextcord.components import Button from", "heirarchy - Invalid Permission\",) return await ctx.send(embed=em3) guild = ctx.guild banEmbed = nextcord.Embed(title=f\"Moderation", "title=\"Lock Cancelled\", description=\"Lets pretend like this never happened them :I\", ) await ctx.author.send(embed=lockEmbed)", "= amount + 1 if amount > 101: em1 = nextcord.Embed( title=\"Clear Error\",", "Banned!*\") await ctx.send(embed=em) await member.send(f\"You got banned in **{guild}** | Reason: **{reason}**\") await", "| Reason: **{reason}** | Time: **{time}**\" ) if not time == None: await", "role from a member.\", ) @commands.has_permissions(manage_roles=True) async def removerole( self, ctx, member: nextcord.Member", "Banned!*\") await ctx.send(embed=em) elif view.value== True: em = nextcord.Embed(title=f\"<a:yes:909765403801182208> | *{member.name} Was Banned!*\")", "you in the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) elif", "to be used in important situations such as, `Raid on the Server`. **If", ") return await ctx.send(embed=embed2) elif ctx.author.top_role.position == member.top_role.position: em4 = nextcord.Embed( title=\"Tempmute Error\",", "await ctx.send(embed=banEmbed,view=view) await view.wait() if view.value==False: em = nextcord.Embed(title=f\"<a:no:909765403872481280> | *{member.name} Was Not", "kick yourself \",) return await ctx.send(embed=embed) elif ctx.author.top_role.position < member.top_role.position: em3 = nextcord.Embed(", "nextcord.Embed( title=\"Mute Error\", description=\"Member **higher** than you in the role heirarchy - Invalid", "command - Missing Permission\") return await ctx.send(embed=embed2) if member.id == ctx.author.id: embed =", "in the role heirarchy - Invalid Permission\", ) return await ctx.send(embed=em4) if not", "member.send(f\"You got banned in **{guild}** | Reason: **{reason}**\") else: banEmbed = nextcord.Embed( title=\"Ban", "if role is None: embed = nextcord.Embed( title=\"Remove Role Error\", description=\"Please ping a", ") return await ctx.send(embed=embed) try: addRole = True for role_ in member.roles: if", "await ctx.send(embed=embed) return if ctx.author.top_role.position < role.position: em = nextcord.Embed( title=\"Add Role Error\",", "description=f\"Locked entire server \", ) await ctx.send(embed=embed) else: lockEmbed = nextcord.Embed( title=\"Lock Cancelled\",", "elif ctx.author.top_role.position == member.top_role.position: em3 = nextcord.Embed( title=\"Ban Error\", description=\"Member has same role", ") return await ctx.send(embed=em3) kickEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Kick Case \",color=nextcord.Color.red())", "= None, setting = None): if setting == '--server': view = LockConfirm() em", ") return await ctx.send(embed=em) if ctx.guild.me.top_role.position < role.position: embed = nextcord.Embed( title=\"Add Role", "await ctx.send(embed=em3) guild = ctx.guild banEmbed = nextcord.Embed(title=f\"Moderation Action <:moderation:910472145824542721> | Ban Case", "heirarchy - Invalid Permission\", ) return await ctx.send(embed=em3) elif ctx.author.top_role.position == member.top_role.position: em3", "= nextcord.Embed( title=\"Tempmute Error\", description=\"I require the ``Manage Roles`` permisson to run this", "value=channel.id, inline=False) embed.add_field(name=\"Channel Topic:-\",value=f\"{channel.topic if channel.topic else 'No topic.'}\",inline=False,) embed.add_field(name=\"Channel Position:-\", value=channel.position, inline=True)", "nextcord.utils.get(guild.roles, name=\"Muted\") if ctx.guild.me.top_role.position < mutedRole.position: em3 = nextcord.Embed( title=\"Unmute Error\", description=\"Muted role", "@commands.has_permissions(manage_roles=True) async def removerole( self, ctx, member: nextcord.Member = None, role: nextcord.Role =", "\"ci\", \"channelinfo\"]) async def channelstats(self, ctx, channel: nextcord.TextChannel = None): f\"\"\" **Info**: Get", "em3 = nextcord.Embed( title=\"Mute Error\", description=\"Member **higher** than you in the role heirarchy", "ctx.channel await channel.set_permissions( ctx.guild.default_role, reason=f\"{ctx.author.name} unlocked {channel.name}\", send_messages=True, ) embed = nextcord.Embed( title=\"Unlock", "title=\"Tempmute Error\", description=\"Time to mute - Not Found\" ) return await ctx.send(embed=em2) elif", "get_close_matches from nextcord.webhook import sync class AllConfirm(nextcord.ui.View): def __init__(self,ctx): super().__init__(timeout=200) self.value = None" ]
[ "Announcements:\" \" SUB={}, PUB={}\" .format(self.sub, self.pub)) msg = json.dumps({'downlink': self.sub, 'uplink': self.pub}) self.discovery_pipe.send(msg.encode('utf_8'))", "ctx, pipe): self.log.debug(\"Pyre on iface : {}\".format(self.iface)) n = Pyre(self.groupName, sel_iface=self.iface) n.set_header(\"DISCOVERY_Header1\", \"DISCOVERY_HEADER\")", "uplink=None, pub=None): super(PyreDiscoveryMasterModule, self).__init__() self.log = logging.getLogger('pyre_discovery_module.main') pyreLogger = logging.getLogger('pyre') pyreLogger.setLevel(logging.CRITICAL) self.running =", "time from uniflex.core import modules __author__ = \"<NAME>\" __copyright__ = \"Copyright (c) 2015,", "uniflex.core import modules __author__ = \"<NAME>\" __copyright__ = \"Copyright (c) 2015, Technische Universitat", "super(PyreDiscoveryMasterModule, self).__init__() self.log = logging.getLogger('pyre_discovery_module.main') pyreLogger = logging.getLogger('pyre') pyreLogger.setLevel(logging.CRITICAL) self.running = False self.iface", "def discovery_task(self, ctx, pipe): self.log.debug(\"Pyre on iface : {}\".format(self.iface)) n = Pyre(self.groupName, sel_iface=self.iface)", "self.pub}) self.discovery_pipe.send(msg.encode('utf_8')) time.sleep(2) @modules.on_start() def start_discovery_announcements(self): self.log.debug(\"Start discovery announcements\".format()) self.running = True self.discovery_pipe", "and items[pipe] == zmq.POLLIN: message = pipe.recv() # message to quit if message.decode('utf-8')", "poller.register(pipe, zmq.POLLIN) while(True): items = dict(poller.poll()) if pipe in items and items[pipe] ==", "self.running = False self.iface = iface self.sub = downlink if not self.sub: self.sub", "class PyreDiscoveryMasterModule(modules.ControlApplication): def __init__(self, iface, groupName=\"uniflex\", downlink=None, sub=None, uplink=None, pub=None): super(PyreDiscoveryMasterModule, self).__init__() self.log", "= False self.discovery_pipe.send(\"$$STOP\".encode('utf_8')) def discovery_task(self, ctx, pipe): self.log.debug(\"Pyre on iface : {}\".format(self.iface)) n", "self.log.debug(\"Discovery Announcements:\" \" SUB={}, PUB={}\" .format(self.sub, self.pub)) msg = json.dumps({'downlink': self.sub, 'uplink': self.pub})", "self.sub: self.sub = sub self.pub = uplink if not self.pub: self.pub = pub", "items = dict(poller.poll()) if pipe in items and items[pipe] == zmq.POLLIN: message =", "poller = zmq.Poller() poller.register(pipe, zmq.POLLIN) while(True): items = dict(poller.poll()) if pipe in items", "n = Pyre(self.groupName, sel_iface=self.iface) n.set_header(\"DISCOVERY_Header1\", \"DISCOVERY_HEADER\") n.join(self.groupName) n.start() poller = zmq.Poller() poller.register(pipe, zmq.POLLIN)", "n.start() poller = zmq.Poller() poller.register(pipe, zmq.POLLIN) while(True): items = dict(poller.poll()) if pipe in", "not self.pub: self.pub = pub self.groupName = groupName self.ctx = zmq.Context() def _sending_announcements(self):", "= zhelper.zthread_fork( self.ctx, self.discovery_task) d = threading.Thread(target=self._sending_announcements) d.setDaemon(True) d.start() return True @modules.on_exit() def", "Berlin\" __version__ = \"0.1.0\" __email__ = <EMAIL>\" class PyreDiscoveryMasterModule(modules.ControlApplication): def __init__(self, iface, groupName=\"uniflex\",", "logging.getLogger('pyre_discovery_module.main') pyreLogger = logging.getLogger('pyre') pyreLogger.setLevel(logging.CRITICAL) self.running = False self.iface = iface self.sub =", "message = pipe.recv() # message to quit if message.decode('utf-8') == \"$$STOP\": break n.shout(self.groupName,", "groupName=\"uniflex\", downlink=None, sub=None, uplink=None, pub=None): super(PyreDiscoveryMasterModule, self).__init__() self.log = logging.getLogger('pyre_discovery_module.main') pyreLogger = logging.getLogger('pyre')", "self.sub = downlink if not self.sub: self.sub = sub self.pub = uplink if", "if not self.sub: self.sub = sub self.pub = uplink if not self.pub: self.pub", "\"DISCOVERY_HEADER\") n.join(self.groupName) n.start() poller = zmq.Poller() poller.register(pipe, zmq.POLLIN) while(True): items = dict(poller.poll()) if", "on iface : {}\".format(self.iface)) n = Pyre(self.groupName, sel_iface=self.iface) n.set_header(\"DISCOVERY_Header1\", \"DISCOVERY_HEADER\") n.join(self.groupName) n.start() poller", "__version__ = \"0.1.0\" __email__ = <EMAIL>\" class PyreDiscoveryMasterModule(modules.ControlApplication): def __init__(self, iface, groupName=\"uniflex\", downlink=None,", "pub self.groupName = groupName self.ctx = zmq.Context() def _sending_announcements(self): while self.running: self.log.debug(\"Discovery Announcements:\"", "\"0.1.0\" __email__ = <EMAIL>\" class PyreDiscoveryMasterModule(modules.ControlApplication): def __init__(self, iface, groupName=\"uniflex\", downlink=None, sub=None, uplink=None,", "self.sub = sub self.pub = uplink if not self.pub: self.pub = pub self.groupName", ": {}\".format(self.iface)) n = Pyre(self.groupName, sel_iface=self.iface) n.set_header(\"DISCOVERY_Header1\", \"DISCOVERY_HEADER\") n.join(self.groupName) n.start() poller = zmq.Poller()", "self.sub, 'uplink': self.pub}) self.discovery_pipe.send(msg.encode('utf_8')) time.sleep(2) @modules.on_start() def start_discovery_announcements(self): self.log.debug(\"Start discovery announcements\".format()) self.running =", "zhelper import threading import zmq import logging import json import time from uniflex.core", "sub=None, uplink=None, pub=None): super(PyreDiscoveryMasterModule, self).__init__() self.log = logging.getLogger('pyre_discovery_module.main') pyreLogger = logging.getLogger('pyre') pyreLogger.setLevel(logging.CRITICAL) self.running", "discovery announcements\".format()) self.running = True self.discovery_pipe = zhelper.zthread_fork( self.ctx, self.discovery_task) d = threading.Thread(target=self._sending_announcements)", "zmq.POLLIN) while(True): items = dict(poller.poll()) if pipe in items and items[pipe] == zmq.POLLIN:", "self.running = False self.discovery_pipe.send(\"$$STOP\".encode('utf_8')) def discovery_task(self, ctx, pipe): self.log.debug(\"Pyre on iface : {}\".format(self.iface))", "__author__ = \"<NAME>\" __copyright__ = \"Copyright (c) 2015, Technische Universitat Berlin\" __version__ =", "from pyre import Pyre from pyre import zhelper import threading import zmq import", "2015, Technische Universitat Berlin\" __version__ = \"0.1.0\" __email__ = <EMAIL>\" class PyreDiscoveryMasterModule(modules.ControlApplication): def", "self.running = True self.discovery_pipe = zhelper.zthread_fork( self.ctx, self.discovery_task) d = threading.Thread(target=self._sending_announcements) d.setDaemon(True) d.start()", "True self.discovery_pipe = zhelper.zthread_fork( self.ctx, self.discovery_task) d = threading.Thread(target=self._sending_announcements) d.setDaemon(True) d.start() return True", "pipe.recv() # message to quit if message.decode('utf-8') == \"$$STOP\": break n.shout(self.groupName, message) n.stop()", "SUB={}, PUB={}\" .format(self.sub, self.pub)) msg = json.dumps({'downlink': self.sub, 'uplink': self.pub}) self.discovery_pipe.send(msg.encode('utf_8')) time.sleep(2) @modules.on_start()", "import json import time from uniflex.core import modules __author__ = \"<NAME>\" __copyright__ =", "announcements\".format()) self.running = True self.discovery_pipe = zhelper.zthread_fork( self.ctx, self.discovery_task) d = threading.Thread(target=self._sending_announcements) d.setDaemon(True)", "from uniflex.core import modules __author__ = \"<NAME>\" __copyright__ = \"Copyright (c) 2015, Technische", "= sub self.pub = uplink if not self.pub: self.pub = pub self.groupName =", "logging.getLogger('pyre') pyreLogger.setLevel(logging.CRITICAL) self.running = False self.iface = iface self.sub = downlink if not", "def __init__(self, iface, groupName=\"uniflex\", downlink=None, sub=None, uplink=None, pub=None): super(PyreDiscoveryMasterModule, self).__init__() self.log = logging.getLogger('pyre_discovery_module.main')", "downlink if not self.sub: self.sub = sub self.pub = uplink if not self.pub:", "sel_iface=self.iface) n.set_header(\"DISCOVERY_Header1\", \"DISCOVERY_HEADER\") n.join(self.groupName) n.start() poller = zmq.Poller() poller.register(pipe, zmq.POLLIN) while(True): items =", "in items and items[pipe] == zmq.POLLIN: message = pipe.recv() # message to quit", "d.start() return True @modules.on_exit() def stop_discovery_announcements(self): self.log.debug(\"Stop discovery announcements\".format()) if self.running: self.running =", "self).__init__() self.log = logging.getLogger('pyre_discovery_module.main') pyreLogger = logging.getLogger('pyre') pyreLogger.setLevel(logging.CRITICAL) self.running = False self.iface =", "= uplink if not self.pub: self.pub = pub self.groupName = groupName self.ctx =", "pyreLogger.setLevel(logging.CRITICAL) self.running = False self.iface = iface self.sub = downlink if not self.sub:", "if not self.pub: self.pub = pub self.groupName = groupName self.ctx = zmq.Context() def", "zmq.Context() def _sending_announcements(self): while self.running: self.log.debug(\"Discovery Announcements:\" \" SUB={}, PUB={}\" .format(self.sub, self.pub)) msg", "json import time from uniflex.core import modules __author__ = \"<NAME>\" __copyright__ = \"Copyright", "n.set_header(\"DISCOVERY_Header1\", \"DISCOVERY_HEADER\") n.join(self.groupName) n.start() poller = zmq.Poller() poller.register(pipe, zmq.POLLIN) while(True): items = dict(poller.poll())", "= \"<NAME>\" __copyright__ = \"Copyright (c) 2015, Technische Universitat Berlin\" __version__ = \"0.1.0\"", "True @modules.on_exit() def stop_discovery_announcements(self): self.log.debug(\"Stop discovery announcements\".format()) if self.running: self.running = False self.discovery_pipe.send(\"$$STOP\".encode('utf_8'))", "zmq.Poller() poller.register(pipe, zmq.POLLIN) while(True): items = dict(poller.poll()) if pipe in items and items[pipe]", "{}\".format(self.iface)) n = Pyre(self.groupName, sel_iface=self.iface) n.set_header(\"DISCOVERY_Header1\", \"DISCOVERY_HEADER\") n.join(self.groupName) n.start() poller = zmq.Poller() poller.register(pipe,", "\"<NAME>\" __copyright__ = \"Copyright (c) 2015, Technische Universitat Berlin\" __version__ = \"0.1.0\" __email__", "= True self.discovery_pipe = zhelper.zthread_fork( self.ctx, self.discovery_task) d = threading.Thread(target=self._sending_announcements) d.setDaemon(True) d.start() return", "False self.iface = iface self.sub = downlink if not self.sub: self.sub = sub", "msg = json.dumps({'downlink': self.sub, 'uplink': self.pub}) self.discovery_pipe.send(msg.encode('utf_8')) time.sleep(2) @modules.on_start() def start_discovery_announcements(self): self.log.debug(\"Start discovery", "pyre import Pyre from pyre import zhelper import threading import zmq import logging", "= logging.getLogger('pyre_discovery_module.main') pyreLogger = logging.getLogger('pyre') pyreLogger.setLevel(logging.CRITICAL) self.running = False self.iface = iface self.sub", "__email__ = <EMAIL>\" class PyreDiscoveryMasterModule(modules.ControlApplication): def __init__(self, iface, groupName=\"uniflex\", downlink=None, sub=None, uplink=None, pub=None):", "sub self.pub = uplink if not self.pub: self.pub = pub self.groupName = groupName", "stop_discovery_announcements(self): self.log.debug(\"Stop discovery announcements\".format()) if self.running: self.running = False self.discovery_pipe.send(\"$$STOP\".encode('utf_8')) def discovery_task(self, ctx,", "= groupName self.ctx = zmq.Context() def _sending_announcements(self): while self.running: self.log.debug(\"Discovery Announcements:\" \" SUB={},", "self.running: self.running = False self.discovery_pipe.send(\"$$STOP\".encode('utf_8')) def discovery_task(self, ctx, pipe): self.log.debug(\"Pyre on iface :", "pub=None): super(PyreDiscoveryMasterModule, self).__init__() self.log = logging.getLogger('pyre_discovery_module.main') pyreLogger = logging.getLogger('pyre') pyreLogger.setLevel(logging.CRITICAL) self.running = False", "def _sending_announcements(self): while self.running: self.log.debug(\"Discovery Announcements:\" \" SUB={}, PUB={}\" .format(self.sub, self.pub)) msg =", "items[pipe] == zmq.POLLIN: message = pipe.recv() # message to quit if message.decode('utf-8') ==", "__copyright__ = \"Copyright (c) 2015, Technische Universitat Berlin\" __version__ = \"0.1.0\" __email__ =", "<EMAIL>\" class PyreDiscoveryMasterModule(modules.ControlApplication): def __init__(self, iface, groupName=\"uniflex\", downlink=None, sub=None, uplink=None, pub=None): super(PyreDiscoveryMasterModule, self).__init__()", "return True @modules.on_exit() def stop_discovery_announcements(self): self.log.debug(\"Stop discovery announcements\".format()) if self.running: self.running = False", "= Pyre(self.groupName, sel_iface=self.iface) n.set_header(\"DISCOVERY_Header1\", \"DISCOVERY_HEADER\") n.join(self.groupName) n.start() poller = zmq.Poller() poller.register(pipe, zmq.POLLIN) while(True):", "pyre import zhelper import threading import zmq import logging import json import time", "= zmq.Context() def _sending_announcements(self): while self.running: self.log.debug(\"Discovery Announcements:\" \" SUB={}, PUB={}\" .format(self.sub, self.pub))", "@modules.on_exit() def stop_discovery_announcements(self): self.log.debug(\"Stop discovery announcements\".format()) if self.running: self.running = False self.discovery_pipe.send(\"$$STOP\".encode('utf_8')) def", "self.pub)) msg = json.dumps({'downlink': self.sub, 'uplink': self.pub}) self.discovery_pipe.send(msg.encode('utf_8')) time.sleep(2) @modules.on_start() def start_discovery_announcements(self): self.log.debug(\"Start", "PUB={}\" .format(self.sub, self.pub)) msg = json.dumps({'downlink': self.sub, 'uplink': self.pub}) self.discovery_pipe.send(msg.encode('utf_8')) time.sleep(2) @modules.on_start() def", "import Pyre from pyre import zhelper import threading import zmq import logging import", "Technische Universitat Berlin\" __version__ = \"0.1.0\" __email__ = <EMAIL>\" class PyreDiscoveryMasterModule(modules.ControlApplication): def __init__(self,", "iface : {}\".format(self.iface)) n = Pyre(self.groupName, sel_iface=self.iface) n.set_header(\"DISCOVERY_Header1\", \"DISCOVERY_HEADER\") n.join(self.groupName) n.start() poller =", "= zmq.Poller() poller.register(pipe, zmq.POLLIN) while(True): items = dict(poller.poll()) if pipe in items and", "self.groupName = groupName self.ctx = zmq.Context() def _sending_announcements(self): while self.running: self.log.debug(\"Discovery Announcements:\" \"", "= pub self.groupName = groupName self.ctx = zmq.Context() def _sending_announcements(self): while self.running: self.log.debug(\"Discovery", "= dict(poller.poll()) if pipe in items and items[pipe] == zmq.POLLIN: message = pipe.recv()", "import logging import json import time from uniflex.core import modules __author__ = \"<NAME>\"", "iface self.sub = downlink if not self.sub: self.sub = sub self.pub = uplink", "= iface self.sub = downlink if not self.sub: self.sub = sub self.pub =", "= \"Copyright (c) 2015, Technische Universitat Berlin\" __version__ = \"0.1.0\" __email__ = <EMAIL>\"", "'uplink': self.pub}) self.discovery_pipe.send(msg.encode('utf_8')) time.sleep(2) @modules.on_start() def start_discovery_announcements(self): self.log.debug(\"Start discovery announcements\".format()) self.running = True", "pipe in items and items[pipe] == zmq.POLLIN: message = pipe.recv() # message to", "self.ctx, self.discovery_task) d = threading.Thread(target=self._sending_announcements) d.setDaemon(True) d.start() return True @modules.on_exit() def stop_discovery_announcements(self): self.log.debug(\"Stop", "Pyre from pyre import zhelper import threading import zmq import logging import json", ".format(self.sub, self.pub)) msg = json.dumps({'downlink': self.sub, 'uplink': self.pub}) self.discovery_pipe.send(msg.encode('utf_8')) time.sleep(2) @modules.on_start() def start_discovery_announcements(self):", "False self.discovery_pipe.send(\"$$STOP\".encode('utf_8')) def discovery_task(self, ctx, pipe): self.log.debug(\"Pyre on iface : {}\".format(self.iface)) n =", "= \"0.1.0\" __email__ = <EMAIL>\" class PyreDiscoveryMasterModule(modules.ControlApplication): def __init__(self, iface, groupName=\"uniflex\", downlink=None, sub=None,", "iface, groupName=\"uniflex\", downlink=None, sub=None, uplink=None, pub=None): super(PyreDiscoveryMasterModule, self).__init__() self.log = logging.getLogger('pyre_discovery_module.main') pyreLogger =", "self.iface = iface self.sub = downlink if not self.sub: self.sub = sub self.pub", "groupName self.ctx = zmq.Context() def _sending_announcements(self): while self.running: self.log.debug(\"Discovery Announcements:\" \" SUB={}, PUB={}\"", "d.setDaemon(True) d.start() return True @modules.on_exit() def stop_discovery_announcements(self): self.log.debug(\"Stop discovery announcements\".format()) if self.running: self.running", "json.dumps({'downlink': self.sub, 'uplink': self.pub}) self.discovery_pipe.send(msg.encode('utf_8')) time.sleep(2) @modules.on_start() def start_discovery_announcements(self): self.log.debug(\"Start discovery announcements\".format()) self.running", "@modules.on_start() def start_discovery_announcements(self): self.log.debug(\"Start discovery announcements\".format()) self.running = True self.discovery_pipe = zhelper.zthread_fork( self.ctx,", "start_discovery_announcements(self): self.log.debug(\"Start discovery announcements\".format()) self.running = True self.discovery_pipe = zhelper.zthread_fork( self.ctx, self.discovery_task) d", "threading import zmq import logging import json import time from uniflex.core import modules", "= threading.Thread(target=self._sending_announcements) d.setDaemon(True) d.start() return True @modules.on_exit() def stop_discovery_announcements(self): self.log.debug(\"Stop discovery announcements\".format()) if", "pipe): self.log.debug(\"Pyre on iface : {}\".format(self.iface)) n = Pyre(self.groupName, sel_iface=self.iface) n.set_header(\"DISCOVERY_Header1\", \"DISCOVERY_HEADER\") n.join(self.groupName)", "n.join(self.groupName) n.start() poller = zmq.Poller() poller.register(pipe, zmq.POLLIN) while(True): items = dict(poller.poll()) if pipe", "import modules __author__ = \"<NAME>\" __copyright__ = \"Copyright (c) 2015, Technische Universitat Berlin\"", "self.discovery_task) d = threading.Thread(target=self._sending_announcements) d.setDaemon(True) d.start() return True @modules.on_exit() def stop_discovery_announcements(self): self.log.debug(\"Stop discovery", "if pipe in items and items[pipe] == zmq.POLLIN: message = pipe.recv() # message", "= <EMAIL>\" class PyreDiscoveryMasterModule(modules.ControlApplication): def __init__(self, iface, groupName=\"uniflex\", downlink=None, sub=None, uplink=None, pub=None): super(PyreDiscoveryMasterModule,", "__init__(self, iface, groupName=\"uniflex\", downlink=None, sub=None, uplink=None, pub=None): super(PyreDiscoveryMasterModule, self).__init__() self.log = logging.getLogger('pyre_discovery_module.main') pyreLogger", "if self.running: self.running = False self.discovery_pipe.send(\"$$STOP\".encode('utf_8')) def discovery_task(self, ctx, pipe): self.log.debug(\"Pyre on iface", "zmq import logging import json import time from uniflex.core import modules __author__ =", "= logging.getLogger('pyre') pyreLogger.setLevel(logging.CRITICAL) self.running = False self.iface = iface self.sub = downlink if", "self.discovery_pipe.send(msg.encode('utf_8')) time.sleep(2) @modules.on_start() def start_discovery_announcements(self): self.log.debug(\"Start discovery announcements\".format()) self.running = True self.discovery_pipe =", "time.sleep(2) @modules.on_start() def start_discovery_announcements(self): self.log.debug(\"Start discovery announcements\".format()) self.running = True self.discovery_pipe = zhelper.zthread_fork(", "(c) 2015, Technische Universitat Berlin\" __version__ = \"0.1.0\" __email__ = <EMAIL>\" class PyreDiscoveryMasterModule(modules.ControlApplication):", "dict(poller.poll()) if pipe in items and items[pipe] == zmq.POLLIN: message = pipe.recv() #", "def start_discovery_announcements(self): self.log.debug(\"Start discovery announcements\".format()) self.running = True self.discovery_pipe = zhelper.zthread_fork( self.ctx, self.discovery_task)", "announcements\".format()) if self.running: self.running = False self.discovery_pipe.send(\"$$STOP\".encode('utf_8')) def discovery_task(self, ctx, pipe): self.log.debug(\"Pyre on", "discovery announcements\".format()) if self.running: self.running = False self.discovery_pipe.send(\"$$STOP\".encode('utf_8')) def discovery_task(self, ctx, pipe): self.log.debug(\"Pyre", "\" SUB={}, PUB={}\" .format(self.sub, self.pub)) msg = json.dumps({'downlink': self.sub, 'uplink': self.pub}) self.discovery_pipe.send(msg.encode('utf_8')) time.sleep(2)", "threading.Thread(target=self._sending_announcements) d.setDaemon(True) d.start() return True @modules.on_exit() def stop_discovery_announcements(self): self.log.debug(\"Stop discovery announcements\".format()) if self.running:", "downlink=None, sub=None, uplink=None, pub=None): super(PyreDiscoveryMasterModule, self).__init__() self.log = logging.getLogger('pyre_discovery_module.main') pyreLogger = logging.getLogger('pyre') pyreLogger.setLevel(logging.CRITICAL)", "import zhelper import threading import zmq import logging import json import time from", "PyreDiscoveryMasterModule(modules.ControlApplication): def __init__(self, iface, groupName=\"uniflex\", downlink=None, sub=None, uplink=None, pub=None): super(PyreDiscoveryMasterModule, self).__init__() self.log =", "import time from uniflex.core import modules __author__ = \"<NAME>\" __copyright__ = \"Copyright (c)", "pyreLogger = logging.getLogger('pyre') pyreLogger.setLevel(logging.CRITICAL) self.running = False self.iface = iface self.sub = downlink", "self.ctx = zmq.Context() def _sending_announcements(self): while self.running: self.log.debug(\"Discovery Announcements:\" \" SUB={}, PUB={}\" .format(self.sub,", "zhelper.zthread_fork( self.ctx, self.discovery_task) d = threading.Thread(target=self._sending_announcements) d.setDaemon(True) d.start() return True @modules.on_exit() def stop_discovery_announcements(self):", "modules __author__ = \"<NAME>\" __copyright__ = \"Copyright (c) 2015, Technische Universitat Berlin\" __version__", "d = threading.Thread(target=self._sending_announcements) d.setDaemon(True) d.start() return True @modules.on_exit() def stop_discovery_announcements(self): self.log.debug(\"Stop discovery announcements\".format())", "import threading import zmq import logging import json import time from uniflex.core import", "zmq.POLLIN: message = pipe.recv() # message to quit if message.decode('utf-8') == \"$$STOP\": break", "self.running: self.log.debug(\"Discovery Announcements:\" \" SUB={}, PUB={}\" .format(self.sub, self.pub)) msg = json.dumps({'downlink': self.sub, 'uplink':", "self.discovery_pipe = zhelper.zthread_fork( self.ctx, self.discovery_task) d = threading.Thread(target=self._sending_announcements) d.setDaemon(True) d.start() return True @modules.on_exit()", "self.pub = pub self.groupName = groupName self.ctx = zmq.Context() def _sending_announcements(self): while self.running:", "self.pub = uplink if not self.pub: self.pub = pub self.groupName = groupName self.ctx", "self.log.debug(\"Stop discovery announcements\".format()) if self.running: self.running = False self.discovery_pipe.send(\"$$STOP\".encode('utf_8')) def discovery_task(self, ctx, pipe):", "_sending_announcements(self): while self.running: self.log.debug(\"Discovery Announcements:\" \" SUB={}, PUB={}\" .format(self.sub, self.pub)) msg = json.dumps({'downlink':", "self.log.debug(\"Pyre on iface : {}\".format(self.iface)) n = Pyre(self.groupName, sel_iface=self.iface) n.set_header(\"DISCOVERY_Header1\", \"DISCOVERY_HEADER\") n.join(self.groupName) n.start()", "discovery_task(self, ctx, pipe): self.log.debug(\"Pyre on iface : {}\".format(self.iface)) n = Pyre(self.groupName, sel_iface=self.iface) n.set_header(\"DISCOVERY_Header1\",", "while(True): items = dict(poller.poll()) if pipe in items and items[pipe] == zmq.POLLIN: message", "self.pub: self.pub = pub self.groupName = groupName self.ctx = zmq.Context() def _sending_announcements(self): while", "while self.running: self.log.debug(\"Discovery Announcements:\" \" SUB={}, PUB={}\" .format(self.sub, self.pub)) msg = json.dumps({'downlink': self.sub,", "== zmq.POLLIN: message = pipe.recv() # message to quit if message.decode('utf-8') == \"$$STOP\":", "= json.dumps({'downlink': self.sub, 'uplink': self.pub}) self.discovery_pipe.send(msg.encode('utf_8')) time.sleep(2) @modules.on_start() def start_discovery_announcements(self): self.log.debug(\"Start discovery announcements\".format())", "logging import json import time from uniflex.core import modules __author__ = \"<NAME>\" __copyright__", "\"Copyright (c) 2015, Technische Universitat Berlin\" __version__ = \"0.1.0\" __email__ = <EMAIL>\" class", "self.log.debug(\"Start discovery announcements\".format()) self.running = True self.discovery_pipe = zhelper.zthread_fork( self.ctx, self.discovery_task) d =", "= pipe.recv() # message to quit if message.decode('utf-8') == \"$$STOP\": break n.shout(self.groupName, message)", "from pyre import zhelper import threading import zmq import logging import json import", "uplink if not self.pub: self.pub = pub self.groupName = groupName self.ctx = zmq.Context()", "self.discovery_pipe.send(\"$$STOP\".encode('utf_8')) def discovery_task(self, ctx, pipe): self.log.debug(\"Pyre on iface : {}\".format(self.iface)) n = Pyre(self.groupName,", "= downlink if not self.sub: self.sub = sub self.pub = uplink if not", "Pyre(self.groupName, sel_iface=self.iface) n.set_header(\"DISCOVERY_Header1\", \"DISCOVERY_HEADER\") n.join(self.groupName) n.start() poller = zmq.Poller() poller.register(pipe, zmq.POLLIN) while(True): items", "not self.sub: self.sub = sub self.pub = uplink if not self.pub: self.pub =", "def stop_discovery_announcements(self): self.log.debug(\"Stop discovery announcements\".format()) if self.running: self.running = False self.discovery_pipe.send(\"$$STOP\".encode('utf_8')) def discovery_task(self,", "items and items[pipe] == zmq.POLLIN: message = pipe.recv() # message to quit if", "Universitat Berlin\" __version__ = \"0.1.0\" __email__ = <EMAIL>\" class PyreDiscoveryMasterModule(modules.ControlApplication): def __init__(self, iface,", "import zmq import logging import json import time from uniflex.core import modules __author__", "= False self.iface = iface self.sub = downlink if not self.sub: self.sub =", "self.log = logging.getLogger('pyre_discovery_module.main') pyreLogger = logging.getLogger('pyre') pyreLogger.setLevel(logging.CRITICAL) self.running = False self.iface = iface" ]
[ "import SessionMiddleware from spire.context import ContextMiddleware, HeaderParser, SessionParser from bastion.security.middleware import RedirectMiddleware class", "Configuration, Unit, Dependency from spire.util import uniqid from spire.wsgi.util import Mount from spire.wsgi.sessions", "spire.wsgi.sessions import SessionMiddleware from spire.context import ContextMiddleware, HeaderParser, SessionParser from bastion.security.middleware import RedirectMiddleware", "MethodNotAllowed from werkzeug.formparser import parse_form_data from werkzeug.utils import secure_filename from spire.core import Configuration,", "pass else: os.unlink(filename) def find(self, id): filename = os.path.join(self.configuration['upload_directory'], id) if os.path.exists(filename): return", "default='/tmp'), }) def acquire(self, id): return open(self.find(id)) def dispose(self, id): try: filename =", "= mapping[name] = '%s_%s' % ( uniqid(), secure_filename(uploaded_file.filename)) uploaded_file.save(os.path.join(directory, filename)) response.mimetype = 'text/html'", "configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def acquire(self, id): return open(self.find(id)) def", "import os, json from scheme import Json, Text from werkzeug.exceptions import MethodNotAllowed from", "werkzeug.formparser import parse_form_data from werkzeug.utils import secure_filename from spire.core import Configuration, Unit, Dependency", "filename = self.find(id) except ValueError: pass else: os.unlink(filename) def find(self, id): filename =", "= Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def _dispatch_request(self, request, response): directory = self.configuration['upload_directory']", "return elif request.method != 'POST': raise MethodNotAllowed() mapping = {} for name, uploaded_file", "'upload_directory': Text(nonempty=True, default='/tmp'), }) def acquire(self, id): return open(self.find(id)) def dispose(self, id): try:", "return open(self.find(id)) def dispose(self, id): try: filename = self.find(id) except ValueError: pass else:", "request, response): directory = self.configuration['upload_directory'] if request.method == 'GET': return elif request.method !=", "SessionMiddleware from spire.context import ContextMiddleware, HeaderParser, SessionParser from bastion.security.middleware import RedirectMiddleware class UploadEndpoint(Mount):", "from werkzeug.exceptions import MethodNotAllowed from werkzeug.formparser import parse_form_data from werkzeug.utils import secure_filename from", "import ContextMiddleware, HeaderParser, SessionParser from bastion.security.middleware import RedirectMiddleware class UploadEndpoint(Mount): session_middleware = Dependency(SessionMiddleware)", "import uniqid from spire.wsgi.util import Mount from spire.wsgi.sessions import SessionMiddleware from spire.context import", "import secure_filename from spire.core import Configuration, Unit, Dependency from spire.util import uniqid from", "filename = mapping[name] = '%s_%s' % ( uniqid(), secure_filename(uploaded_file.filename)) uploaded_file.save(os.path.join(directory, filename)) response.mimetype =", "if request.method == 'GET': return elif request.method != 'POST': raise MethodNotAllowed() mapping =", "def dispose(self, id): try: filename = self.find(id) except ValueError: pass else: os.unlink(filename) def", "from werkzeug.formparser import parse_form_data from werkzeug.utils import secure_filename from spire.core import Configuration, Unit,", "= '%s_%s' % ( uniqid(), secure_filename(uploaded_file.filename)) uploaded_file.save(os.path.join(directory, filename)) response.mimetype = 'text/html' response.data =", "name, uploaded_file in request.files.iteritems(): filename = mapping[name] = '%s_%s' % ( uniqid(), secure_filename(uploaded_file.filename))", "from spire.core import Configuration, Unit, Dependency from spire.util import uniqid from spire.wsgi.util import", "ValueError: pass else: os.unlink(filename) def find(self, id): filename = os.path.join(self.configuration['upload_directory'], id) if os.path.exists(filename):", "scheme import Json, Text from werkzeug.exceptions import MethodNotAllowed from werkzeug.formparser import parse_form_data from", "= self.configuration['upload_directory'] if request.method == 'GET': return elif request.method != 'POST': raise MethodNotAllowed()", "filename)) response.mimetype = 'text/html' response.data = Json.serialize(mapping) class UploadManager(Unit): configuration = Configuration({ 'upload_directory':", "= {} for name, uploaded_file in request.files.iteritems(): filename = mapping[name] = '%s_%s' %", "Json, Text from werkzeug.exceptions import MethodNotAllowed from werkzeug.formparser import parse_form_data from werkzeug.utils import", "{} for name, uploaded_file in request.files.iteritems(): filename = mapping[name] = '%s_%s' % (", "Text from werkzeug.exceptions import MethodNotAllowed from werkzeug.formparser import parse_form_data from werkzeug.utils import secure_filename", "import Configuration, Unit, Dependency from spire.util import uniqid from spire.wsgi.util import Mount from", "from spire.wsgi.sessions import SessionMiddleware from spire.context import ContextMiddleware, HeaderParser, SessionParser from bastion.security.middleware import", "'text/html' response.data = Json.serialize(mapping) class UploadManager(Unit): configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), })", "from bastion.security.middleware import RedirectMiddleware class UploadEndpoint(Mount): session_middleware = Dependency(SessionMiddleware) context_middleware = ContextMiddleware([HeaderParser(), SessionParser()])", "except ValueError: pass else: os.unlink(filename) def find(self, id): filename = os.path.join(self.configuration['upload_directory'], id) if", "self.find(id) except ValueError: pass else: os.unlink(filename) def find(self, id): filename = os.path.join(self.configuration['upload_directory'], id)", "import parse_form_data from werkzeug.utils import secure_filename from spire.core import Configuration, Unit, Dependency from", "= ContextMiddleware([HeaderParser(), SessionParser()]) redirect_middleware = Dependency(RedirectMiddleware) configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), })", "else: os.unlink(filename) def find(self, id): filename = os.path.join(self.configuration['upload_directory'], id) if os.path.exists(filename): return filename", "( uniqid(), secure_filename(uploaded_file.filename)) uploaded_file.save(os.path.join(directory, filename)) response.mimetype = 'text/html' response.data = Json.serialize(mapping) class UploadManager(Unit):", "}) def _dispatch_request(self, request, response): directory = self.configuration['upload_directory'] if request.method == 'GET': return", "secure_filename(uploaded_file.filename)) uploaded_file.save(os.path.join(directory, filename)) response.mimetype = 'text/html' response.data = Json.serialize(mapping) class UploadManager(Unit): configuration =", "configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def _dispatch_request(self, request, response): directory =", "from scheme import Json, Text from werkzeug.exceptions import MethodNotAllowed from werkzeug.formparser import parse_form_data", "redirect_middleware = Dependency(RedirectMiddleware) configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def _dispatch_request(self, request,", "acquire(self, id): return open(self.find(id)) def dispose(self, id): try: filename = self.find(id) except ValueError:", "Unit, Dependency from spire.util import uniqid from spire.wsgi.util import Mount from spire.wsgi.sessions import", "ContextMiddleware, HeaderParser, SessionParser from bastion.security.middleware import RedirectMiddleware class UploadEndpoint(Mount): session_middleware = Dependency(SessionMiddleware) context_middleware", "}) def acquire(self, id): return open(self.find(id)) def dispose(self, id): try: filename = self.find(id)", "dispose(self, id): try: filename = self.find(id) except ValueError: pass else: os.unlink(filename) def find(self,", "directory = self.configuration['upload_directory'] if request.method == 'GET': return elif request.method != 'POST': raise", "SessionParser()]) redirect_middleware = Dependency(RedirectMiddleware) configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def _dispatch_request(self,", "find(self, id): filename = os.path.join(self.configuration['upload_directory'], id) if os.path.exists(filename): return filename else: raise ValueError(id)", "werkzeug.utils import secure_filename from spire.core import Configuration, Unit, Dependency from spire.util import uniqid", "elif request.method != 'POST': raise MethodNotAllowed() mapping = {} for name, uploaded_file in", "import Json, Text from werkzeug.exceptions import MethodNotAllowed from werkzeug.formparser import parse_form_data from werkzeug.utils", "response.data = Json.serialize(mapping) class UploadManager(Unit): configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def", "Text(nonempty=True, default='/tmp'), }) def _dispatch_request(self, request, response): directory = self.configuration['upload_directory'] if request.method ==", "raise MethodNotAllowed() mapping = {} for name, uploaded_file in request.files.iteritems(): filename = mapping[name]", "uniqid(), secure_filename(uploaded_file.filename)) uploaded_file.save(os.path.join(directory, filename)) response.mimetype = 'text/html' response.data = Json.serialize(mapping) class UploadManager(Unit): configuration", "id): try: filename = self.find(id) except ValueError: pass else: os.unlink(filename) def find(self, id):", "= Dependency(RedirectMiddleware) configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def _dispatch_request(self, request, response):", "% ( uniqid(), secure_filename(uploaded_file.filename)) uploaded_file.save(os.path.join(directory, filename)) response.mimetype = 'text/html' response.data = Json.serialize(mapping) class", "= Json.serialize(mapping) class UploadManager(Unit): configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def acquire(self,", "Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def acquire(self, id): return open(self.find(id)) def dispose(self, id):", "spire.core import Configuration, Unit, Dependency from spire.util import uniqid from spire.wsgi.util import Mount", "json from scheme import Json, Text from werkzeug.exceptions import MethodNotAllowed from werkzeug.formparser import", "from werkzeug.utils import secure_filename from spire.core import Configuration, Unit, Dependency from spire.util import", "uniqid from spire.wsgi.util import Mount from spire.wsgi.sessions import SessionMiddleware from spire.context import ContextMiddleware,", "uploaded_file in request.files.iteritems(): filename = mapping[name] = '%s_%s' % ( uniqid(), secure_filename(uploaded_file.filename)) uploaded_file.save(os.path.join(directory,", "id): return open(self.find(id)) def dispose(self, id): try: filename = self.find(id) except ValueError: pass", "Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def _dispatch_request(self, request, response): directory = self.configuration['upload_directory'] if", "HeaderParser, SessionParser from bastion.security.middleware import RedirectMiddleware class UploadEndpoint(Mount): session_middleware = Dependency(SessionMiddleware) context_middleware =", "import Mount from spire.wsgi.sessions import SessionMiddleware from spire.context import ContextMiddleware, HeaderParser, SessionParser from", "'%s_%s' % ( uniqid(), secure_filename(uploaded_file.filename)) uploaded_file.save(os.path.join(directory, filename)) response.mimetype = 'text/html' response.data = Json.serialize(mapping)", "open(self.find(id)) def dispose(self, id): try: filename = self.find(id) except ValueError: pass else: os.unlink(filename)", "os.unlink(filename) def find(self, id): filename = os.path.join(self.configuration['upload_directory'], id) if os.path.exists(filename): return filename else:", "= Dependency(SessionMiddleware) context_middleware = ContextMiddleware([HeaderParser(), SessionParser()]) redirect_middleware = Dependency(RedirectMiddleware) configuration = Configuration({ 'upload_directory':", "Dependency(RedirectMiddleware) configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def _dispatch_request(self, request, response): directory", "default='/tmp'), }) def _dispatch_request(self, request, response): directory = self.configuration['upload_directory'] if request.method == 'GET':", "def acquire(self, id): return open(self.find(id)) def dispose(self, id): try: filename = self.find(id) except", "from spire.context import ContextMiddleware, HeaderParser, SessionParser from bastion.security.middleware import RedirectMiddleware class UploadEndpoint(Mount): session_middleware", "= self.find(id) except ValueError: pass else: os.unlink(filename) def find(self, id): filename = os.path.join(self.configuration['upload_directory'],", "<filename>spire/wsgi/upload.py import os, json from scheme import Json, Text from werkzeug.exceptions import MethodNotAllowed", "self.configuration['upload_directory'] if request.method == 'GET': return elif request.method != 'POST': raise MethodNotAllowed() mapping", "secure_filename from spire.core import Configuration, Unit, Dependency from spire.util import uniqid from spire.wsgi.util", "class UploadEndpoint(Mount): session_middleware = Dependency(SessionMiddleware) context_middleware = ContextMiddleware([HeaderParser(), SessionParser()]) redirect_middleware = Dependency(RedirectMiddleware) configuration", "= 'text/html' response.data = Json.serialize(mapping) class UploadManager(Unit): configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'),", "bastion.security.middleware import RedirectMiddleware class UploadEndpoint(Mount): session_middleware = Dependency(SessionMiddleware) context_middleware = ContextMiddleware([HeaderParser(), SessionParser()]) redirect_middleware", "Mount from spire.wsgi.sessions import SessionMiddleware from spire.context import ContextMiddleware, HeaderParser, SessionParser from bastion.security.middleware", "mapping = {} for name, uploaded_file in request.files.iteritems(): filename = mapping[name] = '%s_%s'", "uploaded_file.save(os.path.join(directory, filename)) response.mimetype = 'text/html' response.data = Json.serialize(mapping) class UploadManager(Unit): configuration = Configuration({", "'upload_directory': Text(nonempty=True, default='/tmp'), }) def _dispatch_request(self, request, response): directory = self.configuration['upload_directory'] if request.method", "in request.files.iteritems(): filename = mapping[name] = '%s_%s' % ( uniqid(), secure_filename(uploaded_file.filename)) uploaded_file.save(os.path.join(directory, filename))", "for name, uploaded_file in request.files.iteritems(): filename = mapping[name] = '%s_%s' % ( uniqid(),", "MethodNotAllowed() mapping = {} for name, uploaded_file in request.files.iteritems(): filename = mapping[name] =", "= Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def acquire(self, id): return open(self.find(id)) def dispose(self,", "import RedirectMiddleware class UploadEndpoint(Mount): session_middleware = Dependency(SessionMiddleware) context_middleware = ContextMiddleware([HeaderParser(), SessionParser()]) redirect_middleware =", "== 'GET': return elif request.method != 'POST': raise MethodNotAllowed() mapping = {} for", "UploadManager(Unit): configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def acquire(self, id): return open(self.find(id))", "Text(nonempty=True, default='/tmp'), }) def acquire(self, id): return open(self.find(id)) def dispose(self, id): try: filename", "response): directory = self.configuration['upload_directory'] if request.method == 'GET': return elif request.method != 'POST':", "'POST': raise MethodNotAllowed() mapping = {} for name, uploaded_file in request.files.iteritems(): filename =", "SessionParser from bastion.security.middleware import RedirectMiddleware class UploadEndpoint(Mount): session_middleware = Dependency(SessionMiddleware) context_middleware = ContextMiddleware([HeaderParser(),", "mapping[name] = '%s_%s' % ( uniqid(), secure_filename(uploaded_file.filename)) uploaded_file.save(os.path.join(directory, filename)) response.mimetype = 'text/html' response.data", "class UploadManager(Unit): configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def acquire(self, id): return", "spire.context import ContextMiddleware, HeaderParser, SessionParser from bastion.security.middleware import RedirectMiddleware class UploadEndpoint(Mount): session_middleware =", "_dispatch_request(self, request, response): directory = self.configuration['upload_directory'] if request.method == 'GET': return elif request.method", "context_middleware = ContextMiddleware([HeaderParser(), SessionParser()]) redirect_middleware = Dependency(RedirectMiddleware) configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'),", "Dependency from spire.util import uniqid from spire.wsgi.util import Mount from spire.wsgi.sessions import SessionMiddleware", "request.method == 'GET': return elif request.method != 'POST': raise MethodNotAllowed() mapping = {}", "session_middleware = Dependency(SessionMiddleware) context_middleware = ContextMiddleware([HeaderParser(), SessionParser()]) redirect_middleware = Dependency(RedirectMiddleware) configuration = Configuration({", "try: filename = self.find(id) except ValueError: pass else: os.unlink(filename) def find(self, id): filename", "ContextMiddleware([HeaderParser(), SessionParser()]) redirect_middleware = Dependency(RedirectMiddleware) configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def", "UploadEndpoint(Mount): session_middleware = Dependency(SessionMiddleware) context_middleware = ContextMiddleware([HeaderParser(), SessionParser()]) redirect_middleware = Dependency(RedirectMiddleware) configuration =", "request.method != 'POST': raise MethodNotAllowed() mapping = {} for name, uploaded_file in request.files.iteritems():", "def find(self, id): filename = os.path.join(self.configuration['upload_directory'], id) if os.path.exists(filename): return filename else: raise", "Json.serialize(mapping) class UploadManager(Unit): configuration = Configuration({ 'upload_directory': Text(nonempty=True, default='/tmp'), }) def acquire(self, id):", "parse_form_data from werkzeug.utils import secure_filename from spire.core import Configuration, Unit, Dependency from spire.util", "RedirectMiddleware class UploadEndpoint(Mount): session_middleware = Dependency(SessionMiddleware) context_middleware = ContextMiddleware([HeaderParser(), SessionParser()]) redirect_middleware = Dependency(RedirectMiddleware)", "from spire.wsgi.util import Mount from spire.wsgi.sessions import SessionMiddleware from spire.context import ContextMiddleware, HeaderParser,", "def _dispatch_request(self, request, response): directory = self.configuration['upload_directory'] if request.method == 'GET': return elif", "response.mimetype = 'text/html' response.data = Json.serialize(mapping) class UploadManager(Unit): configuration = Configuration({ 'upload_directory': Text(nonempty=True,", "os, json from scheme import Json, Text from werkzeug.exceptions import MethodNotAllowed from werkzeug.formparser", "!= 'POST': raise MethodNotAllowed() mapping = {} for name, uploaded_file in request.files.iteritems(): filename", "from spire.util import uniqid from spire.wsgi.util import Mount from spire.wsgi.sessions import SessionMiddleware from", "request.files.iteritems(): filename = mapping[name] = '%s_%s' % ( uniqid(), secure_filename(uploaded_file.filename)) uploaded_file.save(os.path.join(directory, filename)) response.mimetype", "spire.wsgi.util import Mount from spire.wsgi.sessions import SessionMiddleware from spire.context import ContextMiddleware, HeaderParser, SessionParser", "werkzeug.exceptions import MethodNotAllowed from werkzeug.formparser import parse_form_data from werkzeug.utils import secure_filename from spire.core", "Dependency(SessionMiddleware) context_middleware = ContextMiddleware([HeaderParser(), SessionParser()]) redirect_middleware = Dependency(RedirectMiddleware) configuration = Configuration({ 'upload_directory': Text(nonempty=True,", "spire.util import uniqid from spire.wsgi.util import Mount from spire.wsgi.sessions import SessionMiddleware from spire.context", "import MethodNotAllowed from werkzeug.formparser import parse_form_data from werkzeug.utils import secure_filename from spire.core import", "'GET': return elif request.method != 'POST': raise MethodNotAllowed() mapping = {} for name," ]
[ "maior que 1250 é 10%, menor é 15%. salario = int(input('Qual o valor", "que 1250 é 10%, menor é 15%. salario = int(input('Qual o valor do", "10%, menor é 15%. salario = int(input('Qual o valor do seu salário? '))", "= int(input('Qual o valor do seu salário? ')) salario1= (1250*0.1)+salario salario2=(1250*0.15)+salario if salario>=1250:", "15%. salario = int(input('Qual o valor do seu salário? ')) salario1= (1250*0.1)+salario salario2=(1250*0.15)+salario", "salario = int(input('Qual o valor do seu salário? ')) salario1= (1250*0.1)+salario salario2=(1250*0.15)+salario if", "calcule aumento, maior que 1250 é 10%, menor é 15%. salario = int(input('Qual", "valor do seu salário? ')) salario1= (1250*0.1)+salario salario2=(1250*0.15)+salario if salario>=1250: print('Você teve aumento", "int(input('Qual o valor do seu salário? ')) salario1= (1250*0.1)+salario salario2=(1250*0.15)+salario if salario>=1250: print('Você", "(1250*0.1)+salario salario2=(1250*0.15)+salario if salario>=1250: print('Você teve aumento de 10% e agora receberá {}'.format(salario1))", "e calcule aumento, maior que 1250 é 10%, menor é 15%. salario =", "salário? ')) salario1= (1250*0.1)+salario salario2=(1250*0.15)+salario if salario>=1250: print('Você teve aumento de 10% e", "agora receberá {}'.format(salario1)) else: print('Você teve aumento de 15% e agora receberá {}'.format(salario2))", "if salario>=1250: print('Você teve aumento de 10% e agora receberá {}'.format(salario1)) else: print('Você", "menor é 15%. salario = int(input('Qual o valor do seu salário? ')) salario1=", "# Pergunte salario e calcule aumento, maior que 1250 é 10%, menor é", "do seu salário? ')) salario1= (1250*0.1)+salario salario2=(1250*0.15)+salario if salario>=1250: print('Você teve aumento de", "e agora receberá {}'.format(salario1)) else: print('Você teve aumento de 15% e agora receberá", "é 15%. salario = int(input('Qual o valor do seu salário? ')) salario1= (1250*0.1)+salario", "print('Você teve aumento de 10% e agora receberá {}'.format(salario1)) else: print('Você teve aumento", "salario1= (1250*0.1)+salario salario2=(1250*0.15)+salario if salario>=1250: print('Você teve aumento de 10% e agora receberá", "salario e calcule aumento, maior que 1250 é 10%, menor é 15%. salario", "aumento de 10% e agora receberá {}'.format(salario1)) else: print('Você teve aumento de 15%", "salario>=1250: print('Você teve aumento de 10% e agora receberá {}'.format(salario1)) else: print('Você teve", "1250 é 10%, menor é 15%. salario = int(input('Qual o valor do seu", "teve aumento de 10% e agora receberá {}'.format(salario1)) else: print('Você teve aumento de", "seu salário? ')) salario1= (1250*0.1)+salario salario2=(1250*0.15)+salario if salario>=1250: print('Você teve aumento de 10%", "10% e agora receberá {}'.format(salario1)) else: print('Você teve aumento de 15% e agora", "salario2=(1250*0.15)+salario if salario>=1250: print('Você teve aumento de 10% e agora receberá {}'.format(salario1)) else:", "receberá {}'.format(salario1)) else: print('Você teve aumento de 15% e agora receberá {}'.format(salario2)) print('FIM')", "de 10% e agora receberá {}'.format(salario1)) else: print('Você teve aumento de 15% e", "Pergunte salario e calcule aumento, maior que 1250 é 10%, menor é 15%.", "o valor do seu salário? ')) salario1= (1250*0.1)+salario salario2=(1250*0.15)+salario if salario>=1250: print('Você teve", "')) salario1= (1250*0.1)+salario salario2=(1250*0.15)+salario if salario>=1250: print('Você teve aumento de 10% e agora", "é 10%, menor é 15%. salario = int(input('Qual o valor do seu salário?", "aumento, maior que 1250 é 10%, menor é 15%. salario = int(input('Qual o" ]
[]
[ "Statistics.getPercentile(benchmark_samples_to_process, 50) benchmark_stats.q3 = Statistics.getPercentile(benchmark_samples_to_process, 75) benchmark_stats.upper_fence = benchmark_results.sorted_no_outliers_samples[-1] # Plotly uses last", "exact lower_fence set to: lower_fence benchmark_stats.q1 = Statistics.getPercentile(benchmark_samples_to_process, 25) benchmark_stats.mean = Statistics.getMean(benchmark_samples_to_process) benchmark_stats.median", "= len(max(statistics_results, key=len)) + 3 print(key + \":\") for stat_key in statistics_results: print(stat_key", "Analyzer\") parser.add_argument(\"-in\", \"--benchmark_samples_file\", type=str, required=True, help=\"File path containing the benchmark observations as comma", "np.sort(benchmark_no_outliers_samples).tolist() benchmark_results.sorted_upper_outliers_samples = np.sort(upper_outliers_samples).tolist() # Create statistics info from benchmark samples for key", "import Statistics from benchmark_containers import BenchmarkResultsContainer ############################################################################## def createBenchmarkResults(benchmark_samples, operation): benchmark_results = BenchmarkResultsContainer()", "benchmark_samples_to_process = benchmark_no_outliers_samples if without_outliers else benchmark_samples benchmark_stats = benchmark_results.statistics[key] benchmark_stats.num_analyzed_samples = Statistics.getNumAnalyzedSamples(benchmark_samples_to_process)", "else benchmark_results.getFormatedStatisticsResultsWithOutliers() text_alignment_offset = len(max(statistics_results, key=len)) + 3 print(key + \":\") for stat_key", "= createBenchmarkResults(benchmark_samples, operation_name) # Print benchmark results printBenchmarkResults(benchmark_samples, benchmark_results) # Export benchmark results", "= Statistics.getTukeyFences(benchmark_samples) lower_outliers_samples = benchmark_samples[benchmark_samples < lower_fence] benchmark_no_outliers_samples = benchmark_samples[(benchmark_samples >= lower_fence) &", "without_outliers else benchmark_samples benchmark_stats = benchmark_results.statistics[key] benchmark_stats.num_analyzed_samples = Statistics.getNumAnalyzedSamples(benchmark_samples_to_process) benchmark_stats.minimum = Statistics.getMin(benchmark_samples_to_process) benchmark_stats.lower_fence", "Statistics.getPercentile(benchmark_samples_to_process, 75) benchmark_stats.upper_fence = benchmark_results.sorted_no_outliers_samples[-1] # Plotly uses last non outlier point, for", "benchmark_stats.q3 = Statistics.getPercentile(benchmark_samples_to_process, 75) benchmark_stats.upper_fence = benchmark_results.sorted_no_outliers_samples[-1] # Plotly uses last non outlier", "import BenchmarkResultsContainer ############################################################################## def createBenchmarkResults(benchmark_samples, operation): benchmark_results = BenchmarkResultsContainer() benchmark_results.operation = operation #", "def createBenchmarkResults(benchmark_samples, operation): benchmark_results = BenchmarkResultsContainer() benchmark_results.operation = operation # Filter outliers lower_fence,", "= Statistics.getMax(benchmark_samples_to_process) benchmark_stats.iqr = Statistics.getIQR(benchmark_samples_to_process) benchmark_stats.std_dev = Statistics.getStdDev(benchmark_samples_to_process) benchmark_stats.std_err = Statistics.getStdErr(benchmark_samples_to_process) benchmark_stats.std_err_percentage =", "= args.benchmark_samples_file json_output_path = args.json_output_path operation_name = args.operation_name output_file_name = args.output_file_name # Create", "to: lower_fence benchmark_stats.q1 = Statistics.getPercentile(benchmark_samples_to_process, 25) benchmark_stats.mean = Statistics.getMean(benchmark_samples_to_process) benchmark_stats.median = Statistics.getPercentile(benchmark_samples_to_process, 50)", "= Statistics.getStdErr(benchmark_samples_to_process) benchmark_stats.std_err_percentage = benchmark_stats.std_err / benchmark_stats.mean * 100.0 if benchmark_stats.std_err > 0.0", "analyzed benchmark.\") parser.add_argument(\"-op\", \"--operation_name\", type=str, required=True, help=\"Name of the operation related to the", "option is not used the file will be called Benchmark_Results_<MONTH>-<DAY>-<YEAR>_<HOUR>h<MINUTE>m<SECOND>s.\") args = parser.parse_args()", "lower_fence, upper_fence = Statistics.getTukeyFences(benchmark_samples) lower_outliers_samples = benchmark_samples[benchmark_samples < lower_fence] benchmark_no_outliers_samples = benchmark_samples[(benchmark_samples >=", "benchmark_stats.std_err_percentage = benchmark_stats.std_err / benchmark_stats.mean * 100.0 if benchmark_stats.std_err > 0.0 else 0.0", "benchmark observations as comma separated numbers.\") parser.add_argument(\"-out\", \"--json_output_path\", type=str, required=True, help=\"JSON output path", ">= lower_fence) & (benchmark_samples <= upper_fence)] upper_outliers_samples = benchmark_samples[benchmark_samples > upper_fence] benchmark_results.sorted_lower_outliers_samples =", "args.benchmark_samples_file json_output_path = args.json_output_path operation_name = args.operation_name output_file_name = args.output_file_name # Create an", "for stat_key in statistics_results: print(stat_key + \"= \".rjust(text_alignment_offset - len(stat_key)) + statistics_results[stat_key]) print(\"\\n\")", "non outlier point, for exact lower_fence set to: lower_fence benchmark_stats.q1 = Statistics.getPercentile(benchmark_samples_to_process, 25)", "samples in file with open(benchmark_samples_file) as file: benchmark_samples = np.fromfile(file, dtype=float, sep=\",\") #", "Statistics.getTukeyFences(benchmark_samples) lower_outliers_samples = benchmark_samples[benchmark_samples < lower_fence] benchmark_no_outliers_samples = benchmark_samples[(benchmark_samples >= lower_fence) & (benchmark_samples", "createBenchmarkResults(benchmark_samples, operation): benchmark_results = BenchmarkResultsContainer() benchmark_results.operation = operation # Filter outliers lower_fence, upper_fence", "0.0 else 0.0 benchmark_stats.margin = Statistics.getMargin(benchmark_samples_to_process) benchmark_stats.margin_percentage = benchmark_stats.margin / benchmark_stats.mean * 100.0", "stat_key in statistics_results: print(stat_key + \"= \".rjust(text_alignment_offset - len(stat_key)) + statistics_results[stat_key]) print(\"\\n\") ##############################################################################", "benchmark_samples[(benchmark_samples >= lower_fence) & (benchmark_samples <= upper_fence)] upper_outliers_samples = benchmark_samples[benchmark_samples > upper_fence] benchmark_results.sorted_lower_outliers_samples", "results printBenchmarkResults(benchmark_samples, benchmark_results) # Export benchmark results to a JSON file benchmark_results.toJSONFile(json_output_path, operation_name,", "required=False, help=\"(Optional) The name of the output file, if this option is not", "help=\"JSON output path for file containing the statistical information of the analyzed benchmark.\")", "+ 3 print(key + \":\") for stat_key in statistics_results: print(stat_key + \"= \".rjust(text_alignment_offset", "the statistical information of the analyzed benchmark.\") parser.add_argument(\"-op\", \"--operation_name\", type=str, required=True, help=\"Name of", "lower_outliers_samples = benchmark_samples[benchmark_samples < lower_fence] benchmark_no_outliers_samples = benchmark_samples[(benchmark_samples >= lower_fence) & (benchmark_samples <=", "benchmark_stats.iqr = Statistics.getIQR(benchmark_samples_to_process) benchmark_stats.std_dev = Statistics.getStdDev(benchmark_samples_to_process) benchmark_stats.std_err = Statistics.getStdErr(benchmark_samples_to_process) benchmark_stats.std_err_percentage = benchmark_stats.std_err /", "as comma separated numbers.\") parser.add_argument(\"-out\", \"--json_output_path\", type=str, required=True, help=\"JSON output path for file", "benchmark_stats.mean * 100.0 if benchmark_stats.margin > 0.0 else 0.0 benchmark_stats.confidence_interval = Statistics.getConfidenceInterval(benchmark_samples_to_process) benchmark_stats.skewness", "the analyzed benchmark.\") parser.add_argument(\"-op\", \"--operation_name\", type=str, required=True, help=\"Name of the operation related to", "benchmark_results.sorted_upper_outliers_samples = np.sort(upper_outliers_samples).tolist() # Create statistics info from benchmark samples for key in", "this option is not used the file will be called Benchmark_Results_<MONTH>-<DAY>-<YEAR>_<HOUR>h<MINUTE>m<SECOND>s.\") args =", "the output file, if this option is not used the file will be", "point, for exact upper_fence set to: upper_fence benchmark_stats.maximum = Statistics.getMax(benchmark_samples_to_process) benchmark_stats.iqr = Statistics.getIQR(benchmark_samples_to_process)", "if benchmark_stats.std_err > 0.0 else 0.0 benchmark_stats.margin = Statistics.getMargin(benchmark_samples_to_process) benchmark_stats.margin_percentage = benchmark_stats.margin /", "benchmark_samples[benchmark_samples > upper_fence] benchmark_results.sorted_lower_outliers_samples = np.sort(lower_outliers_samples).tolist() benchmark_results.sorted_no_outliers_samples = np.sort(benchmark_no_outliers_samples).tolist() benchmark_results.sorted_upper_outliers_samples = np.sort(upper_outliers_samples).tolist() #", "outliers\" statistics_results = benchmark_results.getFormatedStatisticsResultsWithoutOutliers() if without_outliers else benchmark_results.getFormatedStatisticsResultsWithOutliers() text_alignment_offset = len(max(statistics_results, key=len)) +", "parser.add_argument(\"-in\", \"--benchmark_samples_file\", type=str, required=True, help=\"File path containing the benchmark observations as comma separated", "path for file containing the statistical information of the analyzed benchmark.\") parser.add_argument(\"-op\", \"--operation_name\",", "= benchmark_results.getFormatedStatisticsResultsWithoutOutliers() if without_outliers else benchmark_results.getFormatedStatisticsResultsWithOutliers() text_alignment_offset = len(max(statistics_results, key=len)) + 3 print(key", "key == \"Without outliers\" statistics_results = benchmark_results.getFormatedStatisticsResultsWithoutOutliers() if without_outliers else benchmark_results.getFormatedStatisticsResultsWithOutliers() text_alignment_offset =", "benchmark_results.sorted_lower_outliers_samples = np.sort(lower_outliers_samples).tolist() benchmark_results.sorted_no_outliers_samples = np.sort(benchmark_no_outliers_samples).tolist() benchmark_results.sorted_upper_outliers_samples = np.sort(upper_outliers_samples).tolist() # Create statistics info", "containing the statistical information of the analyzed benchmark.\") parser.add_argument(\"-op\", \"--operation_name\", type=str, required=True, help=\"Name", "set to: lower_fence benchmark_stats.q1 = Statistics.getPercentile(benchmark_samples_to_process, 25) benchmark_stats.mean = Statistics.getMean(benchmark_samples_to_process) benchmark_stats.median = Statistics.getPercentile(benchmark_samples_to_process,", "= np.sort(upper_outliers_samples).tolist() # Create statistics info from benchmark samples for key in benchmark_results.statistics:", "runAnalyzer(kwargs=None): # Parse args parser = argparse.ArgumentParser(description=\"Benchmark Analyzer\") parser.add_argument(\"-in\", \"--benchmark_samples_file\", type=str, required=True, help=\"File", "args.json_output_path operation_name = args.operation_name output_file_name = args.output_file_name # Create an array from benchmark", "statistics_results[stat_key]) print(\"\\n\") ############################################################################## def runAnalyzer(kwargs=None): # Parse args parser = argparse.ArgumentParser(description=\"Benchmark Analyzer\") parser.add_argument(\"-in\",", "benchmark_samples = np.fromfile(file, dtype=float, sep=\",\") # Create benchmark results benchmark_results = createBenchmarkResults(benchmark_samples, operation_name)", "upper_fence] benchmark_results.sorted_lower_outliers_samples = np.sort(lower_outliers_samples).tolist() benchmark_results.sorted_no_outliers_samples = np.sort(benchmark_no_outliers_samples).tolist() benchmark_results.sorted_upper_outliers_samples = np.sort(upper_outliers_samples).tolist() # Create statistics", "open(benchmark_samples_file) as file: benchmark_samples = np.fromfile(file, dtype=float, sep=\",\") # Create benchmark results benchmark_results", "> upper_fence] benchmark_results.sorted_lower_outliers_samples = np.sort(lower_outliers_samples).tolist() benchmark_results.sorted_no_outliers_samples = np.sort(benchmark_no_outliers_samples).tolist() benchmark_results.sorted_upper_outliers_samples = np.sort(upper_outliers_samples).tolist() # Create", "Statistics.getPercentile(benchmark_samples_to_process, 25) benchmark_stats.mean = Statistics.getMean(benchmark_samples_to_process) benchmark_stats.median = Statistics.getPercentile(benchmark_samples_to_process, 50) benchmark_stats.q3 = Statistics.getPercentile(benchmark_samples_to_process, 75)", "0.0 else 0.0 benchmark_stats.confidence_interval = Statistics.getConfidenceInterval(benchmark_samples_to_process) benchmark_stats.skewness = Statistics.getSkewness(benchmark_samples_to_process) benchmark_stats.kurtosis = Statistics.getKurtosis(benchmark_samples_to_process) return", "benchmark_results.statistics: without_outliers = key == \"Without outliers\" statistics_results = benchmark_results.getFormatedStatisticsResultsWithoutOutliers() if without_outliers else", "from benchmark samples for key in benchmark_results.statistics: without_outliers = key == \"Without outliers\"", "an array from benchmark samples in file with open(benchmark_samples_file) as file: benchmark_samples =", "args = parser.parse_args() # Input Params benchmark_samples_file = args.benchmark_samples_file json_output_path = args.json_output_path operation_name", "comma separated numbers.\") parser.add_argument(\"-out\", \"--json_output_path\", type=str, required=True, help=\"JSON output path for file containing", "benchmark_statistics import Statistics from benchmark_containers import BenchmarkResultsContainer ############################################################################## def createBenchmarkResults(benchmark_samples, operation): benchmark_results =", "JSON file benchmark_results.toJSONFile(json_output_path, operation_name, output_file_name) ############################################################################## #----------------------------------------------------------------------------- # Main #----------------------------------------------------------------------------- if __name__ ==", "benchmark_stats.std_err = Statistics.getStdErr(benchmark_samples_to_process) benchmark_stats.std_err_percentage = benchmark_stats.std_err / benchmark_stats.mean * 100.0 if benchmark_stats.std_err >", "benchmark_stats.mean * 100.0 if benchmark_stats.std_err > 0.0 else 0.0 benchmark_stats.margin = Statistics.getMargin(benchmark_samples_to_process) benchmark_stats.margin_percentage", "without_outliers = key == \"Without outliers\" benchmark_samples_to_process = benchmark_no_outliers_samples if without_outliers else benchmark_samples", "# Print benchmark results printBenchmarkResults(benchmark_samples, benchmark_results) # Export benchmark results to a JSON", "observations as comma separated numbers.\") parser.add_argument(\"-out\", \"--json_output_path\", type=str, required=True, help=\"JSON output path for", "def runAnalyzer(kwargs=None): # Parse args parser = argparse.ArgumentParser(description=\"Benchmark Analyzer\") parser.add_argument(\"-in\", \"--benchmark_samples_file\", type=str, required=True,", "used the file will be called Benchmark_Results_<MONTH>-<DAY>-<YEAR>_<HOUR>h<MINUTE>m<SECOND>s.\") args = parser.parse_args() # Input Params", "benchmark_results.sorted_no_outliers_samples[-1] # Plotly uses last non outlier point, for exact upper_fence set to:", "0.0 benchmark_stats.confidence_interval = Statistics.getConfidenceInterval(benchmark_samples_to_process) benchmark_stats.skewness = Statistics.getSkewness(benchmark_samples_to_process) benchmark_stats.kurtosis = Statistics.getKurtosis(benchmark_samples_to_process) return benchmark_results ##############################################################################", "np.fromfile(file, dtype=float, sep=\",\") # Create benchmark results benchmark_results = createBenchmarkResults(benchmark_samples, operation_name) # Print", "= benchmark_samples[benchmark_samples > upper_fence] benchmark_results.sorted_lower_outliers_samples = np.sort(lower_outliers_samples).tolist() benchmark_results.sorted_no_outliers_samples = np.sort(benchmark_no_outliers_samples).tolist() benchmark_results.sorted_upper_outliers_samples = np.sort(upper_outliers_samples).tolist()", "benchmark_results = createBenchmarkResults(benchmark_samples, operation_name) # Print benchmark results printBenchmarkResults(benchmark_samples, benchmark_results) # Export benchmark", "Create statistics info from benchmark samples for key in benchmark_results.statistics: without_outliers = key", "# Plotly uses first non outlier point, for exact lower_fence set to: lower_fence", "with open(benchmark_samples_file) as file: benchmark_samples = np.fromfile(file, dtype=float, sep=\",\") # Create benchmark results", "info from benchmark samples for key in benchmark_results.statistics: without_outliers = key == \"Without", "without_outliers else benchmark_results.getFormatedStatisticsResultsWithOutliers() text_alignment_offset = len(max(statistics_results, key=len)) + 3 print(key + \":\") for", "array from benchmark samples in file with open(benchmark_samples_file) as file: benchmark_samples = np.fromfile(file,", "# Parse args parser = argparse.ArgumentParser(description=\"Benchmark Analyzer\") parser.add_argument(\"-in\", \"--benchmark_samples_file\", type=str, required=True, help=\"File path", "key in benchmark_results.statistics: without_outliers = key == \"Without outliers\" benchmark_samples_to_process = benchmark_no_outliers_samples if", "BenchmarkResultsContainer ############################################################################## def createBenchmarkResults(benchmark_samples, operation): benchmark_results = BenchmarkResultsContainer() benchmark_results.operation = operation # Filter", "for file containing the statistical information of the analyzed benchmark.\") parser.add_argument(\"-op\", \"--operation_name\", type=str,", "operation # Filter outliers lower_fence, upper_fence = Statistics.getTukeyFences(benchmark_samples) lower_outliers_samples = benchmark_samples[benchmark_samples < lower_fence]", "= np.sort(lower_outliers_samples).tolist() benchmark_results.sorted_no_outliers_samples = np.sort(benchmark_no_outliers_samples).tolist() benchmark_results.sorted_upper_outliers_samples = np.sort(upper_outliers_samples).tolist() # Create statistics info from", "benchmark_stats.q1 = Statistics.getPercentile(benchmark_samples_to_process, 25) benchmark_stats.mean = Statistics.getMean(benchmark_samples_to_process) benchmark_stats.median = Statistics.getPercentile(benchmark_samples_to_process, 50) benchmark_stats.q3 =", "benchmark_results.sorted_upper_outliers_samples, \"\\n\") for key in benchmark_results.statistics: without_outliers = key == \"Without outliers\" statistics_results", "= np.sort(benchmark_no_outliers_samples).tolist() benchmark_results.sorted_upper_outliers_samples = np.sort(upper_outliers_samples).tolist() # Create statistics info from benchmark samples for", "== \"Without outliers\" statistics_results = benchmark_results.getFormatedStatisticsResultsWithoutOutliers() if without_outliers else benchmark_results.getFormatedStatisticsResultsWithOutliers() text_alignment_offset = len(max(statistics_results,", "argparse.ArgumentParser(description=\"Benchmark Analyzer\") parser.add_argument(\"-in\", \"--benchmark_samples_file\", type=str, required=True, help=\"File path containing the benchmark observations as", "observations.\") parser.add_argument(\"-out_name\", \"--output_file_name\", type=str, required=False, help=\"(Optional) The name of the output file, if", "25) benchmark_stats.mean = Statistics.getMean(benchmark_samples_to_process) benchmark_stats.median = Statistics.getPercentile(benchmark_samples_to_process, 50) benchmark_stats.q3 = Statistics.getPercentile(benchmark_samples_to_process, 75) benchmark_stats.upper_fence", "# Plotly uses last non outlier point, for exact upper_fence set to: upper_fence", "benchmark_stats.upper_fence = benchmark_results.sorted_no_outliers_samples[-1] # Plotly uses last non outlier point, for exact upper_fence", "== \"Without outliers\" benchmark_samples_to_process = benchmark_no_outliers_samples if without_outliers else benchmark_samples benchmark_stats = benchmark_results.statistics[key]", "statistics info from benchmark samples for key in benchmark_results.statistics: without_outliers = key ==", "/ benchmark_stats.mean * 100.0 if benchmark_stats.margin > 0.0 else 0.0 benchmark_stats.confidence_interval = Statistics.getConfidenceInterval(benchmark_samples_to_process)", "parser.parse_args() # Input Params benchmark_samples_file = args.benchmark_samples_file json_output_path = args.json_output_path operation_name = args.operation_name", "Statistics.getKurtosis(benchmark_samples_to_process) return benchmark_results ############################################################################## def printBenchmarkResults(benchmark_samples, benchmark_results): print(\"Samples:\") print(benchmark_samples, \"\\n\") print(\"Sorted Samples:\") print(benchmark_results.sorted_lower_outliers_samples,", "* 100.0 if benchmark_stats.std_err > 0.0 else 0.0 benchmark_stats.margin = Statistics.getMargin(benchmark_samples_to_process) benchmark_stats.margin_percentage =", "= Statistics.getSkewness(benchmark_samples_to_process) benchmark_stats.kurtosis = Statistics.getKurtosis(benchmark_samples_to_process) return benchmark_results ############################################################################## def printBenchmarkResults(benchmark_samples, benchmark_results): print(\"Samples:\") print(benchmark_samples,", "Print benchmark results printBenchmarkResults(benchmark_samples, benchmark_results) # Export benchmark results to a JSON file", "to a JSON file benchmark_results.toJSONFile(json_output_path, operation_name, output_file_name) ############################################################################## #----------------------------------------------------------------------------- # Main #----------------------------------------------------------------------------- if", "outlier point, for exact upper_fence set to: upper_fence benchmark_stats.maximum = Statistics.getMax(benchmark_samples_to_process) benchmark_stats.iqr =", "Statistics.getSkewness(benchmark_samples_to_process) benchmark_stats.kurtosis = Statistics.getKurtosis(benchmark_samples_to_process) return benchmark_results ############################################################################## def printBenchmarkResults(benchmark_samples, benchmark_results): print(\"Samples:\") print(benchmark_samples, \"\\n\")", "from benchmark_containers import BenchmarkResultsContainer ############################################################################## def createBenchmarkResults(benchmark_samples, operation): benchmark_results = BenchmarkResultsContainer() benchmark_results.operation =", "not used the file will be called Benchmark_Results_<MONTH>-<DAY>-<YEAR>_<HOUR>h<MINUTE>m<SECOND>s.\") args = parser.parse_args() # Input", "\"Without outliers\" statistics_results = benchmark_results.getFormatedStatisticsResultsWithoutOutliers() if without_outliers else benchmark_results.getFormatedStatisticsResultsWithOutliers() text_alignment_offset = len(max(statistics_results, key=len))", "= benchmark_stats.margin / benchmark_stats.mean * 100.0 if benchmark_stats.margin > 0.0 else 0.0 benchmark_stats.confidence_interval", "type=str, required=True, help=\"File path containing the benchmark observations as comma separated numbers.\") parser.add_argument(\"-out\",", "\".rjust(text_alignment_offset - len(stat_key)) + statistics_results[stat_key]) print(\"\\n\") ############################################################################## def runAnalyzer(kwargs=None): # Parse args parser", "the benchmark observations as comma separated numbers.\") parser.add_argument(\"-out\", \"--json_output_path\", type=str, required=True, help=\"JSON output", "= BenchmarkResultsContainer() benchmark_results.operation = operation # Filter outliers lower_fence, upper_fence = Statistics.getTukeyFences(benchmark_samples) lower_outliers_samples", "print(benchmark_samples, \"\\n\") print(\"Sorted Samples:\") print(benchmark_results.sorted_lower_outliers_samples, benchmark_results.sorted_no_outliers_samples, benchmark_results.sorted_upper_outliers_samples, \"\\n\") for key in benchmark_results.statistics: without_outliers", "first non outlier point, for exact lower_fence set to: lower_fence benchmark_stats.q1 = Statistics.getPercentile(benchmark_samples_to_process,", "np.sort(lower_outliers_samples).tolist() benchmark_results.sorted_no_outliers_samples = np.sort(benchmark_no_outliers_samples).tolist() benchmark_results.sorted_upper_outliers_samples = np.sort(upper_outliers_samples).tolist() # Create statistics info from benchmark", "############################################################################## def createBenchmarkResults(benchmark_samples, operation): benchmark_results = BenchmarkResultsContainer() benchmark_results.operation = operation # Filter outliers", "benchmark_stats.skewness = Statistics.getSkewness(benchmark_samples_to_process) benchmark_stats.kurtosis = Statistics.getKurtosis(benchmark_samples_to_process) return benchmark_results ############################################################################## def printBenchmarkResults(benchmark_samples, benchmark_results): print(\"Samples:\")", "= args.operation_name output_file_name = args.output_file_name # Create an array from benchmark samples in", "= key == \"Without outliers\" statistics_results = benchmark_results.getFormatedStatisticsResultsWithoutOutliers() if without_outliers else benchmark_results.getFormatedStatisticsResultsWithOutliers() text_alignment_offset", "statistics_results = benchmark_results.getFormatedStatisticsResultsWithoutOutliers() if without_outliers else benchmark_results.getFormatedStatisticsResultsWithOutliers() text_alignment_offset = len(max(statistics_results, key=len)) + 3", "uses last non outlier point, for exact upper_fence set to: upper_fence benchmark_stats.maximum =", "benchmark_stats.confidence_interval = Statistics.getConfidenceInterval(benchmark_samples_to_process) benchmark_stats.skewness = Statistics.getSkewness(benchmark_samples_to_process) benchmark_stats.kurtosis = Statistics.getKurtosis(benchmark_samples_to_process) return benchmark_results ############################################################################## def", "benchmark_samples benchmark_stats = benchmark_results.statistics[key] benchmark_stats.num_analyzed_samples = Statistics.getNumAnalyzedSamples(benchmark_samples_to_process) benchmark_stats.minimum = Statistics.getMin(benchmark_samples_to_process) benchmark_stats.lower_fence = benchmark_results.sorted_no_outliers_samples[0]", "lower_fence set to: lower_fence benchmark_stats.q1 = Statistics.getPercentile(benchmark_samples_to_process, 25) benchmark_stats.mean = Statistics.getMean(benchmark_samples_to_process) benchmark_stats.median =", "separated numbers.\") parser.add_argument(\"-out\", \"--json_output_path\", type=str, required=True, help=\"JSON output path for file containing the", "upper_fence benchmark_stats.maximum = Statistics.getMax(benchmark_samples_to_process) benchmark_stats.iqr = Statistics.getIQR(benchmark_samples_to_process) benchmark_stats.std_dev = Statistics.getStdDev(benchmark_samples_to_process) benchmark_stats.std_err = Statistics.getStdErr(benchmark_samples_to_process)", "information of the analyzed benchmark.\") parser.add_argument(\"-op\", \"--operation_name\", type=str, required=True, help=\"Name of the operation", "= args.json_output_path operation_name = args.operation_name output_file_name = args.output_file_name # Create an array from", "def printBenchmarkResults(benchmark_samples, benchmark_results): print(\"Samples:\") print(benchmark_samples, \"\\n\") print(\"Sorted Samples:\") print(benchmark_results.sorted_lower_outliers_samples, benchmark_results.sorted_no_outliers_samples, benchmark_results.sorted_upper_outliers_samples, \"\\n\") for", "results benchmark_results = createBenchmarkResults(benchmark_samples, operation_name) # Print benchmark results printBenchmarkResults(benchmark_samples, benchmark_results) # Export", "Filter outliers lower_fence, upper_fence = Statistics.getTukeyFences(benchmark_samples) lower_outliers_samples = benchmark_samples[benchmark_samples < lower_fence] benchmark_no_outliers_samples =", "\"--benchmark_samples_file\", type=str, required=True, help=\"File path containing the benchmark observations as comma separated numbers.\")", "statistics_results: print(stat_key + \"= \".rjust(text_alignment_offset - len(stat_key)) + statistics_results[stat_key]) print(\"\\n\") ############################################################################## def runAnalyzer(kwargs=None):", "\"\\n\") for key in benchmark_results.statistics: without_outliers = key == \"Without outliers\" statistics_results =", "file containing the statistical information of the analyzed benchmark.\") parser.add_argument(\"-op\", \"--operation_name\", type=str, required=True,", "BenchmarkResultsContainer() benchmark_results.operation = operation # Filter outliers lower_fence, upper_fence = Statistics.getTukeyFences(benchmark_samples) lower_outliers_samples =", "Statistics.getIQR(benchmark_samples_to_process) benchmark_stats.std_dev = Statistics.getStdDev(benchmark_samples_to_process) benchmark_stats.std_err = Statistics.getStdErr(benchmark_samples_to_process) benchmark_stats.std_err_percentage = benchmark_stats.std_err / benchmark_stats.mean *", "benchmark_stats.std_err > 0.0 else 0.0 benchmark_stats.margin = Statistics.getMargin(benchmark_samples_to_process) benchmark_stats.margin_percentage = benchmark_stats.margin / benchmark_stats.mean", "file with open(benchmark_samples_file) as file: benchmark_samples = np.fromfile(file, dtype=float, sep=\",\") # Create benchmark", "name of the output file, if this option is not used the file", "for exact lower_fence set to: lower_fence benchmark_stats.q1 = Statistics.getPercentile(benchmark_samples_to_process, 25) benchmark_stats.mean = Statistics.getMean(benchmark_samples_to_process)", "if this option is not used the file will be called Benchmark_Results_<MONTH>-<DAY>-<YEAR>_<HOUR>h<MINUTE>m<SECOND>s.\") args", "= Statistics.getIQR(benchmark_samples_to_process) benchmark_stats.std_dev = Statistics.getStdDev(benchmark_samples_to_process) benchmark_stats.std_err = Statistics.getStdErr(benchmark_samples_to_process) benchmark_stats.std_err_percentage = benchmark_stats.std_err / benchmark_stats.mean", "= benchmark_samples[benchmark_samples < lower_fence] benchmark_no_outliers_samples = benchmark_samples[(benchmark_samples >= lower_fence) & (benchmark_samples <= upper_fence)]", "operation related to the benchmark observations.\") parser.add_argument(\"-out_name\", \"--output_file_name\", type=str, required=False, help=\"(Optional) The name", "= benchmark_results.sorted_no_outliers_samples[-1] # Plotly uses last non outlier point, for exact upper_fence set", "= Statistics.getPercentile(benchmark_samples_to_process, 25) benchmark_stats.mean = Statistics.getMean(benchmark_samples_to_process) benchmark_stats.median = Statistics.getPercentile(benchmark_samples_to_process, 50) benchmark_stats.q3 = Statistics.getPercentile(benchmark_samples_to_process,", "operation_name) # Print benchmark results printBenchmarkResults(benchmark_samples, benchmark_results) # Export benchmark results to a", "file will be called Benchmark_Results_<MONTH>-<DAY>-<YEAR>_<HOUR>h<MINUTE>m<SECOND>s.\") args = parser.parse_args() # Input Params benchmark_samples_file =", "# Create benchmark results benchmark_results = createBenchmarkResults(benchmark_samples, operation_name) # Print benchmark results printBenchmarkResults(benchmark_samples,", "printBenchmarkResults(benchmark_samples, benchmark_results): print(\"Samples:\") print(benchmark_samples, \"\\n\") print(\"Sorted Samples:\") print(benchmark_results.sorted_lower_outliers_samples, benchmark_results.sorted_no_outliers_samples, benchmark_results.sorted_upper_outliers_samples, \"\\n\") for key", "+ \":\") for stat_key in statistics_results: print(stat_key + \"= \".rjust(text_alignment_offset - len(stat_key)) +", "of the output file, if this option is not used the file will", "upper_outliers_samples = benchmark_samples[benchmark_samples > upper_fence] benchmark_results.sorted_lower_outliers_samples = np.sort(lower_outliers_samples).tolist() benchmark_results.sorted_no_outliers_samples = np.sort(benchmark_no_outliers_samples).tolist() benchmark_results.sorted_upper_outliers_samples =", "benchmark_results ############################################################################## def printBenchmarkResults(benchmark_samples, benchmark_results): print(\"Samples:\") print(benchmark_samples, \"\\n\") print(\"Sorted Samples:\") print(benchmark_results.sorted_lower_outliers_samples, benchmark_results.sorted_no_outliers_samples, benchmark_results.sorted_upper_outliers_samples,", "+ statistics_results[stat_key]) print(\"\\n\") ############################################################################## def runAnalyzer(kwargs=None): # Parse args parser = argparse.ArgumentParser(description=\"Benchmark Analyzer\")", "benchmark_stats.margin = Statistics.getMargin(benchmark_samples_to_process) benchmark_stats.margin_percentage = benchmark_stats.margin / benchmark_stats.mean * 100.0 if benchmark_stats.margin >", "+ \"= \".rjust(text_alignment_offset - len(stat_key)) + statistics_results[stat_key]) print(\"\\n\") ############################################################################## def runAnalyzer(kwargs=None): # Parse", "Statistics.getMax(benchmark_samples_to_process) benchmark_stats.iqr = Statistics.getIQR(benchmark_samples_to_process) benchmark_stats.std_dev = Statistics.getStdDev(benchmark_samples_to_process) benchmark_stats.std_err = Statistics.getStdErr(benchmark_samples_to_process) benchmark_stats.std_err_percentage = benchmark_stats.std_err", "output file, if this option is not used the file will be called", "= benchmark_stats.std_err / benchmark_stats.mean * 100.0 if benchmark_stats.std_err > 0.0 else 0.0 benchmark_stats.margin", "statistical information of the analyzed benchmark.\") parser.add_argument(\"-op\", \"--operation_name\", type=str, required=True, help=\"Name of the", "benchmark_stats.margin > 0.0 else 0.0 benchmark_stats.confidence_interval = Statistics.getConfidenceInterval(benchmark_samples_to_process) benchmark_stats.skewness = Statistics.getSkewness(benchmark_samples_to_process) benchmark_stats.kurtosis =", "last non outlier point, for exact upper_fence set to: upper_fence benchmark_stats.maximum = Statistics.getMax(benchmark_samples_to_process)", "if benchmark_stats.margin > 0.0 else 0.0 benchmark_stats.confidence_interval = Statistics.getConfidenceInterval(benchmark_samples_to_process) benchmark_stats.skewness = Statistics.getSkewness(benchmark_samples_to_process) benchmark_stats.kurtosis", "50) benchmark_stats.q3 = Statistics.getPercentile(benchmark_samples_to_process, 75) benchmark_stats.upper_fence = benchmark_results.sorted_no_outliers_samples[-1] # Plotly uses last non", "benchmark_stats.kurtosis = Statistics.getKurtosis(benchmark_samples_to_process) return benchmark_results ############################################################################## def printBenchmarkResults(benchmark_samples, benchmark_results): print(\"Samples:\") print(benchmark_samples, \"\\n\") print(\"Sorted", "benchmark results to a JSON file benchmark_results.toJSONFile(json_output_path, operation_name, output_file_name) ############################################################################## #----------------------------------------------------------------------------- # Main", "outliers lower_fence, upper_fence = Statistics.getTukeyFences(benchmark_samples) lower_outliers_samples = benchmark_samples[benchmark_samples < lower_fence] benchmark_no_outliers_samples = benchmark_samples[(benchmark_samples", "the file will be called Benchmark_Results_<MONTH>-<DAY>-<YEAR>_<HOUR>h<MINUTE>m<SECOND>s.\") args = parser.parse_args() # Input Params benchmark_samples_file", "print(stat_key + \"= \".rjust(text_alignment_offset - len(stat_key)) + statistics_results[stat_key]) print(\"\\n\") ############################################################################## def runAnalyzer(kwargs=None): #", "key == \"Without outliers\" benchmark_samples_to_process = benchmark_no_outliers_samples if without_outliers else benchmark_samples benchmark_stats =", "benchmark_results.statistics: without_outliers = key == \"Without outliers\" benchmark_samples_to_process = benchmark_no_outliers_samples if without_outliers else", "required=True, help=\"Name of the operation related to the benchmark observations.\") parser.add_argument(\"-out_name\", \"--output_file_name\", type=str,", "Statistics.getNumAnalyzedSamples(benchmark_samples_to_process) benchmark_stats.minimum = Statistics.getMin(benchmark_samples_to_process) benchmark_stats.lower_fence = benchmark_results.sorted_no_outliers_samples[0] # Plotly uses first non outlier", "= key == \"Without outliers\" benchmark_samples_to_process = benchmark_no_outliers_samples if without_outliers else benchmark_samples benchmark_stats", "createBenchmarkResults(benchmark_samples, operation_name) # Print benchmark results printBenchmarkResults(benchmark_samples, benchmark_results) # Export benchmark results to", "Export benchmark results to a JSON file benchmark_results.toJSONFile(json_output_path, operation_name, output_file_name) ############################################################################## #----------------------------------------------------------------------------- #", "benchmark_containers import BenchmarkResultsContainer ############################################################################## def createBenchmarkResults(benchmark_samples, operation): benchmark_results = BenchmarkResultsContainer() benchmark_results.operation = operation", "benchmark_stats.minimum = Statistics.getMin(benchmark_samples_to_process) benchmark_stats.lower_fence = benchmark_results.sorted_no_outliers_samples[0] # Plotly uses first non outlier point,", "benchmark_results.getFormatedStatisticsResultsWithoutOutliers() if without_outliers else benchmark_results.getFormatedStatisticsResultsWithOutliers() text_alignment_offset = len(max(statistics_results, key=len)) + 3 print(key +", "numpy as np from benchmark_statistics import Statistics from benchmark_containers import BenchmarkResultsContainer ############################################################################## def", "in file with open(benchmark_samples_file) as file: benchmark_samples = np.fromfile(file, dtype=float, sep=\",\") # Create", "is not used the file will be called Benchmark_Results_<MONTH>-<DAY>-<YEAR>_<HOUR>h<MINUTE>m<SECOND>s.\") args = parser.parse_args() #", "results to a JSON file benchmark_results.toJSONFile(json_output_path, operation_name, output_file_name) ############################################################################## #----------------------------------------------------------------------------- # Main #-----------------------------------------------------------------------------", "benchmark_results): print(\"Samples:\") print(benchmark_samples, \"\\n\") print(\"Sorted Samples:\") print(benchmark_results.sorted_lower_outliers_samples, benchmark_results.sorted_no_outliers_samples, benchmark_results.sorted_upper_outliers_samples, \"\\n\") for key in", "parser.add_argument(\"-out\", \"--json_output_path\", type=str, required=True, help=\"JSON output path for file containing the statistical information", "= benchmark_samples[(benchmark_samples >= lower_fence) & (benchmark_samples <= upper_fence)] upper_outliers_samples = benchmark_samples[benchmark_samples > upper_fence]", "of the analyzed benchmark.\") parser.add_argument(\"-op\", \"--operation_name\", type=str, required=True, help=\"Name of the operation related", "benchmark_samples[benchmark_samples < lower_fence] benchmark_no_outliers_samples = benchmark_samples[(benchmark_samples >= lower_fence) & (benchmark_samples <= upper_fence)] upper_outliers_samples", "\"Without outliers\" benchmark_samples_to_process = benchmark_no_outliers_samples if without_outliers else benchmark_samples benchmark_stats = benchmark_results.statistics[key] benchmark_stats.num_analyzed_samples", "benchmark_stats.margin / benchmark_stats.mean * 100.0 if benchmark_stats.margin > 0.0 else 0.0 benchmark_stats.confidence_interval =", "called Benchmark_Results_<MONTH>-<DAY>-<YEAR>_<HOUR>h<MINUTE>m<SECOND>s.\") args = parser.parse_args() # Input Params benchmark_samples_file = args.benchmark_samples_file json_output_path =", "# Create statistics info from benchmark samples for key in benchmark_results.statistics: without_outliers =", "parser.add_argument(\"-out_name\", \"--output_file_name\", type=str, required=False, help=\"(Optional) The name of the output file, if this", "parser.add_argument(\"-op\", \"--operation_name\", type=str, required=True, help=\"Name of the operation related to the benchmark observations.\")", "benchmark.\") parser.add_argument(\"-op\", \"--operation_name\", type=str, required=True, help=\"Name of the operation related to the benchmark", "from benchmark samples in file with open(benchmark_samples_file) as file: benchmark_samples = np.fromfile(file, dtype=float,", "benchmark_results = BenchmarkResultsContainer() benchmark_results.operation = operation # Filter outliers lower_fence, upper_fence = Statistics.getTukeyFences(benchmark_samples)", "args.output_file_name # Create an array from benchmark samples in file with open(benchmark_samples_file) as", "file: benchmark_samples = np.fromfile(file, dtype=float, sep=\",\") # Create benchmark results benchmark_results = createBenchmarkResults(benchmark_samples,", "to: upper_fence benchmark_stats.maximum = Statistics.getMax(benchmark_samples_to_process) benchmark_stats.iqr = Statistics.getIQR(benchmark_samples_to_process) benchmark_stats.std_dev = Statistics.getStdDev(benchmark_samples_to_process) benchmark_stats.std_err =", "of the operation related to the benchmark observations.\") parser.add_argument(\"-out_name\", \"--output_file_name\", type=str, required=False, help=\"(Optional)", "print(benchmark_results.sorted_lower_outliers_samples, benchmark_results.sorted_no_outliers_samples, benchmark_results.sorted_upper_outliers_samples, \"\\n\") for key in benchmark_results.statistics: without_outliers = key == \"Without", "samples for key in benchmark_results.statistics: without_outliers = key == \"Without outliers\" benchmark_samples_to_process =", "= Statistics.getConfidenceInterval(benchmark_samples_to_process) benchmark_stats.skewness = Statistics.getSkewness(benchmark_samples_to_process) benchmark_stats.kurtosis = Statistics.getKurtosis(benchmark_samples_to_process) return benchmark_results ############################################################################## def printBenchmarkResults(benchmark_samples,", "parser = argparse.ArgumentParser(description=\"Benchmark Analyzer\") parser.add_argument(\"-in\", \"--benchmark_samples_file\", type=str, required=True, help=\"File path containing the benchmark", "Create an array from benchmark samples in file with open(benchmark_samples_file) as file: benchmark_samples", "benchmark_samples_file = args.benchmark_samples_file json_output_path = args.json_output_path operation_name = args.operation_name output_file_name = args.output_file_name #", "0.0 benchmark_stats.margin = Statistics.getMargin(benchmark_samples_to_process) benchmark_stats.margin_percentage = benchmark_stats.margin / benchmark_stats.mean * 100.0 if benchmark_stats.margin", "100.0 if benchmark_stats.std_err > 0.0 else 0.0 benchmark_stats.margin = Statistics.getMargin(benchmark_samples_to_process) benchmark_stats.margin_percentage = benchmark_stats.margin", "print(\"\\n\") ############################################################################## def runAnalyzer(kwargs=None): # Parse args parser = argparse.ArgumentParser(description=\"Benchmark Analyzer\") parser.add_argument(\"-in\", \"--benchmark_samples_file\",", "Statistics from benchmark_containers import BenchmarkResultsContainer ############################################################################## def createBenchmarkResults(benchmark_samples, operation): benchmark_results = BenchmarkResultsContainer() benchmark_results.operation", "Params benchmark_samples_file = args.benchmark_samples_file json_output_path = args.json_output_path operation_name = args.operation_name output_file_name = args.output_file_name", "benchmark results printBenchmarkResults(benchmark_samples, benchmark_results) # Export benchmark results to a JSON file benchmark_results.toJSONFile(json_output_path,", "benchmark_results.toJSONFile(json_output_path, operation_name, output_file_name) ############################################################################## #----------------------------------------------------------------------------- # Main #----------------------------------------------------------------------------- if __name__ == '__main__': runAnalyzer()", "Benchmark_Results_<MONTH>-<DAY>-<YEAR>_<HOUR>h<MINUTE>m<SECOND>s.\") args = parser.parse_args() # Input Params benchmark_samples_file = args.benchmark_samples_file json_output_path = args.json_output_path", "outlier point, for exact lower_fence set to: lower_fence benchmark_stats.q1 = Statistics.getPercentile(benchmark_samples_to_process, 25) benchmark_stats.mean", "key in benchmark_results.statistics: without_outliers = key == \"Without outliers\" statistics_results = benchmark_results.getFormatedStatisticsResultsWithoutOutliers() if", "in benchmark_results.statistics: without_outliers = key == \"Without outliers\" benchmark_samples_to_process = benchmark_no_outliers_samples if without_outliers", "# Create an array from benchmark samples in file with open(benchmark_samples_file) as file:", "upper_fence = Statistics.getTukeyFences(benchmark_samples) lower_outliers_samples = benchmark_samples[benchmark_samples < lower_fence] benchmark_no_outliers_samples = benchmark_samples[(benchmark_samples >= lower_fence)", "benchmark_stats.std_err / benchmark_stats.mean * 100.0 if benchmark_stats.std_err > 0.0 else 0.0 benchmark_stats.margin =", "non outlier point, for exact upper_fence set to: upper_fence benchmark_stats.maximum = Statistics.getMax(benchmark_samples_to_process) benchmark_stats.iqr", "for exact upper_fence set to: upper_fence benchmark_stats.maximum = Statistics.getMax(benchmark_samples_to_process) benchmark_stats.iqr = Statistics.getIQR(benchmark_samples_to_process) benchmark_stats.std_dev", "as np from benchmark_statistics import Statistics from benchmark_containers import BenchmarkResultsContainer ############################################################################## def createBenchmarkResults(benchmark_samples,", "\"--output_file_name\", type=str, required=False, help=\"(Optional) The name of the output file, if this option", "args.operation_name output_file_name = args.output_file_name # Create an array from benchmark samples in file", "operation_name = args.operation_name output_file_name = args.output_file_name # Create an array from benchmark samples", "outliers\" benchmark_samples_to_process = benchmark_no_outliers_samples if without_outliers else benchmark_samples benchmark_stats = benchmark_results.statistics[key] benchmark_stats.num_analyzed_samples =", "Input Params benchmark_samples_file = args.benchmark_samples_file json_output_path = args.json_output_path operation_name = args.operation_name output_file_name =", "benchmark_no_outliers_samples if without_outliers else benchmark_samples benchmark_stats = benchmark_results.statistics[key] benchmark_stats.num_analyzed_samples = Statistics.getNumAnalyzedSamples(benchmark_samples_to_process) benchmark_stats.minimum =", "Create benchmark results benchmark_results = createBenchmarkResults(benchmark_samples, operation_name) # Print benchmark results printBenchmarkResults(benchmark_samples, benchmark_results)", "= Statistics.getMin(benchmark_samples_to_process) benchmark_stats.lower_fence = benchmark_results.sorted_no_outliers_samples[0] # Plotly uses first non outlier point, for", "\"--json_output_path\", type=str, required=True, help=\"JSON output path for file containing the statistical information of", "benchmark results benchmark_results = createBenchmarkResults(benchmark_samples, operation_name) # Print benchmark results printBenchmarkResults(benchmark_samples, benchmark_results) #", "benchmark_stats.mean = Statistics.getMean(benchmark_samples_to_process) benchmark_stats.median = Statistics.getPercentile(benchmark_samples_to_process, 50) benchmark_stats.q3 = Statistics.getPercentile(benchmark_samples_to_process, 75) benchmark_stats.upper_fence =", "help=\"File path containing the benchmark observations as comma separated numbers.\") parser.add_argument(\"-out\", \"--json_output_path\", type=str,", "> 0.0 else 0.0 benchmark_stats.margin = Statistics.getMargin(benchmark_samples_to_process) benchmark_stats.margin_percentage = benchmark_stats.margin / benchmark_stats.mean *", "(benchmark_samples <= upper_fence)] upper_outliers_samples = benchmark_samples[benchmark_samples > upper_fence] benchmark_results.sorted_lower_outliers_samples = np.sort(lower_outliers_samples).tolist() benchmark_results.sorted_no_outliers_samples =", "print(\"Sorted Samples:\") print(benchmark_results.sorted_lower_outliers_samples, benchmark_results.sorted_no_outliers_samples, benchmark_results.sorted_upper_outliers_samples, \"\\n\") for key in benchmark_results.statistics: without_outliers = key", "< lower_fence] benchmark_no_outliers_samples = benchmark_samples[(benchmark_samples >= lower_fence) & (benchmark_samples <= upper_fence)] upper_outliers_samples =", "np.sort(upper_outliers_samples).tolist() # Create statistics info from benchmark samples for key in benchmark_results.statistics: without_outliers", "benchmark observations.\") parser.add_argument(\"-out_name\", \"--output_file_name\", type=str, required=False, help=\"(Optional) The name of the output file,", "for key in benchmark_results.statistics: without_outliers = key == \"Without outliers\" benchmark_samples_to_process = benchmark_no_outliers_samples", "############################################################################## def printBenchmarkResults(benchmark_samples, benchmark_results): print(\"Samples:\") print(benchmark_samples, \"\\n\") print(\"Sorted Samples:\") print(benchmark_results.sorted_lower_outliers_samples, benchmark_results.sorted_no_outliers_samples, benchmark_results.sorted_upper_outliers_samples, \"\\n\")", "for key in benchmark_results.statistics: without_outliers = key == \"Without outliers\" statistics_results = benchmark_results.getFormatedStatisticsResultsWithoutOutliers()", "else benchmark_samples benchmark_stats = benchmark_results.statistics[key] benchmark_stats.num_analyzed_samples = Statistics.getNumAnalyzedSamples(benchmark_samples_to_process) benchmark_stats.minimum = Statistics.getMin(benchmark_samples_to_process) benchmark_stats.lower_fence =", "= Statistics.getKurtosis(benchmark_samples_to_process) return benchmark_results ############################################################################## def printBenchmarkResults(benchmark_samples, benchmark_results): print(\"Samples:\") print(benchmark_samples, \"\\n\") print(\"Sorted Samples:\")", "= Statistics.getNumAnalyzedSamples(benchmark_samples_to_process) benchmark_stats.minimum = Statistics.getMin(benchmark_samples_to_process) benchmark_stats.lower_fence = benchmark_results.sorted_no_outliers_samples[0] # Plotly uses first non", "benchmark_stats.num_analyzed_samples = Statistics.getNumAnalyzedSamples(benchmark_samples_to_process) benchmark_stats.minimum = Statistics.getMin(benchmark_samples_to_process) benchmark_stats.lower_fence = benchmark_results.sorted_no_outliers_samples[0] # Plotly uses first", "args parser = argparse.ArgumentParser(description=\"Benchmark Analyzer\") parser.add_argument(\"-in\", \"--benchmark_samples_file\", type=str, required=True, help=\"File path containing the", "Plotly uses last non outlier point, for exact upper_fence set to: upper_fence benchmark_stats.maximum", "= benchmark_results.statistics[key] benchmark_stats.num_analyzed_samples = Statistics.getNumAnalyzedSamples(benchmark_samples_to_process) benchmark_stats.minimum = Statistics.getMin(benchmark_samples_to_process) benchmark_stats.lower_fence = benchmark_results.sorted_no_outliers_samples[0] # Plotly", "lower_fence benchmark_stats.q1 = Statistics.getPercentile(benchmark_samples_to_process, 25) benchmark_stats.mean = Statistics.getMean(benchmark_samples_to_process) benchmark_stats.median = Statistics.getPercentile(benchmark_samples_to_process, 50) benchmark_stats.q3", "= Statistics.getPercentile(benchmark_samples_to_process, 50) benchmark_stats.q3 = Statistics.getPercentile(benchmark_samples_to_process, 75) benchmark_stats.upper_fence = benchmark_results.sorted_no_outliers_samples[-1] # Plotly uses", "the operation related to the benchmark observations.\") parser.add_argument(\"-out_name\", \"--output_file_name\", type=str, required=False, help=\"(Optional) The", "= Statistics.getPercentile(benchmark_samples_to_process, 75) benchmark_stats.upper_fence = benchmark_results.sorted_no_outliers_samples[-1] # Plotly uses last non outlier point,", "point, for exact lower_fence set to: lower_fence benchmark_stats.q1 = Statistics.getPercentile(benchmark_samples_to_process, 25) benchmark_stats.mean =", "output path for file containing the statistical information of the analyzed benchmark.\") parser.add_argument(\"-op\",", "& (benchmark_samples <= upper_fence)] upper_outliers_samples = benchmark_samples[benchmark_samples > upper_fence] benchmark_results.sorted_lower_outliers_samples = np.sort(lower_outliers_samples).tolist() benchmark_results.sorted_no_outliers_samples", "= Statistics.getMargin(benchmark_samples_to_process) benchmark_stats.margin_percentage = benchmark_stats.margin / benchmark_stats.mean * 100.0 if benchmark_stats.margin > 0.0", "if without_outliers else benchmark_results.getFormatedStatisticsResultsWithOutliers() text_alignment_offset = len(max(statistics_results, key=len)) + 3 print(key + \":\")", "numbers.\") parser.add_argument(\"-out\", \"--json_output_path\", type=str, required=True, help=\"JSON output path for file containing the statistical", "upper_fence set to: upper_fence benchmark_stats.maximum = Statistics.getMax(benchmark_samples_to_process) benchmark_stats.iqr = Statistics.getIQR(benchmark_samples_to_process) benchmark_stats.std_dev = Statistics.getStdDev(benchmark_samples_to_process)", "= np.fromfile(file, dtype=float, sep=\",\") # Create benchmark results benchmark_results = createBenchmarkResults(benchmark_samples, operation_name) #", "exact upper_fence set to: upper_fence benchmark_stats.maximum = Statistics.getMax(benchmark_samples_to_process) benchmark_stats.iqr = Statistics.getIQR(benchmark_samples_to_process) benchmark_stats.std_dev =", "containing the benchmark observations as comma separated numbers.\") parser.add_argument(\"-out\", \"--json_output_path\", type=str, required=True, help=\"JSON", "= benchmark_no_outliers_samples if without_outliers else benchmark_samples benchmark_stats = benchmark_results.statistics[key] benchmark_stats.num_analyzed_samples = Statistics.getNumAnalyzedSamples(benchmark_samples_to_process) benchmark_stats.minimum", "benchmark_stats.margin_percentage = benchmark_stats.margin / benchmark_stats.mean * 100.0 if benchmark_stats.margin > 0.0 else 0.0", "required=True, help=\"File path containing the benchmark observations as comma separated numbers.\") parser.add_argument(\"-out\", \"--json_output_path\",", "will be called Benchmark_Results_<MONTH>-<DAY>-<YEAR>_<HOUR>h<MINUTE>m<SECOND>s.\") args = parser.parse_args() # Input Params benchmark_samples_file = args.benchmark_samples_file", "as file: benchmark_samples = np.fromfile(file, dtype=float, sep=\",\") # Create benchmark results benchmark_results =", "# Export benchmark results to a JSON file benchmark_results.toJSONFile(json_output_path, operation_name, output_file_name) ############################################################################## #-----------------------------------------------------------------------------", "len(stat_key)) + statistics_results[stat_key]) print(\"\\n\") ############################################################################## def runAnalyzer(kwargs=None): # Parse args parser = argparse.ArgumentParser(description=\"Benchmark", "in statistics_results: print(stat_key + \"= \".rjust(text_alignment_offset - len(stat_key)) + statistics_results[stat_key]) print(\"\\n\") ############################################################################## def", "benchmark_results.statistics[key] benchmark_stats.num_analyzed_samples = Statistics.getNumAnalyzedSamples(benchmark_samples_to_process) benchmark_stats.minimum = Statistics.getMin(benchmark_samples_to_process) benchmark_stats.lower_fence = benchmark_results.sorted_no_outliers_samples[0] # Plotly uses", "from benchmark_statistics import Statistics from benchmark_containers import BenchmarkResultsContainer ############################################################################## def createBenchmarkResults(benchmark_samples, operation): benchmark_results", "Statistics.getStdDev(benchmark_samples_to_process) benchmark_stats.std_err = Statistics.getStdErr(benchmark_samples_to_process) benchmark_stats.std_err_percentage = benchmark_stats.std_err / benchmark_stats.mean * 100.0 if benchmark_stats.std_err", "type=str, required=True, help=\"Name of the operation related to the benchmark observations.\") parser.add_argument(\"-out_name\", \"--output_file_name\",", "= args.output_file_name # Create an array from benchmark samples in file with open(benchmark_samples_file)", "set to: upper_fence benchmark_stats.maximum = Statistics.getMax(benchmark_samples_to_process) benchmark_stats.iqr = Statistics.getIQR(benchmark_samples_to_process) benchmark_stats.std_dev = Statistics.getStdDev(benchmark_samples_to_process) benchmark_stats.std_err", "file, if this option is not used the file will be called Benchmark_Results_<MONTH>-<DAY>-<YEAR>_<HOUR>h<MINUTE>m<SECOND>s.\")", "benchmark_results.getFormatedStatisticsResultsWithOutliers() text_alignment_offset = len(max(statistics_results, key=len)) + 3 print(key + \":\") for stat_key in", "argparse import numpy as np from benchmark_statistics import Statistics from benchmark_containers import BenchmarkResultsContainer", "return benchmark_results ############################################################################## def printBenchmarkResults(benchmark_samples, benchmark_results): print(\"Samples:\") print(benchmark_samples, \"\\n\") print(\"Sorted Samples:\") print(benchmark_results.sorted_lower_outliers_samples, benchmark_results.sorted_no_outliers_samples,", "else 0.0 benchmark_stats.margin = Statistics.getMargin(benchmark_samples_to_process) benchmark_stats.margin_percentage = benchmark_stats.margin / benchmark_stats.mean * 100.0 if", "# Input Params benchmark_samples_file = args.benchmark_samples_file json_output_path = args.json_output_path operation_name = args.operation_name output_file_name", "* 100.0 if benchmark_stats.margin > 0.0 else 0.0 benchmark_stats.confidence_interval = Statistics.getConfidenceInterval(benchmark_samples_to_process) benchmark_stats.skewness =", "# Filter outliers lower_fence, upper_fence = Statistics.getTukeyFences(benchmark_samples) lower_outliers_samples = benchmark_samples[benchmark_samples < lower_fence] benchmark_no_outliers_samples", "Statistics.getMin(benchmark_samples_to_process) benchmark_stats.lower_fence = benchmark_results.sorted_no_outliers_samples[0] # Plotly uses first non outlier point, for exact", "path containing the benchmark observations as comma separated numbers.\") parser.add_argument(\"-out\", \"--json_output_path\", type=str, required=True,", "\"--operation_name\", type=str, required=True, help=\"Name of the operation related to the benchmark observations.\") parser.add_argument(\"-out_name\",", "75) benchmark_stats.upper_fence = benchmark_results.sorted_no_outliers_samples[-1] # Plotly uses last non outlier point, for exact", "Statistics.getStdErr(benchmark_samples_to_process) benchmark_stats.std_err_percentage = benchmark_stats.std_err / benchmark_stats.mean * 100.0 if benchmark_stats.std_err > 0.0 else", "else 0.0 benchmark_stats.confidence_interval = Statistics.getConfidenceInterval(benchmark_samples_to_process) benchmark_stats.skewness = Statistics.getSkewness(benchmark_samples_to_process) benchmark_stats.kurtosis = Statistics.getKurtosis(benchmark_samples_to_process) return benchmark_results", "Samples:\") print(benchmark_results.sorted_lower_outliers_samples, benchmark_results.sorted_no_outliers_samples, benchmark_results.sorted_upper_outliers_samples, \"\\n\") for key in benchmark_results.statistics: without_outliers = key ==", "type=str, required=True, help=\"JSON output path for file containing the statistical information of the", "benchmark_no_outliers_samples = benchmark_samples[(benchmark_samples >= lower_fence) & (benchmark_samples <= upper_fence)] upper_outliers_samples = benchmark_samples[benchmark_samples >", "lower_fence) & (benchmark_samples <= upper_fence)] upper_outliers_samples = benchmark_samples[benchmark_samples > upper_fence] benchmark_results.sorted_lower_outliers_samples = np.sort(lower_outliers_samples).tolist()", "The name of the output file, if this option is not used the", "be called Benchmark_Results_<MONTH>-<DAY>-<YEAR>_<HOUR>h<MINUTE>m<SECOND>s.\") args = parser.parse_args() # Input Params benchmark_samples_file = args.benchmark_samples_file json_output_path", "- len(stat_key)) + statistics_results[stat_key]) print(\"\\n\") ############################################################################## def runAnalyzer(kwargs=None): # Parse args parser =", "output_file_name = args.output_file_name # Create an array from benchmark samples in file with", "help=\"(Optional) The name of the output file, if this option is not used", "lower_fence] benchmark_no_outliers_samples = benchmark_samples[(benchmark_samples >= lower_fence) & (benchmark_samples <= upper_fence)] upper_outliers_samples = benchmark_samples[benchmark_samples", "len(max(statistics_results, key=len)) + 3 print(key + \":\") for stat_key in statistics_results: print(stat_key +", "benchmark_results.sorted_no_outliers_samples = np.sort(benchmark_no_outliers_samples).tolist() benchmark_results.sorted_upper_outliers_samples = np.sort(upper_outliers_samples).tolist() # Create statistics info from benchmark samples", "<= upper_fence)] upper_outliers_samples = benchmark_samples[benchmark_samples > upper_fence] benchmark_results.sorted_lower_outliers_samples = np.sort(lower_outliers_samples).tolist() benchmark_results.sorted_no_outliers_samples = np.sort(benchmark_no_outliers_samples).tolist()", "uses first non outlier point, for exact lower_fence set to: lower_fence benchmark_stats.q1 =", "benchmark_results.operation = operation # Filter outliers lower_fence, upper_fence = Statistics.getTukeyFences(benchmark_samples) lower_outliers_samples = benchmark_samples[benchmark_samples", "3 print(key + \":\") for stat_key in statistics_results: print(stat_key + \"= \".rjust(text_alignment_offset -", "benchmark_stats.lower_fence = benchmark_results.sorted_no_outliers_samples[0] # Plotly uses first non outlier point, for exact lower_fence", "a JSON file benchmark_results.toJSONFile(json_output_path, operation_name, output_file_name) ############################################################################## #----------------------------------------------------------------------------- # Main #----------------------------------------------------------------------------- if __name__", "100.0 if benchmark_stats.margin > 0.0 else 0.0 benchmark_stats.confidence_interval = Statistics.getConfidenceInterval(benchmark_samples_to_process) benchmark_stats.skewness = Statistics.getSkewness(benchmark_samples_to_process)", "key=len)) + 3 print(key + \":\") for stat_key in statistics_results: print(stat_key + \"=", "= benchmark_results.sorted_no_outliers_samples[0] # Plotly uses first non outlier point, for exact lower_fence set", "in benchmark_results.statistics: without_outliers = key == \"Without outliers\" statistics_results = benchmark_results.getFormatedStatisticsResultsWithoutOutliers() if without_outliers", "benchmark_results.sorted_no_outliers_samples[0] # Plotly uses first non outlier point, for exact lower_fence set to:", "json_output_path = args.json_output_path operation_name = args.operation_name output_file_name = args.output_file_name # Create an array", "sep=\",\") # Create benchmark results benchmark_results = createBenchmarkResults(benchmark_samples, operation_name) # Print benchmark results", "import argparse import numpy as np from benchmark_statistics import Statistics from benchmark_containers import", "benchmark samples in file with open(benchmark_samples_file) as file: benchmark_samples = np.fromfile(file, dtype=float, sep=\",\")", "benchmark_stats.maximum = Statistics.getMax(benchmark_samples_to_process) benchmark_stats.iqr = Statistics.getIQR(benchmark_samples_to_process) benchmark_stats.std_dev = Statistics.getStdDev(benchmark_samples_to_process) benchmark_stats.std_err = Statistics.getStdErr(benchmark_samples_to_process) benchmark_stats.std_err_percentage", "upper_fence)] upper_outliers_samples = benchmark_samples[benchmark_samples > upper_fence] benchmark_results.sorted_lower_outliers_samples = np.sort(lower_outliers_samples).tolist() benchmark_results.sorted_no_outliers_samples = np.sort(benchmark_no_outliers_samples).tolist() benchmark_results.sorted_upper_outliers_samples", "= Statistics.getStdDev(benchmark_samples_to_process) benchmark_stats.std_err = Statistics.getStdErr(benchmark_samples_to_process) benchmark_stats.std_err_percentage = benchmark_stats.std_err / benchmark_stats.mean * 100.0 if", "print(key + \":\") for stat_key in statistics_results: print(stat_key + \"= \".rjust(text_alignment_offset - len(stat_key))", "to the benchmark observations.\") parser.add_argument(\"-out_name\", \"--output_file_name\", type=str, required=False, help=\"(Optional) The name of the", "= Statistics.getMean(benchmark_samples_to_process) benchmark_stats.median = Statistics.getPercentile(benchmark_samples_to_process, 50) benchmark_stats.q3 = Statistics.getPercentile(benchmark_samples_to_process, 75) benchmark_stats.upper_fence = benchmark_results.sorted_no_outliers_samples[-1]", "benchmark_stats.median = Statistics.getPercentile(benchmark_samples_to_process, 50) benchmark_stats.q3 = Statistics.getPercentile(benchmark_samples_to_process, 75) benchmark_stats.upper_fence = benchmark_results.sorted_no_outliers_samples[-1] # Plotly", "/ benchmark_stats.mean * 100.0 if benchmark_stats.std_err > 0.0 else 0.0 benchmark_stats.margin = Statistics.getMargin(benchmark_samples_to_process)", "benchmark_stats.std_dev = Statistics.getStdDev(benchmark_samples_to_process) benchmark_stats.std_err = Statistics.getStdErr(benchmark_samples_to_process) benchmark_stats.std_err_percentage = benchmark_stats.std_err / benchmark_stats.mean * 100.0", "############################################################################## def runAnalyzer(kwargs=None): # Parse args parser = argparse.ArgumentParser(description=\"Benchmark Analyzer\") parser.add_argument(\"-in\", \"--benchmark_samples_file\", type=str,", "without_outliers = key == \"Without outliers\" statistics_results = benchmark_results.getFormatedStatisticsResultsWithoutOutliers() if without_outliers else benchmark_results.getFormatedStatisticsResultsWithOutliers()", "text_alignment_offset = len(max(statistics_results, key=len)) + 3 print(key + \":\") for stat_key in statistics_results:", "if without_outliers else benchmark_samples benchmark_stats = benchmark_results.statistics[key] benchmark_stats.num_analyzed_samples = Statistics.getNumAnalyzedSamples(benchmark_samples_to_process) benchmark_stats.minimum = Statistics.getMin(benchmark_samples_to_process)", "\"\\n\") print(\"Sorted Samples:\") print(benchmark_results.sorted_lower_outliers_samples, benchmark_results.sorted_no_outliers_samples, benchmark_results.sorted_upper_outliers_samples, \"\\n\") for key in benchmark_results.statistics: without_outliers =", "help=\"Name of the operation related to the benchmark observations.\") parser.add_argument(\"-out_name\", \"--output_file_name\", type=str, required=False,", "\":\") for stat_key in statistics_results: print(stat_key + \"= \".rjust(text_alignment_offset - len(stat_key)) + statistics_results[stat_key])", "Parse args parser = argparse.ArgumentParser(description=\"Benchmark Analyzer\") parser.add_argument(\"-in\", \"--benchmark_samples_file\", type=str, required=True, help=\"File path containing", "Plotly uses first non outlier point, for exact lower_fence set to: lower_fence benchmark_stats.q1", "benchmark_stats = benchmark_results.statistics[key] benchmark_stats.num_analyzed_samples = Statistics.getNumAnalyzedSamples(benchmark_samples_to_process) benchmark_stats.minimum = Statistics.getMin(benchmark_samples_to_process) benchmark_stats.lower_fence = benchmark_results.sorted_no_outliers_samples[0] #", "benchmark samples for key in benchmark_results.statistics: without_outliers = key == \"Without outliers\" benchmark_samples_to_process", "file benchmark_results.toJSONFile(json_output_path, operation_name, output_file_name) ############################################################################## #----------------------------------------------------------------------------- # Main #----------------------------------------------------------------------------- if __name__ == '__main__':", "= argparse.ArgumentParser(description=\"Benchmark Analyzer\") parser.add_argument(\"-in\", \"--benchmark_samples_file\", type=str, required=True, help=\"File path containing the benchmark observations", "printBenchmarkResults(benchmark_samples, benchmark_results) # Export benchmark results to a JSON file benchmark_results.toJSONFile(json_output_path, operation_name, output_file_name)", "type=str, required=False, help=\"(Optional) The name of the output file, if this option is", "> 0.0 else 0.0 benchmark_stats.confidence_interval = Statistics.getConfidenceInterval(benchmark_samples_to_process) benchmark_stats.skewness = Statistics.getSkewness(benchmark_samples_to_process) benchmark_stats.kurtosis = Statistics.getKurtosis(benchmark_samples_to_process)", "Statistics.getMargin(benchmark_samples_to_process) benchmark_stats.margin_percentage = benchmark_stats.margin / benchmark_stats.mean * 100.0 if benchmark_stats.margin > 0.0 else", "dtype=float, sep=\",\") # Create benchmark results benchmark_results = createBenchmarkResults(benchmark_samples, operation_name) # Print benchmark", "Statistics.getMean(benchmark_samples_to_process) benchmark_stats.median = Statistics.getPercentile(benchmark_samples_to_process, 50) benchmark_stats.q3 = Statistics.getPercentile(benchmark_samples_to_process, 75) benchmark_stats.upper_fence = benchmark_results.sorted_no_outliers_samples[-1] #", "= operation # Filter outliers lower_fence, upper_fence = Statistics.getTukeyFences(benchmark_samples) lower_outliers_samples = benchmark_samples[benchmark_samples <", "benchmark_results.sorted_no_outliers_samples, benchmark_results.sorted_upper_outliers_samples, \"\\n\") for key in benchmark_results.statistics: without_outliers = key == \"Without outliers\"", "import numpy as np from benchmark_statistics import Statistics from benchmark_containers import BenchmarkResultsContainer ##############################################################################", "np from benchmark_statistics import Statistics from benchmark_containers import BenchmarkResultsContainer ############################################################################## def createBenchmarkResults(benchmark_samples, operation):", "the benchmark observations.\") parser.add_argument(\"-out_name\", \"--output_file_name\", type=str, required=False, help=\"(Optional) The name of the output", "required=True, help=\"JSON output path for file containing the statistical information of the analyzed", "related to the benchmark observations.\") parser.add_argument(\"-out_name\", \"--output_file_name\", type=str, required=False, help=\"(Optional) The name of", "print(\"Samples:\") print(benchmark_samples, \"\\n\") print(\"Sorted Samples:\") print(benchmark_results.sorted_lower_outliers_samples, benchmark_results.sorted_no_outliers_samples, benchmark_results.sorted_upper_outliers_samples, \"\\n\") for key in benchmark_results.statistics:", "\"= \".rjust(text_alignment_offset - len(stat_key)) + statistics_results[stat_key]) print(\"\\n\") ############################################################################## def runAnalyzer(kwargs=None): # Parse args", "= parser.parse_args() # Input Params benchmark_samples_file = args.benchmark_samples_file json_output_path = args.json_output_path operation_name =", "Statistics.getConfidenceInterval(benchmark_samples_to_process) benchmark_stats.skewness = Statistics.getSkewness(benchmark_samples_to_process) benchmark_stats.kurtosis = Statistics.getKurtosis(benchmark_samples_to_process) return benchmark_results ############################################################################## def printBenchmarkResults(benchmark_samples, benchmark_results):", "benchmark_results) # Export benchmark results to a JSON file benchmark_results.toJSONFile(json_output_path, operation_name, output_file_name) ##############################################################################", "operation): benchmark_results = BenchmarkResultsContainer() benchmark_results.operation = operation # Filter outliers lower_fence, upper_fence =" ]
[ "LearningStrategyEnum from running_modes.enums.logging_mode_enum import LoggingModeEnum from running_modes.enums.running_mode_enum import RunningModeEnum from running_modes.enums.generative_model_regime import GenerativeModelRegimeEnum", "import LoggingModeEnum from running_modes.enums.running_mode_enum import RunningModeEnum from running_modes.enums.generative_model_regime import GenerativeModelRegimeEnum from running_modes.enums.generative_model_parameters import", "RunningModeEnum from running_modes.enums.generative_model_regime import GenerativeModelRegimeEnum from running_modes.enums.generative_model_parameters import GenerativeModelParametersEnum from running_modes.enums.scoring_strategy_enum import ScoringStrategyEnum", "running_modes.enums.running_mode_enum import RunningModeEnum from running_modes.enums.generative_model_regime import GenerativeModelRegimeEnum from running_modes.enums.generative_model_parameters import GenerativeModelParametersEnum from running_modes.enums.scoring_strategy_enum", "from running_modes.enums.logging_mode_enum import LoggingModeEnum from running_modes.enums.running_mode_enum import RunningModeEnum from running_modes.enums.generative_model_regime import GenerativeModelRegimeEnum from", "running_modes.enums.logging_mode_enum import LoggingModeEnum from running_modes.enums.running_mode_enum import RunningModeEnum from running_modes.enums.generative_model_regime import GenerativeModelRegimeEnum from running_modes.enums.generative_model_parameters", "from running_modes.enums.running_mode_enum import RunningModeEnum from running_modes.enums.generative_model_regime import GenerativeModelRegimeEnum from running_modes.enums.generative_model_parameters import GenerativeModelParametersEnum from", "running_modes.enums.diversity_filter_enum import DiversityFilterEnum from running_modes.enums.learning_strategy_enum import LearningStrategyEnum from running_modes.enums.logging_mode_enum import LoggingModeEnum from running_modes.enums.running_mode_enum", "from running_modes.enums.diversity_filter_enum import DiversityFilterEnum from running_modes.enums.learning_strategy_enum import LearningStrategyEnum from running_modes.enums.logging_mode_enum import LoggingModeEnum from", "running_modes.enums.learning_strategy_enum import LearningStrategyEnum from running_modes.enums.logging_mode_enum import LoggingModeEnum from running_modes.enums.running_mode_enum import RunningModeEnum from running_modes.enums.generative_model_regime", "from running_modes.enums.learning_strategy_enum import LearningStrategyEnum from running_modes.enums.logging_mode_enum import LoggingModeEnum from running_modes.enums.running_mode_enum import RunningModeEnum from", "import DiversityFilterEnum from running_modes.enums.learning_strategy_enum import LearningStrategyEnum from running_modes.enums.logging_mode_enum import LoggingModeEnum from running_modes.enums.running_mode_enum import", "LoggingModeEnum from running_modes.enums.running_mode_enum import RunningModeEnum from running_modes.enums.generative_model_regime import GenerativeModelRegimeEnum from running_modes.enums.generative_model_parameters import GenerativeModelParametersEnum", "DiversityFilterEnum from running_modes.enums.learning_strategy_enum import LearningStrategyEnum from running_modes.enums.logging_mode_enum import LoggingModeEnum from running_modes.enums.running_mode_enum import RunningModeEnum", "import LearningStrategyEnum from running_modes.enums.logging_mode_enum import LoggingModeEnum from running_modes.enums.running_mode_enum import RunningModeEnum from running_modes.enums.generative_model_regime import", "import RunningModeEnum from running_modes.enums.generative_model_regime import GenerativeModelRegimeEnum from running_modes.enums.generative_model_parameters import GenerativeModelParametersEnum from running_modes.enums.scoring_strategy_enum import" ]
[ "import MyDataloader from .transforms import Resize, Rotate, RandomCrop, CenterCrop, \\ ColorJitter, HorizontalFlip, ToTensor,", "from .transforms import Resize, Rotate, RandomCrop, CenterCrop, \\ ColorJitter, HorizontalFlip, ToTensor, \\ Compose,", "import NYUDataset from .kitti_dataloader import KITTIDataset from .dataloader import MyDataloader from .transforms import", "import create_datasets __all__ = ['MyDataloader', 'NYUDataset', 'KITTIDataset', 'Resize', 'Rotate', 'RandomCrop', 'CenterCrop', 'ColorJitter', 'HorizontalFlip',", "from .get_datasets import create_datasets __all__ = ['MyDataloader', 'NYUDataset', 'KITTIDataset', 'Resize', 'Rotate', 'RandomCrop', 'CenterCrop',", "import KITTIDataset from .dataloader import MyDataloader from .transforms import Resize, Rotate, RandomCrop, CenterCrop,", "MyDataloader from .transforms import Resize, Rotate, RandomCrop, CenterCrop, \\ ColorJitter, HorizontalFlip, ToTensor, \\", ".kitti_dataloader import KITTIDataset from .dataloader import MyDataloader from .transforms import Resize, Rotate, RandomCrop,", "CenterCrop, \\ ColorJitter, HorizontalFlip, ToTensor, \\ Compose, Crop from .get_datasets import create_datasets __all__", ".get_datasets import create_datasets __all__ = ['MyDataloader', 'NYUDataset', 'KITTIDataset', 'Resize', 'Rotate', 'RandomCrop', 'CenterCrop', 'ColorJitter',", "from .nyu_dataloader import NYUDataset from .kitti_dataloader import KITTIDataset from .dataloader import MyDataloader from", "RandomCrop, CenterCrop, \\ ColorJitter, HorizontalFlip, ToTensor, \\ Compose, Crop from .get_datasets import create_datasets", "__all__ = ['MyDataloader', 'NYUDataset', 'KITTIDataset', 'Resize', 'Rotate', 'RandomCrop', 'CenterCrop', 'ColorJitter', 'HorizontalFlip', 'ToTensor', 'Compose',", "Rotate, RandomCrop, CenterCrop, \\ ColorJitter, HorizontalFlip, ToTensor, \\ Compose, Crop from .get_datasets import", ".dataloader import MyDataloader from .transforms import Resize, Rotate, RandomCrop, CenterCrop, \\ ColorJitter, HorizontalFlip,", "= ['MyDataloader', 'NYUDataset', 'KITTIDataset', 'Resize', 'Rotate', 'RandomCrop', 'CenterCrop', 'ColorJitter', 'HorizontalFlip', 'ToTensor', 'Compose', 'Crop',", "NYUDataset from .kitti_dataloader import KITTIDataset from .dataloader import MyDataloader from .transforms import Resize,", "ColorJitter, HorizontalFlip, ToTensor, \\ Compose, Crop from .get_datasets import create_datasets __all__ = ['MyDataloader',", "KITTIDataset from .dataloader import MyDataloader from .transforms import Resize, Rotate, RandomCrop, CenterCrop, \\", "Crop from .get_datasets import create_datasets __all__ = ['MyDataloader', 'NYUDataset', 'KITTIDataset', 'Resize', 'Rotate', 'RandomCrop',", ".transforms import Resize, Rotate, RandomCrop, CenterCrop, \\ ColorJitter, HorizontalFlip, ToTensor, \\ Compose, Crop", "HorizontalFlip, ToTensor, \\ Compose, Crop from .get_datasets import create_datasets __all__ = ['MyDataloader', 'NYUDataset',", "from .kitti_dataloader import KITTIDataset from .dataloader import MyDataloader from .transforms import Resize, Rotate,", "Resize, Rotate, RandomCrop, CenterCrop, \\ ColorJitter, HorizontalFlip, ToTensor, \\ Compose, Crop from .get_datasets", "\\ Compose, Crop from .get_datasets import create_datasets __all__ = ['MyDataloader', 'NYUDataset', 'KITTIDataset', 'Resize',", "ToTensor, \\ Compose, Crop from .get_datasets import create_datasets __all__ = ['MyDataloader', 'NYUDataset', 'KITTIDataset',", "from .dataloader import MyDataloader from .transforms import Resize, Rotate, RandomCrop, CenterCrop, \\ ColorJitter,", ".nyu_dataloader import NYUDataset from .kitti_dataloader import KITTIDataset from .dataloader import MyDataloader from .transforms", "\\ ColorJitter, HorizontalFlip, ToTensor, \\ Compose, Crop from .get_datasets import create_datasets __all__ =", "Compose, Crop from .get_datasets import create_datasets __all__ = ['MyDataloader', 'NYUDataset', 'KITTIDataset', 'Resize', 'Rotate',", "['MyDataloader', 'NYUDataset', 'KITTIDataset', 'Resize', 'Rotate', 'RandomCrop', 'CenterCrop', 'ColorJitter', 'HorizontalFlip', 'ToTensor', 'Compose', 'Crop', 'create_datasets']", "import Resize, Rotate, RandomCrop, CenterCrop, \\ ColorJitter, HorizontalFlip, ToTensor, \\ Compose, Crop from", "create_datasets __all__ = ['MyDataloader', 'NYUDataset', 'KITTIDataset', 'Resize', 'Rotate', 'RandomCrop', 'CenterCrop', 'ColorJitter', 'HorizontalFlip', 'ToTensor'," ]
[ "valor <= 50: print \"Intervalo (25,50]\" elif valor >= 0 and valor <=", "50: print \"Intervalo (25,50]\" elif valor >= 0 and valor <= 75: print", "\"Intervalo (25,50]\" elif valor >= 0 and valor <= 75: print \"Intervalo (50,", "valor = float(input()) if valor >= 0 and valor <= 25: print \"Intervalo", "valor >= 0 and valor <= 50: print \"Intervalo (25,50]\" elif valor >=", "0 and valor <= 100: print \"Intervalo (75,100]\" else: print \"Fora de intervalo\"", "<= 50: print \"Intervalo (25,50]\" elif valor >= 0 and valor <= 75:", "valor >= 0 and valor <= 100: print \"Intervalo (75,100]\" else: print \"Fora", "\"Intervalo [0,25]\" elif valor >= 0 and valor <= 50: print \"Intervalo (25,50]\"", "print \"Intervalo (25,50]\" elif valor >= 0 and valor <= 75: print \"Intervalo", "and valor <= 50: print \"Intervalo (25,50]\" elif valor >= 0 and valor", "print \"Intervalo (50, 75]\" elif valor >= 0 and valor <= 100: print", "valor <= 75: print \"Intervalo (50, 75]\" elif valor >= 0 and valor", ">= 0 and valor <= 25: print \"Intervalo [0,25]\" elif valor >= 0", ">= 0 and valor <= 75: print \"Intervalo (50, 75]\" elif valor >=", "valor >= 0 and valor <= 25: print \"Intervalo [0,25]\" elif valor >=", "\"Intervalo (50, 75]\" elif valor >= 0 and valor <= 100: print \"Intervalo", "print \"Intervalo [0,25]\" elif valor >= 0 and valor <= 50: print \"Intervalo", ">= 0 and valor <= 100: print \"Intervalo (75,100]\" else: print \"Fora de", "<= 75: print \"Intervalo (50, 75]\" elif valor >= 0 and valor <=", "= float(input()) if valor >= 0 and valor <= 25: print \"Intervalo [0,25]\"", ">= 0 and valor <= 50: print \"Intervalo (25,50]\" elif valor >= 0", "elif valor >= 0 and valor <= 50: print \"Intervalo (25,50]\" elif valor", "if valor >= 0 and valor <= 25: print \"Intervalo [0,25]\" elif valor", "0 and valor <= 50: print \"Intervalo (25,50]\" elif valor >= 0 and", "(25,50]\" elif valor >= 0 and valor <= 75: print \"Intervalo (50, 75]\"", "elif valor >= 0 and valor <= 100: print \"Intervalo (75,100]\" else: print", "(50, 75]\" elif valor >= 0 and valor <= 100: print \"Intervalo (75,100]\"", "[0,25]\" elif valor >= 0 and valor <= 50: print \"Intervalo (25,50]\" elif", "75: print \"Intervalo (50, 75]\" elif valor >= 0 and valor <= 100:", "25: print \"Intervalo [0,25]\" elif valor >= 0 and valor <= 50: print", "0 and valor <= 75: print \"Intervalo (50, 75]\" elif valor >= 0", "75]\" elif valor >= 0 and valor <= 100: print \"Intervalo (75,100]\" else:", "<= 25: print \"Intervalo [0,25]\" elif valor >= 0 and valor <= 50:", "elif valor >= 0 and valor <= 75: print \"Intervalo (50, 75]\" elif", "and valor <= 75: print \"Intervalo (50, 75]\" elif valor >= 0 and", "valor >= 0 and valor <= 75: print \"Intervalo (50, 75]\" elif valor", "float(input()) if valor >= 0 and valor <= 25: print \"Intervalo [0,25]\" elif", "and valor <= 25: print \"Intervalo [0,25]\" elif valor >= 0 and valor", "0 and valor <= 25: print \"Intervalo [0,25]\" elif valor >= 0 and", "valor <= 25: print \"Intervalo [0,25]\" elif valor >= 0 and valor <=" ]
[ "new_filename = 'data/csv/%s.csv' % idx with open(org_filename) as input_csv: with open(new_filename, 'w') as", "0 or x > 170: continue if y < 30 or y >", "continue if y < 30 or y > 220: continue if z >", "csv.writer(output_csv) row_id = 0 for row in csv.reader(input_csv): row_id = row_id + 1", "2101 k_skip_frame = 5 for idx in range(1, k_frame_count, k_skip_frame): org_filename = 'data/csv_org/%s.csv'", "= 'data/csv_org/%s.csv' % idx new_filename = 'data/csv/%s.csv' % idx with open(org_filename) as input_csv:", "idx in range(1, k_frame_count, k_skip_frame): org_filename = 'data/csv_org/%s.csv' % idx new_filename = 'data/csv/%s.csv'", "1 if row_id % 2 == 0: continue x = (int(float(row[0]))) y =", "(int(float(row[0]))) y = (int(float(row[1]))) z = (int(float(row[2]))) w = (int(float(row[3]))) if x <", "row_id + 1 if row_id % 2 == 0: continue x = (int(float(row[0])))", "org_filename = 'data/csv_org/%s.csv' % idx new_filename = 'data/csv/%s.csv' % idx with open(org_filename) as", "with open(new_filename, 'w') as output_csv: csv_writer = csv.writer(output_csv) row_id = 0 for row", "x < 0 or x > 170: continue if y < 30 or", "if y < 30 or y > 220: continue if z > 0", "or x > 170: continue if y < 30 or y > 220:", "== 0: continue x = (int(float(row[0]))) y = (int(float(row[1]))) z = (int(float(row[2]))) w", "x > 170: continue if y < 30 or y > 220: continue", "row_id % 2 == 0: continue x = (int(float(row[0]))) y = (int(float(row[1]))) z", "> 170: continue if y < 30 or y > 220: continue if", "open(new_filename, 'w') as output_csv: csv_writer = csv.writer(output_csv) row_id = 0 for row in", "idx with open(org_filename) as input_csv: with open(new_filename, 'w') as output_csv: csv_writer = csv.writer(output_csv)", "+ 1 if row_id % 2 == 0: continue x = (int(float(row[0]))) y", "= 2101 k_skip_frame = 5 for idx in range(1, k_frame_count, k_skip_frame): org_filename =", "= csv.writer(output_csv) row_id = 0 for row in csv.reader(input_csv): row_id = row_id +", "range(1, k_frame_count, k_skip_frame): org_filename = 'data/csv_org/%s.csv' % idx new_filename = 'data/csv/%s.csv' % idx", "= row_id + 1 if row_id % 2 == 0: continue x =", "continue x = (int(float(row[0]))) y = (int(float(row[1]))) z = (int(float(row[2]))) w = (int(float(row[3])))", "y > 220: continue if z > 0 or z < -130: continue", "for idx in range(1, k_frame_count, k_skip_frame): org_filename = 'data/csv_org/%s.csv' % idx new_filename =", "'data/csv/%s.csv' % idx with open(org_filename) as input_csv: with open(new_filename, 'w') as output_csv: csv_writer", "row_id = row_id + 1 if row_id % 2 == 0: continue x", "in csv.reader(input_csv): row_id = row_id + 1 if row_id % 2 == 0:", "output_csv: csv_writer = csv.writer(output_csv) row_id = 0 for row in csv.reader(input_csv): row_id =", "170: continue if y < 30 or y > 220: continue if z", "if row_id % 2 == 0: continue x = (int(float(row[0]))) y = (int(float(row[1])))", "= (int(float(row[2]))) w = (int(float(row[3]))) if x < 0 or x > 170:", "y = (int(float(row[1]))) z = (int(float(row[2]))) w = (int(float(row[3]))) if x < 0", "(int(float(row[1]))) z = (int(float(row[2]))) w = (int(float(row[3]))) if x < 0 or x", "2 == 0: continue x = (int(float(row[0]))) y = (int(float(row[1]))) z = (int(float(row[2])))", "= 5 for idx in range(1, k_frame_count, k_skip_frame): org_filename = 'data/csv_org/%s.csv' % idx", "<reponame>jing-viz/radiohead import csv k_frame_count = 2101 k_skip_frame = 5 for idx in range(1,", "= (int(float(row[3]))) if x < 0 or x > 170: continue if y", "row in csv.reader(input_csv): row_id = row_id + 1 if row_id % 2 ==", "> 220: continue if z > 0 or z < -130: continue csv_writer.writerow([x,y,z,w])", "idx new_filename = 'data/csv/%s.csv' % idx with open(org_filename) as input_csv: with open(new_filename, 'w')", "csv k_frame_count = 2101 k_skip_frame = 5 for idx in range(1, k_frame_count, k_skip_frame):", "csv.reader(input_csv): row_id = row_id + 1 if row_id % 2 == 0: continue", "'data/csv_org/%s.csv' % idx new_filename = 'data/csv/%s.csv' % idx with open(org_filename) as input_csv: with", "k_skip_frame = 5 for idx in range(1, k_frame_count, k_skip_frame): org_filename = 'data/csv_org/%s.csv' %", "(int(float(row[2]))) w = (int(float(row[3]))) if x < 0 or x > 170: continue", "or y > 220: continue if z > 0 or z < -130:", "w = (int(float(row[3]))) if x < 0 or x > 170: continue if", "csv_writer = csv.writer(output_csv) row_id = 0 for row in csv.reader(input_csv): row_id = row_id", "= 0 for row in csv.reader(input_csv): row_id = row_id + 1 if row_id", "z = (int(float(row[2]))) w = (int(float(row[3]))) if x < 0 or x >", "as output_csv: csv_writer = csv.writer(output_csv) row_id = 0 for row in csv.reader(input_csv): row_id", "0: continue x = (int(float(row[0]))) y = (int(float(row[1]))) z = (int(float(row[2]))) w =", "< 30 or y > 220: continue if z > 0 or z", "in range(1, k_frame_count, k_skip_frame): org_filename = 'data/csv_org/%s.csv' % idx new_filename = 'data/csv/%s.csv' %", "% 2 == 0: continue x = (int(float(row[0]))) y = (int(float(row[1]))) z =", "for row in csv.reader(input_csv): row_id = row_id + 1 if row_id % 2", "0 for row in csv.reader(input_csv): row_id = row_id + 1 if row_id %", "< 0 or x > 170: continue if y < 30 or y", "row_id = 0 for row in csv.reader(input_csv): row_id = row_id + 1 if", "k_frame_count, k_skip_frame): org_filename = 'data/csv_org/%s.csv' % idx new_filename = 'data/csv/%s.csv' % idx with", "if x < 0 or x > 170: continue if y < 30", "k_frame_count = 2101 k_skip_frame = 5 for idx in range(1, k_frame_count, k_skip_frame): org_filename", "import csv k_frame_count = 2101 k_skip_frame = 5 for idx in range(1, k_frame_count,", "= 'data/csv/%s.csv' % idx with open(org_filename) as input_csv: with open(new_filename, 'w') as output_csv:", "5 for idx in range(1, k_frame_count, k_skip_frame): org_filename = 'data/csv_org/%s.csv' % idx new_filename", "(int(float(row[3]))) if x < 0 or x > 170: continue if y <", "k_skip_frame): org_filename = 'data/csv_org/%s.csv' % idx new_filename = 'data/csv/%s.csv' % idx with open(org_filename)", "open(org_filename) as input_csv: with open(new_filename, 'w') as output_csv: csv_writer = csv.writer(output_csv) row_id =", "as input_csv: with open(new_filename, 'w') as output_csv: csv_writer = csv.writer(output_csv) row_id = 0", "% idx new_filename = 'data/csv/%s.csv' % idx with open(org_filename) as input_csv: with open(new_filename,", "'w') as output_csv: csv_writer = csv.writer(output_csv) row_id = 0 for row in csv.reader(input_csv):", "y < 30 or y > 220: continue if z > 0 or", "= (int(float(row[1]))) z = (int(float(row[2]))) w = (int(float(row[3]))) if x < 0 or", "% idx with open(org_filename) as input_csv: with open(new_filename, 'w') as output_csv: csv_writer =", "= (int(float(row[0]))) y = (int(float(row[1]))) z = (int(float(row[2]))) w = (int(float(row[3]))) if x", "x = (int(float(row[0]))) y = (int(float(row[1]))) z = (int(float(row[2]))) w = (int(float(row[3]))) if", "with open(org_filename) as input_csv: with open(new_filename, 'w') as output_csv: csv_writer = csv.writer(output_csv) row_id", "30 or y > 220: continue if z > 0 or z <", "input_csv: with open(new_filename, 'w') as output_csv: csv_writer = csv.writer(output_csv) row_id = 0 for" ]
[ "def last_known_position(self): \"\"\"Grabs the player's last known location from the report set.\"\"\" try:", "is None: return last_spotted elif last_spotted is None: return last_filed else: if last_filed.reported_date", "from the report set.\"\"\" try: last_filed = self.report_set.filter(zombies_only=False) last_filed = last_filed.order_by('-reported_date')[0] except IndexError:", "player's last known location from the report set.\"\"\" try: last_filed = self.report_set.filter(zombies_only=False) last_filed", "= None try: last_spotted = self.reported_at.order_by('-reported_date')[0] except IndexError: last_spotted = None if last_filed", "the player's last known location from the report set.\"\"\" try: last_filed = self.report_set.filter(zombies_only=False)", "the report set.\"\"\" try: last_filed = self.report_set.filter(zombies_only=False) last_filed = last_filed.order_by('-reported_date')[0] except IndexError: last_filed", "if last_filed is None and last_spotted is None: return u\"Never seen\" else: if", "last_spotted = self.reported_at.order_by('-reported_date')[0] except IndexError: last_spotted = None if last_filed is None and", "None: return last_spotted elif last_spotted is None: return last_filed else: if last_filed.reported_date >=", "null=False, blank=False) color_code = models.CharField(max_length=7, null=False, blank=False) def __unicode__(self): return self.name class Player(models.Model):", "report set.\"\"\" try: last_filed = self.report_set.filter(zombies_only=False) last_filed = last_filed.order_by('-reported_date')[0] except IndexError: last_filed =", "= 'categories' name = models.CharField(max_length=25, null=False, blank=False) color_code = models.CharField(max_length=7, null=False, blank=False) def", "location from the report set.\"\"\" try: last_filed = self.report_set.filter(zombies_only=False) last_filed = last_filed.order_by('-reported_date')[0] except", "blank=False) def __unicode__(self): return self.name class Player(models.Model): name = models.CharField(max_length=50, null=False, db_index=True) profile_id", "last_filed = last_filed.order_by('-reported_date')[0] except IndexError: last_filed = None try: last_spotted = self.reported_at.order_by('-reported_date')[0] except", "__unicode__(self): return self.name class Player(models.Model): name = models.CharField(max_length=50, null=False, db_index=True) profile_id = models.IntegerField(null=False,", "= last_filed.order_by('-reported_date')[0] except IndexError: last_filed = None try: last_spotted = self.reported_at.order_by('-reported_date')[0] except IndexError:", "group_name = models.CharField(max_length=50, blank=True, null=True, default=None, db_index=True) category = models.ForeignKey(Category, null=True, blank=True) join_date", "= models.CharField(max_length=50, null=False, db_index=True) profile_id = models.IntegerField(null=False, unique=True, db_index=True) group_name = models.CharField(max_length=50, blank=True,", "= models.CharField(max_length=7, null=False, blank=False) def __unicode__(self): return self.name class Player(models.Model): name = models.CharField(max_length=50,", "= models.BooleanField(default=False, db_index=True) def last_known_position(self): \"\"\"Grabs the player's last known location from the", "= models.CharField(max_length=50, blank=True, null=True, default=None, db_index=True) category = models.ForeignKey(Category, null=True, blank=True) join_date =", "last_filed = None try: last_spotted = self.reported_at.order_by('-reported_date')[0] except IndexError: last_spotted = None if", "and last_spotted is None: return u\"Never seen\" else: if last_filed is None: return", "None try: last_spotted = self.reported_at.order_by('-reported_date')[0] except IndexError: last_spotted = None if last_filed is", "None: return u\"Never seen\" else: if last_filed is None: return last_spotted elif last_spotted", "is_dead = models.BooleanField(default=False, db_index=True) def last_known_position(self): \"\"\"Grabs the player's last known location from", "'categories' name = models.CharField(max_length=25, null=False, blank=False) color_code = models.CharField(max_length=7, null=False, blank=False) def __unicode__(self):", "= self.reported_at.order_by('-reported_date')[0] except IndexError: last_spotted = None if last_filed is None and last_spotted", "None: return last_filed else: if last_filed.reported_date >= last_spotted.reported_date: return last_filed else: return last_spotted", "None and last_spotted is None: return u\"Never seen\" else: if last_filed is None:", "blank=False) color_code = models.CharField(max_length=7, null=False, blank=False) def __unicode__(self): return self.name class Player(models.Model): name", "is None: return last_filed else: if last_filed.reported_date >= last_spotted.reported_date: return last_filed else: return", "blank=True, null=True, default=None, db_index=True) category = models.ForeignKey(Category, null=True, blank=True) join_date = models.DateTimeField(default=datetime.datetime.now) scrape_date", "default=None, db_index=True) category = models.ForeignKey(Category, null=True, blank=True) join_date = models.DateTimeField(default=datetime.datetime.now) scrape_date = models.DateTimeField(auto_now=True,", "except IndexError: last_filed = None try: last_spotted = self.reported_at.order_by('-reported_date')[0] except IndexError: last_spotted =", "seen\" else: if last_filed is None: return last_spotted elif last_spotted is None: return", "null=True, default=None, db_index=True) category = models.ForeignKey(Category, null=True, blank=True) join_date = models.DateTimeField(default=datetime.datetime.now) scrape_date =", "db_index=True) profile_id = models.IntegerField(null=False, unique=True, db_index=True) group_name = models.CharField(max_length=50, blank=True, null=True, default=None, db_index=True)", "class Player(models.Model): name = models.CharField(max_length=50, null=False, db_index=True) profile_id = models.IntegerField(null=False, unique=True, db_index=True) group_name", "last_filed else: if last_filed.reported_date >= last_spotted.reported_date: return last_filed else: return last_spotted def __unicode__(self):", "blank=True) join_date = models.DateTimeField(default=datetime.datetime.now) scrape_date = models.DateTimeField(auto_now=True, auto_now_add=True) is_dead = models.BooleanField(default=False, db_index=True) def", "Category(models.Model): class Meta: verbose_name_plural = 'categories' name = models.CharField(max_length=25, null=False, blank=False) color_code =", "last_filed.order_by('-reported_date')[0] except IndexError: last_filed = None try: last_spotted = self.reported_at.order_by('-reported_date')[0] except IndexError: last_spotted", "last_spotted elif last_spotted is None: return last_filed else: if last_filed.reported_date >= last_spotted.reported_date: return", "known location from the report set.\"\"\" try: last_filed = self.report_set.filter(zombies_only=False) last_filed = last_filed.order_by('-reported_date')[0]", "scrape_date = models.DateTimeField(auto_now=True, auto_now_add=True) is_dead = models.BooleanField(default=False, db_index=True) def last_known_position(self): \"\"\"Grabs the player's", "verbose_name_plural = 'categories' name = models.CharField(max_length=25, null=False, blank=False) color_code = models.CharField(max_length=7, null=False, blank=False)", "def __unicode__(self): return self.name class Player(models.Model): name = models.CharField(max_length=50, null=False, db_index=True) profile_id =", "models.CharField(max_length=25, null=False, blank=False) color_code = models.CharField(max_length=7, null=False, blank=False) def __unicode__(self): return self.name class", "last_spotted is None: return u\"Never seen\" else: if last_filed is None: return last_spotted", "models.DateTimeField(default=datetime.datetime.now) scrape_date = models.DateTimeField(auto_now=True, auto_now_add=True) is_dead = models.BooleanField(default=False, db_index=True) def last_known_position(self): \"\"\"Grabs the", "last_filed is None: return last_spotted elif last_spotted is None: return last_filed else: if", "self.name class Player(models.Model): name = models.CharField(max_length=50, null=False, db_index=True) profile_id = models.IntegerField(null=False, unique=True, db_index=True)", "= models.CharField(max_length=25, null=False, blank=False) color_code = models.CharField(max_length=7, null=False, blank=False) def __unicode__(self): return self.name", "auto_now_add=True) is_dead = models.BooleanField(default=False, db_index=True) def last_known_position(self): \"\"\"Grabs the player's last known location", "db_index=True) group_name = models.CharField(max_length=50, blank=True, null=True, default=None, db_index=True) category = models.ForeignKey(Category, null=True, blank=True)", "Meta: verbose_name_plural = 'categories' name = models.CharField(max_length=25, null=False, blank=False) color_code = models.CharField(max_length=7, null=False,", "IndexError: last_spotted = None if last_filed is None and last_spotted is None: return", "return self.name class Player(models.Model): name = models.CharField(max_length=50, null=False, db_index=True) profile_id = models.IntegerField(null=False, unique=True,", "last_filed is None and last_spotted is None: return u\"Never seen\" else: if last_filed", "models.ForeignKey(Category, null=True, blank=True) join_date = models.DateTimeField(default=datetime.datetime.now) scrape_date = models.DateTimeField(auto_now=True, auto_now_add=True) is_dead = models.BooleanField(default=False,", "except IndexError: last_spotted = None if last_filed is None and last_spotted is None:", "elif last_spotted is None: return last_filed else: if last_filed.reported_date >= last_spotted.reported_date: return last_filed", "class Category(models.Model): class Meta: verbose_name_plural = 'categories' name = models.CharField(max_length=25, null=False, blank=False) color_code", "category = models.ForeignKey(Category, null=True, blank=True) join_date = models.DateTimeField(default=datetime.datetime.now) scrape_date = models.DateTimeField(auto_now=True, auto_now_add=True) is_dead", "django.db import models class Category(models.Model): class Meta: verbose_name_plural = 'categories' name = models.CharField(max_length=25,", "\"\"\"Grabs the player's last known location from the report set.\"\"\" try: last_filed =", "unique=True, db_index=True) group_name = models.CharField(max_length=50, blank=True, null=True, default=None, db_index=True) category = models.ForeignKey(Category, null=True,", "name = models.CharField(max_length=25, null=False, blank=False) color_code = models.CharField(max_length=7, null=False, blank=False) def __unicode__(self): return", "import models class Category(models.Model): class Meta: verbose_name_plural = 'categories' name = models.CharField(max_length=25, null=False,", "try: last_spotted = self.reported_at.order_by('-reported_date')[0] except IndexError: last_spotted = None if last_filed is None", "if last_filed.reported_date >= last_spotted.reported_date: return last_filed else: return last_spotted def __unicode__(self): return self.name", "db_index=True) def last_known_position(self): \"\"\"Grabs the player's last known location from the report set.\"\"\"", "datetime from django.db import models class Category(models.Model): class Meta: verbose_name_plural = 'categories' name", "is None and last_spotted is None: return u\"Never seen\" else: if last_filed is", "return last_spotted elif last_spotted is None: return last_filed else: if last_filed.reported_date >= last_spotted.reported_date:", "from django.db import models class Category(models.Model): class Meta: verbose_name_plural = 'categories' name =", "= models.ForeignKey(Category, null=True, blank=True) join_date = models.DateTimeField(default=datetime.datetime.now) scrape_date = models.DateTimeField(auto_now=True, auto_now_add=True) is_dead =", "last_known_position(self): \"\"\"Grabs the player's last known location from the report set.\"\"\" try: last_filed", "models.IntegerField(null=False, unique=True, db_index=True) group_name = models.CharField(max_length=50, blank=True, null=True, default=None, db_index=True) category = models.ForeignKey(Category,", "u\"Never seen\" else: if last_filed is None: return last_spotted elif last_spotted is None:", "None if last_filed is None and last_spotted is None: return u\"Never seen\" else:", "models.CharField(max_length=7, null=False, blank=False) def __unicode__(self): return self.name class Player(models.Model): name = models.CharField(max_length=50, null=False,", "return last_filed else: if last_filed.reported_date >= last_spotted.reported_date: return last_filed else: return last_spotted def", "color_code = models.CharField(max_length=7, null=False, blank=False) def __unicode__(self): return self.name class Player(models.Model): name =", "last known location from the report set.\"\"\" try: last_filed = self.report_set.filter(zombies_only=False) last_filed =", "= None if last_filed is None and last_spotted is None: return u\"Never seen\"", "name = models.CharField(max_length=50, null=False, db_index=True) profile_id = models.IntegerField(null=False, unique=True, db_index=True) group_name = models.CharField(max_length=50,", "try: last_filed = self.report_set.filter(zombies_only=False) last_filed = last_filed.order_by('-reported_date')[0] except IndexError: last_filed = None try:", "last_spotted = None if last_filed is None and last_spotted is None: return u\"Never", "is None: return u\"Never seen\" else: if last_filed is None: return last_spotted elif", "set.\"\"\" try: last_filed = self.report_set.filter(zombies_only=False) last_filed = last_filed.order_by('-reported_date')[0] except IndexError: last_filed = None", "self.reported_at.order_by('-reported_date')[0] except IndexError: last_spotted = None if last_filed is None and last_spotted is", "models.CharField(max_length=50, blank=True, null=True, default=None, db_index=True) category = models.ForeignKey(Category, null=True, blank=True) join_date = models.DateTimeField(default=datetime.datetime.now)", "= models.DateTimeField(auto_now=True, auto_now_add=True) is_dead = models.BooleanField(default=False, db_index=True) def last_known_position(self): \"\"\"Grabs the player's last", "Player(models.Model): name = models.CharField(max_length=50, null=False, db_index=True) profile_id = models.IntegerField(null=False, unique=True, db_index=True) group_name =", "= models.DateTimeField(default=datetime.datetime.now) scrape_date = models.DateTimeField(auto_now=True, auto_now_add=True) is_dead = models.BooleanField(default=False, db_index=True) def last_known_position(self): \"\"\"Grabs", "last_spotted is None: return last_filed else: if last_filed.reported_date >= last_spotted.reported_date: return last_filed else:", "import datetime from django.db import models class Category(models.Model): class Meta: verbose_name_plural = 'categories'", "else: if last_filed is None: return last_spotted elif last_spotted is None: return last_filed", "db_index=True) category = models.ForeignKey(Category, null=True, blank=True) join_date = models.DateTimeField(default=datetime.datetime.now) scrape_date = models.DateTimeField(auto_now=True, auto_now_add=True)", "return u\"Never seen\" else: if last_filed is None: return last_spotted elif last_spotted is", "if last_filed is None: return last_spotted elif last_spotted is None: return last_filed else:", "models.DateTimeField(auto_now=True, auto_now_add=True) is_dead = models.BooleanField(default=False, db_index=True) def last_known_position(self): \"\"\"Grabs the player's last known", "join_date = models.DateTimeField(default=datetime.datetime.now) scrape_date = models.DateTimeField(auto_now=True, auto_now_add=True) is_dead = models.BooleanField(default=False, db_index=True) def last_known_position(self):", "models class Category(models.Model): class Meta: verbose_name_plural = 'categories' name = models.CharField(max_length=25, null=False, blank=False)", "= models.IntegerField(null=False, unique=True, db_index=True) group_name = models.CharField(max_length=50, blank=True, null=True, default=None, db_index=True) category =", "models.CharField(max_length=50, null=False, db_index=True) profile_id = models.IntegerField(null=False, unique=True, db_index=True) group_name = models.CharField(max_length=50, blank=True, null=True,", "self.report_set.filter(zombies_only=False) last_filed = last_filed.order_by('-reported_date')[0] except IndexError: last_filed = None try: last_spotted = self.reported_at.order_by('-reported_date')[0]", "class Meta: verbose_name_plural = 'categories' name = models.CharField(max_length=25, null=False, blank=False) color_code = models.CharField(max_length=7,", "profile_id = models.IntegerField(null=False, unique=True, db_index=True) group_name = models.CharField(max_length=50, blank=True, null=True, default=None, db_index=True) category", "models.BooleanField(default=False, db_index=True) def last_known_position(self): \"\"\"Grabs the player's last known location from the report", "IndexError: last_filed = None try: last_spotted = self.reported_at.order_by('-reported_date')[0] except IndexError: last_spotted = None", "else: if last_filed.reported_date >= last_spotted.reported_date: return last_filed else: return last_spotted def __unicode__(self): return", "<gh_stars>1-10 import datetime from django.db import models class Category(models.Model): class Meta: verbose_name_plural =", "null=False, db_index=True) profile_id = models.IntegerField(null=False, unique=True, db_index=True) group_name = models.CharField(max_length=50, blank=True, null=True, default=None,", "null=False, blank=False) def __unicode__(self): return self.name class Player(models.Model): name = models.CharField(max_length=50, null=False, db_index=True)", "= self.report_set.filter(zombies_only=False) last_filed = last_filed.order_by('-reported_date')[0] except IndexError: last_filed = None try: last_spotted =", "null=True, blank=True) join_date = models.DateTimeField(default=datetime.datetime.now) scrape_date = models.DateTimeField(auto_now=True, auto_now_add=True) is_dead = models.BooleanField(default=False, db_index=True)", "last_filed = self.report_set.filter(zombies_only=False) last_filed = last_filed.order_by('-reported_date')[0] except IndexError: last_filed = None try: last_spotted" ]
[ "- k + 1)).most_common() occur.update({word[0] for word in words if word[1] == t})", "Counter def find_occurrencies(): text = str(input()) num_inputs = input().split(\" \") k = int(num_inputs[0])", "= int(num_inputs[1]) t = int(num_inputs[2]) curr_pos = 0 occur = set() for curr_pos,", "Counter(curr_str[i:i+k] for i in range(len(curr_str) - k + 1)).most_common() occur.update({word[0] for word in", "curr_pos, i in enumerate(range(len(text) - L), 0): curr_str = text[curr_pos:L + curr_pos +", "in enumerate(range(len(text) - L), 0): curr_str = text[curr_pos:L + curr_pos + 1] words", "curr_pos = 0 occur = set() for curr_pos, i in enumerate(range(len(text) - L),", "def find_occurrencies(): text = str(input()) num_inputs = input().split(\" \") k = int(num_inputs[0]) L", "= 0 occur = set() for curr_pos, i in enumerate(range(len(text) - L), 0):", "= int(num_inputs[0]) L = int(num_inputs[1]) t = int(num_inputs[2]) curr_pos = 0 occur =", "collections import Counter def find_occurrencies(): text = str(input()) num_inputs = input().split(\" \") k", "int(num_inputs[2]) curr_pos = 0 occur = set() for curr_pos, i in enumerate(range(len(text) -", "enumerate(range(len(text) - L), 0): curr_str = text[curr_pos:L + curr_pos + 1] words =", "text = str(input()) num_inputs = input().split(\" \") k = int(num_inputs[0]) L = int(num_inputs[1])", "+ 1)).most_common() occur.update({word[0] for word in words if word[1] == t}) print(\" \".join(sorted(list(occur))))", "input().split(\" \") k = int(num_inputs[0]) L = int(num_inputs[1]) t = int(num_inputs[2]) curr_pos =", "occur.update({word[0] for word in words if word[1] == t}) print(\" \".join(sorted(list(occur)))) if __name__", "words = Counter(curr_str[i:i+k] for i in range(len(curr_str) - k + 1)).most_common() occur.update({word[0] for", "range(len(curr_str) - k + 1)).most_common() occur.update({word[0] for word in words if word[1] ==", "\") k = int(num_inputs[0]) L = int(num_inputs[1]) t = int(num_inputs[2]) curr_pos = 0", "L = int(num_inputs[1]) t = int(num_inputs[2]) curr_pos = 0 occur = set() for", "i in range(len(curr_str) - k + 1)).most_common() occur.update({word[0] for word in words if", "= input().split(\" \") k = int(num_inputs[0]) L = int(num_inputs[1]) t = int(num_inputs[2]) curr_pos", "= int(num_inputs[2]) curr_pos = 0 occur = set() for curr_pos, i in enumerate(range(len(text)", "k + 1)).most_common() occur.update({word[0] for word in words if word[1] == t}) print(\"", "word in words if word[1] == t}) print(\" \".join(sorted(list(occur)))) if __name__ == \"__main__\":", "import Counter def find_occurrencies(): text = str(input()) num_inputs = input().split(\" \") k =", "from collections import Counter def find_occurrencies(): text = str(input()) num_inputs = input().split(\" \")", "find_occurrencies(): text = str(input()) num_inputs = input().split(\" \") k = int(num_inputs[0]) L =", "t = int(num_inputs[2]) curr_pos = 0 occur = set() for curr_pos, i in", "num_inputs = input().split(\" \") k = int(num_inputs[0]) L = int(num_inputs[1]) t = int(num_inputs[2])", "set() for curr_pos, i in enumerate(range(len(text) - L), 0): curr_str = text[curr_pos:L +", "for i in range(len(curr_str) - k + 1)).most_common() occur.update({word[0] for word in words", "= Counter(curr_str[i:i+k] for i in range(len(curr_str) - k + 1)).most_common() occur.update({word[0] for word", "int(num_inputs[1]) t = int(num_inputs[2]) curr_pos = 0 occur = set() for curr_pos, i", "= text[curr_pos:L + curr_pos + 1] words = Counter(curr_str[i:i+k] for i in range(len(curr_str)", "curr_pos + 1] words = Counter(curr_str[i:i+k] for i in range(len(curr_str) - k +", "0): curr_str = text[curr_pos:L + curr_pos + 1] words = Counter(curr_str[i:i+k] for i", "1] words = Counter(curr_str[i:i+k] for i in range(len(curr_str) - k + 1)).most_common() occur.update({word[0]", "= set() for curr_pos, i in enumerate(range(len(text) - L), 0): curr_str = text[curr_pos:L", "+ 1] words = Counter(curr_str[i:i+k] for i in range(len(curr_str) - k + 1)).most_common()", "str(input()) num_inputs = input().split(\" \") k = int(num_inputs[0]) L = int(num_inputs[1]) t =", "k = int(num_inputs[0]) L = int(num_inputs[1]) t = int(num_inputs[2]) curr_pos = 0 occur", "text[curr_pos:L + curr_pos + 1] words = Counter(curr_str[i:i+k] for i in range(len(curr_str) -", "- L), 0): curr_str = text[curr_pos:L + curr_pos + 1] words = Counter(curr_str[i:i+k]", "+ curr_pos + 1] words = Counter(curr_str[i:i+k] for i in range(len(curr_str) - k", "= str(input()) num_inputs = input().split(\" \") k = int(num_inputs[0]) L = int(num_inputs[1]) t", "0 occur = set() for curr_pos, i in enumerate(range(len(text) - L), 0): curr_str", "in range(len(curr_str) - k + 1)).most_common() occur.update({word[0] for word in words if word[1]", "L), 0): curr_str = text[curr_pos:L + curr_pos + 1] words = Counter(curr_str[i:i+k] for", "curr_str = text[curr_pos:L + curr_pos + 1] words = Counter(curr_str[i:i+k] for i in", "for curr_pos, i in enumerate(range(len(text) - L), 0): curr_str = text[curr_pos:L + curr_pos", "for word in words if word[1] == t}) print(\" \".join(sorted(list(occur)))) if __name__ ==", "i in enumerate(range(len(text) - L), 0): curr_str = text[curr_pos:L + curr_pos + 1]", "occur = set() for curr_pos, i in enumerate(range(len(text) - L), 0): curr_str =", "in words if word[1] == t}) print(\" \".join(sorted(list(occur)))) if __name__ == \"__main__\": find_occurrencies()", "1)).most_common() occur.update({word[0] for word in words if word[1] == t}) print(\" \".join(sorted(list(occur)))) if", "int(num_inputs[0]) L = int(num_inputs[1]) t = int(num_inputs[2]) curr_pos = 0 occur = set()" ]
[ "for key, value in doc.iteritems()) elif isinstance(doc, (list, tuple)): return [self.deep_sub(item) for item", "the same directory as the package spec YAML-file can be found in the", "filename, target_name=None): \"\"\" Makes sure that a file located in the same directory", "spec YAML-file can be found in the ``_hastdist`` sub-directory of the build directory", "doc): return None else: raise TypeError(\"unexpected item in documents of type %r: %s\"", "# Copy stage dict and substitute all string arguments stage = self.deep_sub(stage) handler", "target_name = filename self._bundled_files[target_name] = filename def build_stage(handler_name=None): \"\"\" Decorator used to register", "Decorator used to register a function as a handler generating the code for", "them to live in sys.modules. \"\"\" self._modules.append(mod) def dispatch_build_stage(self, stage): # Copy stage", "basestring): return self.sub(doc) elif isinstance(doc, (int, bool, float, types.NoneType)): return doc elif (not", "all string arguments stage = self.deep_sub(stage) handler = stage['handler'] if handler not in", "is necesary to avoid them getting deallocated under our feet, as we don't", "``{{var}}`` in `s` with variables from `self.parameters` in `s`, and return resulting string.", "returned. \"\"\" if isinstance(doc, dict): return dict((key, self.deep_sub(value)) for key, value in doc.iteritems())", "self._build_stage_handlers: raise ProfileError(stage, 'build stage handler \"%s\" not registered' % handler) return self._build_stage_handlers[handler](self,", "\"\"\" Decorator used to register a function as a handler generating the code", "import ProfileError, IllegalHookFileError class PackageBuildContext(object): def __init__(self, package_name, dependency_dir_vars, parameters): import hook self._build_stage_handlers", "elif isinstance(doc, (int, bool, float, types.NoneType)): return doc elif (not doc): return None", "= filename def build_stage(handler_name=None): \"\"\" Decorator used to register a function as a", "for a given build stage. Parameters ---------- handler_name : str (optional) Name of", "\"\"\" self._build_stage_handlers[handler_name] = handler_func def register_module(self, mod): \"\"\" Hold a reference to the", ".exceptions import ProfileError, IllegalHookFileError class PackageBuildContext(object): def __init__(self, package_name, dependency_dir_vars, parameters): import hook", "function as a handler generating the code for a given build stage. Parameters", "and return resulting string. \"\"\" return substitute_profile_parameters(s, self.parameters) def deep_sub(self, doc): \"\"\" Recursively", "value in doc.iteritems()) elif isinstance(doc, (list, tuple)): return [self.deep_sub(item) for item in doc]", "found in .hook. \"\"\" import types from .utils import substitute_profile_parameters from .exceptions import", "TypeError(\"unexpected item in documents of type %r: %s\" % (type(doc), doc)) def bundle_file(self,", "a reference to the registered module; this is necesary to avoid them getting", "the package spec YAML-file can be found in the ``_hastdist`` sub-directory of the", "for a given stage handler type. \"\"\" self._build_stage_handlers[handler_name] = handler_func def register_module(self, mod):", "sub(self, s): \"\"\" Substitute ``{{var}}`` in `s` with variables from `self.parameters` in `s`,", "self._modules = [] self._bundled_files = {} # Available in API self.package_name = package_name", "\"\"\" Recursively walk the document `doc`, and for all non-key strings, make a", "that are part of stack descriptions. A significant portion of the package building", "files are re-loaded for every package build, and so decorators etc. are run", "generating the code for a given build stage. Parameters ---------- handler_name : str", "located in the same directory as the package spec YAML-file can be found", "stage['handler'] if handler not in self._build_stage_handlers: raise ProfileError(stage, 'build stage handler \"%s\" not", "(int, bool, float, types.NoneType)): return doc elif (not doc): return None else: raise", "doc] elif isinstance(doc, basestring): return self.sub(doc) elif isinstance(doc, (int, bool, float, types.NoneType)): return", "hook.bash_handler} self._modules = [] self._bundled_files = {} # Available in API self.package_name =", "return doc elif (not doc): return None else: raise TypeError(\"unexpected item in documents", "sys.modules. \"\"\" self._modules.append(mod) def dispatch_build_stage(self, stage): # Copy stage dict and substitute all", "package_name self.parameters = dict(parameters) self.dependency_dir_vars = list(dependency_dir_vars) def register_build_stage_handler(self, handler_name, handler_func): \"\"\" Registers", "\"\"\" Hold a reference to the registered module; this is necesary to avoid", "self._bundled_files[target_name] = filename def build_stage(handler_name=None): \"\"\" Decorator used to register a function as", "# Available in API self.package_name = package_name self.parameters = dict(parameters) self.dependency_dir_vars = list(dependency_dir_vars)", "are part of stack descriptions. A significant portion of the package building logic", "self.sub(doc) elif isinstance(doc, (int, bool, float, types.NoneType)): return doc elif (not doc): return", "% handler) return self._build_stage_handlers[handler](self, stage) def sub(self, s): \"\"\" Substitute ``{{var}}`` in `s`", "%r: %s\" % (type(doc), doc)) def bundle_file(self, filename, target_name=None): \"\"\" Makes sure that", "into here. Hook files are re-loaded for every package build, and so decorators", "as the package spec YAML-file can be found in the ``_hastdist`` sub-directory of", "def bundle_file(self, filename, target_name=None): \"\"\" Makes sure that a file located in the", "elif isinstance(doc, (list, tuple)): return [self.deep_sub(item) for item in doc] elif isinstance(doc, basestring):", "Python hook files that are part of stack descriptions. A significant portion of", "decorator(func): handler_name_ = handler_name if handler_name_ is None: handler_name_ = func.__name__ import hook", "used to HashDist to load hook files is found in .hook. \"\"\" import", "from .utils import substitute_profile_parameters from .exceptions import ProfileError, IllegalHookFileError class PackageBuildContext(object): def __init__(self,", "them getting deallocated under our feet, as we don't allow them to live", "A deep copy is returned. \"\"\" if isinstance(doc, dict): return dict((key, self.deep_sub(value)) for", "list(dependency_dir_vars) def register_build_stage_handler(self, handler_name, handler_func): \"\"\" Registers a function as a handler for", "raise ProfileError(stage, 'build stage handler \"%s\" not registered' % handler) return self._build_stage_handlers[handler](self, stage)", "to live in sys.modules. \"\"\" self._modules.append(mod) def dispatch_build_stage(self, stage): # Copy stage dict", ".hook. \"\"\" import types from .utils import substitute_profile_parameters from .exceptions import ProfileError, IllegalHookFileError", "descriptions. A significant portion of the package building logic should eventually find its", "copy is returned. \"\"\" if isinstance(doc, dict): return dict((key, self.deep_sub(value)) for key, value", "\"\"\" def decorator(func): handler_name_ = handler_name if handler_name_ is None: handler_name_ = func.__name__", "package building logic should eventually find its way into here. Hook files are", "walk the document `doc`, and for all non-key strings, make a substitution as", "def build_stage(handler_name=None): \"\"\" Decorator used to register a function as a handler generating", "so decorators etc. are run again. The machinery used to HashDist to load", "None: target_name = filename self._bundled_files[target_name] = filename def build_stage(handler_name=None): \"\"\" Decorator used to", "a given build stage. Parameters ---------- handler_name : str (optional) Name of the", "can be found in the ``_hastdist`` sub-directory of the build directory during the", "a handler for a given stage handler type. \"\"\" self._build_stage_handlers[handler_name] = handler_func def", "self.package_name = package_name self.parameters = dict(parameters) self.dependency_dir_vars = list(dependency_dir_vars) def register_build_stage_handler(self, handler_name, handler_func):", "def decorator(func): handler_name_ = handler_name if handler_name_ is None: handler_name_ = func.__name__ import", "class PackageBuildContext(object): def __init__(self, package_name, dependency_dir_vars, parameters): import hook self._build_stage_handlers = {'bash': hook.bash_handler}", "= {} # Available in API self.package_name = package_name self.parameters = dict(parameters) self.dependency_dir_vars", "raise TypeError(\"unexpected item in documents of type %r: %s\" % (type(doc), doc)) def", "stack descriptions. A significant portion of the package building logic should eventually find", "dict((key, self.deep_sub(value)) for key, value in doc.iteritems()) elif isinstance(doc, (list, tuple)): return [self.deep_sub(item)", "to the registered module; this is necesary to avoid them getting deallocated under", "directory during the build. \"\"\" if target_name is None: target_name = filename self._bundled_files[target_name]", ".utils import substitute_profile_parameters from .exceptions import ProfileError, IllegalHookFileError class PackageBuildContext(object): def __init__(self, package_name,", "(type(doc), doc)) def bundle_file(self, filename, target_name=None): \"\"\" Makes sure that a file located", "self._modules.append(mod) def dispatch_build_stage(self, stage): # Copy stage dict and substitute all string arguments", "function. \"\"\" def decorator(func): handler_name_ = handler_name if handler_name_ is None: handler_name_ =", "doc.iteritems()) elif isinstance(doc, (list, tuple)): return [self.deep_sub(item) for item in doc] elif isinstance(doc,", "target_name is None: target_name = filename self._bundled_files[target_name] = filename def build_stage(handler_name=None): \"\"\" Decorator", "doc): \"\"\" Recursively walk the document `doc`, and for all non-key strings, make", "as we don't allow them to live in sys.modules. \"\"\" self._modules.append(mod) def dispatch_build_stage(self,", "register_module(self, mod): \"\"\" Hold a reference to the registered module; this is necesary", "of the handler, defaults to the name of the function. \"\"\" def decorator(func):", "the name of the function. \"\"\" def decorator(func): handler_name_ = handler_name if handler_name_", "= self.deep_sub(stage) handler = stage['handler'] if handler not in self._build_stage_handlers: raise ProfileError(stage, 'build", "Substitute ``{{var}}`` in `s` with variables from `self.parameters` in `s`, and return resulting", "registered' % handler) return self._build_stage_handlers[handler](self, stage) def sub(self, s): \"\"\" Substitute ``{{var}}`` in", "\"%s\" not registered' % handler) return self._build_stage_handlers[handler](self, stage) def sub(self, s): \"\"\" Substitute", "\"\"\" if target_name is None: target_name = filename self._bundled_files[target_name] = filename def build_stage(handler_name=None):", "Parameters ---------- handler_name : str (optional) Name of the handler, defaults to the", "stage dict and substitute all string arguments stage = self.deep_sub(stage) handler = stage['handler']", "handler generating the code for a given build stage. Parameters ---------- handler_name :", "non-key strings, make a substitution as described in `sub`. A deep copy is", "stage) def sub(self, s): \"\"\" Substitute ``{{var}}`` in `s` with variables from `self.parameters`", "Recursively walk the document `doc`, and for all non-key strings, make a substitution", "avoid them getting deallocated under our feet, as we don't allow them to", "dict(parameters) self.dependency_dir_vars = list(dependency_dir_vars) def register_build_stage_handler(self, handler_name, handler_func): \"\"\" Registers a function as", "\"\"\" import types from .utils import substitute_profile_parameters from .exceptions import ProfileError, IllegalHookFileError class", "(list, tuple)): return [self.deep_sub(item) for item in doc] elif isinstance(doc, basestring): return self.sub(doc)", "allow them to live in sys.modules. \"\"\" self._modules.append(mod) def dispatch_build_stage(self, stage): # Copy", "handler_name : str (optional) Name of the handler, defaults to the name of", "in API self.package_name = package_name self.parameters = dict(parameters) self.dependency_dir_vars = list(dependency_dir_vars) def register_build_stage_handler(self,", "the build. \"\"\" if target_name is None: target_name = filename self._bundled_files[target_name] = filename", "types from .utils import substitute_profile_parameters from .exceptions import ProfileError, IllegalHookFileError class PackageBuildContext(object): def", "elif (not doc): return None else: raise TypeError(\"unexpected item in documents of type", "\"\"\" if isinstance(doc, dict): return dict((key, self.deep_sub(value)) for key, value in doc.iteritems()) elif", "build directory during the build. \"\"\" if target_name is None: target_name = filename", "= {'bash': hook.bash_handler} self._modules = [] self._bundled_files = {} # Available in API", "module; this is necesary to avoid them getting deallocated under our feet, as", "isinstance(doc, basestring): return self.sub(doc) elif isinstance(doc, (int, bool, float, types.NoneType)): return doc elif", "files that are part of stack descriptions. A significant portion of the package", "all non-key strings, make a substitution as described in `sub`. A deep copy", "handler \"%s\" not registered' % handler) return self._build_stage_handlers[handler](self, stage) def sub(self, s): \"\"\"", "in self._build_stage_handlers: raise ProfileError(stage, 'build stage handler \"%s\" not registered' % handler) return", "The API exported to Python hook files that are part of stack descriptions.", "stage): # Copy stage dict and substitute all string arguments stage = self.deep_sub(stage)", "dependency_dir_vars, parameters): import hook self._build_stage_handlers = {'bash': hook.bash_handler} self._modules = [] self._bundled_files =", "as a handler for a given stage handler type. \"\"\" self._build_stage_handlers[handler_name] = handler_func", "[self.deep_sub(item) for item in doc] elif isinstance(doc, basestring): return self.sub(doc) elif isinstance(doc, (int,", "resulting string. \"\"\" return substitute_profile_parameters(s, self.parameters) def deep_sub(self, doc): \"\"\" Recursively walk the", "= filename self._bundled_files[target_name] = filename def build_stage(handler_name=None): \"\"\" Decorator used to register a", "for all non-key strings, make a substitution as described in `sub`. A deep", "self._build_stage_handlers = {'bash': hook.bash_handler} self._modules = [] self._bundled_files = {} # Available in", "logic should eventually find its way into here. Hook files are re-loaded for", "is returned. \"\"\" if isinstance(doc, dict): return dict((key, self.deep_sub(value)) for key, value in", "as described in `sub`. A deep copy is returned. \"\"\" if isinstance(doc, dict):", "arguments stage = self.deep_sub(stage) handler = stage['handler'] if handler not in self._build_stage_handlers: raise", "handler type. \"\"\" self._build_stage_handlers[handler_name] = handler_func def register_module(self, mod): \"\"\" Hold a reference", "= list(dependency_dir_vars) def register_build_stage_handler(self, handler_name, handler_func): \"\"\" Registers a function as a handler", "Makes sure that a file located in the same directory as the package", "IllegalHookFileError class PackageBuildContext(object): def __init__(self, package_name, dependency_dir_vars, parameters): import hook self._build_stage_handlers = {'bash':", "to Python hook files that are part of stack descriptions. A significant portion", "feet, as we don't allow them to live in sys.modules. \"\"\" self._modules.append(mod) def", "if handler_name_ is None: handler_name_ = func.__name__ import hook hook.current_package_context.register_build_stage_handler(handler_name_, func) return func", "package_name, dependency_dir_vars, parameters): import hook self._build_stage_handlers = {'bash': hook.bash_handler} self._modules = [] self._bundled_files", "import hook self._build_stage_handlers = {'bash': hook.bash_handler} self._modules = [] self._bundled_files = {} #", "self.parameters) def deep_sub(self, doc): \"\"\" Recursively walk the document `doc`, and for all", "def __init__(self, package_name, dependency_dir_vars, parameters): import hook self._build_stage_handlers = {'bash': hook.bash_handler} self._modules =", "substitute_profile_parameters(s, self.parameters) def deep_sub(self, doc): \"\"\" Recursively walk the document `doc`, and for", "\"\"\" Substitute ``{{var}}`` in `s` with variables from `self.parameters` in `s`, and return", "<filename>hashdist/spec/hook_api.py \"\"\" The API exported to Python hook files that are part of", "substitution as described in `sub`. A deep copy is returned. \"\"\" if isinstance(doc,", "bool, float, types.NoneType)): return doc elif (not doc): return None else: raise TypeError(\"unexpected", "the build directory during the build. \"\"\" if target_name is None: target_name =", "doc)) def bundle_file(self, filename, target_name=None): \"\"\" Makes sure that a file located in", "given stage handler type. \"\"\" self._build_stage_handlers[handler_name] = handler_func def register_module(self, mod): \"\"\" Hold", "a handler generating the code for a given build stage. Parameters ---------- handler_name", ": str (optional) Name of the handler, defaults to the name of the", "register_build_stage_handler(self, handler_name, handler_func): \"\"\" Registers a function as a handler for a given", "Hold a reference to the registered module; this is necesary to avoid them", "return [self.deep_sub(item) for item in doc] elif isinstance(doc, basestring): return self.sub(doc) elif isinstance(doc,", "HashDist to load hook files is found in .hook. \"\"\" import types from", "are re-loaded for every package build, and so decorators etc. are run again.", "parameters): import hook self._build_stage_handlers = {'bash': hook.bash_handler} self._modules = [] self._bundled_files = {}", "strings, make a substitution as described in `sub`. A deep copy is returned.", "in the ``_hastdist`` sub-directory of the build directory during the build. \"\"\" if", "a function as a handler for a given stage handler type. \"\"\" self._build_stage_handlers[handler_name]", "the handler, defaults to the name of the function. \"\"\" def decorator(func): handler_name_", "in .hook. \"\"\" import types from .utils import substitute_profile_parameters from .exceptions import ProfileError,", "a function as a handler generating the code for a given build stage.", "types.NoneType)): return doc elif (not doc): return None else: raise TypeError(\"unexpected item in", "hook files is found in .hook. \"\"\" import types from .utils import substitute_profile_parameters", "here. Hook files are re-loaded for every package build, and so decorators etc.", "if target_name is None: target_name = filename self._bundled_files[target_name] = filename def build_stage(handler_name=None): \"\"\"", "def sub(self, s): \"\"\" Substitute ``{{var}}`` in `s` with variables from `self.parameters` in", "we don't allow them to live in sys.modules. \"\"\" self._modules.append(mod) def dispatch_build_stage(self, stage):", "PackageBuildContext(object): def __init__(self, package_name, dependency_dir_vars, parameters): import hook self._build_stage_handlers = {'bash': hook.bash_handler} self._modules", "= handler_func def register_module(self, mod): \"\"\" Hold a reference to the registered module;", "a substitution as described in `sub`. A deep copy is returned. \"\"\" if", "(not doc): return None else: raise TypeError(\"unexpected item in documents of type %r:", "is None: handler_name_ = func.__name__ import hook hook.current_package_context.register_build_stage_handler(handler_name_, func) return func return decorator", "return resulting string. \"\"\" return substitute_profile_parameters(s, self.parameters) def deep_sub(self, doc): \"\"\" Recursively walk", "YAML-file can be found in the ``_hastdist`` sub-directory of the build directory during", "and substitute all string arguments stage = self.deep_sub(stage) handler = stage['handler'] if handler", "in `s` with variables from `self.parameters` in `s`, and return resulting string. \"\"\"", "\"\"\" Registers a function as a handler for a given stage handler type.", "are run again. The machinery used to HashDist to load hook files is", "= handler_name if handler_name_ is None: handler_name_ = func.__name__ import hook hook.current_package_context.register_build_stage_handler(handler_name_, func)", "that a file located in the same directory as the package spec YAML-file", "if handler not in self._build_stage_handlers: raise ProfileError(stage, 'build stage handler \"%s\" not registered'", "to load hook files is found in .hook. \"\"\" import types from .utils", "the document `doc`, and for all non-key strings, make a substitution as described", "build stage. Parameters ---------- handler_name : str (optional) Name of the handler, defaults", "item in documents of type %r: %s\" % (type(doc), doc)) def bundle_file(self, filename,", "in the same directory as the package spec YAML-file can be found in", "stage handler \"%s\" not registered' % handler) return self._build_stage_handlers[handler](self, stage) def sub(self, s):", "the package building logic should eventually find its way into here. Hook files", "\"\"\" return substitute_profile_parameters(s, self.parameters) def deep_sub(self, doc): \"\"\" Recursively walk the document `doc`,", "same directory as the package spec YAML-file can be found in the ``_hastdist``", "handler_name_ = handler_name if handler_name_ is None: handler_name_ = func.__name__ import hook hook.current_package_context.register_build_stage_handler(handler_name_,", "not in self._build_stage_handlers: raise ProfileError(stage, 'build stage handler \"%s\" not registered' % handler)", "string. \"\"\" return substitute_profile_parameters(s, self.parameters) def deep_sub(self, doc): \"\"\" Recursively walk the document", "key, value in doc.iteritems()) elif isinstance(doc, (list, tuple)): return [self.deep_sub(item) for item in", "self._build_stage_handlers[handler](self, stage) def sub(self, s): \"\"\" Substitute ``{{var}}`` in `s` with variables from", "return self.sub(doc) elif isinstance(doc, (int, bool, float, types.NoneType)): return doc elif (not doc):", "Name of the handler, defaults to the name of the function. \"\"\" def", "live in sys.modules. \"\"\" self._modules.append(mod) def dispatch_build_stage(self, stage): # Copy stage dict and", "machinery used to HashDist to load hook files is found in .hook. \"\"\"", "our feet, as we don't allow them to live in sys.modules. \"\"\" self._modules.append(mod)", "`sub`. A deep copy is returned. \"\"\" if isinstance(doc, dict): return dict((key, self.deep_sub(value))", "to avoid them getting deallocated under our feet, as we don't allow them", "substitute_profile_parameters from .exceptions import ProfileError, IllegalHookFileError class PackageBuildContext(object): def __init__(self, package_name, dependency_dir_vars, parameters):", "don't allow them to live in sys.modules. \"\"\" self._modules.append(mod) def dispatch_build_stage(self, stage): #", "stage = self.deep_sub(stage) handler = stage['handler'] if handler not in self._build_stage_handlers: raise ProfileError(stage,", "during the build. \"\"\" if target_name is None: target_name = filename self._bundled_files[target_name] =", "The machinery used to HashDist to load hook files is found in .hook.", "`s`, and return resulting string. \"\"\" return substitute_profile_parameters(s, self.parameters) def deep_sub(self, doc): \"\"\"", "for every package build, and so decorators etc. are run again. The machinery", "registered module; this is necesary to avoid them getting deallocated under our feet,", "and for all non-key strings, make a substitution as described in `sub`. A", "= [] self._bundled_files = {} # Available in API self.package_name = package_name self.parameters", "build, and so decorators etc. are run again. The machinery used to HashDist", "API exported to Python hook files that are part of stack descriptions. A", "re-loaded for every package build, and so decorators etc. are run again. The", "import substitute_profile_parameters from .exceptions import ProfileError, IllegalHookFileError class PackageBuildContext(object): def __init__(self, package_name, dependency_dir_vars,", "to the name of the function. \"\"\" def decorator(func): handler_name_ = handler_name if", "of the function. \"\"\" def decorator(func): handler_name_ = handler_name if handler_name_ is None:", "s): \"\"\" Substitute ``{{var}}`` in `s` with variables from `self.parameters` in `s`, and", "used to register a function as a handler generating the code for a", "its way into here. Hook files are re-loaded for every package build, and", "the code for a given build stage. Parameters ---------- handler_name : str (optional)", "function as a handler for a given stage handler type. \"\"\" self._build_stage_handlers[handler_name] =", "build_stage(handler_name=None): \"\"\" Decorator used to register a function as a handler generating the", "return substitute_profile_parameters(s, self.parameters) def deep_sub(self, doc): \"\"\" Recursively walk the document `doc`, and", "handler for a given stage handler type. \"\"\" self._build_stage_handlers[handler_name] = handler_func def register_module(self,", "def register_build_stage_handler(self, handler_name, handler_func): \"\"\" Registers a function as a handler for a", "self.deep_sub(stage) handler = stage['handler'] if handler not in self._build_stage_handlers: raise ProfileError(stage, 'build stage", "handler_name, handler_func): \"\"\" Registers a function as a handler for a given stage", "of the build directory during the build. \"\"\" if target_name is None: target_name", "again. The machinery used to HashDist to load hook files is found in", "\"\"\" self._modules.append(mod) def dispatch_build_stage(self, stage): # Copy stage dict and substitute all string", "hook files that are part of stack descriptions. A significant portion of the", "decorators etc. are run again. The machinery used to HashDist to load hook", "in `s`, and return resulting string. \"\"\" return substitute_profile_parameters(s, self.parameters) def deep_sub(self, doc):", "deep_sub(self, doc): \"\"\" Recursively walk the document `doc`, and for all non-key strings,", "self._bundled_files = {} # Available in API self.package_name = package_name self.parameters = dict(parameters)", "the registered module; this is necesary to avoid them getting deallocated under our", "register a function as a handler generating the code for a given build", "target_name=None): \"\"\" Makes sure that a file located in the same directory as", "elif isinstance(doc, basestring): return self.sub(doc) elif isinstance(doc, (int, bool, float, types.NoneType)): return doc", "every package build, and so decorators etc. are run again. The machinery used", "Registers a function as a handler for a given stage handler type. \"\"\"", "for item in doc] elif isinstance(doc, basestring): return self.sub(doc) elif isinstance(doc, (int, bool,", "return None else: raise TypeError(\"unexpected item in documents of type %r: %s\" %", "substitute all string arguments stage = self.deep_sub(stage) handler = stage['handler'] if handler not", "package build, and so decorators etc. are run again. The machinery used to", "import types from .utils import substitute_profile_parameters from .exceptions import ProfileError, IllegalHookFileError class PackageBuildContext(object):", "under our feet, as we don't allow them to live in sys.modules. \"\"\"", "deep copy is returned. \"\"\" if isinstance(doc, dict): return dict((key, self.deep_sub(value)) for key,", "described in `sub`. A deep copy is returned. \"\"\" if isinstance(doc, dict): return", "item in doc] elif isinstance(doc, basestring): return self.sub(doc) elif isinstance(doc, (int, bool, float,", "API self.package_name = package_name self.parameters = dict(parameters) self.dependency_dir_vars = list(dependency_dir_vars) def register_build_stage_handler(self, handler_name,", "a file located in the same directory as the package spec YAML-file can", "find its way into here. Hook files are re-loaded for every package build,", "is None: target_name = filename self._bundled_files[target_name] = filename def build_stage(handler_name=None): \"\"\" Decorator used", "`s` with variables from `self.parameters` in `s`, and return resulting string. \"\"\" return", "ProfileError(stage, 'build stage handler \"%s\" not registered' % handler) return self._build_stage_handlers[handler](self, stage) def", "self._build_stage_handlers[handler_name] = handler_func def register_module(self, mod): \"\"\" Hold a reference to the registered", "isinstance(doc, (int, bool, float, types.NoneType)): return doc elif (not doc): return None else:", "a given stage handler type. \"\"\" self._build_stage_handlers[handler_name] = handler_func def register_module(self, mod): \"\"\"", "Hook files are re-loaded for every package build, and so decorators etc. are", "handler not in self._build_stage_handlers: raise ProfileError(stage, 'build stage handler \"%s\" not registered' %", "document `doc`, and for all non-key strings, make a substitution as described in", "= package_name self.parameters = dict(parameters) self.dependency_dir_vars = list(dependency_dir_vars) def register_build_stage_handler(self, handler_name, handler_func): \"\"\"", "from `self.parameters` in `s`, and return resulting string. \"\"\" return substitute_profile_parameters(s, self.parameters) def", "variables from `self.parameters` in `s`, and return resulting string. \"\"\" return substitute_profile_parameters(s, self.parameters)", "the ``_hastdist`` sub-directory of the build directory during the build. \"\"\" if target_name", "__init__(self, package_name, dependency_dir_vars, parameters): import hook self._build_stage_handlers = {'bash': hook.bash_handler} self._modules = []", "filename def build_stage(handler_name=None): \"\"\" Decorator used to register a function as a handler", "to register a function as a handler generating the code for a given", "code for a given build stage. Parameters ---------- handler_name : str (optional) Name", "this is necesary to avoid them getting deallocated under our feet, as we", "as a handler generating the code for a given build stage. Parameters ----------", "exported to Python hook files that are part of stack descriptions. A significant", "build. \"\"\" if target_name is None: target_name = filename self._bundled_files[target_name] = filename def", "else: raise TypeError(\"unexpected item in documents of type %r: %s\" % (type(doc), doc))", "of the package building logic should eventually find its way into here. Hook", "defaults to the name of the function. \"\"\" def decorator(func): handler_name_ = handler_name", "float, types.NoneType)): return doc elif (not doc): return None else: raise TypeError(\"unexpected item", "handler_name_ is None: handler_name_ = func.__name__ import hook hook.current_package_context.register_build_stage_handler(handler_name_, func) return func return", "handler = stage['handler'] if handler not in self._build_stage_handlers: raise ProfileError(stage, 'build stage handler", "ProfileError, IllegalHookFileError class PackageBuildContext(object): def __init__(self, package_name, dependency_dir_vars, parameters): import hook self._build_stage_handlers =", "way into here. Hook files are re-loaded for every package build, and so", "from .exceptions import ProfileError, IllegalHookFileError class PackageBuildContext(object): def __init__(self, package_name, dependency_dir_vars, parameters): import", "{} # Available in API self.package_name = package_name self.parameters = dict(parameters) self.dependency_dir_vars =", "---------- handler_name : str (optional) Name of the handler, defaults to the name", "filename self._bundled_files[target_name] = filename def build_stage(handler_name=None): \"\"\" Decorator used to register a function", "in documents of type %r: %s\" % (type(doc), doc)) def bundle_file(self, filename, target_name=None):", "in doc.iteritems()) elif isinstance(doc, (list, tuple)): return [self.deep_sub(item) for item in doc] elif", "A significant portion of the package building logic should eventually find its way", "with variables from `self.parameters` in `s`, and return resulting string. \"\"\" return substitute_profile_parameters(s,", "documents of type %r: %s\" % (type(doc), doc)) def bundle_file(self, filename, target_name=None): \"\"\"", "portion of the package building logic should eventually find its way into here.", "handler_name if handler_name_ is None: handler_name_ = func.__name__ import hook hook.current_package_context.register_build_stage_handler(handler_name_, func) return", "{'bash': hook.bash_handler} self._modules = [] self._bundled_files = {} # Available in API self.package_name", "def register_module(self, mod): \"\"\" Hold a reference to the registered module; this is", "and so decorators etc. are run again. The machinery used to HashDist to", "in sys.modules. \"\"\" self._modules.append(mod) def dispatch_build_stage(self, stage): # Copy stage dict and substitute", "return dict((key, self.deep_sub(value)) for key, value in doc.iteritems()) elif isinstance(doc, (list, tuple)): return", "return self._build_stage_handlers[handler](self, stage) def sub(self, s): \"\"\" Substitute ``{{var}}`` in `s` with variables", "dict and substitute all string arguments stage = self.deep_sub(stage) handler = stage['handler'] if", "isinstance(doc, dict): return dict((key, self.deep_sub(value)) for key, value in doc.iteritems()) elif isinstance(doc, (list,", "sub-directory of the build directory during the build. \"\"\" if target_name is None:", "\"\"\" The API exported to Python hook files that are part of stack", "'build stage handler \"%s\" not registered' % handler) return self._build_stage_handlers[handler](self, stage) def sub(self,", "self.parameters = dict(parameters) self.dependency_dir_vars = list(dependency_dir_vars) def register_build_stage_handler(self, handler_name, handler_func): \"\"\" Registers a", "= stage['handler'] if handler not in self._build_stage_handlers: raise ProfileError(stage, 'build stage handler \"%s\"", "handler) return self._build_stage_handlers[handler](self, stage) def sub(self, s): \"\"\" Substitute ``{{var}}`` in `s` with", "in doc] elif isinstance(doc, basestring): return self.sub(doc) elif isinstance(doc, (int, bool, float, types.NoneType)):", "handler, defaults to the name of the function. \"\"\" def decorator(func): handler_name_ =", "mod): \"\"\" Hold a reference to the registered module; this is necesary to", "if isinstance(doc, dict): return dict((key, self.deep_sub(value)) for key, value in doc.iteritems()) elif isinstance(doc,", "type. \"\"\" self._build_stage_handlers[handler_name] = handler_func def register_module(self, mod): \"\"\" Hold a reference to", "necesary to avoid them getting deallocated under our feet, as we don't allow", "reference to the registered module; this is necesary to avoid them getting deallocated", "Available in API self.package_name = package_name self.parameters = dict(parameters) self.dependency_dir_vars = list(dependency_dir_vars) def", "getting deallocated under our feet, as we don't allow them to live in", "dict): return dict((key, self.deep_sub(value)) for key, value in doc.iteritems()) elif isinstance(doc, (list, tuple)):", "self.dependency_dir_vars = list(dependency_dir_vars) def register_build_stage_handler(self, handler_name, handler_func): \"\"\" Registers a function as a", "self.deep_sub(value)) for key, value in doc.iteritems()) elif isinstance(doc, (list, tuple)): return [self.deep_sub(item) for", "file located in the same directory as the package spec YAML-file can be", "def dispatch_build_stage(self, stage): # Copy stage dict and substitute all string arguments stage", "sure that a file located in the same directory as the package spec", "isinstance(doc, (list, tuple)): return [self.deep_sub(item) for item in doc] elif isinstance(doc, basestring): return", "dispatch_build_stage(self, stage): # Copy stage dict and substitute all string arguments stage =", "None else: raise TypeError(\"unexpected item in documents of type %r: %s\" % (type(doc),", "(optional) Name of the handler, defaults to the name of the function. \"\"\"", "given build stage. Parameters ---------- handler_name : str (optional) Name of the handler,", "to HashDist to load hook files is found in .hook. \"\"\" import types", "% (type(doc), doc)) def bundle_file(self, filename, target_name=None): \"\"\" Makes sure that a file", "found in the ``_hastdist`` sub-directory of the build directory during the build. \"\"\"", "etc. are run again. The machinery used to HashDist to load hook files", "files is found in .hook. \"\"\" import types from .utils import substitute_profile_parameters from", "Copy stage dict and substitute all string arguments stage = self.deep_sub(stage) handler =", "type %r: %s\" % (type(doc), doc)) def bundle_file(self, filename, target_name=None): \"\"\" Makes sure", "handler_func): \"\"\" Registers a function as a handler for a given stage handler", "should eventually find its way into here. Hook files are re-loaded for every", "the function. \"\"\" def decorator(func): handler_name_ = handler_name if handler_name_ is None: handler_name_", "directory as the package spec YAML-file can be found in the ``_hastdist`` sub-directory", "be found in the ``_hastdist`` sub-directory of the build directory during the build.", "[] self._bundled_files = {} # Available in API self.package_name = package_name self.parameters =", "bundle_file(self, filename, target_name=None): \"\"\" Makes sure that a file located in the same", "part of stack descriptions. A significant portion of the package building logic should", "significant portion of the package building logic should eventually find its way into", "stage. Parameters ---------- handler_name : str (optional) Name of the handler, defaults to", "is found in .hook. \"\"\" import types from .utils import substitute_profile_parameters from .exceptions", "``_hastdist`` sub-directory of the build directory during the build. \"\"\" if target_name is", "building logic should eventually find its way into here. Hook files are re-loaded", "\"\"\" Makes sure that a file located in the same directory as the", "run again. The machinery used to HashDist to load hook files is found", "`self.parameters` in `s`, and return resulting string. \"\"\" return substitute_profile_parameters(s, self.parameters) def deep_sub(self,", "stage handler type. \"\"\" self._build_stage_handlers[handler_name] = handler_func def register_module(self, mod): \"\"\" Hold a", "= dict(parameters) self.dependency_dir_vars = list(dependency_dir_vars) def register_build_stage_handler(self, handler_name, handler_func): \"\"\" Registers a function", "eventually find its way into here. Hook files are re-loaded for every package", "of stack descriptions. A significant portion of the package building logic should eventually", "package spec YAML-file can be found in the ``_hastdist`` sub-directory of the build", "handler_func def register_module(self, mod): \"\"\" Hold a reference to the registered module; this", "name of the function. \"\"\" def decorator(func): handler_name_ = handler_name if handler_name_ is", "def deep_sub(self, doc): \"\"\" Recursively walk the document `doc`, and for all non-key", "not registered' % handler) return self._build_stage_handlers[handler](self, stage) def sub(self, s): \"\"\" Substitute ``{{var}}``", "hook self._build_stage_handlers = {'bash': hook.bash_handler} self._modules = [] self._bundled_files = {} # Available", "of type %r: %s\" % (type(doc), doc)) def bundle_file(self, filename, target_name=None): \"\"\" Makes", "deallocated under our feet, as we don't allow them to live in sys.modules.", "load hook files is found in .hook. \"\"\" import types from .utils import", "str (optional) Name of the handler, defaults to the name of the function.", "`doc`, and for all non-key strings, make a substitution as described in `sub`.", "string arguments stage = self.deep_sub(stage) handler = stage['handler'] if handler not in self._build_stage_handlers:", "%s\" % (type(doc), doc)) def bundle_file(self, filename, target_name=None): \"\"\" Makes sure that a", "tuple)): return [self.deep_sub(item) for item in doc] elif isinstance(doc, basestring): return self.sub(doc) elif", "in `sub`. A deep copy is returned. \"\"\" if isinstance(doc, dict): return dict((key,", "make a substitution as described in `sub`. A deep copy is returned. \"\"\"", "doc elif (not doc): return None else: raise TypeError(\"unexpected item in documents of" ]
[ "license terms in the LICENSE.txt file found in the # top-level directory of", "that should be unique: # TODO - we assume that the count is", "from its parent. Helper method for :meth:`set_item_count_per_profile`. Args: new_item (OVFItem): Newly cloned Item", "+= 1 for (profile, count) in count_dict.items(): logger.spam(\"Profile '%s' has %s %s Item(s)\",", "properties=None, profile_list=None): \"\"\"Find all items matching the given type, properties, and profiles. Args:", "not. \"\"\" if resource_type and (self.ovf.RES_MAP[resource_type] != item.get_value(self.ovf.RESOURCE_TYPE)): return False if profile_list: for", "under another profile will be added to this profile, starting with the lowest-sequence", "prop_name, new_value, profile_list) if len(value_list): logger.warning(\"After scanning all known %s Items, not all", "out if we leave out the ElementName on an Item, # so provide", "profile_list) logger.debug(\"Creating %d new items\", items_to_add) while items_to_add > 0: # Which profiles", ":class:`~COT.vm_description.ovf.item.OVFItem` objects with a bunch of helper methods. \"\"\" def __init__(self, ovf): \"\"\"Construct", "distinct from its parent. Helper method for :meth:`set_item_count_per_profile`. Args: new_item (OVFItem): Newly cloned", "the first available ``InstanceID`` number. Args: start (int): First InstanceID value to consider", "%s under profiles %s\", resource_type, prop_name, new_value, profile_list) def set_item_values_per_profile(self, resource_type, prop_name, value_list,", "delta return count_dict, items_to_add, last_item def _update_cloned_item(self, new_item, new_item_profiles, item_count): \"\"\"Update a cloned", "False if profile_list: for profile in profile_list: if not item.has_profile(profile): return False for", "= copy.deepcopy(parent_item) # Delete any profiles from the parent that we don't need", "count_dict, items_to_add, last_item def _update_cloned_item(self, new_item, new_item_profiles, item_count): \"\"\"Update a cloned item to", "greater than the current count under this profile, then additional instances that already", "items of type %s found. Nothing to do.\", resource_type) return logger.notice(\"No existing items", "(count_dict, items_to_add, last_item) \"\"\" count_dict = self.get_item_count_per_profile(resource_type, profile_list) items_seen = dict.fromkeys(profile_list, 0) last_item", "Instance ordering = [self.ovf.INFO, self.ovf.SYSTEM, self.ovf.ITEM] for instance in natural_sort(self.item_dict): logger.debug(\"Writing Item(s) with", "with InstanceID %s\", instance) ovfitem = self.item_dict[instance] new_items = ovfitem.generate_items() logger.spam(\"Generated %d items\",", "\"OVF contains %s hardware Item elements describing %s \" \"unique devices\", item_count, len(self.item_dict))", "found. \" \"Will create new %s from scratch.\", resource_type, resource_type) (_, ovfitem) =", "len(value_list): logger.warning(\"After scanning all known %s Items, not all \" \"%s values were", "if not profile_list: # Get the count under all profiles profile_list = self.ovf.config_profiles", "%d \" \"Items representing %d devices\", len(self.ovf.virtual_hw_section.findall(self.ovf.ITEM)), len(self.item_dict)) def find_unused_instance_id(self, start=1): \"\"\"Find the", "clone due to self-inconsistency (#64). for profile in self.ovf.config_profiles: if ovfitem.has_profile(profile) and profile", "and their values to match profile_list (list): List of profiles to filter on", "a last resort will new instances be created. If the new count is", "profile_list = self.ovf.config_profiles + [None] count_dict, items_to_add, last_item = \\ self._update_existing_item_profiles( resource_type, count,", "not item.has_profile(profile): return False for (prop, value) in properties.items(): if item.get_value(prop) != value:", "on (default: apply across all profiles) default (str): If there are more matching", "get an error when trying to set the instance ID # on our", "# an Item that uniquely identifies this set of hardware items. instance =", "An instance ID that is not yet in use. \"\"\" instance = int(start)", "Wrapper for :meth:`get_item_count_per_profile`. Args: resource_type (str): Resource type string like 'scsi' or 'serial'", "we don't have enough items under a profile, add any items found #", "NIC number element_name = self.ovf.platform.guess_nic_name(item_count) new_item.set_property(self.ovf.ELEMENT_NAME, element_name, new_item_profiles) return new_item def set_item_count_per_profile(self, resource_type,", "at the top-level directory of this distribution # and at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt. # #", "filters. Args: item (OVFItem): Item to validate resource_type (str): Resource type string like", "while items_to_add > 0: # Which profiles does this Item need to belong", "a profile, add any items found # under other profiles to this profile", "profiles to filter on (default: apply across all profiles) \"\"\" if not profile_list:", "create_new=False): \"\"\"Set a property to the given value for all items of the", "= new_item.hardware_type address = new_item.get(self.ovf.ADDRESS) if address: raise NotImplementedError(\"Don't know how to ensure", ".format(resource_type, [m.instance_id for m in matches])) elif len(matches) == 0: return None else:", "file at the top-level directory of this distribution # and at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt. #", "natural_sort from COT.xml_file import XML from .item import OVFItem, OVFItemDataError logger = logging.getLogger(__name__)", "ovfitem = OVFItem(self.ovf) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.set_property(self.ovf.RESOURCE_TYPE, self.ovf.RES_MAP[resource_type], profile_list) # ovftool freaks out", "last_item def _update_cloned_item(self, new_item, new_item_profiles, item_count): \"\"\"Update a cloned item to make it", "self.find_all_items(resource_type): for profile in profile_list: if ovfitem.has_profile(profile): count_dict[profile] += 1 for (profile, count)", "str(address_on_parent), new_item_profiles) except ValueError: raise NotImplementedError(\"Don't know how to ensure a \" \"unique", "new_value, profile_list) if len(value_list): logger.warning(\"After scanning all known %s Items, not all \"", "raise LookupError( \"Found multiple matching '{0}' Items (instances {1})\" .format(resource_type, [m.instance_id for m", "_update_cloned_item(self, new_item, new_item_profiles, item_count): \"\"\"Update a cloned item to make it distinct from", "last_item) \"\"\" count_dict = self.get_item_count_per_profile(resource_type, profile_list) items_seen = dict.fromkeys(profile_list, 0) last_item = None", "int(start) while str(instance) in self.item_dict.keys(): instance += 1 logger.debug(\"Found unused InstanceID %d\", instance)", "items_to_add > 0: # Which profiles does this Item need to belong to?", "Items have been changed. \"\"\" modified = False if len(self.item_dict) != len(XML.find_all_children( self.ovf.virtual_hw_section,", "(str): String such as 'cpu' or 'harddisk' - used as a key to", "over existing Items. # Once we've seen \"count\" items under a profile, remove", "under all profiles profile_list = self.ovf.config_profiles + [None] for profile in profile_list: count_dict[profile]", "exist, will create a new ``Item`` if :attr:`create_new` is set to ``True``; otherwise", "Item from the hardware. Args: item (OVFItem): Item to delete \"\"\" instance =", "all filters, False if not. \"\"\" if resource_type and (self.ovf.RES_MAP[resource_type] != item.get_value(self.ovf.RESOURCE_TYPE)): return", "this profile, starting with the lowest-sequence instance not already present, and only as", "in item_dict def clone_item(self, parent_item, profile_list): \"\"\"Clone an OVFItem to create a new", "if needed. Will do nothing if no Items have been changed. \"\"\" modified", "(int): Desired number of items profile_list (list): List of profiles to filter on", "resource_type = new_item.hardware_type address = new_item.get(self.ovf.ADDRESS) if address: raise NotImplementedError(\"Don't know how to", "given type. If no items of the given type exist, will create a", "profile_list): \"\"\"Clone an OVFItem to create a new instance. Args: parent_item (OVFItem): Instance", "to extract hardware information from. Raises: OVFHardwareDataError: if any data errors are seen", "Delete the existing Items: delete_count = 0 for item in list(self.ovf.virtual_hw_section): if (item.tag", "new_item_profiles) except ValueError: raise NotImplementedError(\"Don't know how to ensure a \" \"unique AddressOnParent", "count is greater than the current count under this profile, then additional instances", "if count_dict[profile] < count: new_item_profiles.append(profile) count_dict[profile] += 1 if last_item is None: logger.notice(\"No", "'serial' profile (str): Single profile identifier string to look up. Returns: int: Number", "hardware items defined by this OVF; i.e., the contents of all Items in", "as 'harddisk' or 'cpu' prop_name (str): Property name to update value_list (list): List", "parent_item (OVFItem): Instance to clone from profile_list (list): List of profiles to clone", "error handling - currently a no-op if item not in item_dict def clone_item(self,", "not sane.\"\"\" class OVFHardware(object): \"\"\"Helper class for :class:`~COT.vm_description.ovf.ovf.OVF`. Represents all hardware items defined", "a new instance. Args: parent_item (OVFItem): Instance to clone from profile_list (list): List", "their values to match profile_list (list): List of profiles to filter on Returns:", "if delta > items_to_add: items_to_add = delta return count_dict, items_to_add, last_item def _update_cloned_item(self,", "otherwise will log a warning and do nothing. Args: resource_type (str): Resource type", "self.item_dict[instance] # TODO: error handling - currently a no-op if item not in", "natural_sort(self.item_dict)] filtered_items = [] if properties is None: properties = {} for item", "(#64). for profile in self.ovf.config_profiles: if ovfitem.has_profile(profile) and profile not in profile_list: ovfitem.remove_profile(profile)", "If the new count is less than the current count under this profile,", "Single profile identifier string to look up. Returns: int: Number of items of", "\"\"\" if profile_list is None: profile_list = self.ovf.config_profiles + [None] for ovfitem in", "= [] if properties is None: properties = {} for item in items:", "ovfitem in self.item_dict.values(): if ovfitem.modified: modified = True break if not modified: logger.verbose(\"No", "profile. \"\"\" return (self.get_item_count_per_profile(resource_type, [profile]) [profile]) def get_item_count_per_profile(self, resource_type, profile_list): \"\"\"Get the number", "in profile_list: if not item.has_profile(profile): return False for (prop, value) in properties.items(): if", "= self.find_unused_instance_id() ovfitem = OVFItem(self.ovf) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.set_property(self.ovf.RESOURCE_TYPE, self.ovf.RES_MAP[resource_type], profile_list) # ovftool", "given Item from the hardware. Args: item (OVFItem): Item to delete \"\"\" instance", "(str): Property name to update new_value (str): New value to set the property", "profile_list: if not item.has_profile(profile): return False for (prop, value) in properties.items(): if item.get_value(prop)", "logger.info(\"Updated %s property %s to %s under %s\", resource_type, prop_name, new_value, profile_list) if", "raise OVFHardwareDataError(\"Data conflict for instance {0}\" .format(instance)) logger.debug( \"OVF contains %s hardware Item", "found. Nothing to do.\", resource_type) return logger.notice(\"No existing items of type %s found.", "all profiles) Returns: dict: mapping profile strings to the number of items under", "current state as golden: for ovfitem in self.item_dict.values(): ovfitem.modified = False def update_xml(self):", "items of type %s found. \" \"Will create new %s from scratch.\", resource_type,", "``AddressOnParent`` is not an integer. \"\"\" resource_type = new_item.hardware_type address = new_item.get(self.ovf.ADDRESS) if", "to the terms contained in the LICENSE.txt file. \"\"\"Representation of OVF hardware definitions.", "the contents of all Items in the VirtualHardwareSection. Fundamentally it's just a dict", "of items of this type in this profile. \"\"\" return (self.get_item_count_per_profile(resource_type, [profile]) [profile])", "https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No part # of COT, including this file, may be copied, modified,", "this distribution # and at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt. # # This file is part of", "to filter on (default: apply across all profiles) \"\"\" if not profile_list: #", "cloning an Item \" \"of type {0}\".format(resource_type)) address_on_parent = new_item.get(self.ovf.ADDRESS_ON_PARENT) if address_on_parent: address_list", "set(item.get(self.ovf.ITEM_CONFIG, \"\").split()) unknown_profiles = item_profiles - valid_profiles if unknown_profiles: raise OVFHardwareDataError(\"Unknown profile(s) {0}", "Updated :param:`new_item` Raises: NotImplementedError: No support yet for updating ``Address`` NotImplementedError: If updating", "== item: del self.item_dict[instance] # TODO: error handling - currently a no-op if", "type (including this item) now exist. Used with :meth:`COT.platform.Platform.guess_nic_name` Returns: OVFItem: Updated :param:`new_item`", "nothing. Args: resource_type (str): Resource type such as 'cpu' or 'harddisk' prop_name (str):", "Items: delete_count = 0 for item in list(self.ovf.virtual_hw_section): if (item.tag == self.ovf.ITEM or", "count_dict, items_to_add, last_item = \\ self._update_existing_item_profiles( resource_type, count, profile_list) logger.debug(\"Creating %d new items\",", "%s found. \" \"Will create new %s from scratch.\", resource_type, resource_type) (_, ovfitem)", "{0}\".format(resource_type)) address_on_parent = new_item.get(self.ovf.ADDRESS_ON_PARENT) if address_on_parent: address_list = new_item.get_all_values(self.ovf.ADDRESS_ON_PARENT) if len(address_list) > 1:", "profiles new_item = self._update_cloned_item( new_item, new_item_profiles, count_dict[new_item_profiles[0]]) last_item = new_item items_to_add -= 1", "to consider (disregarding all lower InstanceIDs, even if available). Returns: str: An instance", "to hardware definition, \" \"so no XML update is required\") return # Delete", "break if not modified: logger.verbose(\"No changes to hardware definition, \" \"so no XML", "all profiles but has \" \"multiple values {0}. COT can't \" \"handle this", "None: logger.notice(\"No existing items of type %s found. \" \"Will create new %s", "new_item_profiles.append(profile) count_dict[profile] += 1 if last_item is None: logger.notice(\"No existing items of type", "%s property %s to %s under %s\", resource_type, prop_name, new_value, profile_list) if len(value_list):", "Items under the VirtualHardwareSection, if needed. Will do nothing if no Items have", "for instance {0}\" .format(instance)) logger.debug( \"OVF contains %s hardware Item elements describing %s", "len(self.item_dict) != len(XML.find_all_children( self.ovf.virtual_hw_section, set([self.ovf.ITEM, self.ovf.STORAGE_ITEM, self.ovf.ETHERNET_PORT_ITEM]))): modified = True else: for ovfitem", "profile in profile_list: if count_dict[profile] < count: new_item_profiles.append(profile) count_dict[profile] += 1 if last_item", "given type, properties, and profiles. Args: resource_type (str): Resource type string like 'scsi'", "import XML from .item import OVFItem, OVFItemDataError logger = logging.getLogger(__name__) class OVFHardwareDataError(Exception): \"\"\"The", "or 'serial' profile_list (list): List of profiles to filter on (default: apply across", "the profiles associated with this # item properly defined in the OVF DeploymentOptionSection?", "modified = True else: for ovfitem in self.item_dict.values(): if ovfitem.modified: modified = True", "= logging.getLogger(__name__) class OVFHardwareDataError(Exception): \"\"\"The input data used to construct an :class:`OVFHardware` is", "to make it distinct from its parent. Helper method for :meth:`set_item_count_per_profile`. Args: new_item", "modified: logger.verbose(\"No changes to hardware definition, \" \"so no XML update is required\")", "of OVF hardware definitions. **Classes and Exceptions** .. autosummary:: :nosignatures: OVFHardware OVFHardwareDataError \"\"\"", "COT.xml_file import XML from .item import OVFItem, OVFItemDataError logger = logging.getLogger(__name__) class OVFHardwareDataError(Exception):", "find_all_items(self, resource_type=None, properties=None, profile_list=None): \"\"\"Find all items matching the given type, properties, and", "conflict for instance {0}\" .format(instance)) logger.debug( \"OVF contains %s hardware Item elements describing", "or 'serial' properties (dict): Properties and their values to match profile (str): Single", "[None] count_dict, items_to_add, last_item = \\ self._update_existing_item_profiles( resource_type, count, profile_list) logger.debug(\"Creating %d new", "address: raise NotImplementedError(\"Don't know how to ensure a unique \" \"Address value when", "- remove this one! ovfitem.remove_profile(profile) else: items_seen[profile] += 1 else: if count_dict[profile] <", "and Exceptions** .. autosummary:: :nosignatures: OVFHardware OVFHardwareDataError \"\"\" import copy import logging from", "- OVFHardware class # # June 2016, <NAME> # Copyright (c) 2013-2016, 2019", "%s with properties %s and\" \" profiles %s\", len(filtered_items), resource_type, properties, profile_list) return", "raise NotImplementedError(\"Don't know how to ensure a unique \" \"Address value when cloning", "items. instance = item.find(namespace + self.ovf.INSTANCE_ID).text # Pre-sanity check - are all of", "on Returns: list: Matching OVFItem instances \"\"\" items = [self.item_dict[instance] for instance in", "for ovfitem in self.item_dict.values(): ovfitem.modified = False def update_xml(self): \"\"\"Regenerate all Items under", "another profile will be added to this profile, starting with the lowest-sequence instance", "the COPYRIGHT.txt file at the top-level directory of this distribution # and at", "properties, [profile]) if len(matches) > 1: raise LookupError( \"Found multiple matching '{0}' Items", "under the given profile(s). If the new count is greater than the current", "self.find_unused_instance_id(start=parent_item.instance_id) logger.spam(\"Cloning existing Item %s with new instance ID %s\", parent_item, instance) ovfitem", "= item.get_value(self.ovf.INSTANCE_ID) if self.item_dict[instance] == item: del self.item_dict[instance] # TODO: error handling -", "this :attr:`resource_type` presently exist. \"\"\" ovfitem_list = self.find_all_items(resource_type) if not ovfitem_list: if not", "apply across all profiles) default (str): If there are more matching items than", "# # June 2016, <NAME> # Copyright (c) 2013-2016, 2019 the COT project", "from COT.data_validation import natural_sort from COT.xml_file import XML from .item import OVFItem, OVFItemDataError", "+= 1 logger.debug(\"Found unused InstanceID %d\", instance) return str(instance) def new_item(self, resource_type, profile_list=None):", "== self.ovf.ITEM or item.tag == self.ovf.STORAGE_ITEM or item.tag == self.ovf.ETHERNET_PORT_ITEM): self.ovf.virtual_hw_section.remove(item) delete_count +=", "one property of # an Item that uniquely identifies this set of hardware", "in the OVF DeploymentOptionSection? item_profiles = set(item.get(self.ovf.ITEM_CONFIG, \"\").split()) unknown_profiles = item_profiles - valid_profiles", "including the default profile_list = self.ovf.config_profiles + [None] count_dict, items_to_add, last_item = \\", "else: try: self.item_dict[instance].add_item(item) except OVFItemDataError as exc: logger.debug(exc) # Mask away the nitty-gritty", "already exist under another profile will be added to this profile, starting with", "profile_list=None): \"\"\"Create a new OVFItem of the given type. Args: resource_type (str): String", "new ``Item`` if :attr:`create_new` is set to ``True``; otherwise will log a warning", "create_new (bool): Whether to create a new entry if no items of this", "default=None): \"\"\"Set value(s) for a property of multiple items of a type. Args:", "__init__(self, ovf): \"\"\"Construct an OVFHardware object describing all Items in the OVF. Args:", "ovfitem.modified = False def update_xml(self): \"\"\"Regenerate all Items under the VirtualHardwareSection, if needed.", "logger.spam(\"Cloning existing Item %s with new instance ID %s\", parent_item, instance) ovfitem =", "\"\"\" if not profile_list: # Set the profile list for all profiles, including", "= self.clone_item(last_item, new_item_profiles) # Check/update other properties of the clone that should be", "on an Item, # so provide a simple default value. ovfitem.set_property(self.ovf.ELEMENT_NAME, resource_type, profile_list)", "ovfitem in self.find_all_items(resource_type): last_item = ovfitem for profile in profile_list: if ovfitem.has_profile(profile): if", "(str): Resource type string like 'scsi' or 'serial' profile_list (list): List of profiles", "this profile as well. for ovfitem in self.find_all_items(resource_type): last_item = ovfitem for profile", "return (instance, ovfitem) def item_match(self, item, resource_type, properties, profile_list): \"\"\"Check whether the given", "(default: apply across all profiles) default (str): If there are more matching items", "(self.ovf.RES_MAP[resource_type] != item.get_value(self.ovf.RESOURCE_TYPE)): return False if profile_list: for profile in profile_list: if not", "(_, new_item) = self.clone_item(last_item, new_item_profiles) # Check/update other properties of the clone that", "the parent that we don't need now, # otherwise we'll get an error", "to :data:`~COT.vm_description.ovf.name_helper.OVFNameHelper1.RES_MAP` profile_list (list): Profiles the new item should belong to Returns: tuple:", "(one value per item of the given :attr:`resource_type`) profile_list (list): List of profiles", "COT.data_validation import natural_sort from COT.xml_file import XML from .item import OVFItem, OVFItemDataError logger", "profiles %s\", len(filtered_items), resource_type, properties, profile_list) return filtered_items def find_item(self, resource_type=None, properties=None, profile=None):", "= self.find_unused_instance_id(start=parent_item.instance_id) logger.spam(\"Cloning existing Item %s with new instance ID %s\", parent_item, instance)", "def _update_existing_item_profiles(self, resource_type, count, profile_list): \"\"\"Change profile membership of existing items as needed.", "profile not in profile_list: ovfitem.remove_profile(profile) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.modified = True self.item_dict[instance] =", "Whether to create a new entry if no items of this :attr:`resource_type` presently", "logger.spam(\"Found %s Items of type %s with properties %s and\" \" profiles %s\",", "describing all Items in the OVF. Args: ovf (OVF): OVF instance to extract", "import OVFItem, OVFItemDataError logger = logging.getLogger(__name__) class OVFHardwareDataError(Exception): \"\"\"The input data used to", "count under this profile, then the highest-numbered instances will be removed preferentially. Args:", "logger.debug(\"Creating %d new items\", items_to_add) while items_to_add > 0: # Which profiles does", "resource_type, resource_type) (_, new_item) = self.new_item(resource_type, new_item_profiles) else: (_, new_item) = self.clone_item(last_item, new_item_profiles)", "validate resource_type (str): Resource type string like 'scsi' or 'serial' properties (dict): Properties", "given type. Args: resource_type (str): String such as 'cpu' or 'harddisk' - used", "ID # on our clone due to self-inconsistency (#64). for profile in self.ovf.config_profiles:", "many items - remove this one! ovfitem.remove_profile(profile) else: items_seen[profile] += 1 else: if", "if len(value_list): logger.warning(\"After scanning all known %s Items, not all \" \"%s values", "new_item) = self.clone_item(last_item, new_item_profiles) # Check/update other properties of the clone that should", "profile ID to search within Returns: OVFItem: Matching instance, or None Raises: LookupError:", "# It is subject to the license terms in the LICENSE.txt file found", "profiles to filter on Returns: list: Matching OVFItem instances \"\"\" items = [self.item_dict[instance]", "for profile in profile_list: if not item.has_profile(profile): return False for (prop, value) in", "provide a simple default value. ovfitem.set_property(self.ovf.ELEMENT_NAME, resource_type, profile_list) self.item_dict[instance] = ovfitem ovfitem.modified =", "if ovfitem.has_profile(profile): count_dict[profile] += 1 for (profile, count) in count_dict.items(): logger.spam(\"Profile '%s' has", "See the COPYRIGHT.txt file at the top-level directory of this distribution # and", "%s from scratch.\", resource_type, resource_type) (_, new_item) = self.new_item(resource_type, new_item_profiles) else: (_, new_item)", "0 for item in ovf.virtual_hw_section: namespace = ovf.namespace_for_item_tag(item.tag) if not namespace: continue item_count", "(profile, count) in count_dict.items(): logger.spam(\"Profile '%s' has %s %s Item(s)\", profile, count, resource_type)", "is None: profile_list = self.ovf.config_profiles + [None] for ovfitem in self.find_all_items(resource_type): if len(value_list):", "of the given :attr:`resource_type`. Args: resource_type (str): Resource type string like 'scsi' or", "value: return False return True def find_all_items(self, resource_type=None, properties=None, profile_list=None): \"\"\"Find all items", "and at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt. # # This file is part of the Common OVF", "profile_list) def set_item_values_per_profile(self, resource_type, prop_name, value_list, profile_list, default=None): \"\"\"Set value(s) for a property", "# If we don't have enough items under a profile, add any items", "be copied, modified, propagated, or # distributed except according to the terms contained", "ovfitem = copy.deepcopy(parent_item) # Delete any profiles from the parent that we don't", "profile. Items present under \"no profile\" will be counted against the total for", "know how to ensure a unique \" \"Address value when cloning an Item", "new_item.get(self.ovf.ADDRESS) if address: raise NotImplementedError(\"Don't know how to ensure a unique \" \"Address", "contains %d \" \"Items representing %d devices\", len(self.ovf.virtual_hw_section.findall(self.ovf.ITEM)), len(self.item_dict)) def find_unused_instance_id(self, start=1): \"\"\"Find", "new Items do we need to create in total? items_to_add = 0 for", "due to self-inconsistency (#64). for profile in self.ovf.config_profiles: if ovfitem.has_profile(profile) and profile not", "from COT.xml_file import XML from .item import OVFItem, OVFItemDataError logger = logging.getLogger(__name__) class", "first available ``InstanceID`` number. Args: start (int): First InstanceID value to consider (disregarding", "information from. Raises: OVFHardwareDataError: if any data errors are seen \"\"\" self.ovf =", "\"given base value '{0}'\" .format(address_on_parent)) if resource_type == 'ethernet': # Update ElementName to", "resort will new instances be created. If the new count is less than", "number of items profile_list (list): List of profiles to filter on (default: apply", "for item in new_items: XML.add_child(self.ovf.virtual_hw_section, item, ordering) logger.verbose(\"Updated XML VirtualHardwareSection, now contains %d", "items\", items_to_add) while items_to_add > 0: # Which profiles does this Item need", "is %s\", parent_item, profile_list, instance) return (instance, ovfitem) def item_match(self, item, resource_type, properties,", "= set(item.get(self.ovf.ITEM_CONFIG, \"\").split()) unknown_profiles = item_profiles - valid_profiles if unknown_profiles: raise OVFHardwareDataError(\"Unknown profile(s)", "new_item_profiles) else: (_, new_item) = self.clone_item(last_item, new_item_profiles) # Check/update other properties of the", "ElementName on an Item, # so provide a simple default value. ovfitem.set_property(self.ovf.ELEMENT_NAME, resource_type,", "if count_dict[profile] < count: # Add this profile to this Item ovfitem.add_profile(profile) count_dict[profile]", "# # hardware.py - OVFHardware class # # June 2016, <NAME> # Copyright", "ID %s\", parent_item, instance) ovfitem = copy.deepcopy(parent_item) # Delete any profiles from the", "find_unused_instance_id(self, start=1): \"\"\"Find the first available ``InstanceID`` number. Args: start (int): First InstanceID", "additional instances that already exist under another profile will be added to this", "items than entries in :attr:`value_list`, set extra items to this value \"\"\" if", "this profile, then additional instances that already exist under another profile will be", "return False for (prop, value) in properties.items(): if item.get_value(prop) != value: return False", "Resource type string like 'scsi' or 'serial' profile_list (list): List of profiles to", "of profiles to filter on (default: apply across all profiles) create_new (bool): Whether", "profile_list (list): List of profiles to clone into Returns: tuple: ``(instance_id, ovfitem)`` \"\"\"", "Profiles the new item should belong to Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance", "ovf self.item_dict = {} valid_profiles = set(ovf.config_profiles) item_count = 0 for item in", "like 'scsi' or 'serial' properties (dict): Properties and their values to match profile_list", "Number of items of this type in this profile. \"\"\" return (self.get_item_count_per_profile(resource_type, [profile])", "distribution and at # https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No part # of COT, including this file,", "properties, profile_list): \"\"\"Check whether the given item matches the given filters. Args: item", "# TODO - we assume that the count is the same across profiles", "item_profiles - valid_profiles if unknown_profiles: raise OVFHardwareDataError(\"Unknown profile(s) {0} for \" \"Item instance", "to filter on (default: apply across all profiles) create_new (bool): Whether to create", "{0}\" .format(instance)) logger.debug( \"OVF contains %s hardware Item elements describing %s \" \"unique", "= self.ovf.config_profiles + [None] for ovfitem in self.find_all_items(resource_type): if len(value_list): new_value = value_list.pop(0)", "count_dict[new_item_profiles[0]]) last_item = new_item items_to_add -= 1 def set_value_for_all_items(self, resource_type, prop_name, new_value, profile_list,", "class # # June 2016, <NAME> # Copyright (c) 2013-2016, 2019 the COT", "Item to validate resource_type (str): Resource type string like 'scsi' or 'serial' properties", "None # First, iterate over existing Items. # Once we've seen \"count\" items", "we'll get an error when trying to set the instance ID # on", "need to belong to? new_item_profiles = [] for profile in profile_list: if count_dict[profile]", "top-level directory of this distribution # and at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt. # # This file", "LICENSE.txt file found in the # top-level directory of this distribution and at", "each profile. \"\"\" count_dict = {} if not profile_list: # Get the count", "= [self.ovf.INFO, self.ovf.SYSTEM, self.ovf.ITEM] for instance in natural_sort(self.item_dict): logger.debug(\"Writing Item(s) with InstanceID %s\",", "len(value_list): new_value = value_list.pop(0) else: new_value = default for profile in profile_list: if", "if the item matches all filters, False if not. \"\"\" if resource_type and", "(str): Resource type string like 'scsi' or 'serial' profile (str): Single profile identifier", "as exc: logger.debug(exc) # Mask away the nitty-gritty details from our caller raise", "value_list.pop(0) else: new_value = default for profile in profile_list: if ovfitem.has_profile(profile): ovfitem.set_property(prop_name, new_value,", "given profile. Wrapper for :meth:`get_item_count_per_profile`. Args: resource_type (str): Resource type string like 'scsi'", "None else: return matches[0] def get_item_count(self, resource_type, profile): \"\"\"Get the number of Items", "not in self.item_dict: self.item_dict[instance] = OVFItem(self.ovf, item) else: try: self.item_dict[instance].add_item(item) except OVFItemDataError as", "such as 'cpu' or 'harddisk' - used as a key to :data:`~COT.vm_description.ovf.name_helper.OVFNameHelper1.RES_MAP` profile_list", "for \" \"Item instance {1}\" .format(unknown_profiles, instance)) if instance not in self.item_dict: self.item_dict[instance]", "caller raise OVFHardwareDataError(\"Data conflict for instance {0}\" .format(instance)) logger.debug( \"OVF contains %s hardware", "of type %s found. \" \"Will create new %s from scratch.\", resource_type, resource_type)", "\"\"\" self.ovf = ovf self.item_dict = {} valid_profiles = set(ovf.config_profiles) item_count = 0", "changed. \"\"\" modified = False if len(self.item_dict) != len(XML.find_all_children( self.ovf.virtual_hw_section, set([self.ovf.ITEM, self.ovf.STORAGE_ITEM, self.ovf.ETHERNET_PORT_ITEM]))):", "to Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id() ovfitem = OVFItem(self.ovf) ovfitem.set_property(self.ovf.INSTANCE_ID,", "set_value_for_all_items(self, resource_type, prop_name, new_value, profile_list, create_new=False): \"\"\"Set a property to the given value", "this profile. \"\"\" return (self.get_item_count_per_profile(resource_type, [profile]) [profile]) def get_item_count_per_profile(self, resource_type, profile_list): \"\"\"Get the", "instance {0}\" .format(instance)) logger.debug( \"OVF contains %s hardware Item elements describing %s \"", "apply across all profiles) create_new (bool): Whether to create a new entry if", "self.ovf.RES_MAP[resource_type], profile_list) # ovftool freaks out if we leave out the ElementName on", "a warning and do nothing. Args: resource_type (str): Resource type such as 'cpu'", "for ovfitem in self.find_all_items(resource_type): for profile in profile_list: if ovfitem.has_profile(profile): count_dict[profile] += 1", "type string like 'scsi' or 'serial' profile (str): Single profile identifier string to", "developers. # See the COPYRIGHT.txt file at the top-level directory of this distribution", "profile=None): \"\"\"Find the only OVFItem of the given :attr:`resource_type`. Args: resource_type (str): Resource", "(list): List of profiles to clone into Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance", "new_item_profiles) # Check/update other properties of the clone that should be unique: #", "profile identifier string to look up. Returns: int: Number of items of this", "resource_type, properties, profile_list): \"\"\"Check whether the given item matches the given filters. Args:", "``Address`` NotImplementedError: If updating ``AddressOnParent`` but the prior value varies across config profiles.", "elif len(matches) == 0: return None else: return matches[0] def get_item_count(self, resource_type, profile):", "type such as 'cpu' or 'harddisk' prop_name (str): Property name to update new_value", "!= len(XML.find_all_children( self.ovf.virtual_hw_section, set([self.ovf.ITEM, self.ovf.STORAGE_ITEM, self.ovf.ETHERNET_PORT_ITEM]))): modified = True else: for ovfitem in", "# so provide a simple default value. ovfitem.set_property(self.ovf.ELEMENT_NAME, resource_type, profile_list) self.item_dict[instance] = ovfitem", "ovfitem.has_profile(profile): count_dict[profile] += 1 for (profile, count) in count_dict.items(): logger.spam(\"Profile '%s' has %s", "according to the terms contained in the LICENSE.txt file. \"\"\"Representation of OVF hardware", "(bool): Whether to create a new entry if no items of this :attr:`resource_type`", "# Too many items - remove this one! ovfitem.remove_profile(profile) else: items_seen[profile] += 1", "True else: for ovfitem in self.item_dict.values(): if ovfitem.modified: modified = True break if", "and do nothing. Args: resource_type (str): Resource type such as 'cpu' or 'harddisk'", "ovfitem) def delete_item(self, item): \"\"\"Delete the given Item from the hardware. Args: item", "self.new_item(resource_type, profile_list) ovfitem_list = [ovfitem] for ovfitem in ovfitem_list: ovfitem.set_property(prop_name, new_value, profile_list) logger.debug(\"Updated", "enough items under a profile, add any items found # under other profiles", "Property name to update value_list (list): List of values to set (one value", "the terms contained in the LICENSE.txt file. \"\"\"Representation of OVF hardware definitions. **Classes", "the new item should belong to Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance =", "properties (dict): Properties and their values to match profile_list (list): List of profiles", "from our caller raise OVFHardwareDataError(\"Data conflict for instance {0}\" .format(instance)) logger.debug( \"OVF contains", "resource_type, resource_type) (_, ovfitem) = self.new_item(resource_type, profile_list) ovfitem_list = [ovfitem] for ovfitem in", "values {0}. COT can't \" \"handle this yet.\" .format(address_list)) address_on_parent = address_list[0] #", "'{0}' Items (instances {1})\" .format(resource_type, [m.instance_id for m in matches])) elif len(matches) ==", "= self.find_all_items(resource_type, properties, [profile]) if len(matches) > 1: raise LookupError( \"Found multiple matching", "to set the property to profile_list (list): List of profiles to filter on", "of helper methods. \"\"\" def __init__(self, ovf): \"\"\"Construct an OVFHardware object describing all", "'%s' has %s %s Item(s)\", profile, count, resource_type) return count_dict def _update_existing_item_profiles(self, resource_type,", "new_item = self._update_cloned_item( new_item, new_item_profiles, count_dict[new_item_profiles[0]]) last_item = new_item items_to_add -= 1 def", "Item %s with new instance ID %s\", parent_item, instance) ovfitem = copy.deepcopy(parent_item) #", "all profiles) Returns: tuple: (count_dict, items_to_add, last_item) \"\"\" count_dict = self.get_item_count_per_profile(resource_type, profile_list) items_seen", "at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt. # # This file is part of the Common OVF Tool", "instances be created. If the new count is less than the current count", "except ValueError: raise NotImplementedError(\"Don't know how to ensure a \" \"unique AddressOnParent value", "in the VirtualHardwareSection. Fundamentally it's just a dict of :class:`~COT.vm_description.ovf.item.OVFItem` objects with a", "{} for item in items: if self.item_match(item, resource_type, properties, profile_list): filtered_items.append(item) logger.spam(\"Found %s", "self.ovf = ovf self.item_dict = {} valid_profiles = set(ovf.config_profiles) item_count = 0 for", "that is not yet in use. \"\"\" instance = int(start) while str(instance) in", "LookupError( \"Found multiple matching '{0}' Items (instances {1})\" .format(resource_type, [m.instance_id for m in", "last_item = new_item items_to_add -= 1 def set_value_for_all_items(self, resource_type, prop_name, new_value, profile_list, create_new=False):", "in the LICENSE.txt file found in the # top-level directory of this distribution", "address_list[0] # Currently we only handle integer addresses try: address_on_parent = int(address_on_parent) address_on_parent", "(str): Resource type such as 'cpu' or 'harddisk' prop_name (str): Property name to", "the one property of # an Item that uniquely identifies this set of", "\"\"\" instance = self.find_unused_instance_id(start=parent_item.instance_id) logger.spam(\"Cloning existing Item %s with new instance ID %s\",", "if ovfitem.has_profile(profile) and profile not in profile_list: ovfitem.remove_profile(profile) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.modified =", "Items, not all \" \"%s values were used - leftover %s\", resource_type, prop_name,", "the given :attr:`resource_type`) profile_list (list): List of profiles to filter on (default: apply", "under other profiles to this profile as well. for ovfitem in self.find_all_items(resource_type): last_item", "instance = item.get_value(self.ovf.INSTANCE_ID) if self.item_dict[instance] == item: del self.item_dict[instance] # TODO: error handling", "resource_type, properties, profile_list) return filtered_items def find_item(self, resource_type=None, properties=None, profile=None): \"\"\"Find the only", "``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id(start=parent_item.instance_id) logger.spam(\"Cloning existing Item %s with new instance", "present under \"no profile\" will be counted against the total for each profile.", "resource_type) (_, new_item) = self.new_item(resource_type, new_item_profiles) else: (_, new_item) = self.clone_item(last_item, new_item_profiles) #", "of this :attr:`resource_type` presently exist. \"\"\" ovfitem_list = self.find_all_items(resource_type) if not ovfitem_list: if", "(_, new_item) = self.new_item(resource_type, new_item_profiles) else: (_, new_item) = self.clone_item(last_item, new_item_profiles) # Check/update", "do.\", resource_type) return logger.notice(\"No existing items of type %s found. \" \"Will create", "# Delete any profiles from the parent that we don't need now, #", "instance = item.find(namespace + self.ovf.INSTANCE_ID).text # Pre-sanity check - are all of the", "profile_list) ovfitem_list = [ovfitem] for ovfitem in ovfitem_list: ovfitem.set_property(prop_name, new_value, profile_list) logger.debug(\"Updated %s", "\"\"\" return (self.get_item_count_per_profile(resource_type, [profile]) [profile]) def get_item_count_per_profile(self, resource_type, profile_list): \"\"\"Get the number of", "in matches])) elif len(matches) == 0: return None else: return matches[0] def get_item_count(self,", "len(XML.find_all_children( self.ovf.virtual_hw_section, set([self.ovf.ITEM, self.ovf.STORAGE_ITEM, self.ovf.ETHERNET_PORT_ITEM]))): modified = True else: for ovfitem in self.item_dict.values():", "{0} for \" \"Item instance {1}\" .format(unknown_profiles, instance)) if instance not in self.item_dict:", "(str): Resource type string like 'scsi' or 'serial' properties (dict): Properties and their", "defined in the OVF DeploymentOptionSection? item_profiles = set(item.get(self.ovf.ITEM_CONFIG, \"\").split()) unknown_profiles = item_profiles -", "the clone that should be unique: # TODO - we assume that the", "with properties %s and\" \" profiles %s\", len(filtered_items), resource_type, properties, profile_list) return filtered_items", "= self.get_item_count_per_profile(resource_type, profile_list) items_seen = dict.fromkeys(profile_list, 0) last_item = None # First, iterate", "1: raise NotImplementedError(\"AddressOnParent is not common \" \"across all profiles but has \"", "count, profile_list): \"\"\"Change profile membership of existing items as needed. Helper method for", "item_count = 0 for item in ovf.virtual_hw_section: namespace = ovf.namespace_for_item_tag(item.tag) if not namespace:", "under each profile. \"\"\" count_dict = {} if not profile_list: # Get the", "{1}\" .format(unknown_profiles, instance)) if instance not in self.item_dict: self.item_dict[instance] = OVFItem(self.ovf, item) else:", "new %s from scratch.\", resource_type, resource_type) (_, ovfitem) = self.new_item(resource_type, profile_list) ovfitem_list =", "are seen \"\"\" self.ovf = ovf self.item_dict = {} valid_profiles = set(ovf.config_profiles) item_count", "only OVFItem of the given :attr:`resource_type`. Args: resource_type (str): Resource type string like", "try: address_on_parent = int(address_on_parent) address_on_parent += 1 new_item.set_property(self.ovf.ADDRESS_ON_PARENT, str(address_on_parent), new_item_profiles) except ValueError: raise", "item.tag == self.ovf.STORAGE_ITEM or item.tag == self.ovf.ETHERNET_PORT_ITEM): self.ovf.virtual_hw_section.remove(item) delete_count += 1 logger.debug(\"Cleared %d", "ID that is not yet in use. \"\"\" instance = int(start) while str(instance)", "type for the given profile. Wrapper for :meth:`get_item_count_per_profile`. Args: resource_type (str): Resource type", "values to match profile_list (list): List of profiles to filter on Returns: bool:", "Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id() ovfitem = OVFItem(self.ovf) ovfitem.set_property(self.ovf.INSTANCE_ID, instance,", "= True break if not modified: logger.verbose(\"No changes to hardware definition, \" \"so", "Set the profile list for all profiles, including the default profile_list = self.ovf.config_profiles", "not all \" \"%s values were used - leftover %s\", resource_type, prop_name, value_list)", "(str): New value to set the property to profile_list (list): List of profiles", "return count_dict, items_to_add, last_item def _update_cloned_item(self, new_item, new_item_profiles, item_count): \"\"\"Update a cloned item", "devices\", len(self.ovf.virtual_hw_section.findall(self.ovf.ITEM)), len(self.item_dict)) def find_unused_instance_id(self, start=1): \"\"\"Find the first available ``InstanceID`` number. Args:", "key to :data:`~COT.vm_description.ovf.name_helper.OVFNameHelper1.RES_MAP` profile_list (list): Profiles the new item should belong to Returns:", "should belong to item_count (int): How many Items of this type (including this", "\" \"across all profiles but has \" \"multiple values {0}. COT can't \"", "+= 1 items_seen[profile] += 1 # How many new Items do we need", "def new_item(self, resource_type, profile_list=None): \"\"\"Create a new OVFItem of the given type. Args:", "new_item should belong to item_count (int): How many Items of this type (including", "(list): Profiles new_item should belong to item_count (int): How many Items of this", "make it distinct from its parent. Helper method for :meth:`set_item_count_per_profile`. Args: new_item (OVFItem):", "across all profiles) Returns: dict: mapping profile strings to the number of items", "List of profiles to filter on (default: apply across all profiles) Returns: tuple:", "NotImplementedError: No support yet for updating ``Address`` NotImplementedError: If updating ``AddressOnParent`` but the", "# Copyright (c) 2013-2016, 2019 the COT project developers. # See the COPYRIGHT.txt", "of this type in this profile. \"\"\" return (self.get_item_count_per_profile(resource_type, [profile]) [profile]) def get_item_count_per_profile(self,", "This file is part of the Common OVF Tool (COT) project. # It", "Nothing to do.\", resource_type) return logger.notice(\"No existing items of type %s found. \"", "filtered_items = [] if properties is None: properties = {} for item in", "for :meth:`set_item_count_per_profile`. Args: resource_type (str): 'cpu', 'harddisk', etc. count (int): Desired number of", "a type. Args: resource_type (str): Device type such as 'harddisk' or 'cpu' prop_name", "how to ensure a unique \" \"Address value when cloning an Item \"", "Check/update other properties of the clone that should be unique: # TODO -", "OVFHardwareDataError: if any data errors are seen \"\"\" self.ovf = ovf self.item_dict =", "file found in the # top-level directory of this distribution and at #", "Profiles new_item should belong to item_count (int): How many Items of this type", "items_to_add = delta return count_dict, items_to_add, last_item def _update_cloned_item(self, new_item, new_item_profiles, item_count): \"\"\"Update", "'scsi' or 'serial' properties (dict): Properties and their values to match profile_list (list):", "count) in count_dict.items(): logger.spam(\"Profile '%s' has %s %s Item(s)\", profile, count, resource_type) return", "in the LICENSE.txt file. \"\"\"Representation of OVF hardware definitions. **Classes and Exceptions** ..", "\"\"\"Find all items matching the given type, properties, and profiles. Args: resource_type (str):", "# https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No part # of COT, including this file, may be copied,", "OVFHardwareDataError \"\"\" import copy import logging from COT.data_validation import natural_sort from COT.xml_file import", "Item need to belong to? new_item_profiles = [] for profile in profile_list: if", "value to consider (disregarding all lower InstanceIDs, even if available). Returns: str: An", "that already exist under another profile will be added to this profile, starting", "List of profiles to filter on (default: apply across all profiles) \"\"\" if", "available). Returns: str: An instance ID that is not yet in use. \"\"\"", "self.item_dict[instance] new_items = ovfitem.generate_items() logger.spam(\"Generated %d items\", len(new_items)) for item in new_items: XML.add_child(self.ovf.virtual_hw_section,", "new count is greater than the current count under this profile, then additional", "object describing all Items in the OVF. Args: ovf (OVF): OVF instance to", "= 0 for profile in profile_list: delta = count - items_seen[profile] if delta", "= ovf self.item_dict = {} valid_profiles = set(ovf.config_profiles) item_count = 0 for item", "added to this profile, starting with the lowest-sequence instance not already present, and", "**Classes and Exceptions** .. autosummary:: :nosignatures: OVFHardware OVFHardwareDataError \"\"\" import copy import logging", "Returns: str: An instance ID that is not yet in use. \"\"\" instance", "int(address_on_parent) address_on_parent += 1 new_item.set_property(self.ovf.ADDRESS_ON_PARENT, str(address_on_parent), new_item_profiles) except ValueError: raise NotImplementedError(\"Don't know how", "new_item_profiles (list): Profiles new_item should belong to item_count (int): How many Items of", "'ethernet': # Update ElementName to reflect the NIC number element_name = self.ovf.platform.guess_nic_name(item_count) new_item.set_property(self.ovf.ELEMENT_NAME,", "in profile_list: delta = count - items_seen[profile] if delta > items_to_add: items_to_add =", "associated with this # item properly defined in the OVF DeploymentOptionSection? item_profiles =", "profile_list (list): Profiles the new item should belong to Returns: tuple: ``(instance_id, ovfitem)``", "this Item ovfitem.add_profile(profile) count_dict[profile] += 1 items_seen[profile] += 1 # How many new", "profile_list) # ovftool freaks out if we leave out the ElementName on an", "create a new entry if no items of this :attr:`resource_type` presently exist. \"\"\"", "any profiles from the parent that we don't need now, # otherwise we'll", "raise NotImplementedError(\"AddressOnParent is not common \" \"across all profiles but has \" \"multiple", "items: if self.item_match(item, resource_type, properties, profile_list): filtered_items.append(item) logger.spam(\"Found %s Items of type %s", "Returns: int: Number of items of this type in this profile. \"\"\" return", "= item_profiles - valid_profiles if unknown_profiles: raise OVFHardwareDataError(\"Unknown profile(s) {0} for \" \"Item", "Items of the given type per profile. Items present under \"no profile\" will", "property to profile_list (list): List of profiles to filter on (default: apply across", "ovfitem.modified = True logger.info(\"Created new %s under profile(s) %s, InstanceID is %s\", resource_type,", "prop_name (str): Property name to update new_value (str): New value to set the", "new items\", items_to_add) while items_to_add > 0: # Which profiles does this Item", "new_item) = self.new_item(resource_type, new_item_profiles) else: (_, new_item) = self.clone_item(last_item, new_item_profiles) # Check/update other", "modified = True break if not modified: logger.verbose(\"No changes to hardware definition, \"", "1 items_seen[profile] += 1 # How many new Items do we need to", "Update ElementName to reflect the NIC number element_name = self.ovf.platform.guess_nic_name(item_count) new_item.set_property(self.ovf.ELEMENT_NAME, element_name, new_item_profiles)", "all items matching the given type, properties, and profiles. Args: resource_type (str): Resource", "new_item.set_property(self.ovf.ADDRESS_ON_PARENT, str(address_on_parent), new_item_profiles) except ValueError: raise NotImplementedError(\"Don't know how to ensure a \"", "values to match profile_list (list): List of profiles to filter on Returns: list:", "value \"\"\" if profile_list is None: profile_list = self.ovf.config_profiles + [None] for ovfitem", "if len(matches) > 1: raise LookupError( \"Found multiple matching '{0}' Items (instances {1})\"", "top-level directory of this distribution and at # https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No part # of", "data used to construct an :class:`OVFHardware` is not sane.\"\"\" class OVFHardware(object): \"\"\"Helper class", "this set of hardware items. instance = item.find(namespace + self.ovf.INSTANCE_ID).text # Pre-sanity check", "Once we've seen \"count\" items under a profile, remove all subsequent # items", "\"\"\" matches = self.find_all_items(resource_type, properties, [profile]) if len(matches) > 1: raise LookupError( \"Found", "to reflect the NIC number element_name = self.ovf.platform.guess_nic_name(item_count) new_item.set_property(self.ovf.ELEMENT_NAME, element_name, new_item_profiles) return new_item", "profile. Wrapper for :meth:`get_item_count_per_profile`. Args: resource_type (str): Resource type string like 'scsi' or", "new_item def set_item_count_per_profile(self, resource_type, count, profile_list): \"\"\"Set the number of items of a", "'serial' properties (dict): Properties and their values to match profile (str): Single profile", "for profile in profile_list: if count_dict[profile] < count: new_item_profiles.append(profile) count_dict[profile] += 1 if", "self.ovf.config_profiles + [None] count_dict, items_to_add, last_item = \\ self._update_existing_item_profiles( resource_type, count, profile_list) logger.debug(\"Creating", "item, resource_type, properties, profile_list): \"\"\"Check whether the given item matches the given filters.", "will log a warning and do nothing. Args: resource_type (str): Resource type such", "for instance in natural_sort(self.item_dict)] filtered_items = [] if properties is None: properties =", "address_on_parent += 1 new_item.set_property(self.ovf.ADDRESS_ON_PARENT, str(address_on_parent), new_item_profiles) except ValueError: raise NotImplementedError(\"Don't know how to", "the number of Items of the given type for the given profile. Wrapper", "if unknown_profiles: raise OVFHardwareDataError(\"Unknown profile(s) {0} for \" \"Item instance {1}\" .format(unknown_profiles, instance))", "to ensure a \" \"unique AddressOnParent value \" \"given base value '{0}'\" .format(address_on_parent))", "if any data errors are seen \"\"\" self.ovf = ovf self.item_dict = {}", "this one! ovfitem.remove_profile(profile) else: items_seen[profile] += 1 else: if count_dict[profile] < count: #", "matching '{0}' Items (instances {1})\" .format(resource_type, [m.instance_id for m in matches])) elif len(matches)", "a \" \"unique AddressOnParent value \" \"given base value '{0}'\" .format(address_on_parent)) if resource_type", "if profile_list is None: profile_list = self.ovf.config_profiles + [None] for ovfitem in self.find_all_items(resource_type):", "%s under profile(s) %s, InstanceID is %s\", resource_type, profile_list, instance) return (instance, ovfitem)", ":attr:`resource_type`) profile_list (list): List of profiles to filter on (default: apply across all", "COT can't \" \"handle this yet.\" .format(address_list)) address_on_parent = address_list[0] # Currently we", "self.ovf.config_profiles + [None] for profile in profile_list: count_dict[profile] = 0 for ovfitem in", "a new entry if no items of this :attr:`resource_type` presently exist. \"\"\" ovfitem_list", "present, and only as a last resort will new instances be created. If", "It is subject to the license terms in the LICENSE.txt file found in", "filter on Returns: bool: True if the item matches all filters, False if", "elements describing %s \" \"unique devices\", item_count, len(self.item_dict)) # Treat the current state", "this profile. # If we don't have enough items under a profile, add", "profile_list: if ovfitem.has_profile(profile): ovfitem.set_property(prop_name, new_value, [profile]) logger.info(\"Updated %s property %s to %s under", "bool: True if the item matches all filters, False if not. \"\"\" if", "properties = {} for item in items: if self.item_match(item, resource_type, properties, profile_list): filtered_items.append(item)", "instance in natural_sort(self.item_dict): logger.debug(\"Writing Item(s) with InstanceID %s\", instance) ovfitem = self.item_dict[instance] new_items", "than entries in :attr:`value_list`, set extra items to this value \"\"\" if profile_list", "new_item_profiles, item_count): \"\"\"Update a cloned item to make it distinct from its parent.", "# items from this profile. # If we don't have enough items under", "otherwise we'll get an error when trying to set the instance ID #", "this value \"\"\" if profile_list is None: profile_list = self.ovf.config_profiles + [None] for", "instance is %s\", parent_item, profile_list, instance) return (instance, ovfitem) def item_match(self, item, resource_type,", "for a property of multiple items of a type. Args: resource_type (str): Device", "= {} if not profile_list: # Get the count under all profiles profile_list", "\" \"given base value '{0}'\" .format(address_on_parent)) if resource_type == 'ethernet': # Update ElementName", "DeploymentOptionSection? item_profiles = set(item.get(self.ovf.ITEM_CONFIG, \"\").split()) unknown_profiles = item_profiles - valid_profiles if unknown_profiles: raise", "identifier string to look up. Returns: int: Number of items of this type", "create new %s from scratch.\", resource_type, resource_type) (_, new_item) = self.new_item(resource_type, new_item_profiles) else:", "if not profile_list: # Set the profile list for all profiles, including the", "Device type such as 'harddisk' or 'cpu' prop_name (str): Property name to update", "sorted order by Instance ordering = [self.ovf.INFO, self.ovf.SYSTEM, self.ovf.ITEM] for instance in natural_sort(self.item_dict):", "or 'harddisk' - used as a key to :data:`~COT.vm_description.ovf.name_helper.OVFNameHelper1.RES_MAP` profile_list (list): Profiles the", "in self.item_dict.values(): ovfitem.modified = False def update_xml(self): \"\"\"Regenerate all Items under the VirtualHardwareSection,", "base value '{0}'\" .format(address_on_parent)) if resource_type == 'ethernet': # Update ElementName to reflect", "(default: apply across all profiles) Returns: tuple: (count_dict, items_to_add, last_item) \"\"\" count_dict =", "profiles) \"\"\" if not profile_list: # Set the profile list for all profiles,", "new OVFItem of the given type. Args: resource_type (str): String such as 'cpu'", "\"\"\"The input data used to construct an :class:`OVFHardware` is not sane.\"\"\" class OVFHardware(object):", "Args: new_item (OVFItem): Newly cloned Item new_item_profiles (list): Profiles new_item should belong to", "'cpu' or 'harddisk' prop_name (str): Property name to update new_value (str): New value", "delete_item(self, item): \"\"\"Delete the given Item from the hardware. Args: item (OVFItem): Item", "to %s under %s\", resource_type, prop_name, new_value, profile_list) if len(value_list): logger.warning(\"After scanning all", "given type exist, will create a new ``Item`` if :attr:`create_new` is set to", "item.find(namespace + self.ovf.INSTANCE_ID).text # Pre-sanity check - are all of the profiles associated", "profile. Args: resource_type (str): Resource type string like 'scsi' or 'serial' profile_list (list):", "found. \" \"Will create new %s from scratch.\", resource_type, resource_type) (_, new_item) =", "the count under all profiles profile_list = self.ovf.config_profiles + [None] for profile in", "the given type. Args: resource_type (str): String such as 'cpu' or 'harddisk' -", "this file, may be copied, modified, propagated, or # distributed except according to", "properties, and profiles. Args: resource_type (str): Resource type string like 'scsi' or 'serial'", "addresses try: address_on_parent = int(address_on_parent) address_on_parent += 1 new_item.set_property(self.ovf.ADDRESS_ON_PARENT, str(address_on_parent), new_item_profiles) except ValueError:", "Matching OVFItem instances \"\"\" items = [self.item_dict[instance] for instance in natural_sort(self.item_dict)] filtered_items =", "to the license terms in the LICENSE.txt file found in the # top-level", "self.item_dict[instance] = ovfitem ovfitem.modified = True logger.info(\"Created new %s under profile(s) %s, InstanceID", "+= 1 new_item.set_property(self.ovf.ADDRESS_ON_PARENT, str(address_on_parent), new_item_profiles) except ValueError: raise NotImplementedError(\"Don't know how to ensure", "logger.spam(\"Generated %d items\", len(new_items)) for item in new_items: XML.add_child(self.ovf.virtual_hw_section, item, ordering) logger.verbose(\"Updated XML", "- items_seen[profile] if delta > items_to_add: items_to_add = delta return count_dict, items_to_add, last_item", "does this Item need to belong to? new_item_profiles = [] for profile in", "all profiles) create_new (bool): Whether to create a new entry if no items", ":attr:`create_new` is set to ``True``; otherwise will log a warning and do nothing.", "len(new_items)) for item in new_items: XML.add_child(self.ovf.virtual_hw_section, item, ordering) logger.verbose(\"Updated XML VirtualHardwareSection, now contains", "profiles profile_list = self.ovf.config_profiles + [None] for profile in profile_list: count_dict[profile] = 0", "within Returns: OVFItem: Matching instance, or None Raises: LookupError: if more than one", "existing items as needed. Helper method for :meth:`set_item_count_per_profile`. Args: resource_type (str): 'cpu', 'harddisk',", "remove all subsequent # items from this profile. # If we don't have", "LookupError: if more than one such Item exists. \"\"\" matches = self.find_all_items(resource_type, properties,", "_update_existing_item_profiles(self, resource_type, count, profile_list): \"\"\"Change profile membership of existing items as needed. Helper", "hardware items. instance = item.find(namespace + self.ovf.INSTANCE_ID).text # Pre-sanity check - are all", "Matching instance, or None Raises: LookupError: if more than one such Item exists.", "= 0 for item in list(self.ovf.virtual_hw_section): if (item.tag == self.ovf.ITEM or item.tag ==", "item to make it distinct from its parent. Helper method for :meth:`set_item_count_per_profile`. Args:", "warning and do nothing. Args: resource_type (str): Resource type such as 'cpu' or", "given value for all items of the given type. If no items of", "for profile in profile_list: delta = count - items_seen[profile] if delta > items_to_add:", "input data used to construct an :class:`OVFHardware` is not sane.\"\"\" class OVFHardware(object): \"\"\"Helper", "as golden: for ovfitem in self.item_dict.values(): ovfitem.modified = False def update_xml(self): \"\"\"Regenerate all", "``AddressOnParent`` but the prior value varies across config profiles. NotImplementedError: if ``AddressOnParent`` is", "``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id() ovfitem = OVFItem(self.ovf) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.set_property(self.ovf.RESOURCE_TYPE,", "the number of Items of the given type per profile. Items present under", "we've seen \"count\" items under a profile, remove all subsequent # items from", "that we don't need now, # otherwise we'll get an error when trying", "not profile_list: # Get the count under all profiles profile_list = self.ovf.config_profiles +", "ovfitem for profile in profile_list: if ovfitem.has_profile(profile): if items_seen[profile] >= count: # Too", "%s found. Nothing to do.\", resource_type) return logger.notice(\"No existing items of type %s", "str(instance) in self.item_dict.keys(): instance += 1 logger.debug(\"Found unused InstanceID %d\", instance) return str(instance)", "address_on_parent = int(address_on_parent) address_on_parent += 1 new_item.set_property(self.ovf.ADDRESS_ON_PARENT, str(address_on_parent), new_item_profiles) except ValueError: raise NotImplementedError(\"Don't", "= dict.fromkeys(profile_list, 0) last_item = None # First, iterate over existing Items. #", "try: self.item_dict[instance].add_item(item) except OVFItemDataError as exc: logger.debug(exc) # Mask away the nitty-gritty details", "profiles. Args: resource_type (str): Resource type string like 'scsi' or 'serial' properties (dict):", "continue item_count += 1 # We index the dict by InstanceID as it's", "if items_seen[profile] >= count: # Too many items - remove this one! ovfitem.remove_profile(profile)", "string like 'scsi' or 'serial' profile (str): Single profile identifier string to look", "of profiles to filter on (default: apply across all profiles) Returns: dict: mapping", "m in matches])) elif len(matches) == 0: return None else: return matches[0] def", "bunch of helper methods. \"\"\" def __init__(self, ovf): \"\"\"Construct an OVFHardware object describing", "'harddisk' or 'cpu' prop_name (str): Property name to update value_list (list): List of", "return logger.notice(\"No existing items of type %s found. \" \"Will create new %s", "\"\"\" count_dict = {} if not profile_list: # Get the count under all", "all profiles, including the default profile_list = self.ovf.config_profiles + [None] count_dict, items_to_add, last_item", "= OVFItem(self.ovf) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.set_property(self.ovf.RESOURCE_TYPE, self.ovf.RES_MAP[resource_type], profile_list) # ovftool freaks out if", "copied, modified, propagated, or # distributed except according to the terms contained in", "instance, profile_list) ovfitem.set_property(self.ovf.RESOURCE_TYPE, self.ovf.RES_MAP[resource_type], profile_list) # ovftool freaks out if we leave out", "to ensure a unique \" \"Address value when cloning an Item \" \"of", "address_on_parent = new_item.get(self.ovf.ADDRESS_ON_PARENT) if address_on_parent: address_list = new_item.get_all_values(self.ovf.ADDRESS_ON_PARENT) if len(address_list) > 1: raise", "how to ensure a \" \"unique AddressOnParent value \" \"given base value '{0}'\"", "value when cloning an Item \" \"of type {0}\".format(resource_type)) address_on_parent = new_item.get(self.ovf.ADDRESS_ON_PARENT) if", "{} valid_profiles = set(ovf.config_profiles) item_count = 0 for item in ovf.virtual_hw_section: namespace =", "= new_item items_to_add -= 1 def set_value_for_all_items(self, resource_type, prop_name, new_value, profile_list, create_new=False): \"\"\"Set", "for item in ovf.virtual_hw_section: namespace = ovf.namespace_for_item_tag(item.tag) if not namespace: continue item_count +=", "== 0: return None else: return matches[0] def get_item_count(self, resource_type, profile): \"\"\"Get the", "a simple default value. ovfitem.set_property(self.ovf.ELEMENT_NAME, resource_type, profile_list) self.item_dict[instance] = ovfitem ovfitem.modified = True", "extra items to this value \"\"\" if profile_list is None: profile_list = self.ovf.config_profiles", "set([self.ovf.ITEM, self.ovf.STORAGE_ITEM, self.ovf.ETHERNET_PORT_ITEM]))): modified = True else: for ovfitem in self.item_dict.values(): if ovfitem.modified:", "(c) 2013-2016, 2019 the COT project developers. # See the COPYRIGHT.txt file at", "for all profiles, including the default profile_list = self.ovf.config_profiles + [None] count_dict, items_to_add,", "now contains %d \" \"Items representing %d devices\", len(self.ovf.virtual_hw_section.findall(self.ovf.ITEM)), len(self.item_dict)) def find_unused_instance_id(self, start=1):", "OVFHardware class # # June 2016, <NAME> # Copyright (c) 2013-2016, 2019 the", "Represents all hardware items defined by this OVF; i.e., the contents of all", "\"\"\" instance = item.get_value(self.ovf.INSTANCE_ID) if self.item_dict[instance] == item: del self.item_dict[instance] # TODO: error", "ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.modified = True self.item_dict[instance] = ovfitem logger.spam(\"Added clone of %s", "of type %s with properties %s and\" \" profiles %s\", len(filtered_items), resource_type, properties,", "Items (instances {1})\" .format(resource_type, [m.instance_id for m in matches])) elif len(matches) == 0:", "yet.\" .format(address_list)) address_on_parent = address_list[0] # Currently we only handle integer addresses try:", "if len(self.item_dict) != len(XML.find_all_children( self.ovf.virtual_hw_section, set([self.ovf.ITEM, self.ovf.STORAGE_ITEM, self.ovf.ETHERNET_PORT_ITEM]))): modified = True else: for", "return False return True def find_all_items(self, resource_type=None, properties=None, profile_list=None): \"\"\"Find all items matching", "profile (str): Single profile identifier string to look up. Returns: int: Number of", "known %s Items, not all \" \"%s values were used - leftover %s\",", "= address_list[0] # Currently we only handle integer addresses try: address_on_parent = int(address_on_parent)", "profile in profile_list: if ovfitem.has_profile(profile): ovfitem.set_property(prop_name, new_value, [profile]) logger.info(\"Updated %s property %s to", "up. Returns: int: Number of items of this type in this profile. \"\"\"", "(disregarding all lower InstanceIDs, even if available). Returns: str: An instance ID that", "of items of a given type under the given profile(s). If the new", "in use. \"\"\" instance = int(start) while str(instance) in self.item_dict.keys(): instance += 1", "definitions. **Classes and Exceptions** .. autosummary:: :nosignatures: OVFHardware OVFHardwareDataError \"\"\" import copy import", "this profile, then the highest-numbered instances will be removed preferentially. Args: resource_type (str):", "type string like 'scsi' or 'serial' profile_list (list): List of profiles to filter", "as 'cpu' or 'harddisk' prop_name (str): Property name to update new_value (str): New", "match profile_list (list): List of profiles to filter on Returns: list: Matching OVFItem", "\" \"of type {0}\".format(resource_type)) address_on_parent = new_item.get(self.ovf.ADDRESS_ON_PARENT) if address_on_parent: address_list = new_item.get_all_values(self.ovf.ADDRESS_ON_PARENT) if", "+ [None] for ovfitem in self.find_all_items(resource_type): if len(value_list): new_value = value_list.pop(0) else: new_value", "self.ovf.SYSTEM, self.ovf.ITEM] for instance in natural_sort(self.item_dict): logger.debug(\"Writing Item(s) with InstanceID %s\", instance) ovfitem", "items to this value \"\"\" if profile_list is None: profile_list = self.ovf.config_profiles +", "self.clone_item(last_item, new_item_profiles) # Check/update other properties of the clone that should be unique:", "there are more matching items than entries in :attr:`value_list`, set extra items to", "a key to :data:`~COT.vm_description.ovf.name_helper.OVFNameHelper1.RES_MAP` profile_list (list): Profiles the new item should belong to", "count_dict = {} if not profile_list: # Get the count under all profiles", "'cpu', 'harddisk', etc. count (int): Desired number of items profile_list (list): List of", "item_count (int): How many Items of this type (including this item) now exist.", "logger.debug(\"Found unused InstanceID %d\", instance) return str(instance) def new_item(self, resource_type, profile_list=None): \"\"\"Create a", "profile_list is None: profile_list = self.ovf.config_profiles + [None] for ovfitem in self.find_all_items(resource_type): if", ":nosignatures: OVFHardware OVFHardwareDataError \"\"\" import copy import logging from COT.data_validation import natural_sort from", "property of # an Item that uniquely identifies this set of hardware items.", "delta > items_to_add: items_to_add = delta return count_dict, items_to_add, last_item def _update_cloned_item(self, new_item,", "new_item.get_all_values(self.ovf.ADDRESS_ON_PARENT) if len(address_list) > 1: raise NotImplementedError(\"AddressOnParent is not common \" \"across all", "value varies across config profiles. NotImplementedError: if ``AddressOnParent`` is not an integer. \"\"\"", "in this profile. \"\"\" return (self.get_item_count_per_profile(resource_type, [profile]) [profile]) def get_item_count_per_profile(self, resource_type, profile_list): \"\"\"Get", "new instances be created. If the new count is less than the current", "instance. Args: parent_item (OVFItem): Instance to clone from profile_list (list): List of profiles", "address_on_parent: address_list = new_item.get_all_values(self.ovf.ADDRESS_ON_PARENT) if len(address_list) > 1: raise NotImplementedError(\"AddressOnParent is not common", "items_to_add, last_item) \"\"\" count_dict = self.get_item_count_per_profile(resource_type, profile_list) items_seen = dict.fromkeys(profile_list, 0) last_item =", "# Mask away the nitty-gritty details from our caller raise OVFHardwareDataError(\"Data conflict for", "an integer. \"\"\" resource_type = new_item.hardware_type address = new_item.get(self.ovf.ADDRESS) if address: raise NotImplementedError(\"Don't", "be removed preferentially. Args: resource_type (str): 'cpu', 'harddisk', etc. count (int): Desired number", "for ovfitem in self.find_all_items(resource_type): last_item = ovfitem for profile in profile_list: if ovfitem.has_profile(profile):", "on (default: apply across all profiles) \"\"\" if not profile_list: # Set the", "item not in item_dict def clone_item(self, parent_item, profile_list): \"\"\"Clone an OVFItem to create", "instance) return str(instance) def new_item(self, resource_type, profile_list=None): \"\"\"Create a new OVFItem of the", "0 for item in list(self.ovf.virtual_hw_section): if (item.tag == self.ovf.ITEM or item.tag == self.ovf.STORAGE_ITEM", "prior value varies across config profiles. NotImplementedError: if ``AddressOnParent`` is not an integer.", "yet in use. \"\"\" instance = int(start) while str(instance) in self.item_dict.keys(): instance +=", "False if not. \"\"\" if resource_type and (self.ovf.RES_MAP[resource_type] != item.get_value(self.ovf.RESOURCE_TYPE)): return False if", "items = [self.item_dict[instance] for instance in natural_sort(self.item_dict)] filtered_items = [] if properties is", ".item import OVFItem, OVFItemDataError logger = logging.getLogger(__name__) class OVFHardwareDataError(Exception): \"\"\"The input data used", "filter on (default: apply across all profiles) Returns: dict: mapping profile strings to", "self.ovf.ITEM] for instance in natural_sort(self.item_dict): logger.debug(\"Writing Item(s) with InstanceID %s\", instance) ovfitem =", "filter on Returns: list: Matching OVFItem instances \"\"\" items = [self.item_dict[instance] for instance", "= 0 for ovfitem in self.find_all_items(resource_type): for profile in profile_list: if ovfitem.has_profile(profile): count_dict[profile]", "ovfitem.remove_profile(profile) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.modified = True self.item_dict[instance] = ovfitem logger.spam(\"Added clone of", "\"so no XML update is required\") return # Delete the existing Items: delete_count", "False if len(self.item_dict) != len(XML.find_all_children( self.ovf.virtual_hw_section, set([self.ovf.ITEM, self.ovf.STORAGE_ITEM, self.ovf.ETHERNET_PORT_ITEM]))): modified = True else:", "items\", len(new_items)) for item in new_items: XML.add_child(self.ovf.virtual_hw_section, item, ordering) logger.verbose(\"Updated XML VirtualHardwareSection, now", "new entry if no items of this :attr:`resource_type` presently exist. \"\"\" ovfitem_list =", "is not yet in use. \"\"\" instance = int(start) while str(instance) in self.item_dict.keys():", "of existing items as needed. Helper method for :meth:`set_item_count_per_profile`. Args: resource_type (str): 'cpu',", "to profile_list (list): List of profiles to filter on (default: apply across all", "count under this profile, then additional instances that already exist under another profile", "self.item_dict[instance].add_item(item) except OVFItemDataError as exc: logger.debug(exc) # Mask away the nitty-gritty details from", "other properties of the clone that should be unique: # TODO - we", "new_value, profile_list, create_new=False): \"\"\"Set a property to the given value for all items", "for each profile. Args: resource_type (str): Resource type string like 'scsi' or 'serial'", "instances will be removed preferentially. Args: resource_type (str): 'cpu', 'harddisk', etc. count (int):", "profile_list, instance) return (instance, ovfitem) def item_match(self, item, resource_type, properties, profile_list): \"\"\"Check whether", "profile, starting with the lowest-sequence instance not already present, and only as a", "under the VirtualHardwareSection, if needed. Will do nothing if no Items have been", "a property of multiple items of a type. Args: resource_type (str): Device type", "= True else: for ovfitem in self.item_dict.values(): if ovfitem.modified: modified = True break", "Returns: OVFItem: Matching instance, or None Raises: LookupError: if more than one such", "our clone due to self-inconsistency (#64). for profile in self.ovf.config_profiles: if ovfitem.has_profile(profile) and", "we leave out the ElementName on an Item, # so provide a simple", "all hardware items defined by this OVF; i.e., the contents of all Items", "self.ovf.STORAGE_ITEM, self.ovf.ETHERNET_PORT_ITEM]))): modified = True else: for ovfitem in self.item_dict.values(): if ovfitem.modified: modified", "profile_list) items_seen = dict.fromkeys(profile_list, 0) last_item = None # First, iterate over existing", "filters, False if not. \"\"\" if resource_type and (self.ovf.RES_MAP[resource_type] != item.get_value(self.ovf.RESOURCE_TYPE)): return False", "'cpu' prop_name (str): Property name to update value_list (list): List of values to", "else: (_, new_item) = self.clone_item(last_item, new_item_profiles) # Check/update other properties of the clone", "[m.instance_id for m in matches])) elif len(matches) == 0: return None else: return", "Returns: OVFItem: Updated :param:`new_item` Raises: NotImplementedError: No support yet for updating ``Address`` NotImplementedError:", "items_seen = dict.fromkeys(profile_list, 0) last_item = None # First, iterate over existing Items.", "no items of the given type exist, will create a new ``Item`` if", "hardware. Args: item (OVFItem): Item to delete \"\"\" instance = item.get_value(self.ovf.INSTANCE_ID) if self.item_dict[instance]", "# distributed except according to the terms contained in the LICENSE.txt file. \"\"\"Representation", "(list): List of profiles to filter on (default: apply across all profiles) create_new", "ElementName to reflect the NIC number element_name = self.ovf.platform.guess_nic_name(item_count) new_item.set_property(self.ovf.ELEMENT_NAME, element_name, new_item_profiles) return", "of hardware items. instance = item.find(namespace + self.ovf.INSTANCE_ID).text # Pre-sanity check - are", "logging from COT.data_validation import natural_sort from COT.xml_file import XML from .item import OVFItem,", "from the parent that we don't need now, # otherwise we'll get an", "items defined by this OVF; i.e., the contents of all Items in the", "(self.get_item_count_per_profile(resource_type, [profile]) [profile]) def get_item_count_per_profile(self, resource_type, profile_list): \"\"\"Get the number of Items of", "return filtered_items def find_item(self, resource_type=None, properties=None, profile=None): \"\"\"Find the only OVFItem of the", "for updating ``Address`` NotImplementedError: If updating ``AddressOnParent`` but the prior value varies across", "0) last_item = None # First, iterate over existing Items. # Once we've", "for ovfitem in ovfitem_list: ovfitem.set_property(prop_name, new_value, profile_list) logger.debug(\"Updated %s %s to %s under", "items under each profile. \"\"\" count_dict = {} if not profile_list: # Get", "update_xml(self): \"\"\"Regenerate all Items under the VirtualHardwareSection, if needed. Will do nothing if", "if item.get_value(prop) != value: return False return True def find_all_items(self, resource_type=None, properties=None, profile_list=None):", "in ovfitem_list: ovfitem.set_property(prop_name, new_value, profile_list) logger.debug(\"Updated %s %s to %s under profiles %s\",", "# Once we've seen \"count\" items under a profile, remove all subsequent #", "\"\"\" instance = self.find_unused_instance_id() ovfitem = OVFItem(self.ovf) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.set_property(self.ovf.RESOURCE_TYPE, self.ovf.RES_MAP[resource_type], profile_list)", "= OVFItem(self.ovf, item) else: try: self.item_dict[instance].add_item(item) except OVFItemDataError as exc: logger.debug(exc) # Mask", "If there are more matching items than entries in :attr:`value_list`, set extra items", "at # https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No part # of COT, including this file, may be", "as needed. Helper method for :meth:`set_item_count_per_profile`. Args: resource_type (str): 'cpu', 'harddisk', etc. count", "\"\"\"Set value(s) for a property of multiple items of a type. Args: resource_type", "profile in self.ovf.config_profiles: if ovfitem.has_profile(profile) and profile not in profile_list: ovfitem.remove_profile(profile) ovfitem.set_property(self.ovf.INSTANCE_ID, instance,", "in self.item_dict: self.item_dict[instance] = OVFItem(self.ovf, item) else: try: self.item_dict[instance].add_item(item) except OVFItemDataError as exc:", "type exist, will create a new ``Item`` if :attr:`create_new` is set to ``True``;", "of values to set (one value per item of the given :attr:`resource_type`) profile_list", "value per item of the given :attr:`resource_type`) profile_list (list): List of profiles to", "don't need now, # otherwise we'll get an error when trying to set", "Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id(start=parent_item.instance_id) logger.spam(\"Cloning existing Item %s with", "items of the given type. If no items of the given type exist,", "the Common OVF Tool (COT) project. # It is subject to the license", "many new Items do we need to create in total? items_to_add = 0", "items_to_add, last_item = \\ self._update_existing_item_profiles( resource_type, count, profile_list) logger.debug(\"Creating %d new items\", items_to_add)", "from profile_list (list): List of profiles to clone into Returns: tuple: ``(instance_id, ovfitem)``", "of %s under %s, instance is %s\", parent_item, profile_list, instance) return (instance, ovfitem)", "+= 1 if last_item is None: logger.notice(\"No existing items of type %s found.", "self._update_cloned_item( new_item, new_item_profiles, count_dict[new_item_profiles[0]]) last_item = new_item items_to_add -= 1 def set_value_for_all_items(self, resource_type,", "profile_list, default=None): \"\"\"Set value(s) for a property of multiple items of a type.", "# Check/update other properties of the clone that should be unique: # TODO", "= ovfitem for profile in profile_list: if ovfitem.has_profile(profile): if items_seen[profile] >= count: #", "the OVF. Args: ovf (OVF): OVF instance to extract hardware information from. Raises:", "dict.fromkeys(profile_list, 0) last_item = None # First, iterate over existing Items. # Once", "No support yet for updating ``Address`` NotImplementedError: If updating ``AddressOnParent`` but the prior", "if self.item_match(item, resource_type, properties, profile_list): filtered_items.append(item) logger.spam(\"Found %s Items of type %s with", "ovfitem.set_property(self.ovf.RESOURCE_TYPE, self.ovf.RES_MAP[resource_type], profile_list) # ovftool freaks out if we leave out the ElementName", "use. \"\"\" instance = int(start) while str(instance) in self.item_dict.keys(): instance += 1 logger.debug(\"Found", "items of a type. Args: resource_type (str): Device type such as 'harddisk' or", "> 0: # Which profiles does this Item need to belong to? new_item_profiles", "\" \"Will create new %s from scratch.\", resource_type, resource_type) (_, ovfitem) = self.new_item(resource_type,", "< count: new_item_profiles.append(profile) count_dict[profile] += 1 if last_item is None: logger.notice(\"No existing items", "1 else: if count_dict[profile] < count: # Add this profile to this Item", "value '{0}'\" .format(address_on_parent)) if resource_type == 'ethernet': # Update ElementName to reflect the", "\"\"\"Find the only OVFItem of the given :attr:`resource_type`. Args: resource_type (str): Resource type", "for profile in profile_list: count_dict[profile] = 0 for ovfitem in self.find_all_items(resource_type): for profile", "Will do nothing if no Items have been changed. \"\"\" modified = False", "across profiles new_item = self._update_cloned_item( new_item, new_item_profiles, count_dict[new_item_profiles[0]]) last_item = new_item items_to_add -=", "with this # item properly defined in the OVF DeploymentOptionSection? item_profiles = set(item.get(self.ovf.ITEM_CONFIG,", "# under other profiles to this profile as well. for ovfitem in self.find_all_items(resource_type):", "string to look up. Returns: int: Number of items of this type in", "Args: resource_type (str): 'cpu', 'harddisk', etc. count (int): Desired number of items profile_list", "a property to the given value for all items of the given type.", "+ [None] for profile in profile_list: count_dict[profile] = 0 for ovfitem in self.find_all_items(resource_type):", "class OVFHardwareDataError(Exception): \"\"\"The input data used to construct an :class:`OVFHardware` is not sane.\"\"\"", "in ovf.virtual_hw_section: namespace = ovf.namespace_for_item_tag(item.tag) if not namespace: continue item_count += 1 #", "(list): Profiles the new item should belong to Returns: tuple: ``(instance_id, ovfitem)`` \"\"\"", "item.has_profile(profile): return False for (prop, value) in properties.items(): if item.get_value(prop) != value: return", "already present, and only as a last resort will new instances be created.", "Item \" \"of type {0}\".format(resource_type)) address_on_parent = new_item.get(self.ovf.ADDRESS_ON_PARENT) if address_on_parent: address_list = new_item.get_all_values(self.ovf.ADDRESS_ON_PARENT)", "entry if no items of this :attr:`resource_type` presently exist. \"\"\" ovfitem_list = self.find_all_items(resource_type)", "in count_dict.items(): logger.spam(\"Profile '%s' has %s %s Item(s)\", profile, count, resource_type) return count_dict", "away the nitty-gritty details from our caller raise OVFHardwareDataError(\"Data conflict for instance {0}\"", "should belong to Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id() ovfitem =", "item.tag == self.ovf.ETHERNET_PORT_ITEM): self.ovf.virtual_hw_section.remove(item) delete_count += 1 logger.debug(\"Cleared %d existing items from VirtualHWSection\",", "multiple items of a type. Args: resource_type (str): Device type such as 'harddisk'", "(list): List of profiles to filter on (default: apply across all profiles) Returns:", "List of profiles to filter on (default: apply across all profiles) default (str):", "else: for ovfitem in self.item_dict.values(): if ovfitem.modified: modified = True break if not", "if not item.has_profile(profile): return False for (prop, value) in properties.items(): if item.get_value(prop) !=", "filtered_items.append(item) logger.spam(\"Found %s Items of type %s with properties %s and\" \" profiles", "if more than one such Item exists. \"\"\" matches = self.find_all_items(resource_type, properties, [profile])", "the given type exist, will create a new ``Item`` if :attr:`create_new` is set", "all Items in the VirtualHardwareSection. Fundamentally it's just a dict of :class:`~COT.vm_description.ovf.item.OVFItem` objects", "logger.debug( \"OVF contains %s hardware Item elements describing %s \" \"unique devices\", item_count,", "and profiles. Args: resource_type (str): Resource type string like 'scsi' or 'serial' properties", "belong to item_count (int): How many Items of this type (including this item)", "1 if last_item is None: logger.notice(\"No existing items of type %s found. \"", "logger.debug(\"Writing Item(s) with InstanceID %s\", instance) ovfitem = self.item_dict[instance] new_items = ovfitem.generate_items() logger.spam(\"Generated", "this type (including this item) now exist. Used with :meth:`COT.platform.Platform.guess_nic_name` Returns: OVFItem: Updated", "address = new_item.get(self.ovf.ADDRESS) if address: raise NotImplementedError(\"Don't know how to ensure a unique", "and profile not in profile_list: ovfitem.remove_profile(profile) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.modified = True self.item_dict[instance]", "Helper method for :meth:`set_item_count_per_profile`. Args: resource_type (str): 'cpu', 'harddisk', etc. count (int): Desired", "a profile, remove all subsequent # items from this profile. # If we", "type %s found. \" \"Will create new %s from scratch.\", resource_type, resource_type) (_,", "a no-op if item not in item_dict def clone_item(self, parent_item, profile_list): \"\"\"Clone an", "is %s\", resource_type, profile_list, instance) return (instance, ovfitem) def delete_item(self, item): \"\"\"Delete the", "ovfitem = self.item_dict[instance] new_items = ovfitem.generate_items() logger.spam(\"Generated %d items\", len(new_items)) for item in", "\" \"so no XML update is required\") return # Delete the existing Items:", "given filters. Args: item (OVFItem): Item to validate resource_type (str): Resource type string", "if len(value_list): new_value = value_list.pop(0) else: new_value = default for profile in profile_list:", "number of Items of the given type for the given profile. Wrapper for", "of this type (including this item) now exist. Used with :meth:`COT.platform.Platform.guess_nic_name` Returns: OVFItem:", "== self.ovf.STORAGE_ITEM or item.tag == self.ovf.ETHERNET_PORT_ITEM): self.ovf.virtual_hw_section.remove(item) delete_count += 1 logger.debug(\"Cleared %d existing", "False return True def find_all_items(self, resource_type=None, properties=None, profile_list=None): \"\"\"Find all items matching the", "XML from .item import OVFItem, OVFItemDataError logger = logging.getLogger(__name__) class OVFHardwareDataError(Exception): \"\"\"The input", "profile. # If we don't have enough items under a profile, add any", "type. If no items of the given type exist, will create a new", "\"\"\"Update a cloned item to make it distinct from its parent. Helper method", "a cloned item to make it distinct from its parent. Helper method for", "+= 1 # We index the dict by InstanceID as it's the one", "modified, propagated, or # distributed except according to the terms contained in the", "required\") return # Delete the existing Items: delete_count = 0 for item in", "the highest-numbered instances will be removed preferentially. Args: resource_type (str): 'cpu', 'harddisk', etc.", "belong to? new_item_profiles = [] for profile in profile_list: if count_dict[profile] < count:", "then the highest-numbered instances will be removed preferentially. Args: resource_type (str): 'cpu', 'harddisk',", "profile in profile_list: if not item.has_profile(profile): return False for (prop, value) in properties.items():", "[ovfitem] for ovfitem in ovfitem_list: ovfitem.set_property(prop_name, new_value, profile_list) logger.debug(\"Updated %s %s to %s", "else: new_value = default for profile in profile_list: if ovfitem.has_profile(profile): ovfitem.set_property(prop_name, new_value, [profile])", "new_item_profiles) return new_item def set_item_count_per_profile(self, resource_type, count, profile_list): \"\"\"Set the number of items", "self.item_dict.values(): if ovfitem.modified: modified = True break if not modified: logger.verbose(\"No changes to", "is not an integer. \"\"\" resource_type = new_item.hardware_type address = new_item.get(self.ovf.ADDRESS) if address:", "items matching the given type, properties, and profiles. Args: resource_type (str): Resource type", "new_item.hardware_type address = new_item.get(self.ovf.ADDRESS) if address: raise NotImplementedError(\"Don't know how to ensure a", "new_item_profiles, count_dict[new_item_profiles[0]]) last_item = new_item items_to_add -= 1 def set_value_for_all_items(self, resource_type, prop_name, new_value,", "for (prop, value) in properties.items(): if item.get_value(prop) != value: return False return True", "prop_name, new_value, profile_list) def set_item_values_per_profile(self, resource_type, prop_name, value_list, profile_list, default=None): \"\"\"Set value(s) for", "type %s with properties %s and\" \" profiles %s\", len(filtered_items), resource_type, properties, profile_list)", "count_dict def _update_existing_item_profiles(self, resource_type, count, profile_list): \"\"\"Change profile membership of existing items as", "def item_match(self, item, resource_type, properties, profile_list): \"\"\"Check whether the given item matches the", "and (self.ovf.RES_MAP[resource_type] != item.get_value(self.ovf.RESOURCE_TYPE)): return False if profile_list: for profile in profile_list: if", "under a profile, remove all subsequent # items from this profile. # If", "existing items from VirtualHWSection\", delete_count) # Generate the new XML Items, in appropriately", "- used as a key to :data:`~COT.vm_description.ovf.name_helper.OVFNameHelper1.RES_MAP` profile_list (list): Profiles the new item", "instance = self.find_unused_instance_id() ovfitem = OVFItem(self.ovf) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.set_property(self.ovf.RESOURCE_TYPE, self.ovf.RES_MAP[resource_type], profile_list) #", "\"multiple values {0}. COT can't \" \"handle this yet.\" .format(address_list)) address_on_parent = address_list[0]", "ID to search within Returns: OVFItem: Matching instance, or None Raises: LookupError: if", "# How many new Items do we need to create in total? items_to_add", "the given type. If no items of the given type exist, will create", "\"\"\" ovfitem_list = self.find_all_items(resource_type) if not ovfitem_list: if not create_new: logger.warning(\"No items of", "[None] for profile in profile_list: count_dict[profile] = 0 for ovfitem in self.find_all_items(resource_type): for", "ovfitem.has_profile(profile) and profile not in profile_list: ovfitem.remove_profile(profile) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.modified = True", "profile will be added to this profile, starting with the lowest-sequence instance not", "in profile_list: if ovfitem.has_profile(profile): count_dict[profile] += 1 for (profile, count) in count_dict.items(): logger.spam(\"Profile", "profile_list: for profile in profile_list: if not item.has_profile(profile): return False for (prop, value)", "1 new_item.set_property(self.ovf.ADDRESS_ON_PARENT, str(address_on_parent), new_item_profiles) except ValueError: raise NotImplementedError(\"Don't know how to ensure a", "profile(s). If the new count is greater than the current count under this", "and\" \" profiles %s\", len(filtered_items), resource_type, properties, profile_list) return filtered_items def find_item(self, resource_type=None,", "we assume that the count is the same across profiles new_item = self._update_cloned_item(", "then additional instances that already exist under another profile will be added to", "type per profile. Items present under \"no profile\" will be counted against the", "# Delete the existing Items: delete_count = 0 for item in list(self.ovf.virtual_hw_section): if", "a bunch of helper methods. \"\"\" def __init__(self, ovf): \"\"\"Construct an OVFHardware object", "= new_item.get_all_values(self.ovf.ADDRESS_ON_PARENT) if len(address_list) > 1: raise NotImplementedError(\"AddressOnParent is not common \" \"across", "if address_on_parent: address_list = new_item.get_all_values(self.ovf.ADDRESS_ON_PARENT) if len(address_list) > 1: raise NotImplementedError(\"AddressOnParent is not", "set extra items to this value \"\"\" if profile_list is None: profile_list =", "all lower InstanceIDs, even if available). Returns: str: An instance ID that is", "OVFHardwareDataError(\"Data conflict for instance {0}\" .format(instance)) logger.debug( \"OVF contains %s hardware Item elements", "updating ``Address`` NotImplementedError: If updating ``AddressOnParent`` but the prior value varies across config", "If updating ``AddressOnParent`` but the prior value varies across config profiles. NotImplementedError: if", "int: Number of items of this type in this profile. \"\"\" return (self.get_item_count_per_profile(resource_type,", "VirtualHardwareSection, if needed. Will do nothing if no Items have been changed. \"\"\"", "as it's the one property of # an Item that uniquely identifies this", "on Returns: bool: True if the item matches all filters, False if not.", "\"\"\" modified = False if len(self.item_dict) != len(XML.find_all_children( self.ovf.virtual_hw_section, set([self.ovf.ITEM, self.ovf.STORAGE_ITEM, self.ovf.ETHERNET_PORT_ITEM]))): modified", "for item in list(self.ovf.virtual_hw_section): if (item.tag == self.ovf.ITEM or item.tag == self.ovf.STORAGE_ITEM or", "count_dict = self.get_item_count_per_profile(resource_type, profile_list) items_seen = dict.fromkeys(profile_list, 0) last_item = None # First,", "\"unique AddressOnParent value \" \"given base value '{0}'\" .format(address_on_parent)) if resource_type == 'ethernet':", "or 'cpu' prop_name (str): Property name to update value_list (list): List of values", "%d\", instance) return str(instance) def new_item(self, resource_type, profile_list=None): \"\"\"Create a new OVFItem of", "delta = count - items_seen[profile] if delta > items_to_add: items_to_add = delta return", "False for (prop, value) in properties.items(): if item.get_value(prop) != value: return False return", "the same across profiles new_item = self._update_cloned_item( new_item, new_item_profiles, count_dict[new_item_profiles[0]]) last_item = new_item", "describing %s \" \"unique devices\", item_count, len(self.item_dict)) # Treat the current state as", "\"\"\"Get the number of Items of the given type per profile. Items present", "items_to_add = 0 for profile in profile_list: delta = count - items_seen[profile] if", "if properties is None: properties = {} for item in items: if self.item_match(item,", "given :attr:`resource_type`) profile_list (list): List of profiles to filter on (default: apply across", "know how to ensure a \" \"unique AddressOnParent value \" \"given base value", "= self.new_item(resource_type, new_item_profiles) else: (_, new_item) = self.clone_item(last_item, new_item_profiles) # Check/update other properties", "if self.item_dict[instance] == item: del self.item_dict[instance] # TODO: error handling - currently a", "config profiles. NotImplementedError: if ``AddressOnParent`` is not an integer. \"\"\" resource_type = new_item.hardware_type", "(OVFItem): Newly cloned Item new_item_profiles (list): Profiles new_item should belong to item_count (int):", "NotImplementedError(\"Don't know how to ensure a \" \"unique AddressOnParent value \" \"given base", "'harddisk' - used as a key to :data:`~COT.vm_description.ovf.name_helper.OVFNameHelper1.RES_MAP` profile_list (list): Profiles the new", "if profile_list: for profile in profile_list: if not item.has_profile(profile): return False for (prop,", "OVFItem instances \"\"\" items = [self.item_dict[instance] for instance in natural_sort(self.item_dict)] filtered_items = []", "terms contained in the LICENSE.txt file. \"\"\"Representation of OVF hardware definitions. **Classes and", "2016, <NAME> # Copyright (c) 2013-2016, 2019 the COT project developers. # See", "many Items of this type (including this item) now exist. Used with :meth:`COT.platform.Platform.guess_nic_name`", "OVF instance to extract hardware information from. Raises: OVFHardwareDataError: if any data errors", "for item in items: if self.item_match(item, resource_type, properties, profile_list): filtered_items.append(item) logger.spam(\"Found %s Items", "state as golden: for ovfitem in self.item_dict.values(): ovfitem.modified = False def update_xml(self): \"\"\"Regenerate", "def clone_item(self, parent_item, profile_list): \"\"\"Clone an OVFItem to create a new instance. Args:", "1: raise LookupError( \"Found multiple matching '{0}' Items (instances {1})\" .format(resource_type, [m.instance_id for", ".. autosummary:: :nosignatures: OVFHardware OVFHardwareDataError \"\"\" import copy import logging from COT.data_validation import", "new %s from scratch.\", resource_type, resource_type) (_, new_item) = self.new_item(resource_type, new_item_profiles) else: (_,", "an Item \" \"of type {0}\".format(resource_type)) address_on_parent = new_item.get(self.ovf.ADDRESS_ON_PARENT) if address_on_parent: address_list =", "ovf.virtual_hw_section: namespace = ovf.namespace_for_item_tag(item.tag) if not namespace: continue item_count += 1 # We", "2019 the COT project developers. # See the COPYRIGHT.txt file at the top-level", "found in the # top-level directory of this distribution and at # https://github.com/glennmatthews/cot/blob/master/LICENSE.txt.", "parent_item, profile_list, instance) return (instance, ovfitem) def item_match(self, item, resource_type, properties, profile_list): \"\"\"Check", "def update_xml(self): \"\"\"Regenerate all Items under the VirtualHardwareSection, if needed. Will do nothing", "lowest-sequence instance not already present, and only as a last resort will new", "the given value for all items of the given type. If no items", "part # of COT, including this file, may be copied, modified, propagated, or", "be counted against the total for each profile. Args: resource_type (str): Resource type", "{} if not profile_list: # Get the count under all profiles profile_list =", "the given item matches the given filters. Args: item (OVFItem): Item to validate", "parent. Helper method for :meth:`set_item_count_per_profile`. Args: new_item (OVFItem): Newly cloned Item new_item_profiles (list):", "for :meth:`set_item_count_per_profile`. Args: new_item (OVFItem): Newly cloned Item new_item_profiles (list): Profiles new_item should", "Returns: list: Matching OVFItem instances \"\"\" items = [self.item_dict[instance] for instance in natural_sort(self.item_dict)]", "the prior value varies across config profiles. NotImplementedError: if ``AddressOnParent`` is not an", "profile_list, instance) return (instance, ovfitem) def delete_item(self, item): \"\"\"Delete the given Item from", "has %s %s Item(s)\", profile, count, resource_type) return count_dict def _update_existing_item_profiles(self, resource_type, count,", "to update new_value (str): New value to set the property to profile_list (list):", "so provide a simple default value. ovfitem.set_property(self.ovf.ELEMENT_NAME, resource_type, profile_list) self.item_dict[instance] = ovfitem ovfitem.modified", "profiles to this profile as well. for ovfitem in self.find_all_items(resource_type): last_item = ovfitem", "= self.new_item(resource_type, profile_list) ovfitem_list = [ovfitem] for ovfitem in ovfitem_list: ovfitem.set_property(prop_name, new_value, profile_list)", "NotImplementedError(\"AddressOnParent is not common \" \"across all profiles but has \" \"multiple values", "Item new_item_profiles (list): Profiles new_item should belong to item_count (int): How many Items", "to match profile_list (list): List of profiles to filter on Returns: list: Matching", "dict: mapping profile strings to the number of items under each profile. \"\"\"", "of items under each profile. \"\"\" count_dict = {} if not profile_list: #", "now exist. Used with :meth:`COT.platform.Platform.guess_nic_name` Returns: OVFItem: Updated :param:`new_item` Raises: NotImplementedError: No support", "of a type. Args: resource_type (str): Device type such as 'harddisk' or 'cpu'", "if not namespace: continue item_count += 1 # We index the dict by", "exist under another profile will be added to this profile, starting with the", "the lowest-sequence instance not already present, and only as a last resort will", "need now, # otherwise we'll get an error when trying to set the", "properties.items(): if item.get_value(prop) != value: return False return True def find_all_items(self, resource_type=None, properties=None,", "new_value (str): New value to set the property to profile_list (list): List of", "return (instance, ovfitem) def delete_item(self, item): \"\"\"Delete the given Item from the hardware.", "self.item_dict[instance] = ovfitem logger.spam(\"Added clone of %s under %s, instance is %s\", parent_item,", "item in items: if self.item_match(item, resource_type, properties, profile_list): filtered_items.append(item) logger.spam(\"Found %s Items of", "profile_list: count_dict[profile] = 0 for ovfitem in self.find_all_items(resource_type): for profile in profile_list: if", "project developers. # See the COPYRIGHT.txt file at the top-level directory of this", "return # Delete the existing Items: delete_count = 0 for item in list(self.ovf.virtual_hw_section):", "len(matches) == 0: return None else: return matches[0] def get_item_count(self, resource_type, profile): \"\"\"Get", "the current count under this profile, then additional instances that already exist under", "Tool (COT) project. # It is subject to the license terms in the", "item.get_value(prop) != value: return False return True def find_all_items(self, resource_type=None, properties=None, profile_list=None): \"\"\"Find", "in self.find_all_items(resource_type): for profile in profile_list: if ovfitem.has_profile(profile): count_dict[profile] += 1 for (profile,", "found # under other profiles to this profile as well. for ovfitem in", "in profile_list: if ovfitem.has_profile(profile): if items_seen[profile] >= count: # Too many items -", "the item matches all filters, False if not. \"\"\" if resource_type and (self.ovf.RES_MAP[resource_type]", "logger.verbose(\"Updated XML VirtualHardwareSection, now contains %d \" \"Items representing %d devices\", len(self.ovf.virtual_hw_section.findall(self.ovf.ITEM)), len(self.item_dict))", "> items_to_add: items_to_add = delta return count_dict, items_to_add, last_item def _update_cloned_item(self, new_item, new_item_profiles,", "%d items\", len(new_items)) for item in new_items: XML.add_child(self.ovf.virtual_hw_section, item, ordering) logger.verbose(\"Updated XML VirtualHardwareSection,", "\" \"unique devices\", item_count, len(self.item_dict)) # Treat the current state as golden: for", "scratch.\", resource_type, resource_type) (_, ovfitem) = self.new_item(resource_type, profile_list) ovfitem_list = [ovfitem] for ovfitem", "# hardware.py - OVFHardware class # # June 2016, <NAME> # Copyright (c)", "If the new count is greater than the current count under this profile,", "apply across all profiles) \"\"\" if not profile_list: # Set the profile list", "resource_type (str): Resource type string like 'scsi' or 'serial' profile (str): Single profile", "an OVFHardware object describing all Items in the OVF. Args: ovf (OVF): OVF", "Args: resource_type (str): Resource type such as 'cpu' or 'harddisk' prop_name (str): Property", "class OVFHardware(object): \"\"\"Helper class for :class:`~COT.vm_description.ovf.ovf.OVF`. Represents all hardware items defined by this", "List of profiles to clone into Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance =", "of multiple items of a type. Args: resource_type (str): Device type such as", "import natural_sort from COT.xml_file import XML from .item import OVFItem, OVFItemDataError logger =", "(str): Single profile ID to search within Returns: OVFItem: Matching instance, or None", "with new instance ID %s\", parent_item, instance) ovfitem = copy.deepcopy(parent_item) # Delete any", "assume that the count is the same across profiles new_item = self._update_cloned_item( new_item,", "errors are seen \"\"\" self.ovf = ovf self.item_dict = {} valid_profiles = set(ovf.config_profiles)", "handling - currently a no-op if item not in item_dict def clone_item(self, parent_item,", "create_new: logger.warning(\"No items of type %s found. Nothing to do.\", resource_type) return logger.notice(\"No", "instance)) if instance not in self.item_dict: self.item_dict[instance] = OVFItem(self.ovf, item) else: try: self.item_dict[instance].add_item(item)", "return None else: return matches[0] def get_item_count(self, resource_type, profile): \"\"\"Get the number of", "resource_type) return count_dict def _update_existing_item_profiles(self, resource_type, count, profile_list): \"\"\"Change profile membership of existing", "- are all of the profiles associated with this # item properly defined", "self.ovf.platform.guess_nic_name(item_count) new_item.set_property(self.ovf.ELEMENT_NAME, element_name, new_item_profiles) return new_item def set_item_count_per_profile(self, resource_type, count, profile_list): \"\"\"Set the", "string like 'scsi' or 'serial' properties (dict): Properties and their values to match", "clone_item(self, parent_item, profile_list): \"\"\"Clone an OVFItem to create a new instance. Args: parent_item", "%s from scratch.\", resource_type, resource_type) (_, ovfitem) = self.new_item(resource_type, profile_list) ovfitem_list = [ovfitem]", "item in ovf.virtual_hw_section: namespace = ovf.namespace_for_item_tag(item.tag) if not namespace: continue item_count += 1", "to item_count (int): How many Items of this type (including this item) now", "count: new_item_profiles.append(profile) count_dict[profile] += 1 if last_item is None: logger.notice(\"No existing items of", ":attr:`resource_type` presently exist. \"\"\" ovfitem_list = self.find_all_items(resource_type) if not ovfitem_list: if not create_new:", "just a dict of :class:`~COT.vm_description.ovf.item.OVFItem` objects with a bunch of helper methods. \"\"\"", "(_, ovfitem) = self.new_item(resource_type, profile_list) ovfitem_list = [ovfitem] for ovfitem in ovfitem_list: ovfitem.set_property(prop_name,", "profile to this Item ovfitem.add_profile(profile) count_dict[profile] += 1 items_seen[profile] += 1 # How", "= True self.item_dict[instance] = ovfitem logger.spam(\"Added clone of %s under %s, instance is", "get_item_count(self, resource_type, profile): \"\"\"Get the number of Items of the given type for", "List of profiles to filter on (default: apply across all profiles) Returns: dict:", "str(instance) def new_item(self, resource_type, profile_list=None): \"\"\"Create a new OVFItem of the given type.", "valid_profiles = set(ovf.config_profiles) item_count = 0 for item in ovf.virtual_hw_section: namespace = ovf.namespace_for_item_tag(item.tag)", "this Item need to belong to? new_item_profiles = [] for profile in profile_list:", "items_to_add) while items_to_add > 0: # Which profiles does this Item need to", "resource_type=None, properties=None, profile_list=None): \"\"\"Find all items matching the given type, properties, and profiles.", "Args: resource_type (str): Resource type string like 'scsi' or 'serial' profile (str): Single", "filter on (default: apply across all profiles) \"\"\" if not profile_list: # Set", "logger.warning(\"After scanning all known %s Items, not all \" \"%s values were used", "under %s, instance is %s\", parent_item, profile_list, instance) return (instance, ovfitem) def item_match(self,", "OVFItem of the given type. Args: resource_type (str): String such as 'cpu' or", "item in new_items: XML.add_child(self.ovf.virtual_hw_section, item, ordering) logger.verbose(\"Updated XML VirtualHardwareSection, now contains %d \"", "profiles. NotImplementedError: if ``AddressOnParent`` is not an integer. \"\"\" resource_type = new_item.hardware_type address", "unique: # TODO - we assume that the count is the same across", "= delta return count_dict, items_to_add, last_item def _update_cloned_item(self, new_item, new_item_profiles, item_count): \"\"\"Update a", "of the given type exist, will create a new ``Item`` if :attr:`create_new` is", "%d new items\", items_to_add) while items_to_add > 0: # Which profiles does this", "profiles from the parent that we don't need now, # otherwise we'll get", "look up. Returns: int: Number of items of this type in this profile.", "Args: resource_type (str): Resource type string like 'scsi' or 'serial' profile_list (list): List", "self.item_dict: self.item_dict[instance] = OVFItem(self.ovf, item) else: try: self.item_dict[instance].add_item(item) except OVFItemDataError as exc: logger.debug(exc)", "item properly defined in the OVF DeploymentOptionSection? item_profiles = set(item.get(self.ovf.ITEM_CONFIG, \"\").split()) unknown_profiles =", "# Which profiles does this Item need to belong to? new_item_profiles = []", "count, profile_list) logger.debug(\"Creating %d new items\", items_to_add) while items_to_add > 0: # Which", "start=1): \"\"\"Find the first available ``InstanceID`` number. Args: start (int): First InstanceID value", ":param:`new_item` Raises: NotImplementedError: No support yet for updating ``Address`` NotImplementedError: If updating ``AddressOnParent``", "instance, or None Raises: LookupError: if more than one such Item exists. \"\"\"", "ovf): \"\"\"Construct an OVFHardware object describing all Items in the OVF. Args: ovf", "%s\", instance) ovfitem = self.item_dict[instance] new_items = ovfitem.generate_items() logger.spam(\"Generated %d items\", len(new_items)) for", "\"Will create new %s from scratch.\", resource_type, resource_type) (_, new_item) = self.new_item(resource_type, new_item_profiles)", "ovf (OVF): OVF instance to extract hardware information from. Raises: OVFHardwareDataError: if any", "objects with a bunch of helper methods. \"\"\" def __init__(self, ovf): \"\"\"Construct an", "Copyright (c) 2013-2016, 2019 the COT project developers. # See the COPYRIGHT.txt file", "to filter on (default: apply across all profiles) Returns: dict: mapping profile strings", "self.find_all_items(resource_type, properties, [profile]) if len(matches) > 1: raise LookupError( \"Found multiple matching '{0}'", "part of the Common OVF Tool (COT) project. # It is subject to", "file. \"\"\"Representation of OVF hardware definitions. **Classes and Exceptions** .. autosummary:: :nosignatures: OVFHardware", "properties is None: properties = {} for item in items: if self.item_match(item, resource_type,", "create in total? items_to_add = 0 for profile in profile_list: delta = count", "to create a new instance. Args: parent_item (OVFItem): Instance to clone from profile_list", "items_seen[profile] += 1 # How many new Items do we need to create", "presently exist. \"\"\" ovfitem_list = self.find_all_items(resource_type) if not ovfitem_list: if not create_new: logger.warning(\"No", "if no Items have been changed. \"\"\" modified = False if len(self.item_dict) !=", "ovfitem.set_property(self.ovf.ELEMENT_NAME, resource_type, profile_list) self.item_dict[instance] = ovfitem ovfitem.modified = True logger.info(\"Created new %s under", "= [ovfitem] for ovfitem in ovfitem_list: ovfitem.set_property(prop_name, new_value, profile_list) logger.debug(\"Updated %s %s to", "the number of items under each profile. \"\"\" count_dict = {} if not", "if (item.tag == self.ovf.ITEM or item.tag == self.ovf.STORAGE_ITEM or item.tag == self.ovf.ETHERNET_PORT_ITEM): self.ovf.virtual_hw_section.remove(item)", "no XML update is required\") return # Delete the existing Items: delete_count =", "VirtualHWSection\", delete_count) # Generate the new XML Items, in appropriately sorted order by", "been changed. \"\"\" modified = False if len(self.item_dict) != len(XML.find_all_children( self.ovf.virtual_hw_section, set([self.ovf.ITEM, self.ovf.STORAGE_ITEM,", "than one such Item exists. \"\"\" matches = self.find_all_items(resource_type, properties, [profile]) if len(matches)", "properties=None, profile=None): \"\"\"Find the only OVFItem of the given :attr:`resource_type`. Args: resource_type (str):", "methods. \"\"\" def __init__(self, ovf): \"\"\"Construct an OVFHardware object describing all Items in", "item matches the given filters. Args: item (OVFItem): Item to validate resource_type (str):", "resource_type (str): Resource type string like 'scsi' or 'serial' profile_list (list): List of", "InstanceID value to consider (disregarding all lower InstanceIDs, even if available). Returns: str:", "(int): First InstanceID value to consider (disregarding all lower InstanceIDs, even if available).", "matches the given filters. Args: item (OVFItem): Item to validate resource_type (str): Resource", "count, profile_list): \"\"\"Set the number of items of a given type under the", "given :attr:`resource_type`. Args: resource_type (str): Resource type string like 'scsi' or 'serial' properties", "= False if len(self.item_dict) != len(XML.find_all_children( self.ovf.virtual_hw_section, set([self.ovf.ITEM, self.ovf.STORAGE_ITEM, self.ovf.ETHERNET_PORT_ITEM]))): modified = True", "\" \"multiple values {0}. COT can't \" \"handle this yet.\" .format(address_list)) address_on_parent =", "resource_type, prop_name, new_value, profile_list) if len(value_list): logger.warning(\"After scanning all known %s Items, not", "\"\"\"Delete the given Item from the hardware. Args: item (OVFItem): Item to delete", "such Item exists. \"\"\" matches = self.find_all_items(resource_type, properties, [profile]) if len(matches) > 1:", "len(filtered_items), resource_type, properties, profile_list) return filtered_items def find_item(self, resource_type=None, properties=None, profile=None): \"\"\"Find the", "instance in natural_sort(self.item_dict)] filtered_items = [] if properties is None: properties = {}", "as 'cpu' or 'harddisk' - used as a key to :data:`~COT.vm_description.ovf.name_helper.OVFNameHelper1.RES_MAP` profile_list (list):", "to look up. Returns: int: Number of items of this type in this", "instance not already present, and only as a last resort will new instances", ":meth:`COT.platform.Platform.guess_nic_name` Returns: OVFItem: Updated :param:`new_item` Raises: NotImplementedError: No support yet for updating ``Address``", "updating ``AddressOnParent`` but the prior value varies across config profiles. NotImplementedError: if ``AddressOnParent``", "or 'serial' profile (str): Single profile identifier string to look up. Returns: int:", "under this profile, then the highest-numbered instances will be removed preferentially. Args: resource_type", "existing Items: delete_count = 0 for item in list(self.ovf.virtual_hw_section): if (item.tag == self.ovf.ITEM", "No part # of COT, including this file, may be copied, modified, propagated,", "XML Items, in appropriately sorted order by Instance ordering = [self.ovf.INFO, self.ovf.SYSTEM, self.ovf.ITEM]", "in natural_sort(self.item_dict)] filtered_items = [] if properties is None: properties = {} for", "of the given type. Args: resource_type (str): String such as 'cpu' or 'harddisk'", "(prop, value) in properties.items(): if item.get_value(prop) != value: return False return True def", "filter on (default: apply across all profiles) create_new (bool): Whether to create a", "profiles) create_new (bool): Whether to create a new entry if no items of", "profile_list) ovfitem.set_property(self.ovf.RESOURCE_TYPE, self.ovf.RES_MAP[resource_type], profile_list) # ovftool freaks out if we leave out the", "\" \"Address value when cloning an Item \" \"of type {0}\".format(resource_type)) address_on_parent =", "value(s) for a property of multiple items of a type. Args: resource_type (str):", "Properties and their values to match profile_list (list): List of profiles to filter", "this yet.\" .format(address_list)) address_on_parent = address_list[0] # Currently we only handle integer addresses", "%s %s to %s under profiles %s\", resource_type, prop_name, new_value, profile_list) def set_item_values_per_profile(self,", "is not sane.\"\"\" class OVFHardware(object): \"\"\"Helper class for :class:`~COT.vm_description.ovf.ovf.OVF`. Represents all hardware items", "%s \" \"unique devices\", item_count, len(self.item_dict)) # Treat the current state as golden:", "in the # top-level directory of this distribution and at # https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No", "default profile_list = self.ovf.config_profiles + [None] count_dict, items_to_add, last_item = \\ self._update_existing_item_profiles( resource_type,", "unused InstanceID %d\", instance) return str(instance) def new_item(self, resource_type, profile_list=None): \"\"\"Create a new", "of profiles to filter on (default: apply across all profiles) default (str): If", "while str(instance) in self.item_dict.keys(): instance += 1 logger.debug(\"Found unused InstanceID %d\", instance) return", "in appropriately sorted order by Instance ordering = [self.ovf.INFO, self.ovf.SYSTEM, self.ovf.ITEM] for instance", "-= 1 def set_value_for_all_items(self, resource_type, prop_name, new_value, profile_list, create_new=False): \"\"\"Set a property to", "\" \"Item instance {1}\" .format(unknown_profiles, instance)) if instance not in self.item_dict: self.item_dict[instance] =", "self.ovf.virtual_hw_section.remove(item) delete_count += 1 logger.debug(\"Cleared %d existing items from VirtualHWSection\", delete_count) # Generate", "from this profile. # If we don't have enough items under a profile,", "ovfitem in self.find_all_items(resource_type): if len(value_list): new_value = value_list.pop(0) else: new_value = default for", "the current state as golden: for ovfitem in self.item_dict.values(): ovfitem.modified = False def", "\"\"\"Get the number of Items of the given type for the given profile.", "unique \" \"Address value when cloning an Item \" \"of type {0}\".format(resource_type)) address_on_parent", "True if the item matches all filters, False if not. \"\"\" if resource_type", "to delete \"\"\" instance = item.get_value(self.ovf.INSTANCE_ID) if self.item_dict[instance] == item: del self.item_dict[instance] #", "identifies this set of hardware items. instance = item.find(namespace + self.ovf.INSTANCE_ID).text # Pre-sanity", "to set (one value per item of the given :attr:`resource_type`) profile_list (list): List", "Item ovfitem.add_profile(profile) count_dict[profile] += 1 items_seen[profile] += 1 # How many new Items", "appropriately sorted order by Instance ordering = [self.ovf.INFO, self.ovf.SYSTEM, self.ovf.ITEM] for instance in", "ovfitem.remove_profile(profile) else: items_seen[profile] += 1 else: if count_dict[profile] < count: # Add this", "the new XML Items, in appropriately sorted order by Instance ordering = [self.ovf.INFO,", "prop_name (str): Property name to update value_list (list): List of values to set", "in profile_list: if count_dict[profile] < count: new_item_profiles.append(profile) count_dict[profile] += 1 if last_item is", "in self.find_all_items(resource_type): last_item = ovfitem for profile in profile_list: if ovfitem.has_profile(profile): if items_seen[profile]", "Helper method for :meth:`set_item_count_per_profile`. Args: new_item (OVFItem): Newly cloned Item new_item_profiles (list): Profiles", "items found # under other profiles to this profile as well. for ovfitem", "# top-level directory of this distribution and at # https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No part #", "Returns: dict: mapping profile strings to the number of items under each profile.", "all Items under the VirtualHardwareSection, if needed. Will do nothing if no Items", "profile, remove all subsequent # items from this profile. # If we don't", "return str(instance) def new_item(self, resource_type, profile_list=None): \"\"\"Create a new OVFItem of the given", "item: del self.item_dict[instance] # TODO: error handling - currently a no-op if item", "the existing Items: delete_count = 0 for item in list(self.ovf.virtual_hw_section): if (item.tag ==", "in self.find_all_items(resource_type): if len(value_list): new_value = value_list.pop(0) else: new_value = default for profile", "None: properties = {} for item in items: if self.item_match(item, resource_type, properties, profile_list):", "\"unique devices\", item_count, len(self.item_dict)) # Treat the current state as golden: for ovfitem", "new_item.set_property(self.ovf.ELEMENT_NAME, element_name, new_item_profiles) return new_item def set_item_count_per_profile(self, resource_type, count, profile_list): \"\"\"Set the number", "value_list (list): List of values to set (one value per item of the", "self.ovf.ETHERNET_PORT_ITEM): self.ovf.virtual_hw_section.remove(item) delete_count += 1 logger.debug(\"Cleared %d existing items from VirtualHWSection\", delete_count) #", "a given type under the given profile(s). If the new count is greater", "to self-inconsistency (#64). for profile in self.ovf.config_profiles: if ovfitem.has_profile(profile) and profile not in", "profile_list: ovfitem.remove_profile(profile) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.modified = True self.item_dict[instance] = ovfitem logger.spam(\"Added clone", "the count is the same across profiles new_item = self._update_cloned_item( new_item, new_item_profiles, count_dict[new_item_profiles[0]])", "# Pre-sanity check - are all of the profiles associated with this #", "details from our caller raise OVFHardwareDataError(\"Data conflict for instance {0}\" .format(instance)) logger.debug( \"OVF", "do nothing. Args: resource_type (str): Resource type such as 'cpu' or 'harddisk' prop_name", "profile. \"\"\" count_dict = {} if not profile_list: # Get the count under", "Raises: LookupError: if more than one such Item exists. \"\"\" matches = self.find_all_items(resource_type,", ".format(address_on_parent)) if resource_type == 'ethernet': # Update ElementName to reflect the NIC number", "self.get_item_count_per_profile(resource_type, profile_list) items_seen = dict.fromkeys(profile_list, 0) last_item = None # First, iterate over", "Used with :meth:`COT.platform.Platform.guess_nic_name` Returns: OVFItem: Updated :param:`new_item` Raises: NotImplementedError: No support yet for", "logger.debug(\"Cleared %d existing items from VirtualHWSection\", delete_count) # Generate the new XML Items,", "properties of the clone that should be unique: # TODO - we assume", "if last_item is None: logger.notice(\"No existing items of type %s found. \" \"Will", "OVF Tool (COT) project. # It is subject to the license terms in", "the dict by InstanceID as it's the one property of # an Item", "instance {1}\" .format(unknown_profiles, instance)) if instance not in self.item_dict: self.item_dict[instance] = OVFItem(self.ovf, item)", "all subsequent # items from this profile. # If we don't have enough", ".format(unknown_profiles, instance)) if instance not in self.item_dict: self.item_dict[instance] = OVFItem(self.ovf, item) else: try:", "delete \"\"\" instance = item.get_value(self.ovf.INSTANCE_ID) if self.item_dict[instance] == item: del self.item_dict[instance] # TODO:", "item_dict def clone_item(self, parent_item, profile_list): \"\"\"Clone an OVFItem to create a new instance.", "as well. for ovfitem in self.find_all_items(resource_type): last_item = ovfitem for profile in profile_list:", "to match profile (str): Single profile ID to search within Returns: OVFItem: Matching", "profile_list): \"\"\"Get the number of Items of the given type per profile. Items", "or item.tag == self.ovf.STORAGE_ITEM or item.tag == self.ovf.ETHERNET_PORT_ITEM): self.ovf.virtual_hw_section.remove(item) delete_count += 1 logger.debug(\"Cleared", "\"\"\" count_dict = self.get_item_count_per_profile(resource_type, profile_list) items_seen = dict.fromkeys(profile_list, 0) last_item = None #", "etc. count (int): Desired number of items profile_list (list): List of profiles to", "Args: resource_type (str): Resource type string like 'scsi' or 'serial' properties (dict): Properties", "if ovfitem.has_profile(profile): if items_seen[profile] >= count: # Too many items - remove this", "- we assume that the count is the same across profiles new_item =", "(str): Device type such as 'harddisk' or 'cpu' prop_name (str): Property name to", "address_list = new_item.get_all_values(self.ovf.ADDRESS_ON_PARENT) if len(address_list) > 1: raise NotImplementedError(\"AddressOnParent is not common \"", "instance) ovfitem = copy.deepcopy(parent_item) # Delete any profiles from the parent that we", "the OVF DeploymentOptionSection? item_profiles = set(item.get(self.ovf.ITEM_CONFIG, \"\").split()) unknown_profiles = item_profiles - valid_profiles if", "on (default: apply across all profiles) Returns: tuple: (count_dict, items_to_add, last_item) \"\"\" count_dict", "``True``; otherwise will log a warning and do nothing. Args: resource_type (str): Resource", "prop_name, new_value, profile_list, create_new=False): \"\"\"Set a property to the given value for all", "to clone into Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id(start=parent_item.instance_id) logger.spam(\"Cloning existing", "the number of items of a given type under the given profile(s). If", "new_item, new_item_profiles, count_dict[new_item_profiles[0]]) last_item = new_item items_to_add -= 1 def set_value_for_all_items(self, resource_type, prop_name,", "{1})\" .format(resource_type, [m.instance_id for m in matches])) elif len(matches) == 0: return None", "AddressOnParent value \" \"given base value '{0}'\" .format(address_on_parent)) if resource_type == 'ethernet': #", "all items of the given type. If no items of the given type", "the total for each profile. Args: resource_type (str): Resource type string like 'scsi'", "instance) return (instance, ovfitem) def item_match(self, item, resource_type, properties, profile_list): \"\"\"Check whether the", "log a warning and do nothing. Args: resource_type (str): Resource type such as", "to update value_list (list): List of values to set (one value per item", "count under all profiles profile_list = self.ovf.config_profiles + [None] for profile in profile_list:", "across all profiles) create_new (bool): Whether to create a new entry if no", "created. If the new count is less than the current count under this", "from .item import OVFItem, OVFItemDataError logger = logging.getLogger(__name__) class OVFHardwareDataError(Exception): \"\"\"The input data", "OVFHardwareDataError(\"Unknown profile(s) {0} for \" \"Item instance {1}\" .format(unknown_profiles, instance)) if instance not", "items_seen[profile] >= count: # Too many items - remove this one! ovfitem.remove_profile(profile) else:", "# Treat the current state as golden: for ovfitem in self.item_dict.values(): ovfitem.modified =", "uniquely identifies this set of hardware items. instance = item.find(namespace + self.ovf.INSTANCE_ID).text #", "profile_list (list): List of profiles to filter on Returns: list: Matching OVFItem instances", "used as a key to :data:`~COT.vm_description.ovf.name_helper.OVFNameHelper1.RES_MAP` profile_list (list): Profiles the new item should", "Item(s)\", profile, count, resource_type) return count_dict def _update_existing_item_profiles(self, resource_type, count, profile_list): \"\"\"Change profile", "# on our clone due to self-inconsistency (#64). for profile in self.ovf.config_profiles: if", "one! ovfitem.remove_profile(profile) else: items_seen[profile] += 1 else: if count_dict[profile] < count: # Add", "for ovfitem in self.item_dict.values(): if ovfitem.modified: modified = True break if not modified:", "element_name = self.ovf.platform.guess_nic_name(item_count) new_item.set_property(self.ovf.ELEMENT_NAME, element_name, new_item_profiles) return new_item def set_item_count_per_profile(self, resource_type, count, profile_list):", "less than the current count under this profile, then the highest-numbered instances will", "self.ovf.virtual_hw_section, set([self.ovf.ITEM, self.ovf.STORAGE_ITEM, self.ovf.ETHERNET_PORT_ITEM]))): modified = True else: for ovfitem in self.item_dict.values(): if", "the LICENSE.txt file found in the # top-level directory of this distribution and", "the NIC number element_name = self.ovf.platform.guess_nic_name(item_count) new_item.set_property(self.ovf.ELEMENT_NAME, element_name, new_item_profiles) return new_item def set_item_count_per_profile(self,", "Items in the OVF. Args: ovf (OVF): OVF instance to extract hardware information", "instance to extract hardware information from. Raises: OVFHardwareDataError: if any data errors are", "Raises: NotImplementedError: No support yet for updating ``Address`` NotImplementedError: If updating ``AddressOnParent`` but", "First InstanceID value to consider (disregarding all lower InstanceIDs, even if available). Returns:", "OVF. Args: ovf (OVF): OVF instance to extract hardware information from. Raises: OVFHardwareDataError:", "resource_type == 'ethernet': # Update ElementName to reflect the NIC number element_name =", "profile_list): \"\"\"Change profile membership of existing items as needed. Helper method for :meth:`set_item_count_per_profile`.", "COPYRIGHT.txt file at the top-level directory of this distribution # and at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt.", "= ovfitem.generate_items() logger.spam(\"Generated %d items\", len(new_items)) for item in new_items: XML.add_child(self.ovf.virtual_hw_section, item, ordering)", "profiles) Returns: tuple: (count_dict, items_to_add, last_item) \"\"\" count_dict = self.get_item_count_per_profile(resource_type, profile_list) items_seen =", "existing Items. # Once we've seen \"count\" items under a profile, remove all", "logger = logging.getLogger(__name__) class OVFHardwareDataError(Exception): \"\"\"The input data used to construct an :class:`OVFHardware`", "'scsi' or 'serial' properties (dict): Properties and their values to match profile (str):", "new %s under profile(s) %s, InstanceID is %s\", resource_type, profile_list, instance) return (instance,", "golden: for ovfitem in self.item_dict.values(): ovfitem.modified = False def update_xml(self): \"\"\"Regenerate all Items", "search within Returns: OVFItem: Matching instance, or None Raises: LookupError: if more than", "(dict): Properties and their values to match profile_list (list): List of profiles to", "2013-2016, 2019 the COT project developers. # See the COPYRIGHT.txt file at the", "to this Item ovfitem.add_profile(profile) count_dict[profile] += 1 items_seen[profile] += 1 # How many", "do we need to create in total? items_to_add = 0 for profile in", "ovfitem logger.spam(\"Added clone of %s under %s, instance is %s\", parent_item, profile_list, instance)", "item should belong to Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id() ovfitem", "- valid_profiles if unknown_profiles: raise OVFHardwareDataError(\"Unknown profile(s) {0} for \" \"Item instance {1}\"", "number of items under each profile. \"\"\" count_dict = {} if not profile_list:", "to create in total? items_to_add = 0 for profile in profile_list: delta =", "profile(s) {0} for \" \"Item instance {1}\" .format(unknown_profiles, instance)) if instance not in", "distribution # and at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt. # # This file is part of the", "logger.debug(exc) # Mask away the nitty-gritty details from our caller raise OVFHardwareDataError(\"Data conflict", "OVFItem to create a new instance. Args: parent_item (OVFItem): Instance to clone from", "now, # otherwise we'll get an error when trying to set the instance", "self.new_item(resource_type, new_item_profiles) else: (_, new_item) = self.clone_item(last_item, new_item_profiles) # Check/update other properties of", "Item to delete \"\"\" instance = item.get_value(self.ovf.INSTANCE_ID) if self.item_dict[instance] == item: del self.item_dict[instance]", "available ``InstanceID`` number. Args: start (int): First InstanceID value to consider (disregarding all", "%s\", len(filtered_items), resource_type, properties, profile_list) return filtered_items def find_item(self, resource_type=None, properties=None, profile=None): \"\"\"Find", "the given filters. Args: item (OVFItem): Item to validate resource_type (str): Resource type", "in :attr:`value_list`, set extra items to this value \"\"\" if profile_list is None:", "Newly cloned Item new_item_profiles (list): Profiles new_item should belong to item_count (int): How", "handle integer addresses try: address_on_parent = int(address_on_parent) address_on_parent += 1 new_item.set_property(self.ovf.ADDRESS_ON_PARENT, str(address_on_parent), new_item_profiles)", "is None: logger.notice(\"No existing items of type %s found. \" \"Will create new", "given type under the given profile(s). If the new count is greater than", "``InstanceID`` number. Args: start (int): First InstanceID value to consider (disregarding all lower", "the nitty-gritty details from our caller raise OVFHardwareDataError(\"Data conflict for instance {0}\" .format(instance))", "hardware definition, \" \"so no XML update is required\") return # Delete the", "under a profile, add any items found # under other profiles to this", "whether the given item matches the given filters. Args: item (OVFItem): Item to", "logger.notice(\"No existing items of type %s found. \" \"Will create new %s from", "item): \"\"\"Delete the given Item from the hardware. Args: item (OVFItem): Item to", "exists. \"\"\" matches = self.find_all_items(resource_type, properties, [profile]) if len(matches) > 1: raise LookupError(", "self.item_dict[instance] == item: del self.item_dict[instance] # TODO: error handling - currently a no-op", "set(ovf.config_profiles) item_count = 0 for item in ovf.virtual_hw_section: namespace = ovf.namespace_for_item_tag(item.tag) if not", "including this file, may be copied, modified, propagated, or # distributed except according", "\"\"\"Clone an OVFItem to create a new instance. Args: parent_item (OVFItem): Instance to", "strings to the number of items under each profile. \"\"\" count_dict = {}", "delete_count = 0 for item in list(self.ovf.virtual_hw_section): if (item.tag == self.ovf.ITEM or item.tag", "items of the given type exist, will create a new ``Item`` if :attr:`create_new`", "with :meth:`COT.platform.Platform.guess_nic_name` Returns: OVFItem: Updated :param:`new_item` Raises: NotImplementedError: No support yet for updating", "highest-numbered instances will be removed preferentially. Args: resource_type (str): 'cpu', 'harddisk', etc. count", "for profile in profile_list: if ovfitem.has_profile(profile): count_dict[profile] += 1 for (profile, count) in", "self.ovf.ITEM or item.tag == self.ovf.STORAGE_ITEM or item.tag == self.ovf.ETHERNET_PORT_ITEM): self.ovf.virtual_hw_section.remove(item) delete_count += 1", "profiles to clone into Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id(start=parent_item.instance_id) logger.spam(\"Cloning", "but the prior value varies across config profiles. NotImplementedError: if ``AddressOnParent`` is not", "items from this profile. # If we don't have enough items under a", "%s\", resource_type, profile_list, instance) return (instance, ovfitem) def delete_item(self, item): \"\"\"Delete the given", "to construct an :class:`OVFHardware` is not sane.\"\"\" class OVFHardware(object): \"\"\"Helper class for :class:`~COT.vm_description.ovf.ovf.OVF`.", "if ``AddressOnParent`` is not an integer. \"\"\" resource_type = new_item.hardware_type address = new_item.get(self.ovf.ADDRESS)", "Args: item (OVFItem): Item to validate resource_type (str): Resource type string like 'scsi'", "\" profiles %s\", len(filtered_items), resource_type, properties, profile_list) return filtered_items def find_item(self, resource_type=None, properties=None,", "for instance in natural_sort(self.item_dict): logger.debug(\"Writing Item(s) with InstanceID %s\", instance) ovfitem = self.item_dict[instance]", "an Item that uniquely identifies this set of hardware items. instance = item.find(namespace", "if not modified: logger.verbose(\"No changes to hardware definition, \" \"so no XML update", "(OVFItem): Instance to clone from profile_list (list): List of profiles to clone into", "under \"no profile\" will be counted against the total for each profile. Args:", "in properties.items(): if item.get_value(prop) != value: return False return True def find_all_items(self, resource_type=None,", "%s to %s under %s\", resource_type, prop_name, new_value, profile_list) if len(value_list): logger.warning(\"After scanning", "do nothing if no Items have been changed. \"\"\" modified = False if", "from the hardware. Args: item (OVFItem): Item to delete \"\"\" instance = item.get_value(self.ovf.INSTANCE_ID)", "apply across all profiles) Returns: tuple: (count_dict, items_to_add, last_item) \"\"\" count_dict = self.get_item_count_per_profile(resource_type,", "no-op if item not in item_dict def clone_item(self, parent_item, profile_list): \"\"\"Clone an OVFItem", "!= item.get_value(self.ovf.RESOURCE_TYPE)): return False if profile_list: for profile in profile_list: if not item.has_profile(profile):", "Properties and their values to match profile (str): Single profile ID to search", "TODO - we assume that the count is the same across profiles new_item", "items of a given type under the given profile(s). If the new count", "per item of the given :attr:`resource_type`) profile_list (list): List of profiles to filter", "> 1: raise NotImplementedError(\"AddressOnParent is not common \" \"across all profiles but has", "into Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id(start=parent_item.instance_id) logger.spam(\"Cloning existing Item %s", "for profile in self.ovf.config_profiles: if ovfitem.has_profile(profile) and profile not in profile_list: ovfitem.remove_profile(profile) ovfitem.set_property(self.ovf.INSTANCE_ID,", "NotImplementedError: if ``AddressOnParent`` is not an integer. \"\"\" resource_type = new_item.hardware_type address =", "items_to_add -= 1 def set_value_for_all_items(self, resource_type, prop_name, new_value, profile_list, create_new=False): \"\"\"Set a property", "%s under %s\", resource_type, prop_name, new_value, profile_list) if len(value_list): logger.warning(\"After scanning all known", "instance = int(start) while str(instance) in self.item_dict.keys(): instance += 1 logger.debug(\"Found unused InstanceID", "will be removed preferentially. Args: resource_type (str): 'cpu', 'harddisk', etc. count (int): Desired", "to %s under profiles %s\", resource_type, prop_name, new_value, profile_list) def set_item_values_per_profile(self, resource_type, prop_name,", "are more matching items than entries in :attr:`value_list`, set extra items to this", "Treat the current state as golden: for ovfitem in self.item_dict.values(): ovfitem.modified = False", "update value_list (list): List of values to set (one value per item of", "Too many items - remove this one! ovfitem.remove_profile(profile) else: items_seen[profile] += 1 else:", "(str): If there are more matching items than entries in :attr:`value_list`, set extra", "the only OVFItem of the given :attr:`resource_type`. Args: resource_type (str): Resource type string", "profiles, including the default profile_list = self.ovf.config_profiles + [None] count_dict, items_to_add, last_item =", "the VirtualHardwareSection, if needed. Will do nothing if no Items have been changed.", "all profiles) default (str): If there are more matching items than entries in", "= new_item.get(self.ovf.ADDRESS_ON_PARENT) if address_on_parent: address_list = new_item.get_all_values(self.ovf.ADDRESS_ON_PARENT) if len(address_list) > 1: raise NotImplementedError(\"AddressOnParent", "count_dict[profile] += 1 items_seen[profile] += 1 # How many new Items do we", "profile_list: if ovfitem.has_profile(profile): count_dict[profile] += 1 for (profile, count) in count_dict.items(): logger.spam(\"Profile '%s'", "# June 2016, <NAME> # Copyright (c) 2013-2016, 2019 the COT project developers.", "under profile(s) %s, InstanceID is %s\", resource_type, profile_list, instance) return (instance, ovfitem) def", ":meth:`get_item_count_per_profile`. Args: resource_type (str): Resource type string like 'scsi' or 'serial' profile (str):", "= set(ovf.config_profiles) item_count = 0 for item in ovf.virtual_hw_section: namespace = ovf.namespace_for_item_tag(item.tag) if", "or 'serial' properties (dict): Properties and their values to match profile_list (list): List", "or # distributed except according to the terms contained in the LICENSE.txt file.", "the hardware. Args: item (OVFItem): Item to delete \"\"\" instance = item.get_value(self.ovf.INSTANCE_ID) if", "instances that already exist under another profile will be added to this profile,", "return True def find_all_items(self, resource_type=None, properties=None, profile_list=None): \"\"\"Find all items matching the given", "element_name, new_item_profiles) return new_item def set_item_count_per_profile(self, resource_type, count, profile_list): \"\"\"Set the number of", "subject to the license terms in the LICENSE.txt file found in the #", "[self.item_dict[instance] for instance in natural_sort(self.item_dict)] filtered_items = [] if properties is None: properties", "type {0}\".format(resource_type)) address_on_parent = new_item.get(self.ovf.ADDRESS_ON_PARENT) if address_on_parent: address_list = new_item.get_all_values(self.ovf.ADDRESS_ON_PARENT) if len(address_list) >", "file is part of the Common OVF Tool (COT) project. # It is", "or None Raises: LookupError: if more than one such Item exists. \"\"\" matches", ":class:`OVFHardware` is not sane.\"\"\" class OVFHardware(object): \"\"\"Helper class for :class:`~COT.vm_description.ovf.ovf.OVF`. Represents all hardware", ">= count: # Too many items - remove this one! ovfitem.remove_profile(profile) else: items_seen[profile]", "= new_item.get(self.ovf.ADDRESS) if address: raise NotImplementedError(\"Don't know how to ensure a unique \"", "OVFItem, OVFItemDataError logger = logging.getLogger(__name__) class OVFHardwareDataError(Exception): \"\"\"The input data used to construct", "\"\"\"Create a new OVFItem of the given type. Args: resource_type (str): String such", "# This file is part of the Common OVF Tool (COT) project. #", "ValueError: raise NotImplementedError(\"Don't know how to ensure a \" \"unique AddressOnParent value \"", "preferentially. Args: resource_type (str): 'cpu', 'harddisk', etc. count (int): Desired number of items", "this # item properly defined in the OVF DeploymentOptionSection? item_profiles = set(item.get(self.ovf.ITEM_CONFIG, \"\").split())", "resource_type, profile): \"\"\"Get the number of Items of the given type for the", "value to set the property to profile_list (list): List of profiles to filter", "a dict of :class:`~COT.vm_description.ovf.item.OVFItem` objects with a bunch of helper methods. \"\"\" def", "we need to create in total? items_to_add = 0 for profile in profile_list:", "def find_item(self, resource_type=None, properties=None, profile=None): \"\"\"Find the only OVFItem of the given :attr:`resource_type`.", "needed. Helper method for :meth:`set_item_count_per_profile`. Args: resource_type (str): 'cpu', 'harddisk', etc. count (int):", "XML.add_child(self.ovf.virtual_hw_section, item, ordering) logger.verbose(\"Updated XML VirtualHardwareSection, now contains %d \" \"Items representing %d", ":meth:`set_item_count_per_profile`. Args: new_item (OVFItem): Newly cloned Item new_item_profiles (list): Profiles new_item should belong", "nothing if no Items have been changed. \"\"\" modified = False if len(self.item_dict)", "set (one value per item of the given :attr:`resource_type`) profile_list (list): List of", "(list): List of values to set (one value per item of the given", "of COT, including this file, may be copied, modified, propagated, or # distributed", "[profile]) def get_item_count_per_profile(self, resource_type, profile_list): \"\"\"Get the number of Items of the given", "set the instance ID # on our clone due to self-inconsistency (#64). for", "%s Items, not all \" \"%s values were used - leftover %s\", resource_type,", "\"handle this yet.\" .format(address_list)) address_on_parent = address_list[0] # Currently we only handle integer", "this profile to this Item ovfitem.add_profile(profile) count_dict[profile] += 1 items_seen[profile] += 1 #", "\"\"\" if resource_type and (self.ovf.RES_MAP[resource_type] != item.get_value(self.ovf.RESOURCE_TYPE)): return False if profile_list: for profile", "== self.ovf.ETHERNET_PORT_ITEM): self.ovf.virtual_hw_section.remove(item) delete_count += 1 logger.debug(\"Cleared %d existing items from VirtualHWSection\", delete_count)", "else: items_seen[profile] += 1 else: if count_dict[profile] < count: # Add this profile", "instance += 1 logger.debug(\"Found unused InstanceID %d\", instance) return str(instance) def new_item(self, resource_type,", "ovfitem)`` \"\"\" instance = self.find_unused_instance_id() ovfitem = OVFItem(self.ovf) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.set_property(self.ovf.RESOURCE_TYPE, self.ovf.RES_MAP[resource_type],", "with a bunch of helper methods. \"\"\" def __init__(self, ovf): \"\"\"Construct an OVFHardware", "del self.item_dict[instance] # TODO: error handling - currently a no-op if item not", "'serial' profile_list (list): List of profiles to filter on (default: apply across all", "= 0 for item in ovf.virtual_hw_section: namespace = ovf.namespace_for_item_tag(item.tag) if not namespace: continue", "exist. \"\"\" ovfitem_list = self.find_all_items(resource_type) if not ovfitem_list: if not create_new: logger.warning(\"No items", "a unique \" \"Address value when cloning an Item \" \"of type {0}\".format(resource_type))", "clone from profile_list (list): List of profiles to clone into Returns: tuple: ``(instance_id,", "(default: apply across all profiles) \"\"\" if not profile_list: # Set the profile", "OVFItem: Updated :param:`new_item` Raises: NotImplementedError: No support yet for updating ``Address`` NotImplementedError: If", "ovfitem.set_property(prop_name, new_value, [profile]) logger.info(\"Updated %s property %s to %s under %s\", resource_type, prop_name,", "needed. Will do nothing if no Items have been changed. \"\"\" modified =", "an error when trying to set the instance ID # on our clone", "subsequent # items from this profile. # If we don't have enough items", "profile\" will be counted against the total for each profile. Args: resource_type (str):", "(including this item) now exist. Used with :meth:`COT.platform.Platform.guess_nic_name` Returns: OVFItem: Updated :param:`new_item` Raises:", "# # This file is part of the Common OVF Tool (COT) project.", "OVFHardware object describing all Items in the OVF. Args: ovf (OVF): OVF instance", "tuple: ``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id() ovfitem = OVFItem(self.ovf) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list)", "sane.\"\"\" class OVFHardware(object): \"\"\"Helper class for :class:`~COT.vm_description.ovf.ovf.OVF`. Represents all hardware items defined by", ":class:`~COT.vm_description.ovf.ovf.OVF`. Represents all hardware items defined by this OVF; i.e., the contents of", "one such Item exists. \"\"\" matches = self.find_all_items(resource_type, properties, [profile]) if len(matches) >", "item) now exist. Used with :meth:`COT.platform.Platform.guess_nic_name` Returns: OVFItem: Updated :param:`new_item` Raises: NotImplementedError: No", "[None] for ovfitem in self.find_all_items(resource_type): if len(value_list): new_value = value_list.pop(0) else: new_value =", "more than one such Item exists. \"\"\" matches = self.find_all_items(resource_type, properties, [profile]) if", "1 def set_value_for_all_items(self, resource_type, prop_name, new_value, profile_list, create_new=False): \"\"\"Set a property to the", "of items profile_list (list): List of profiles to filter on (default: apply across", "is greater than the current count under this profile, then additional instances that", "to? new_item_profiles = [] for profile in profile_list: if count_dict[profile] < count: new_item_profiles.append(profile)", "resource_type, prop_name, value_list, profile_list, default=None): \"\"\"Set value(s) for a property of multiple items", "if address: raise NotImplementedError(\"Don't know how to ensure a unique \" \"Address value", "be unique: # TODO - we assume that the count is the same", "seen \"\"\" self.ovf = ovf self.item_dict = {} valid_profiles = set(ovf.config_profiles) item_count =", "profile_list) logger.debug(\"Updated %s %s to %s under profiles %s\", resource_type, prop_name, new_value, profile_list)", "representing %d devices\", len(self.ovf.virtual_hw_section.findall(self.ovf.ITEM)), len(self.item_dict)) def find_unused_instance_id(self, start=1): \"\"\"Find the first available ``InstanceID``", "Resource type string like 'scsi' or 'serial' properties (dict): Properties and their values", "default for profile in profile_list: if ovfitem.has_profile(profile): ovfitem.set_property(prop_name, new_value, [profile]) logger.info(\"Updated %s property", "to validate resource_type (str): Resource type string like 'scsi' or 'serial' properties (dict):", "instances \"\"\" items = [self.item_dict[instance] for instance in natural_sort(self.item_dict)] filtered_items = [] if", "(default: apply across all profiles) Returns: dict: mapping profile strings to the number", "will be counted against the total for each profile. Args: resource_type (str): Resource", "'cpu' or 'harddisk' - used as a key to :data:`~COT.vm_description.ovf.name_helper.OVFNameHelper1.RES_MAP` profile_list (list): Profiles", "items of this type in this profile. \"\"\" return (self.get_item_count_per_profile(resource_type, [profile]) [profile]) def", "its parent. Helper method for :meth:`set_item_count_per_profile`. Args: new_item (OVFItem): Newly cloned Item new_item_profiles", "ordering = [self.ovf.INFO, self.ovf.SYSTEM, self.ovf.ITEM] for instance in natural_sort(self.item_dict): logger.debug(\"Writing Item(s) with InstanceID", "currently a no-op if item not in item_dict def clone_item(self, parent_item, profile_list): \"\"\"Clone", "if item not in item_dict def clone_item(self, parent_item, profile_list): \"\"\"Clone an OVFItem to", "Generate the new XML Items, in appropriately sorted order by Instance ordering =", "hardware definitions. **Classes and Exceptions** .. autosummary:: :nosignatures: OVFHardware OVFHardwareDataError \"\"\" import copy", "class for :class:`~COT.vm_description.ovf.ovf.OVF`. Represents all hardware items defined by this OVF; i.e., the", "True self.item_dict[instance] = ovfitem logger.spam(\"Added clone of %s under %s, instance is %s\",", "if len(address_list) > 1: raise NotImplementedError(\"AddressOnParent is not common \" \"across all profiles", "self._update_existing_item_profiles( resource_type, count, profile_list) logger.debug(\"Creating %d new items\", items_to_add) while items_to_add > 0:", "value \" \"given base value '{0}'\" .format(address_on_parent)) if resource_type == 'ethernet': # Update", "for profile in profile_list: if ovfitem.has_profile(profile): if items_seen[profile] >= count: # Too many", "is subject to the license terms in the LICENSE.txt file found in the", "when cloning an Item \" \"of type {0}\".format(resource_type)) address_on_parent = new_item.get(self.ovf.ADDRESS_ON_PARENT) if address_on_parent:", "%s under %s, instance is %s\", parent_item, profile_list, instance) return (instance, ovfitem) def", "reflect the NIC number element_name = self.ovf.platform.guess_nic_name(item_count) new_item.set_property(self.ovf.ELEMENT_NAME, element_name, new_item_profiles) return new_item def", "autosummary:: :nosignatures: OVFHardware OVFHardwareDataError \"\"\" import copy import logging from COT.data_validation import natural_sort", "set to ``True``; otherwise will log a warning and do nothing. Args: resource_type", "items of this :attr:`resource_type` presently exist. \"\"\" ovfitem_list = self.find_all_items(resource_type) if not ovfitem_list:", "(OVF): OVF instance to extract hardware information from. Raises: OVFHardwareDataError: if any data", "the given type, properties, and profiles. Args: resource_type (str): Resource type string like", "item_count, len(self.item_dict)) # Treat the current state as golden: for ovfitem in self.item_dict.values():", "1 logger.debug(\"Found unused InstanceID %d\", instance) return str(instance) def new_item(self, resource_type, profile_list=None): \"\"\"Create", "VirtualHardwareSection. Fundamentally it's just a dict of :class:`~COT.vm_description.ovf.item.OVFItem` objects with a bunch of", "Item(s) with InstanceID %s\", instance) ovfitem = self.item_dict[instance] new_items = ovfitem.generate_items() logger.spam(\"Generated %d", "False def update_xml(self): \"\"\"Regenerate all Items under the VirtualHardwareSection, if needed. Will do", "we only handle integer addresses try: address_on_parent = int(address_on_parent) address_on_parent += 1 new_item.set_property(self.ovf.ADDRESS_ON_PARENT,", "the default profile_list = self.ovf.config_profiles + [None] count_dict, items_to_add, last_item = \\ self._update_existing_item_profiles(", "self.ovf.STORAGE_ITEM or item.tag == self.ovf.ETHERNET_PORT_ITEM): self.ovf.virtual_hw_section.remove(item) delete_count += 1 logger.debug(\"Cleared %d existing items", "== 'ethernet': # Update ElementName to reflect the NIC number element_name = self.ovf.platform.guess_nic_name(item_count)", "profile list for all profiles, including the default profile_list = self.ovf.config_profiles + [None]", "our caller raise OVFHardwareDataError(\"Data conflict for instance {0}\" .format(instance)) logger.debug( \"OVF contains %s", "- currently a no-op if item not in item_dict def clone_item(self, parent_item, profile_list):", "namespace = ovf.namespace_for_item_tag(item.tag) if not namespace: continue item_count += 1 # We index", "in total? items_to_add = 0 for profile in profile_list: delta = count -", "properties %s and\" \" profiles %s\", len(filtered_items), resource_type, properties, profile_list) return filtered_items def", "resource_type, prop_name, new_value, profile_list) def set_item_values_per_profile(self, resource_type, prop_name, value_list, profile_list, default=None): \"\"\"Set value(s)", "in self.item_dict.keys(): instance += 1 logger.debug(\"Found unused InstanceID %d\", instance) return str(instance) def", "than the current count under this profile, then the highest-numbered instances will be", "list(self.ovf.virtual_hw_section): if (item.tag == self.ovf.ITEM or item.tag == self.ovf.STORAGE_ITEM or item.tag == self.ovf.ETHERNET_PORT_ITEM):", "matches[0] def get_item_count(self, resource_type, profile): \"\"\"Get the number of Items of the given", "number of items of a given type under the given profile(s). If the", "OVFItem(self.ovf) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.set_property(self.ovf.RESOURCE_TYPE, self.ovf.RES_MAP[resource_type], profile_list) # ovftool freaks out if we", "len(self.ovf.virtual_hw_section.findall(self.ovf.ITEM)), len(self.item_dict)) def find_unused_instance_id(self, start=1): \"\"\"Find the first available ``InstanceID`` number. Args: start", "to filter on Returns: list: Matching OVFItem instances \"\"\" items = [self.item_dict[instance] for", "or item.tag == self.ovf.ETHERNET_PORT_ITEM): self.ovf.virtual_hw_section.remove(item) delete_count += 1 logger.debug(\"Cleared %d existing items from", "\"count\" items under a profile, remove all subsequent # items from this profile.", "instance ID # on our clone due to self-inconsistency (#64). for profile in", "[profile]) if len(matches) > 1: raise LookupError( \"Found multiple matching '{0}' Items (instances", "all known %s Items, not all \" \"%s values were used - leftover", "counted against the total for each profile. Args: resource_type (str): Resource type string", "# First, iterate over existing Items. # Once we've seen \"count\" items under", "need to create in total? items_to_add = 0 for profile in profile_list: delta", "all profiles profile_list = self.ovf.config_profiles + [None] for profile in profile_list: count_dict[profile] =", "def set_value_for_all_items(self, resource_type, prop_name, new_value, profile_list, create_new=False): \"\"\"Set a property to the given", "(OVFItem): Item to delete \"\"\" instance = item.get_value(self.ovf.INSTANCE_ID) if self.item_dict[instance] == item: del", "\"\"\"Set a property to the given value for all items of the given", "String such as 'cpu' or 'harddisk' - used as a key to :data:`~COT.vm_description.ovf.name_helper.OVFNameHelper1.RES_MAP`", "across all profiles) default (str): If there are more matching items than entries", "given type for the given profile. Wrapper for :meth:`get_item_count_per_profile`. Args: resource_type (str): Resource", "the given type per profile. Items present under \"no profile\" will be counted", "if resource_type == 'ethernet': # Update ElementName to reflect the NIC number element_name", "profile_list): \"\"\"Set the number of items of a given type under the given", "new_item items_to_add -= 1 def set_value_for_all_items(self, resource_type, prop_name, new_value, profile_list, create_new=False): \"\"\"Set a", "items_to_add: items_to_add = delta return count_dict, items_to_add, last_item def _update_cloned_item(self, new_item, new_item_profiles, item_count):", "nitty-gritty details from our caller raise OVFHardwareDataError(\"Data conflict for instance {0}\" .format(instance)) logger.debug(", "profiles) default (str): If there are more matching items than entries in :attr:`value_list`,", "new_item (OVFItem): Newly cloned Item new_item_profiles (list): Profiles new_item should belong to item_count", "profile, then the highest-numbered instances will be removed preferentially. Args: resource_type (str): 'cpu',", "# Get the count under all profiles profile_list = self.ovf.config_profiles + [None] for", "< count: # Add this profile to this Item ovfitem.add_profile(profile) count_dict[profile] += 1", "self.item_dict.values(): ovfitem.modified = False def update_xml(self): \"\"\"Regenerate all Items under the VirtualHardwareSection, if", "logger.verbose(\"No changes to hardware definition, \" \"so no XML update is required\") return", "and their values to match profile (str): Single profile ID to search within", "(list): List of profiles to filter on (default: apply across all profiles) default", "profile_list: # Get the count under all profiles profile_list = self.ovf.config_profiles + [None]", "check - are all of the profiles associated with this # item properly", "delete_count += 1 logger.debug(\"Cleared %d existing items from VirtualHWSection\", delete_count) # Generate the", "profile, count, resource_type) return count_dict def _update_existing_item_profiles(self, resource_type, count, profile_list): \"\"\"Change profile membership", "count_dict[profile] += 1 for (profile, count) in count_dict.items(): logger.spam(\"Profile '%s' has %s %s", "dict of :class:`~COT.vm_description.ovf.item.OVFItem` objects with a bunch of helper methods. \"\"\" def __init__(self,", "profiles associated with this # item properly defined in the OVF DeploymentOptionSection? item_profiles", "terms in the LICENSE.txt file found in the # top-level directory of this", "None Raises: LookupError: if more than one such Item exists. \"\"\" matches =", "True logger.info(\"Created new %s under profile(s) %s, InstanceID is %s\", resource_type, profile_list, instance)", "resource_type (str): 'cpu', 'harddisk', etc. count (int): Desired number of items profile_list (list):", "profile_list: # Set the profile list for all profiles, including the default profile_list", "leave out the ElementName on an Item, # so provide a simple default", "last_item = None # First, iterate over existing Items. # Once we've seen", "Item that uniquely identifies this set of hardware items. instance = item.find(namespace +", "# otherwise we'll get an error when trying to set the instance ID", "return False if profile_list: for profile in profile_list: if not item.has_profile(profile): return False", "Which profiles does this Item need to belong to? new_item_profiles = [] for", "item matches all filters, False if not. \"\"\" if resource_type and (self.ovf.RES_MAP[resource_type] !=", "OVF DeploymentOptionSection? item_profiles = set(item.get(self.ovf.ITEM_CONFIG, \"\").split()) unknown_profiles = item_profiles - valid_profiles if unknown_profiles:", "on (default: apply across all profiles) Returns: dict: mapping profile strings to the", "name to update new_value (str): New value to set the property to profile_list", "that the count is the same across profiles new_item = self._update_cloned_item( new_item, new_item_profiles,", "distributed except according to the terms contained in the LICENSE.txt file. \"\"\"Representation of", "scratch.\", resource_type, resource_type) (_, new_item) = self.new_item(resource_type, new_item_profiles) else: (_, new_item) = self.clone_item(last_item,", "items as needed. Helper method for :meth:`set_item_count_per_profile`. Args: resource_type (str): 'cpu', 'harddisk', etc.", "not create_new: logger.warning(\"No items of type %s found. Nothing to do.\", resource_type) return", "else: return matches[0] def get_item_count(self, resource_type, profile): \"\"\"Get the number of Items of", "%s\", resource_type, prop_name, new_value, profile_list) if len(value_list): logger.warning(\"After scanning all known %s Items,", "Common OVF Tool (COT) project. # It is subject to the license terms", "their values to match profile (str): Single profile ID to search within Returns:", "logger.warning(\"No items of type %s found. Nothing to do.\", resource_type) return logger.notice(\"No existing", "> 1: raise LookupError( \"Found multiple matching '{0}' Items (instances {1})\" .format(resource_type, [m.instance_id", "in natural_sort(self.item_dict): logger.debug(\"Writing Item(s) with InstanceID %s\", instance) ovfitem = self.item_dict[instance] new_items =", "the new count is less than the current count under this profile, then", "parent that we don't need now, # otherwise we'll get an error when", "resource_type, prop_name, new_value, profile_list, create_new=False): \"\"\"Set a property to the given value for", "may be copied, modified, propagated, or # distributed except according to the terms", "for :class:`~COT.vm_description.ovf.ovf.OVF`. Represents all hardware items defined by this OVF; i.e., the contents", "import logging from COT.data_validation import natural_sort from COT.xml_file import XML from .item import", "ovfitem_list = [ovfitem] for ovfitem in ovfitem_list: ovfitem.set_property(prop_name, new_value, profile_list) logger.debug(\"Updated %s %s", "ovfitem_list: ovfitem.set_property(prop_name, new_value, profile_list) logger.debug(\"Updated %s %s to %s under profiles %s\", resource_type,", "[profile]) [profile]) def get_item_count_per_profile(self, resource_type, profile_list): \"\"\"Get the number of Items of the", "filter on (default: apply across all profiles) Returns: tuple: (count_dict, items_to_add, last_item) \"\"\"", "total? items_to_add = 0 for profile in profile_list: delta = count - items_seen[profile]", "item.get_value(self.ovf.RESOURCE_TYPE)): return False if profile_list: for profile in profile_list: if not item.has_profile(profile): return", "all profiles) \"\"\" if not profile_list: # Set the profile list for all", "\"\"\"Representation of OVF hardware definitions. **Classes and Exceptions** .. autosummary:: :nosignatures: OVFHardware OVFHardwareDataError", "to filter on (default: apply across all profiles) Returns: tuple: (count_dict, items_to_add, last_item)", "resource_type (str): Resource type string like 'scsi' or 'serial' properties (dict): Properties and", "Items do we need to create in total? items_to_add = 0 for profile", "items from VirtualHWSection\", delete_count) # Generate the new XML Items, in appropriately sorted", ":attr:`value_list`, set extra items to this value \"\"\" if profile_list is None: profile_list", "Items of type %s with properties %s and\" \" profiles %s\", len(filtered_items), resource_type,", "have enough items under a profile, add any items found # under other", "\"\"\" items = [self.item_dict[instance] for instance in natural_sort(self.item_dict)] filtered_items = [] if properties", "set_item_count_per_profile(self, resource_type, count, profile_list): \"\"\"Set the number of items of a given type", "\"Item instance {1}\" .format(unknown_profiles, instance)) if instance not in self.item_dict: self.item_dict[instance] = OVFItem(self.ovf,", "if instance not in self.item_dict: self.item_dict[instance] = OVFItem(self.ovf, item) else: try: self.item_dict[instance].add_item(item) except", "ensure a \" \"unique AddressOnParent value \" \"given base value '{0}'\" .format(address_on_parent)) if", "items under a profile, add any items found # under other profiles to", "is required\") return # Delete the existing Items: delete_count = 0 for item", "and at # https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No part # of COT, including this file, may", "across config profiles. NotImplementedError: if ``AddressOnParent`` is not an integer. \"\"\" resource_type =", "(dict): Properties and their values to match profile (str): Single profile ID to", "Items present under \"no profile\" will be counted against the total for each", "OVFHardware OVFHardwareDataError \"\"\" import copy import logging from COT.data_validation import natural_sort from COT.xml_file", "%s\", parent_item, profile_list, instance) return (instance, ovfitem) def item_match(self, item, resource_type, properties, profile_list):", "= self.ovf.config_profiles + [None] count_dict, items_to_add, last_item = \\ self._update_existing_item_profiles( resource_type, count, profile_list)", "%s, instance is %s\", parent_item, profile_list, instance) return (instance, ovfitem) def item_match(self, item,", "of type %s found. Nothing to do.\", resource_type) return logger.notice(\"No existing items of", "matches])) elif len(matches) == 0: return None else: return matches[0] def get_item_count(self, resource_type,", "the property to profile_list (list): List of profiles to filter on (default: apply", "Items, in appropriately sorted order by Instance ordering = [self.ovf.INFO, self.ovf.SYSTEM, self.ovf.ITEM] for", "%s Item(s)\", profile, count, resource_type) return count_dict def _update_existing_item_profiles(self, resource_type, count, profile_list): \"\"\"Change", "We index the dict by InstanceID as it's the one property of #", "profile_list: if count_dict[profile] < count: new_item_profiles.append(profile) count_dict[profile] += 1 if last_item is None:", "self-inconsistency (#64). for profile in self.ovf.config_profiles: if ovfitem.has_profile(profile) and profile not in profile_list:", "copy.deepcopy(parent_item) # Delete any profiles from the parent that we don't need now,", "instance) ovfitem = self.item_dict[instance] new_items = ovfitem.generate_items() logger.spam(\"Generated %d items\", len(new_items)) for item", "ovfitem ovfitem.modified = True logger.info(\"Created new %s under profile(s) %s, InstanceID is %s\",", "profiles) Returns: dict: mapping profile strings to the number of items under each", "NotImplementedError(\"Don't know how to ensure a unique \" \"Address value when cloning an", "for ovfitem in self.find_all_items(resource_type): if len(value_list): new_value = value_list.pop(0) else: new_value = default", "\"\"\"Check whether the given item matches the given filters. Args: item (OVFItem): Item", "profiles %s\", resource_type, prop_name, new_value, profile_list) def set_item_values_per_profile(self, resource_type, prop_name, value_list, profile_list, default=None):", "NotImplementedError: If updating ``AddressOnParent`` but the prior value varies across config profiles. NotImplementedError:", "removed preferentially. Args: resource_type (str): 'cpu', 'harddisk', etc. count (int): Desired number of", "(str): Property name to update value_list (list): List of values to set (one", "ovfitem_list = self.find_all_items(resource_type) if not ovfitem_list: if not create_new: logger.warning(\"No items of type", "%s Items of type %s with properties %s and\" \" profiles %s\", len(filtered_items),", "= default for profile in profile_list: if ovfitem.has_profile(profile): ovfitem.set_property(prop_name, new_value, [profile]) logger.info(\"Updated %s", "ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.set_property(self.ovf.RESOURCE_TYPE, self.ovf.RES_MAP[resource_type], profile_list) # ovftool freaks out if we leave", "ovfitem.modified: modified = True break if not modified: logger.verbose(\"No changes to hardware definition,", "of this distribution and at # https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No part # of COT, including", "How many Items of this type (including this item) now exist. Used with", "of this distribution # and at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt. # # This file is part", "if ovfitem.modified: modified = True break if not modified: logger.verbose(\"No changes to hardware", "valid_profiles if unknown_profiles: raise OVFHardwareDataError(\"Unknown profile(s) {0} for \" \"Item instance {1}\" .format(unknown_profiles,", "profile_list): filtered_items.append(item) logger.spam(\"Found %s Items of type %s with properties %s and\" \"", "new_value, profile_list) logger.debug(\"Updated %s %s to %s under profiles %s\", resource_type, prop_name, new_value,", "# Currently we only handle integer addresses try: address_on_parent = int(address_on_parent) address_on_parent +=", "membership of existing items as needed. Helper method for :meth:`set_item_count_per_profile`. Args: resource_type (str):", "profile(s) %s, InstanceID is %s\", resource_type, profile_list, instance) return (instance, ovfitem) def delete_item(self,", "self.ovf.config_profiles + [None] for ovfitem in self.find_all_items(resource_type): if len(value_list): new_value = value_list.pop(0) else:", "%s %s Item(s)\", profile, count, resource_type) return count_dict def _update_existing_item_profiles(self, resource_type, count, profile_list):", "construct an :class:`OVFHardware` is not sane.\"\"\" class OVFHardware(object): \"\"\"Helper class for :class:`~COT.vm_description.ovf.ovf.OVF`. Represents", "(OVFItem): Item to validate resource_type (str): Resource type string like 'scsi' or 'serial'", "for (profile, count) in count_dict.items(): logger.spam(\"Profile '%s' has %s %s Item(s)\", profile, count,", "not profile_list: # Set the profile list for all profiles, including the default", "# Generate the new XML Items, in appropriately sorted order by Instance ordering", "return (self.get_item_count_per_profile(resource_type, [profile]) [profile]) def get_item_count_per_profile(self, resource_type, profile_list): \"\"\"Get the number of Items", "item in list(self.ovf.virtual_hw_section): if (item.tag == self.ovf.ITEM or item.tag == self.ovf.STORAGE_ITEM or item.tag", "all Items in the OVF. Args: ovf (OVF): OVF instance to extract hardware", "self.find_all_items(resource_type) if not ovfitem_list: if not create_new: logger.warning(\"No items of type %s found.", "Items of the given type for the given profile. Wrapper for :meth:`get_item_count_per_profile`. Args:", "properties, profile_list) return filtered_items def find_item(self, resource_type=None, properties=None, profile=None): \"\"\"Find the only OVFItem", "ovfitem.has_profile(profile): if items_seen[profile] >= count: # Too many items - remove this one!", "don't have enough items under a profile, add any items found # under", "Args: parent_item (OVFItem): Instance to clone from profile_list (list): List of profiles to", "last resort will new instances be created. If the new count is less", "\"\"\" def __init__(self, ovf): \"\"\"Construct an OVFHardware object describing all Items in the", "'harddisk' prop_name (str): Property name to update new_value (str): New value to set", "(int): How many Items of this type (including this item) now exist. Used", "profile_list: if ovfitem.has_profile(profile): if items_seen[profile] >= count: # Too many items - remove", "# ovftool freaks out if we leave out the ElementName on an Item,", "\"Will create new %s from scratch.\", resource_type, resource_type) (_, ovfitem) = self.new_item(resource_type, profile_list)", "of profiles to filter on Returns: list: Matching OVFItem instances \"\"\" items =", "if :attr:`create_new` is set to ``True``; otherwise will log a warning and do", "directory of this distribution # and at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt. # # This file is", "this type in this profile. \"\"\" return (self.get_item_count_per_profile(resource_type, [profile]) [profile]) def get_item_count_per_profile(self, resource_type,", "index the dict by InstanceID as it's the one property of # an", "existing items of type %s found. \" \"Will create new %s from scratch.\",", "raise OVFHardwareDataError(\"Unknown profile(s) {0} for \" \"Item instance {1}\" .format(unknown_profiles, instance)) if instance", "resource_type (str): String such as 'cpu' or 'harddisk' - used as a key", "def get_item_count(self, resource_type, profile): \"\"\"Get the number of Items of the given type", "dict by InstanceID as it's the one property of # an Item that", "self.item_dict = {} valid_profiles = set(ovf.config_profiles) item_count = 0 for item in ovf.virtual_hw_section:", "logger.info(\"Created new %s under profile(s) %s, InstanceID is %s\", resource_type, profile_list, instance) return", "count_dict.items(): logger.spam(\"Profile '%s' has %s %s Item(s)\", profile, count, resource_type) return count_dict def", "only handle integer addresses try: address_on_parent = int(address_on_parent) address_on_parent += 1 new_item.set_property(self.ovf.ADDRESS_ON_PARENT, str(address_on_parent),", "iterate over existing Items. # Once we've seen \"count\" items under a profile,", "list: Matching OVFItem instances \"\"\" items = [self.item_dict[instance] for instance in natural_sort(self.item_dict)] filtered_items", "How many new Items do we need to create in total? items_to_add =", "the given type for the given profile. Wrapper for :meth:`get_item_count_per_profile`. Args: resource_type (str):", "more matching items than entries in :attr:`value_list`, set extra items to this value", "profile_list: delta = count - items_seen[profile] if delta > items_to_add: items_to_add = delta", "add any items found # under other profiles to this profile as well.", "+= 1 # How many new Items do we need to create in", "+ [None] count_dict, items_to_add, last_item = \\ self._update_existing_item_profiles( resource_type, count, profile_list) logger.debug(\"Creating %d", "profile_list = self.ovf.config_profiles + [None] for ovfitem in self.find_all_items(resource_type): if len(value_list): new_value =", "Items of this type (including this item) now exist. Used with :meth:`COT.platform.Platform.guess_nic_name` Returns:", "tuple: (count_dict, items_to_add, last_item) \"\"\" count_dict = self.get_item_count_per_profile(resource_type, profile_list) items_seen = dict.fromkeys(profile_list, 0)", "of profiles to clone into Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id(start=parent_item.instance_id)", "# See the COPYRIGHT.txt file at the top-level directory of this distribution #", "ovftool freaks out if we leave out the ElementName on an Item, #", "Args: start (int): First InstanceID value to consider (disregarding all lower InstanceIDs, even", "property %s to %s under %s\", resource_type, prop_name, new_value, profile_list) if len(value_list): logger.warning(\"After", "yet for updating ``Address`` NotImplementedError: If updating ``AddressOnParent`` but the prior value varies", "consider (disregarding all lower InstanceIDs, even if available). Returns: str: An instance ID", "mapping profile strings to the number of items under each profile. \"\"\" count_dict", "is part of the Common OVF Tool (COT) project. # It is subject", "tuple: ``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id(start=parent_item.instance_id) logger.spam(\"Cloning existing Item %s with new", "\"no profile\" will be counted against the total for each profile. Args: resource_type", "%s found. \" \"Will create new %s from scratch.\", resource_type, resource_type) (_, new_item)", "create new %s from scratch.\", resource_type, resource_type) (_, ovfitem) = self.new_item(resource_type, profile_list) ovfitem_list", "profile_list (list): List of profiles to filter on Returns: bool: True if the", "in list(self.ovf.virtual_hw_section): if (item.tag == self.ovf.ITEM or item.tag == self.ovf.STORAGE_ITEM or item.tag ==", "of the clone that should be unique: # TODO - we assume that", "for m in matches])) elif len(matches) == 0: return None else: return matches[0]", "this distribution and at # https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No part # of COT, including this", "= value_list.pop(0) else: new_value = default for profile in profile_list: if ovfitem.has_profile(profile): ovfitem.set_property(prop_name,", "name to update value_list (list): List of values to set (one value per", "resource_type, count, profile_list): \"\"\"Change profile membership of existing items as needed. Helper method", "not yet in use. \"\"\" instance = int(start) while str(instance) in self.item_dict.keys(): instance", "\"Found multiple matching '{0}' Items (instances {1})\" .format(resource_type, [m.instance_id for m in matches]))", "that uniquely identifies this set of hardware items. instance = item.find(namespace + self.ovf.INSTANCE_ID).text", "defined by this OVF; i.e., the contents of all Items in the VirtualHardwareSection.", "profile_list) self.item_dict[instance] = ovfitem ovfitem.modified = True logger.info(\"Created new %s under profile(s) %s,", "to this profile as well. for ovfitem in self.find_all_items(resource_type): last_item = ovfitem for", "new_value, profile_list) def set_item_values_per_profile(self, resource_type, prop_name, value_list, profile_list, default=None): \"\"\"Set value(s) for a", "simple default value. ovfitem.set_property(self.ovf.ELEMENT_NAME, resource_type, profile_list) self.item_dict[instance] = ovfitem ovfitem.modified = True logger.info(\"Created", "(list): List of profiles to filter on (default: apply across all profiles) \"\"\"", "\"\"\"Change profile membership of existing items as needed. Helper method for :meth:`set_item_count_per_profile`. Args:", "not in profile_list: ovfitem.remove_profile(profile) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.modified = True self.item_dict[instance] = ovfitem", "scanning all known %s Items, not all \" \"%s values were used -", "(item.tag == self.ovf.ITEM or item.tag == self.ovf.STORAGE_ITEM or item.tag == self.ovf.ETHERNET_PORT_ITEM): self.ovf.virtual_hw_section.remove(item) delete_count", "#!/usr/bin/env python # # hardware.py - OVFHardware class # # June 2016, <NAME>", "def _update_cloned_item(self, new_item, new_item_profiles, item_count): \"\"\"Update a cloned item to make it distinct", "%s with new instance ID %s\", parent_item, instance) ovfitem = copy.deepcopy(parent_item) # Delete", "profile, add any items found # under other profiles to this profile as", "item_profiles = set(item.get(self.ovf.ITEM_CONFIG, \"\").split()) unknown_profiles = item_profiles - valid_profiles if unknown_profiles: raise OVFHardwareDataError(\"Unknown", "is less than the current count under this profile, then the highest-numbered instances", "item of the given :attr:`resource_type`) profile_list (list): List of profiles to filter on", "under %s\", resource_type, prop_name, new_value, profile_list) if len(value_list): logger.warning(\"After scanning all known %s", "ovfitem) = self.new_item(resource_type, profile_list) ovfitem_list = [ovfitem] for ovfitem in ovfitem_list: ovfitem.set_property(prop_name, new_value,", "Items. # Once we've seen \"count\" items under a profile, remove all subsequent", "profile_list = self.ovf.config_profiles + [None] for profile in profile_list: count_dict[profile] = 0 for", "self.find_all_items(resource_type): last_item = ovfitem for profile in profile_list: if ovfitem.has_profile(profile): if items_seen[profile] >=", "0 for profile in profile_list: delta = count - items_seen[profile] if delta >", "create a new ``Item`` if :attr:`create_new` is set to ``True``; otherwise will log", "resource_type (str): Resource type such as 'cpu' or 'harddisk' prop_name (str): Property name", "exc: logger.debug(exc) # Mask away the nitty-gritty details from our caller raise OVFHardwareDataError(\"Data", "the instance ID # on our clone due to self-inconsistency (#64). for profile", "trying to set the instance ID # on our clone due to self-inconsistency", "entries in :attr:`value_list`, set extra items to this value \"\"\" if profile_list is", "from scratch.\", resource_type, resource_type) (_, new_item) = self.new_item(resource_type, new_item_profiles) else: (_, new_item) =", "in new_items: XML.add_child(self.ovf.virtual_hw_section, item, ordering) logger.verbose(\"Updated XML VirtualHardwareSection, now contains %d \" \"Items", "%s\", parent_item, instance) ovfitem = copy.deepcopy(parent_item) # Delete any profiles from the parent", "[self.ovf.INFO, self.ovf.SYSTEM, self.ovf.ITEM] for instance in natural_sort(self.item_dict): logger.debug(\"Writing Item(s) with InstanceID %s\", instance)", "will be added to this profile, starting with the lowest-sequence instance not already", "List of profiles to filter on Returns: list: Matching OVFItem instances \"\"\" items", "(list): List of profiles to filter on Returns: bool: True if the item", "= {} valid_profiles = set(ovf.config_profiles) item_count = 0 for item in ovf.virtual_hw_section: namespace", "with the lowest-sequence instance not already present, and only as a last resort", "to search within Returns: OVFItem: Matching instance, or None Raises: LookupError: if more", "the current count under this profile, then the highest-numbered instances will be removed", "to ``True``; otherwise will log a warning and do nothing. Args: resource_type (str):", "List of values to set (one value per item of the given :attr:`resource_type`)", "the COT project developers. # See the COPYRIGHT.txt file at the top-level directory", "\" \"Items representing %d devices\", len(self.ovf.virtual_hw_section.findall(self.ovf.ITEM)), len(self.item_dict)) def find_unused_instance_id(self, start=1): \"\"\"Find the first", "if ovfitem.has_profile(profile): ovfitem.set_property(prop_name, new_value, [profile]) logger.info(\"Updated %s property %s to %s under %s\",", "current count under this profile, then the highest-numbered instances will be removed preferentially.", "not an integer. \"\"\" resource_type = new_item.hardware_type address = new_item.get(self.ovf.ADDRESS) if address: raise", "to belong to? new_item_profiles = [] for profile in profile_list: if count_dict[profile] <", "as a key to :data:`~COT.vm_description.ovf.name_helper.OVFNameHelper1.RES_MAP` profile_list (list): Profiles the new item should belong", "default (str): If there are more matching items than entries in :attr:`value_list`, set", "%s\", resource_type, prop_name, new_value, profile_list) def set_item_values_per_profile(self, resource_type, prop_name, value_list, profile_list, default=None): \"\"\"Set", "item_match(self, item, resource_type, properties, profile_list): \"\"\"Check whether the given item matches the given", "data errors are seen \"\"\" self.ovf = ovf self.item_dict = {} valid_profiles =", "given item matches the given filters. Args: item (OVFItem): Item to validate resource_type", "ovfitem.generate_items() logger.spam(\"Generated %d items\", len(new_items)) for item in new_items: XML.add_child(self.ovf.virtual_hw_section, item, ordering) logger.verbose(\"Updated", "profile (str): Single profile ID to search within Returns: OVFItem: Matching instance, or", "is the same across profiles new_item = self._update_cloned_item( new_item, new_item_profiles, count_dict[new_item_profiles[0]]) last_item =", "True break if not modified: logger.verbose(\"No changes to hardware definition, \" \"so no", "but has \" \"multiple values {0}. COT can't \" \"handle this yet.\" .format(address_list))", "self.ovf.config_profiles: if ovfitem.has_profile(profile) and profile not in profile_list: ovfitem.remove_profile(profile) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.modified", "count_dict[profile] < count: # Add this profile to this Item ovfitem.add_profile(profile) count_dict[profile] +=", "profiles to filter on (default: apply across all profiles) Returns: tuple: (count_dict, items_to_add,", "else: if count_dict[profile] < count: # Add this profile to this Item ovfitem.add_profile(profile)", "ensure a unique \" \"Address value when cloning an Item \" \"of type", "same across profiles new_item = self._update_cloned_item( new_item, new_item_profiles, count_dict[new_item_profiles[0]]) last_item = new_item items_to_add", "%s to %s under profiles %s\", resource_type, prop_name, new_value, profile_list) def set_item_values_per_profile(self, resource_type,", "Delete any profiles from the parent that we don't need now, # otherwise", "if available). Returns: str: An instance ID that is not yet in use.", "of all Items in the VirtualHardwareSection. Fundamentally it's just a dict of :class:`~COT.vm_description.ovf.item.OVFItem`", "\" \"unique AddressOnParent value \" \"given base value '{0}'\" .format(address_on_parent)) if resource_type ==", ":meth:`set_item_count_per_profile`. Args: resource_type (str): 'cpu', 'harddisk', etc. count (int): Desired number of items", "# Update ElementName to reflect the NIC number element_name = self.ovf.platform.guess_nic_name(item_count) new_item.set_property(self.ovf.ELEMENT_NAME, element_name,", "InstanceID %d\", instance) return str(instance) def new_item(self, resource_type, profile_list=None): \"\"\"Create a new OVFItem", "the top-level directory of this distribution # and at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt. # # This", "def __init__(self, ovf): \"\"\"Construct an OVFHardware object describing all Items in the OVF.", "hardware.py - OVFHardware class # # June 2016, <NAME> # Copyright (c) 2013-2016,", "OVFItem of the given :attr:`resource_type`. Args: resource_type (str): Resource type string like 'scsi'", "Currently we only handle integer addresses try: address_on_parent = int(address_on_parent) address_on_parent += 1", "Resource type string like 'scsi' or 'serial' profile (str): Single profile identifier string", "items_seen[profile] if delta > items_to_add: items_to_add = delta return count_dict, items_to_add, last_item def", "InstanceID as it's the one property of # an Item that uniquely identifies", "def set_item_values_per_profile(self, resource_type, prop_name, value_list, profile_list, default=None): \"\"\"Set value(s) for a property of", "of the given :attr:`resource_type`) profile_list (list): List of profiles to filter on (default:", "under profiles %s\", resource_type, prop_name, new_value, profile_list) def set_item_values_per_profile(self, resource_type, prop_name, value_list, profile_list,", "self.item_dict.keys(): instance += 1 logger.debug(\"Found unused InstanceID %d\", instance) return str(instance) def new_item(self,", "as a last resort will new instances be created. If the new count", "other profiles to this profile as well. for ovfitem in self.find_all_items(resource_type): last_item =", "resource_type, count, profile_list) logger.debug(\"Creating %d new items\", items_to_add) while items_to_add > 0: #", "properties (dict): Properties and their values to match profile (str): Single profile ID", "(default: apply across all profiles) create_new (bool): Whether to create a new entry", "self.find_all_items(resource_type): if len(value_list): new_value = value_list.pop(0) else: new_value = default for profile in", "ovfitem in self.item_dict.values(): ovfitem.modified = False def update_xml(self): \"\"\"Regenerate all Items under the", "\"\"\"Helper class for :class:`~COT.vm_description.ovf.ovf.OVF`. Represents all hardware items defined by this OVF; i.e.,", "if not create_new: logger.warning(\"No items of type %s found. Nothing to do.\", resource_type)", "to the given value for all items of the given type. If no", ":data:`~COT.vm_description.ovf.name_helper.OVFNameHelper1.RES_MAP` profile_list (list): Profiles the new item should belong to Returns: tuple: ``(instance_id,", "type string like 'scsi' or 'serial' properties (dict): Properties and their values to", "profiles but has \" \"multiple values {0}. COT can't \" \"handle this yet.\"", "True def find_all_items(self, resource_type=None, properties=None, profile_list=None): \"\"\"Find all items matching the given type,", "len(self.item_dict)) def find_unused_instance_id(self, start=1): \"\"\"Find the first available ``InstanceID`` number. Args: start (int):", "an Item, # so provide a simple default value. ovfitem.set_property(self.ovf.ELEMENT_NAME, resource_type, profile_list) self.item_dict[instance]", "hardware information from. Raises: OVFHardwareDataError: if any data errors are seen \"\"\" self.ovf", "it distinct from its parent. Helper method for :meth:`set_item_count_per_profile`. Args: new_item (OVFItem): Newly", "= item.find(namespace + self.ovf.INSTANCE_ID).text # Pre-sanity check - are all of the profiles", "OVFHardware(object): \"\"\"Helper class for :class:`~COT.vm_description.ovf.ovf.OVF`. Represents all hardware items defined by this OVF;", "belong to Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id() ovfitem = OVFItem(self.ovf)", "count - items_seen[profile] if delta > items_to_add: items_to_add = delta return count_dict, items_to_add,", "devices\", item_count, len(self.item_dict)) # Treat the current state as golden: for ovfitem in", "OVF hardware definitions. **Classes and Exceptions** .. autosummary:: :nosignatures: OVFHardware OVFHardwareDataError \"\"\" import", ":attr:`resource_type`. Args: resource_type (str): Resource type string like 'scsi' or 'serial' properties (dict):", "item (OVFItem): Item to validate resource_type (str): Resource type string like 'scsi' or", "InstanceID is %s\", resource_type, profile_list, instance) return (instance, ovfitem) def delete_item(self, item): \"\"\"Delete", "profile_list): \"\"\"Check whether the given item matches the given filters. Args: item (OVFItem):", "items profile_list (list): List of profiles to filter on (default: apply across all", "\" \"Will create new %s from scratch.\", resource_type, resource_type) (_, new_item) = self.new_item(resource_type,", "ovfitem.set_property(prop_name, new_value, profile_list) logger.debug(\"Updated %s %s to %s under profiles %s\", resource_type, prop_name,", "profile_list) ovfitem.modified = True self.item_dict[instance] = ovfitem logger.spam(\"Added clone of %s under %s,", "new_value, [profile]) logger.info(\"Updated %s property %s to %s under %s\", resource_type, prop_name, new_value,", "def find_all_items(self, resource_type=None, properties=None, profile_list=None): \"\"\"Find all items matching the given type, properties,", "unknown_profiles = item_profiles - valid_profiles if unknown_profiles: raise OVFHardwareDataError(\"Unknown profile(s) {0} for \"", "for :meth:`get_item_count_per_profile`. Args: resource_type (str): Resource type string like 'scsi' or 'serial' profile", "Item elements describing %s \" \"unique devices\", item_count, len(self.item_dict)) # Treat the current", "Args: item (OVFItem): Item to delete \"\"\" instance = item.get_value(self.ovf.INSTANCE_ID) if self.item_dict[instance] ==", "# item properly defined in the OVF DeploymentOptionSection? item_profiles = set(item.get(self.ovf.ITEM_CONFIG, \"\").split()) unknown_profiles", "profile in profile_list: if ovfitem.has_profile(profile): if items_seen[profile] >= count: # Too many items", "self.ovf.ETHERNET_PORT_ITEM]))): modified = True else: for ovfitem in self.item_dict.values(): if ovfitem.modified: modified =", "items under a profile, remove all subsequent # items from this profile. #", "to filter on (default: apply across all profiles) default (str): If there are", "common \" \"across all profiles but has \" \"multiple values {0}. COT can't", "not already present, and only as a last resort will new instances be", "= self.ovf.config_profiles + [None] for profile in profile_list: count_dict[profile] = 0 for ovfitem", "used to construct an :class:`OVFHardware` is not sane.\"\"\" class OVFHardware(object): \"\"\"Helper class for", "this item) now exist. Used with :meth:`COT.platform.Platform.guess_nic_name` Returns: OVFItem: Updated :param:`new_item` Raises: NotImplementedError:", "prop_name, value_list, profile_list, default=None): \"\"\"Set value(s) for a property of multiple items of", "the ElementName on an Item, # so provide a simple default value. ovfitem.set_property(self.ovf.ELEMENT_NAME,", "like 'scsi' or 'serial' properties (dict): Properties and their values to match profile", "type. Args: resource_type (str): Device type such as 'harddisk' or 'cpu' prop_name (str):", "Property name to update new_value (str): New value to set the property to", "count is less than the current count under this profile, then the highest-numbered", "List of profiles to filter on Returns: bool: True if the item matches", ".format(instance)) logger.debug( \"OVF contains %s hardware Item elements describing %s \" \"unique devices\",", "count_dict[profile] = 0 for ovfitem in self.find_all_items(resource_type): for profile in profile_list: if ovfitem.has_profile(profile):", "resource_type, count, profile_list): \"\"\"Set the number of items of a given type under", "ovfitem.modified = True self.item_dict[instance] = ovfitem logger.spam(\"Added clone of %s under %s, instance", "will create a new ``Item`` if :attr:`create_new` is set to ``True``; otherwise will", "LICENSE.txt file. \"\"\"Representation of OVF hardware definitions. **Classes and Exceptions** .. autosummary:: :nosignatures:", "will new instances be created. If the new count is less than the", "type, properties, and profiles. Args: resource_type (str): Resource type string like 'scsi' or", "= self.ovf.platform.guess_nic_name(item_count) new_item.set_property(self.ovf.ELEMENT_NAME, element_name, new_item_profiles) return new_item def set_item_count_per_profile(self, resource_type, count, profile_list): \"\"\"Set", "0: return None else: return matches[0] def get_item_count(self, resource_type, profile): \"\"\"Get the number", "from scratch.\", resource_type, resource_type) (_, ovfitem) = self.new_item(resource_type, profile_list) ovfitem_list = [ovfitem] for", "against the total for each profile. Args: resource_type (str): Resource type string like", "we don't need now, # otherwise we'll get an error when trying to", "on (default: apply across all profiles) create_new (bool): Whether to create a new", "if not. \"\"\" if resource_type and (self.ovf.RES_MAP[resource_type] != item.get_value(self.ovf.RESOURCE_TYPE)): return False if profile_list:", "i.e., the contents of all Items in the VirtualHardwareSection. Fundamentally it's just a", "def find_unused_instance_id(self, start=1): \"\"\"Find the first available ``InstanceID`` number. Args: start (int): First", "seen \"count\" items under a profile, remove all subsequent # items from this", "# We index the dict by InstanceID as it's the one property of", "is set to ``True``; otherwise will log a warning and do nothing. Args:", "not ovfitem_list: if not create_new: logger.warning(\"No items of type %s found. Nothing to", "is None: properties = {} for item in items: if self.item_match(item, resource_type, properties,", "of the given type for the given profile. Wrapper for :meth:`get_item_count_per_profile`. Args: resource_type", "item.get_value(self.ovf.INSTANCE_ID) if self.item_dict[instance] == item: del self.item_dict[instance] # TODO: error handling - currently", "in self.ovf.config_profiles: if ovfitem.has_profile(profile) and profile not in profile_list: ovfitem.remove_profile(profile) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list)", "value. ovfitem.set_property(self.ovf.ELEMENT_NAME, resource_type, profile_list) self.item_dict[instance] = ovfitem ovfitem.modified = True logger.info(\"Created new %s", "count, resource_type) return count_dict def _update_existing_item_profiles(self, resource_type, count, profile_list): \"\"\"Change profile membership of", "project. # It is subject to the license terms in the LICENSE.txt file", "{0}. COT can't \" \"handle this yet.\" .format(address_list)) address_on_parent = address_list[0] # Currently", "to match profile_list (list): List of profiles to filter on Returns: bool: True", "hardware Item elements describing %s \" \"unique devices\", item_count, len(self.item_dict)) # Treat the", "can't \" \"handle this yet.\" .format(address_list)) address_on_parent = address_list[0] # Currently we only", "def set_item_count_per_profile(self, resource_type, count, profile_list): \"\"\"Set the number of items of a given", "count (int): Desired number of items profile_list (list): List of profiles to filter", "resource_type (str): Device type such as 'harddisk' or 'cpu' prop_name (str): Property name", "https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt. # # This file is part of the Common OVF Tool (COT)", "of :class:`~COT.vm_description.ovf.item.OVFItem` objects with a bunch of helper methods. \"\"\" def __init__(self, ovf):", "cloned Item new_item_profiles (list): Profiles new_item should belong to item_count (int): How many", "\"\"\" resource_type = new_item.hardware_type address = new_item.get(self.ovf.ADDRESS) if address: raise NotImplementedError(\"Don't know how", "count_dict[profile] < count: new_item_profiles.append(profile) count_dict[profile] += 1 if last_item is None: logger.notice(\"No existing", "= [] for profile in profile_list: if count_dict[profile] < count: new_item_profiles.append(profile) count_dict[profile] +=", "Single profile ID to search within Returns: OVFItem: Matching instance, or None Raises:", "logger.debug(\"Updated %s %s to %s under profiles %s\", resource_type, prop_name, new_value, profile_list) def", "this OVF; i.e., the contents of all Items in the VirtualHardwareSection. Fundamentally it's", "resource_type and (self.ovf.RES_MAP[resource_type] != item.get_value(self.ovf.RESOURCE_TYPE)): return False if profile_list: for profile in profile_list:", "'scsi' or 'serial' profile_list (list): List of profiles to filter on (default: apply", "remove this one! ovfitem.remove_profile(profile) else: items_seen[profile] += 1 else: if count_dict[profile] < count:", "items_seen[profile] += 1 else: if count_dict[profile] < count: # Add this profile to", "profiles does this Item need to belong to? new_item_profiles = [] for profile", "Args: resource_type (str): Device type such as 'harddisk' or 'cpu' prop_name (str): Property", "item, ordering) logger.verbose(\"Updated XML VirtualHardwareSection, now contains %d \" \"Items representing %d devices\",", "XML VirtualHardwareSection, now contains %d \" \"Items representing %d devices\", len(self.ovf.virtual_hw_section.findall(self.ovf.ITEM)), len(self.item_dict)) def", "matching items than entries in :attr:`value_list`, set extra items to this value \"\"\"", "no Items have been changed. \"\"\" modified = False if len(self.item_dict) != len(XML.find_all_children(", "clone of %s under %s, instance is %s\", parent_item, profile_list, instance) return (instance,", "of the Common OVF Tool (COT) project. # It is subject to the", "parent_item, profile_list): \"\"\"Clone an OVFItem to create a new instance. Args: parent_item (OVFItem):", "not modified: logger.verbose(\"No changes to hardware definition, \" \"so no XML update is", "= ovfitem logger.spam(\"Added clone of %s under %s, instance is %s\", parent_item, profile_list,", "Pre-sanity check - are all of the profiles associated with this # item", "like 'scsi' or 'serial' profile (str): Single profile identifier string to look up.", "``Item`` if :attr:`create_new` is set to ``True``; otherwise will log a warning and", "new_value = value_list.pop(0) else: new_value = default for profile in profile_list: if ovfitem.has_profile(profile):", "exist. Used with :meth:`COT.platform.Platform.guess_nic_name` Returns: OVFItem: Updated :param:`new_item` Raises: NotImplementedError: No support yet", "count is the same across profiles new_item = self._update_cloned_item( new_item, new_item_profiles, count_dict[new_item_profiles[0]]) last_item", "'{0}'\" .format(address_on_parent)) if resource_type == 'ethernet': # Update ElementName to reflect the NIC", "values to set (one value per item of the given :attr:`resource_type`) profile_list (list):", "to set the instance ID # on our clone due to self-inconsistency (#64).", "profile in profile_list: delta = count - items_seen[profile] if delta > items_to_add: items_to_add", "logger.spam(\"Profile '%s' has %s %s Item(s)\", profile, count, resource_type) return count_dict def _update_existing_item_profiles(self,", "unknown_profiles: raise OVFHardwareDataError(\"Unknown profile(s) {0} for \" \"Item instance {1}\" .format(unknown_profiles, instance)) if", "Returns: tuple: (count_dict, items_to_add, last_item) \"\"\" count_dict = self.get_item_count_per_profile(resource_type, profile_list) items_seen = dict.fromkeys(profile_list,", "List of profiles to filter on (default: apply across all profiles) create_new (bool):", "the given :attr:`resource_type`. Args: resource_type (str): Resource type string like 'scsi' or 'serial'", "any data errors are seen \"\"\" self.ovf = ovf self.item_dict = {} valid_profiles", "count: # Add this profile to this Item ovfitem.add_profile(profile) count_dict[profile] += 1 items_seen[profile]", "profile_list, create_new=False): \"\"\"Set a property to the given value for all items of", "clone into Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id(start=parent_item.instance_id) logger.spam(\"Cloning existing Item", "varies across config profiles. NotImplementedError: if ``AddressOnParent`` is not an integer. \"\"\" resource_type", "update is required\") return # Delete the existing Items: delete_count = 0 for", "Resource type such as 'cpu' or 'harddisk' prop_name (str): Property name to update", "items - remove this one! ovfitem.remove_profile(profile) else: items_seen[profile] += 1 else: if count_dict[profile]", "return matches[0] def get_item_count(self, resource_type, profile): \"\"\"Get the number of Items of the", "ovfitem.add_profile(profile) count_dict[profile] += 1 items_seen[profile] += 1 # How many new Items do", "any items found # under other profiles to this profile as well. for", "last_item is None: logger.notice(\"No existing items of type %s found. \" \"Will create", "set_item_values_per_profile(self, resource_type, prop_name, value_list, profile_list, default=None): \"\"\"Set value(s) for a property of multiple", "[profile]) logger.info(\"Updated %s property %s to %s under %s\", resource_type, prop_name, new_value, profile_list)", "ovfitem_list: if not create_new: logger.warning(\"No items of type %s found. Nothing to do.\",", "a new OVFItem of the given type. Args: resource_type (str): String such as", "resource_type, profile_list, instance) return (instance, ovfitem) def delete_item(self, item): \"\"\"Delete the given Item", "\"\").split()) unknown_profiles = item_profiles - valid_profiles if unknown_profiles: raise OVFHardwareDataError(\"Unknown profile(s) {0} for", "len(address_list) > 1: raise NotImplementedError(\"AddressOnParent is not common \" \"across all profiles but", "should be unique: # TODO - we assume that the count is the", "= self.find_all_items(resource_type) if not ovfitem_list: if not create_new: logger.warning(\"No items of type %s", "None: profile_list = self.ovf.config_profiles + [None] for ovfitem in self.find_all_items(resource_type): if len(value_list): new_value", "under this profile, then additional instances that already exist under another profile will", "set the property to profile_list (list): List of profiles to filter on (default:", "propagated, or # distributed except according to the terms contained in the LICENSE.txt", "in self.item_dict.values(): if ovfitem.modified: modified = True break if not modified: logger.verbose(\"No changes", "ovfitem) def item_match(self, item, resource_type, properties, profile_list): \"\"\"Check whether the given item matches", "profile strings to the number of items under each profile. \"\"\" count_dict =", "value for all items of the given type. If no items of the", "profile as well. for ovfitem in self.find_all_items(resource_type): last_item = ovfitem for profile in", "%d devices\", len(self.ovf.virtual_hw_section.findall(self.ovf.ITEM)), len(self.item_dict)) def find_unused_instance_id(self, start=1): \"\"\"Find the first available ``InstanceID`` number.", "Returns: bool: True if the item matches all filters, False if not. \"\"\"", "<NAME> # Copyright (c) 2013-2016, 2019 the COT project developers. # See the", "+ self.ovf.INSTANCE_ID).text # Pre-sanity check - are all of the profiles associated with", "namespace: continue item_count += 1 # We index the dict by InstanceID as", "self.find_unused_instance_id() ovfitem = OVFItem(self.ovf) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.set_property(self.ovf.RESOURCE_TYPE, self.ovf.RES_MAP[resource_type], profile_list) # ovftool freaks", "new_items = ovfitem.generate_items() logger.spam(\"Generated %d items\", len(new_items)) for item in new_items: XML.add_child(self.ovf.virtual_hw_section, item,", "of Items of the given type per profile. Items present under \"no profile\"", "of the given type. If no items of the given type exist, will", "matches all filters, False if not. \"\"\" if resource_type and (self.ovf.RES_MAP[resource_type] != item.get_value(self.ovf.RESOURCE_TYPE)):", "Desired number of items profile_list (list): List of profiles to filter on (default:", "match profile_list (list): List of profiles to filter on Returns: bool: True if", "changes to hardware definition, \" \"so no XML update is required\") return #", "in the OVF. Args: ovf (OVF): OVF instance to extract hardware information from.", "number element_name = self.ovf.platform.guess_nic_name(item_count) new_item.set_property(self.ovf.ELEMENT_NAME, element_name, new_item_profiles) return new_item def set_item_count_per_profile(self, resource_type, count,", "be added to this profile, starting with the lowest-sequence instance not already present,", "ovfitem in self.find_all_items(resource_type): for profile in profile_list: if ovfitem.has_profile(profile): count_dict[profile] += 1 for", "natural_sort(self.item_dict): logger.debug(\"Writing Item(s) with InstanceID %s\", instance) ovfitem = self.item_dict[instance] new_items = ovfitem.generate_items()", "= \\ self._update_existing_item_profiles( resource_type, count, profile_list) logger.debug(\"Creating %d new items\", items_to_add) while items_to_add", "\"\"\"Find the first available ``InstanceID`` number. Args: start (int): First InstanceID value to", "multiple matching '{0}' Items (instances {1})\" .format(resource_type, [m.instance_id for m in matches])) elif", "it's the one property of # an Item that uniquely identifies this set", "count_dict[profile] += 1 if last_item is None: logger.notice(\"No existing items of type %s", "item_count += 1 # We index the dict by InstanceID as it's the", "import copy import logging from COT.data_validation import natural_sort from COT.xml_file import XML from", "across all profiles) Returns: tuple: (count_dict, items_to_add, last_item) \"\"\" count_dict = self.get_item_count_per_profile(resource_type, profile_list)", "instance, profile_list) ovfitem.modified = True self.item_dict[instance] = ovfitem logger.spam(\"Added clone of %s under", "= self.item_dict[instance] new_items = ovfitem.generate_items() logger.spam(\"Generated %d items\", len(new_items)) for item in new_items:", "type %s found. Nothing to do.\", resource_type) return logger.notice(\"No existing items of type", "last_item = ovfitem for profile in profile_list: if ovfitem.has_profile(profile): if items_seen[profile] >= count:", "of profiles to filter on (default: apply across all profiles) Returns: tuple: (count_dict,", "except OVFItemDataError as exc: logger.debug(exc) # Mask away the nitty-gritty details from our", "existing Item %s with new instance ID %s\", parent_item, instance) ovfitem = copy.deepcopy(parent_item)", "by Instance ordering = [self.ovf.INFO, self.ovf.SYSTEM, self.ovf.ITEM] for instance in natural_sort(self.item_dict): logger.debug(\"Writing Item(s)", "contained in the LICENSE.txt file. \"\"\"Representation of OVF hardware definitions. **Classes and Exceptions**", "not common \" \"across all profiles but has \" \"multiple values {0}. COT", "\"across all profiles but has \" \"multiple values {0}. COT can't \" \"handle", "contents of all Items in the VirtualHardwareSection. Fundamentally it's just a dict of", "by InstanceID as it's the one property of # an Item that uniquely", "profile in profile_list: count_dict[profile] = 0 for ovfitem in self.find_all_items(resource_type): for profile in", "the given profile(s). If the new count is greater than the current count", "[] if properties is None: properties = {} for item in items: if", "= ovfitem ovfitem.modified = True logger.info(\"Created new %s under profile(s) %s, InstanceID is", "VirtualHardwareSection, now contains %d \" \"Items representing %d devices\", len(self.ovf.virtual_hw_section.findall(self.ovf.ITEM)), len(self.item_dict)) def find_unused_instance_id(self,", "each profile. Args: resource_type (str): Resource type string like 'scsi' or 'serial' profile_list", "= int(address_on_parent) address_on_parent += 1 new_item.set_property(self.ovf.ADDRESS_ON_PARENT, str(address_on_parent), new_item_profiles) except ValueError: raise NotImplementedError(\"Don't know", "(COT) project. # It is subject to the license terms in the LICENSE.txt", "\"\"\"Construct an OVFHardware object describing all Items in the OVF. Args: ovf (OVF):", "resource_type=None, properties=None, profile=None): \"\"\"Find the only OVFItem of the given :attr:`resource_type`. Args: resource_type", "\"\"\" instance = int(start) while str(instance) in self.item_dict.keys(): instance += 1 logger.debug(\"Found unused", "OVFHardwareDataError(Exception): \"\"\"The input data used to construct an :class:`OVFHardware` is not sane.\"\"\" class", "resource_type, profile_list) self.item_dict[instance] = ovfitem ovfitem.modified = True logger.info(\"Created new %s under profile(s)", "of a given type under the given profile(s). If the new count is", "type under the given profile(s). If the new count is greater than the", "extract hardware information from. Raises: OVFHardwareDataError: if any data errors are seen \"\"\"", "of profiles to filter on Returns: bool: True if the item matches all", "OVFItem: Matching instance, or None Raises: LookupError: if more than one such Item", "new XML Items, in appropriately sorted order by Instance ordering = [self.ovf.INFO, self.ovf.SYSTEM,", "'scsi' or 'serial' profile (str): Single profile identifier string to look up. Returns:", "to filter on Returns: bool: True if the item matches all filters, False", "Args: resource_type (str): String such as 'cpu' or 'harddisk' - used as a", "(list): List of profiles to filter on Returns: list: Matching OVFItem instances \"\"\"", "for the given profile. Wrapper for :meth:`get_item_count_per_profile`. Args: resource_type (str): Resource type string", "be created. If the new count is less than the current count under", "ovfitem.has_profile(profile): ovfitem.set_property(prop_name, new_value, [profile]) logger.info(\"Updated %s property %s to %s under %s\", resource_type,", "the given profile. Wrapper for :meth:`get_item_count_per_profile`. Args: resource_type (str): Resource type string like", "instance) return (instance, ovfitem) def delete_item(self, item): \"\"\"Delete the given Item from the", "COT project developers. # See the COPYRIGHT.txt file at the top-level directory of", "across all profiles) \"\"\" if not profile_list: # Set the profile list for", "matching the given type, properties, and profiles. Args: resource_type (str): Resource type string", "error when trying to set the instance ID # on our clone due", "COT, including this file, may be copied, modified, propagated, or # distributed except", "of Items of the given type for the given profile. Wrapper for :meth:`get_item_count_per_profile`.", "the LICENSE.txt file. \"\"\"Representation of OVF hardware definitions. **Classes and Exceptions** .. autosummary::", "profile in profile_list: if ovfitem.has_profile(profile): count_dict[profile] += 1 for (profile, count) in count_dict.items():", "XML update is required\") return # Delete the existing Items: delete_count = 0", "parent_item, instance) ovfitem = copy.deepcopy(parent_item) # Delete any profiles from the parent that", "default value. ovfitem.set_property(self.ovf.ELEMENT_NAME, resource_type, profile_list) self.item_dict[instance] = ovfitem ovfitem.modified = True logger.info(\"Created new", "item) else: try: self.item_dict[instance].add_item(item) except OVFItemDataError as exc: logger.debug(exc) # Mask away the", "Item exists. \"\"\" matches = self.find_all_items(resource_type, properties, [profile]) if len(matches) > 1: raise", "resource_type, properties, profile_list): filtered_items.append(item) logger.spam(\"Found %s Items of type %s with properties %s", "if not ovfitem_list: if not create_new: logger.warning(\"No items of type %s found. Nothing", "'harddisk', etc. count (int): Desired number of items profile_list (list): List of profiles", "new instance. Args: parent_item (OVFItem): Instance to clone from profile_list (list): List of", "not namespace: continue item_count += 1 # We index the dict by InstanceID", "\"Items representing %d devices\", len(self.ovf.virtual_hw_section.findall(self.ovf.ITEM)), len(self.item_dict)) def find_unused_instance_id(self, start=1): \"\"\"Find the first available", "property to the given value for all items of the given type. If", "by this OVF; i.e., the contents of all Items in the VirtualHardwareSection. Fundamentally", "If we don't have enough items under a profile, add any items found", "# of COT, including this file, may be copied, modified, propagated, or #", ".format(address_list)) address_on_parent = address_list[0] # Currently we only handle integer addresses try: address_on_parent", "profiles to filter on (default: apply across all profiles) create_new (bool): Whether to", "contains %s hardware Item elements describing %s \" \"unique devices\", item_count, len(self.item_dict)) #", "\"\"\" import copy import logging from COT.data_validation import natural_sort from COT.xml_file import XML", "item (OVFItem): Item to delete \"\"\" instance = item.get_value(self.ovf.INSTANCE_ID) if self.item_dict[instance] == item:", "(str): 'cpu', 'harddisk', etc. count (int): Desired number of items profile_list (list): List", "the new count is greater than the current count under this profile, then", "%d existing items from VirtualHWSection\", delete_count) # Generate the new XML Items, in", "if we leave out the ElementName on an Item, # so provide a", "instance ID that is not yet in use. \"\"\" instance = int(start) while", "list for all profiles, including the default profile_list = self.ovf.config_profiles + [None] count_dict,", "resource_type) (_, ovfitem) = self.new_item(resource_type, profile_list) ovfitem_list = [ovfitem] for ovfitem in ovfitem_list:", "Get the count under all profiles profile_list = self.ovf.config_profiles + [None] for profile", "integer. \"\"\" resource_type = new_item.hardware_type address = new_item.get(self.ovf.ADDRESS) if address: raise NotImplementedError(\"Don't know", "ordering) logger.verbose(\"Updated XML VirtualHardwareSection, now contains %d \" \"Items representing %d devices\", len(self.ovf.virtual_hw_section.findall(self.ovf.ITEM)),", "'serial' properties (dict): Properties and their values to match profile_list (list): List of", "find_item(self, resource_type=None, properties=None, profile=None): \"\"\"Find the only OVFItem of the given :attr:`resource_type`. Args:", "have been changed. \"\"\" modified = False if len(self.item_dict) != len(XML.find_all_children( self.ovf.virtual_hw_section, set([self.ovf.ITEM,", "InstanceIDs, even if available). Returns: str: An instance ID that is not yet", "order by Instance ordering = [self.ovf.INFO, self.ovf.SYSTEM, self.ovf.ITEM] for instance in natural_sort(self.item_dict): logger.debug(\"Writing", "If no items of the given type exist, will create a new ``Item``", "definition, \" \"so no XML update is required\") return # Delete the existing", "items_to_add, last_item def _update_cloned_item(self, new_item, new_item_profiles, item_count): \"\"\"Update a cloned item to make", "for profile in profile_list: if ovfitem.has_profile(profile): ovfitem.set_property(prop_name, new_value, [profile]) logger.info(\"Updated %s property %s", "a new ``Item`` if :attr:`create_new` is set to ``True``; otherwise will log a", "OVFItemDataError logger = logging.getLogger(__name__) class OVFHardwareDataError(Exception): \"\"\"The input data used to construct an", "not in item_dict def clone_item(self, parent_item, profile_list): \"\"\"Clone an OVFItem to create a", "address_on_parent = address_list[0] # Currently we only handle integer addresses try: address_on_parent =", "on our clone due to self-inconsistency (#64). for profile in self.ovf.config_profiles: if ovfitem.has_profile(profile)", "(instance, ovfitem) def delete_item(self, item): \"\"\"Delete the given Item from the hardware. Args:", "self.ovf.INSTANCE_ID).text # Pre-sanity check - are all of the profiles associated with this", "Mask away the nitty-gritty details from our caller raise OVFHardwareDataError(\"Data conflict for instance", "last_item = \\ self._update_existing_item_profiles( resource_type, count, profile_list) logger.debug(\"Creating %d new items\", items_to_add) while", "the # top-level directory of this distribution and at # https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No part", "directory of this distribution and at # https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No part # of COT,", "new_item.get(self.ovf.ADDRESS_ON_PARENT) if address_on_parent: address_list = new_item.get_all_values(self.ovf.ADDRESS_ON_PARENT) if len(address_list) > 1: raise NotImplementedError(\"AddressOnParent is", "resource_type) return logger.notice(\"No existing items of type %s found. \" \"Will create new", "str: An instance ID that is not yet in use. \"\"\" instance =", "of profiles to filter on (default: apply across all profiles) \"\"\" if not", "Raises: OVFHardwareDataError: if any data errors are seen \"\"\" self.ovf = ovf self.item_dict", "new count is less than the current count under this profile, then the", "# Add this profile to this Item ovfitem.add_profile(profile) count_dict[profile] += 1 items_seen[profile] +=", "number of Items of the given type per profile. Items present under \"no", "New value to set the property to profile_list (list): List of profiles to", "%s hardware Item elements describing %s \" \"unique devices\", item_count, len(self.item_dict)) # Treat", "resource_type, profile_list): \"\"\"Get the number of Items of the given type per profile.", "given type per profile. Items present under \"no profile\" will be counted against", "(instance, ovfitem) def item_match(self, item, resource_type, properties, profile_list): \"\"\"Check whether the given item", "helper methods. \"\"\" def __init__(self, ovf): \"\"\"Construct an OVFHardware object describing all Items", "and only as a last resort will new instances be created. If the", "1 # How many new Items do we need to create in total?", "to this profile, starting with the lowest-sequence instance not already present, and only", "current count under this profile, then additional instances that already exist under another", "clone that should be unique: # TODO - we assume that the count", "except according to the terms contained in the LICENSE.txt file. \"\"\"Representation of OVF", "profile_list (list): List of profiles to filter on (default: apply across all profiles)", "copy import logging from COT.data_validation import natural_sort from COT.xml_file import XML from .item", "than the current count under this profile, then additional instances that already exist", "type such as 'harddisk' or 'cpu' prop_name (str): Property name to update value_list", "apply across all profiles) Returns: dict: mapping profile strings to the number of", "given profile(s). If the new count is greater than the current count under", "logger.spam(\"Added clone of %s under %s, instance is %s\", parent_item, profile_list, instance) return", "to do.\", resource_type) return logger.notice(\"No existing items of type %s found. \" \"Will", "of the profiles associated with this # item properly defined in the OVF", "no items of this :attr:`resource_type` presently exist. \"\"\" ovfitem_list = self.find_all_items(resource_type) if not", "new_item(self, resource_type, profile_list=None): \"\"\"Create a new OVFItem of the given type. Args: resource_type", "support yet for updating ``Address`` NotImplementedError: If updating ``AddressOnParent`` but the prior value", "instance = self.find_unused_instance_id(start=parent_item.instance_id) logger.spam(\"Cloning existing Item %s with new instance ID %s\", parent_item,", "1 for (profile, count) in count_dict.items(): logger.spam(\"Profile '%s' has %s %s Item(s)\", profile,", "it's just a dict of :class:`~COT.vm_description.ovf.item.OVFItem` objects with a bunch of helper methods.", "for all items of the given type. If no items of the given", "new_items: XML.add_child(self.ovf.virtual_hw_section, item, ordering) logger.verbose(\"Updated XML VirtualHardwareSection, now contains %d \" \"Items representing", "new instance ID %s\", parent_item, instance) ovfitem = copy.deepcopy(parent_item) # Delete any profiles", "(instances {1})\" .format(resource_type, [m.instance_id for m in matches])) elif len(matches) == 0: return", "has \" \"multiple values {0}. COT can't \" \"handle this yet.\" .format(address_list)) address_on_parent", "the given Item from the hardware. Args: item (OVFItem): Item to delete \"\"\"", "match profile (str): Single profile ID to search within Returns: OVFItem: Matching instance,", "if resource_type and (self.ovf.RES_MAP[resource_type] != item.get_value(self.ovf.RESOURCE_TYPE)): return False if profile_list: for profile in", "Instance to clone from profile_list (list): List of profiles to clone into Returns:", "such as 'cpu' or 'harddisk' prop_name (str): Property name to update new_value (str):", "or 'harddisk' prop_name (str): Property name to update new_value (str): New value to", "out the ElementName on an Item, # so provide a simple default value.", "profile_list) return filtered_items def find_item(self, resource_type=None, properties=None, profile=None): \"\"\"Find the only OVFItem of", "Add this profile to this Item ovfitem.add_profile(profile) count_dict[profile] += 1 items_seen[profile] += 1", "profile, then additional instances that already exist under another profile will be added", "= [self.item_dict[instance] for instance in natural_sort(self.item_dict)] filtered_items = [] if properties is None:", "profile): \"\"\"Get the number of Items of the given type for the given", "Item, # so provide a simple default value. ovfitem.set_property(self.ovf.ELEMENT_NAME, resource_type, profile_list) self.item_dict[instance] =", "an OVFItem to create a new instance. Args: parent_item (OVFItem): Instance to clone", "the profile list for all profiles, including the default profile_list = self.ovf.config_profiles +", "such as 'harddisk' or 'cpu' prop_name (str): Property name to update value_list (list):", "of the given type per profile. Items present under \"no profile\" will be", "type. Args: resource_type (str): String such as 'cpu' or 'harddisk' - used as", "len(self.item_dict)) # Treat the current state as golden: for ovfitem in self.item_dict.values(): ovfitem.modified", "June 2016, <NAME> # Copyright (c) 2013-2016, 2019 the COT project developers. #", "type in this profile. \"\"\" return (self.get_item_count_per_profile(resource_type, [profile]) [profile]) def get_item_count_per_profile(self, resource_type, profile_list):", "ovfitem)`` \"\"\" instance = self.find_unused_instance_id(start=parent_item.instance_id) logger.spam(\"Cloning existing Item %s with new instance ID", "python # # hardware.py - OVFHardware class # # June 2016, <NAME> #", "to this value \"\"\" if profile_list is None: profile_list = self.ovf.config_profiles + [None]", "= False def update_xml(self): \"\"\"Regenerate all Items under the VirtualHardwareSection, if needed. Will", "filtered_items def find_item(self, resource_type=None, properties=None, profile=None): \"\"\"Find the only OVFItem of the given", "= ovf.namespace_for_item_tag(item.tag) if not namespace: continue item_count += 1 # We index the", "1 logger.debug(\"Cleared %d existing items from VirtualHWSection\", delete_count) # Generate the new XML", "!= value: return False return True def find_all_items(self, resource_type=None, properties=None, profile_list=None): \"\"\"Find all", "properly defined in the OVF DeploymentOptionSection? item_profiles = set(item.get(self.ovf.ITEM_CONFIG, \"\").split()) unknown_profiles = item_profiles", "\" \"handle this yet.\" .format(address_list)) address_on_parent = address_list[0] # Currently we only handle", "instance ID %s\", parent_item, instance) ovfitem = copy.deepcopy(parent_item) # Delete any profiles from", "values to match profile (str): Single profile ID to search within Returns: OVFItem:", "count: # Too many items - remove this one! ovfitem.remove_profile(profile) else: items_seen[profile] +=", "like 'scsi' or 'serial' profile_list (list): List of profiles to filter on (default:", "get_item_count_per_profile(self, resource_type, profile_list): \"\"\"Get the number of Items of the given type per", "0: # Which profiles does this Item need to belong to? new_item_profiles =", "= self._update_cloned_item( new_item, new_item_profiles, count_dict[new_item_profiles[0]]) last_item = new_item items_to_add -= 1 def set_value_for_all_items(self,", "= int(start) while str(instance) in self.item_dict.keys(): instance += 1 logger.debug(\"Found unused InstanceID %d\",", "of # an Item that uniquely identifies this set of hardware items. instance", "in profile_list: count_dict[profile] = 0 for ovfitem in self.find_all_items(resource_type): for profile in profile_list:", "update new_value (str): New value to set the property to profile_list (list): List", "\"\"\"Regenerate all Items under the VirtualHardwareSection, if needed. Will do nothing if no", "to create a new entry if no items of this :attr:`resource_type` presently exist.", "return new_item def set_item_count_per_profile(self, resource_type, count, profile_list): \"\"\"Set the number of items of", "raise NotImplementedError(\"Don't know how to ensure a \" \"unique AddressOnParent value \" \"given", "in profile_list: ovfitem.remove_profile(profile) ovfitem.set_property(self.ovf.INSTANCE_ID, instance, profile_list) ovfitem.modified = True self.item_dict[instance] = ovfitem logger.spam(\"Added", "= None # First, iterate over existing Items. # Once we've seen \"count\"", "+= 1 else: if count_dict[profile] < count: # Add this profile to this", "[] for profile in profile_list: if count_dict[profile] < count: new_item_profiles.append(profile) count_dict[profile] += 1", "self.item_dict[instance] = OVFItem(self.ovf, item) else: try: self.item_dict[instance].add_item(item) except OVFItemDataError as exc: logger.debug(exc) #", "new item should belong to Returns: tuple: ``(instance_id, ovfitem)`` \"\"\" instance = self.find_unused_instance_id()", "item_count): \"\"\"Update a cloned item to make it distinct from its parent. Helper", "the VirtualHardwareSection. Fundamentally it's just a dict of :class:`~COT.vm_description.ovf.item.OVFItem` objects with a bunch", "all of the profiles associated with this # item properly defined in the", "TODO: error handling - currently a no-op if item not in item_dict def", "profile_list) if len(value_list): logger.warning(\"After scanning all known %s Items, not all \" \"%s", "matches = self.find_all_items(resource_type, properties, [profile]) if len(matches) > 1: raise LookupError( \"Found multiple", "string like 'scsi' or 'serial' profile_list (list): List of profiles to filter on", "if no items of this :attr:`resource_type` presently exist. \"\"\" ovfitem_list = self.find_all_items(resource_type) if", "start (int): First InstanceID value to consider (disregarding all lower InstanceIDs, even if", "# and at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt. # # This file is part of the Common", "properties, profile_list): filtered_items.append(item) logger.spam(\"Found %s Items of type %s with properties %s and\"", "ovf.namespace_for_item_tag(item.tag) if not namespace: continue item_count += 1 # We index the dict", "InstanceID %s\", instance) ovfitem = self.item_dict[instance] new_items = ovfitem.generate_items() logger.spam(\"Generated %d items\", len(new_items))", "method for :meth:`set_item_count_per_profile`. Args: resource_type (str): 'cpu', 'harddisk', etc. count (int): Desired number", "to clone from profile_list (list): List of profiles to clone into Returns: tuple:", "1 # We index the dict by InstanceID as it's the one property", "in profile_list: if ovfitem.has_profile(profile): ovfitem.set_property(prop_name, new_value, [profile]) logger.info(\"Updated %s property %s to %s", "\"of type {0}\".format(resource_type)) address_on_parent = new_item.get(self.ovf.ADDRESS_ON_PARENT) if address_on_parent: address_list = new_item.get_all_values(self.ovf.ADDRESS_ON_PARENT) if len(address_list)", "from. Raises: OVFHardwareDataError: if any data errors are seen \"\"\" self.ovf = ovf", "file, may be copied, modified, propagated, or # distributed except according to the", "new_value = default for profile in profile_list: if ovfitem.has_profile(profile): ovfitem.set_property(prop_name, new_value, [profile]) logger.info(\"Updated", "even if available). Returns: str: An instance ID that is not yet in", "Items in the VirtualHardwareSection. Fundamentally it's just a dict of :class:`~COT.vm_description.ovf.item.OVFItem` objects with", "method for :meth:`set_item_count_per_profile`. Args: new_item (OVFItem): Newly cloned Item new_item_profiles (list): Profiles new_item", "integer addresses try: address_on_parent = int(address_on_parent) address_on_parent += 1 new_item.set_property(self.ovf.ADDRESS_ON_PARENT, str(address_on_parent), new_item_profiles) except", "number. Args: start (int): First InstanceID value to consider (disregarding all lower InstanceIDs,", "is not common \" \"across all profiles but has \" \"multiple values {0}.", "%s and\" \" profiles %s\", len(filtered_items), resource_type, properties, profile_list) return filtered_items def find_item(self,", "delete_count) # Generate the new XML Items, in appropriately sorted order by Instance", "= {} for item in items: if self.item_match(item, resource_type, properties, profile_list): filtered_items.append(item) logger.spam(\"Found", "in items: if self.item_match(item, resource_type, properties, profile_list): filtered_items.append(item) logger.spam(\"Found %s Items of type", "per profile. Items present under \"no profile\" will be counted against the total", "def delete_item(self, item): \"\"\"Delete the given Item from the hardware. Args: item (OVFItem):", "to the number of items under each profile. \"\"\" count_dict = {} if", "Args: ovf (OVF): OVF instance to extract hardware information from. Raises: OVFHardwareDataError: if", "OVFItemDataError as exc: logger.debug(exc) # Mask away the nitty-gritty details from our caller", "from VirtualHWSection\", delete_count) # Generate the new XML Items, in appropriately sorted order", "an :class:`OVFHardware` is not sane.\"\"\" class OVFHardware(object): \"\"\"Helper class for :class:`~COT.vm_description.ovf.ovf.OVF`. Represents all", "are all of the profiles associated with this # item properly defined in", "total for each profile. Args: resource_type (str): Resource type string like 'scsi' or", "OVFItem(self.ovf, item) else: try: self.item_dict[instance].add_item(item) except OVFItemDataError as exc: logger.debug(exc) # Mask away", "%s, InstanceID is %s\", resource_type, profile_list, instance) return (instance, ovfitem) def delete_item(self, item):", "value_list, profile_list, default=None): \"\"\"Set value(s) for a property of multiple items of a", "instance not in self.item_dict: self.item_dict[instance] = OVFItem(self.ovf, item) else: try: self.item_dict[instance].add_item(item) except OVFItemDataError", "set of hardware items. instance = item.find(namespace + self.ovf.INSTANCE_ID).text # Pre-sanity check -", "+= 1 logger.debug(\"Cleared %d existing items from VirtualHWSection\", delete_count) # Generate the new", "return count_dict def _update_existing_item_profiles(self, resource_type, count, profile_list): \"\"\"Change profile membership of existing items", "resource_type, profile_list=None): \"\"\"Create a new OVFItem of the given type. Args: resource_type (str):", "First, iterate over existing Items. # Once we've seen \"count\" items under a", "(str): Single profile identifier string to look up. Returns: int: Number of items", "property of multiple items of a type. Args: resource_type (str): Device type such", "new_item, new_item_profiles, item_count): \"\"\"Update a cloned item to make it distinct from its", "value) in properties.items(): if item.get_value(prop) != value: return False return True def find_all_items(self,", "cloned item to make it distinct from its parent. Helper method for :meth:`set_item_count_per_profile`.", "profile membership of existing items as needed. Helper method for :meth:`set_item_count_per_profile`. Args: resource_type", "profiles to filter on (default: apply across all profiles) default (str): If there", "ovfitem in ovfitem_list: ovfitem.set_property(prop_name, new_value, profile_list) logger.debug(\"Updated %s %s to %s under profiles", "only as a last resort will new instances be created. If the new", "freaks out if we leave out the ElementName on an Item, # so", "modified = False if len(self.item_dict) != len(XML.find_all_children( self.ovf.virtual_hw_section, set([self.ovf.ITEM, self.ovf.STORAGE_ITEM, self.ovf.ETHERNET_PORT_ITEM]))): modified =", "when trying to set the instance ID # on our clone due to", "filter on (default: apply across all profiles) default (str): If there are more", "\"\"\"Set the number of items of a given type under the given profile(s).", "Exceptions** .. autosummary:: :nosignatures: OVFHardware OVFHardwareDataError \"\"\" import copy import logging from COT.data_validation", "= True logger.info(\"Created new %s under profile(s) %s, InstanceID is %s\", resource_type, profile_list,", "\"Address value when cloning an Item \" \"of type {0}\".format(resource_type)) address_on_parent = new_item.get(self.ovf.ADDRESS_ON_PARENT)", "profiles to filter on (default: apply across all profiles) Returns: dict: mapping profile", "\\ self._update_existing_item_profiles( resource_type, count, profile_list) logger.debug(\"Creating %d new items\", items_to_add) while items_to_add >", "Fundamentally it's just a dict of :class:`~COT.vm_description.ovf.item.OVFItem` objects with a bunch of helper", "starting with the lowest-sequence instance not already present, and only as a last", "lower InstanceIDs, even if available). Returns: str: An instance ID that is not", "profiles to filter on Returns: bool: True if the item matches all filters,", "def get_item_count_per_profile(self, resource_type, profile_list): \"\"\"Get the number of Items of the given type", "profile_list=None): \"\"\"Find all items matching the given type, properties, and profiles. Args: resource_type", "well. for ovfitem in self.find_all_items(resource_type): last_item = ovfitem for profile in profile_list: if", "= count - items_seen[profile] if delta > items_to_add: items_to_add = delta return count_dict,", "0 for ovfitem in self.find_all_items(resource_type): for profile in profile_list: if ovfitem.has_profile(profile): count_dict[profile] +=", "OVF; i.e., the contents of all Items in the VirtualHardwareSection. Fundamentally it's just", "# TODO: error handling - currently a no-op if item not in item_dict", "create a new instance. Args: parent_item (OVFItem): Instance to clone from profile_list (list):", "# Set the profile list for all profiles, including the default profile_list =", "new_item_profiles = [] for profile in profile_list: if count_dict[profile] < count: new_item_profiles.append(profile) count_dict[profile]", "the license terms in the LICENSE.txt file found in the # top-level directory", "logging.getLogger(__name__) class OVFHardwareDataError(Exception): \"\"\"The input data used to construct an :class:`OVFHardware` is not", "len(matches) > 1: raise LookupError( \"Found multiple matching '{0}' Items (instances {1})\" .format(resource_type,", "self.item_match(item, resource_type, properties, profile_list): filtered_items.append(item) logger.spam(\"Found %s Items of type %s with properties" ]
[ "parameter to define the numbers of indents: #Use the separators parameter change the", "24.1} ] } print(json.dumps(x)) #Use the indent parameter to define the numbers of", "\"John\", \"age\": 30, \"married\": True, \"divorced\": False, \"children\": (\"Ann\",\"Billy\"), \"pets\": None, \"cars\": [", "result is a JSON string: print(y) #Convert a Python object containing all the", "the legal data types: x = { \"name\": \"John\", \"age\": 30, \"married\": True,", "some JSON: x = '{ \"name\":\"John\", \"age\":30, \"city\":\"New York\"}' # parse x: y", "\"name\": \"John\", \"age\": 30, \"city\": \"New York\" } # convert into JSON: y", "json #Convert from JSON to Python # some JSON: x = '{ \"name\":\"John\",", "indent parameter to define the numbers of indents: #Use the separators parameter change", "#Convert a Python object containing all the legal data types: x = {", "Python dictionary: print(y[\"name\"]) #Convert from Python to JSON # a Python object (dict):", "] } print(json.dumps(x)) #Use the indent parameter to define the numbers of indents:", "\"mpg\": 27.5}, {\"model\": \"Ford Edge\", \"mpg\": 24.1} ] } print(json.dumps(x)) #Use the indent", "change the default separator: y = json.dumps(x, indent=4, separators=(\". \", \" = \"))", "JSON: x = '{ \"name\":\"John\", \"age\":30, \"city\":\"New York\"}' # parse x: y =", "30, \"married\": True, \"divorced\": False, \"children\": (\"Ann\",\"Billy\"), \"pets\": None, \"cars\": [ {\"model\": \"BMW", "x = { \"name\": \"John\", \"age\": 30, \"married\": True, \"divorced\": False, \"children\": (\"Ann\",\"Billy\"),", "#Use the separators parameter change the default separator: y = json.dumps(x, indent=4, separators=(\".", "\"pets\": None, \"cars\": [ {\"model\": \"BMW 230\", \"mpg\": 27.5}, {\"model\": \"Ford Edge\", \"mpg\":", "to define the numbers of indents: #Use the separators parameter change the default", "json.dumps(x, indent=4, separators=(\". \", \" = \")) #Use the sort_keys parameter to specify", "y = json.loads(x) # the result is a Python dictionary: print(y[\"name\"]) #Convert from", "\"cars\": [ {\"model\": \"BMW 230\", \"mpg\": 27.5}, {\"model\": \"Ford Edge\", \"mpg\": 24.1} ]", "is a JSON string: print(y) #Convert a Python object containing all the legal", "the result should be sorted or not: z = json.dumps(x, indent=4, sort_keys=True) print(y)", "\"children\": (\"Ann\",\"Billy\"), \"pets\": None, \"cars\": [ {\"model\": \"BMW 230\", \"mpg\": 27.5}, {\"model\": \"Ford", "print(y) #Convert a Python object containing all the legal data types: x =", "the sort_keys parameter to specify if the result should be sorted or not:", "result is a Python dictionary: print(y[\"name\"]) #Convert from Python to JSON # a", "containing all the legal data types: x = { \"name\": \"John\", \"age\": 30,", "\"age\":30, \"city\":\"New York\"}' # parse x: y = json.loads(x) # the result is", "230\", \"mpg\": 27.5}, {\"model\": \"Ford Edge\", \"mpg\": 24.1} ] } print(json.dumps(x)) #Use the", "JSON: y = json.dumps(x) # the result is a JSON string: print(y) #Convert", "if the result should be sorted or not: z = json.dumps(x, indent=4, sort_keys=True)", "30, \"city\": \"New York\" } # convert into JSON: y = json.dumps(x) #", "\"city\": \"New York\" } # convert into JSON: y = json.dumps(x) # the", "\"divorced\": False, \"children\": (\"Ann\",\"Billy\"), \"pets\": None, \"cars\": [ {\"model\": \"BMW 230\", \"mpg\": 27.5},", "\", \" = \")) #Use the sort_keys parameter to specify if the result", "\"John\", \"age\": 30, \"city\": \"New York\" } # convert into JSON: y =", "the default separator: y = json.dumps(x, indent=4, separators=(\". \", \" = \")) #Use", "a JSON string: print(y) #Convert a Python object containing all the legal data", "the numbers of indents: #Use the separators parameter change the default separator: y", "import json #Convert from JSON to Python # some JSON: x = '{", "of indents: #Use the separators parameter change the default separator: y = json.dumps(x,", "separators=(\". \", \" = \")) #Use the sort_keys parameter to specify if the", "data types: x = { \"name\": \"John\", \"age\": 30, \"married\": True, \"divorced\": False,", "'{ \"name\":\"John\", \"age\":30, \"city\":\"New York\"}' # parse x: y = json.loads(x) # the", "\"Ford Edge\", \"mpg\": 24.1} ] } print(json.dumps(x)) #Use the indent parameter to define", "string: print(y) #Convert a Python object containing all the legal data types: x", "separator: y = json.dumps(x, indent=4, separators=(\". \", \" = \")) #Use the sort_keys", "= '{ \"name\":\"John\", \"age\":30, \"city\":\"New York\"}' # parse x: y = json.loads(x) #", "\" = \")) #Use the sort_keys parameter to specify if the result should", "\"city\":\"New York\"}' # parse x: y = json.loads(x) # the result is a", "#Convert from Python to JSON # a Python object (dict): x = {", "} # convert into JSON: y = json.dumps(x) # the result is a", "all the legal data types: x = { \"name\": \"John\", \"age\": 30, \"married\":", "= json.loads(x) # the result is a Python dictionary: print(y[\"name\"]) #Convert from Python", "= \")) #Use the sort_keys parameter to specify if the result should be", "York\"}' # parse x: y = json.loads(x) # the result is a Python", "x: y = json.loads(x) # the result is a Python dictionary: print(y[\"name\"]) #Convert", "} print(json.dumps(x)) #Use the indent parameter to define the numbers of indents: #Use", "parameter to specify if the result should be sorted or not: z =", "json.loads(x) # the result is a Python dictionary: print(y[\"name\"]) #Convert from Python to", "specify if the result should be sorted or not: z = json.dumps(x, indent=4,", "convert into JSON: y = json.dumps(x) # the result is a JSON string:", "legal data types: x = { \"name\": \"John\", \"age\": 30, \"married\": True, \"divorced\":", "parameter change the default separator: y = json.dumps(x, indent=4, separators=(\". \", \" =", "indent=4, separators=(\". \", \" = \")) #Use the sort_keys parameter to specify if", "#Use the indent parameter to define the numbers of indents: #Use the separators", "= json.dumps(x) # the result is a JSON string: print(y) #Convert a Python", "y = json.dumps(x) # the result is a JSON string: print(y) #Convert a", "\"name\": \"John\", \"age\": 30, \"married\": True, \"divorced\": False, \"children\": (\"Ann\",\"Billy\"), \"pets\": None, \"cars\":", "dictionary: print(y[\"name\"]) #Convert from Python to JSON # a Python object (dict): x", "x = '{ \"name\":\"John\", \"age\":30, \"city\":\"New York\"}' # parse x: y = json.loads(x)", "{\"model\": \"BMW 230\", \"mpg\": 27.5}, {\"model\": \"Ford Edge\", \"mpg\": 24.1} ] } print(json.dumps(x))", "indents: #Use the separators parameter change the default separator: y = json.dumps(x, indent=4,", "{ \"name\": \"John\", \"age\": 30, \"married\": True, \"divorced\": False, \"children\": (\"Ann\",\"Billy\"), \"pets\": None,", "is a Python dictionary: print(y[\"name\"]) #Convert from Python to JSON # a Python", "[ {\"model\": \"BMW 230\", \"mpg\": 27.5}, {\"model\": \"Ford Edge\", \"mpg\": 24.1} ] }", "{\"model\": \"Ford Edge\", \"mpg\": 24.1} ] } print(json.dumps(x)) #Use the indent parameter to", "= json.dumps(x, indent=4, separators=(\". \", \" = \")) #Use the sort_keys parameter to", "to JSON # a Python object (dict): x = { \"name\": \"John\", \"age\":", "# some JSON: x = '{ \"name\":\"John\", \"age\":30, \"city\":\"New York\"}' # parse x:", "JSON string: print(y) #Convert a Python object containing all the legal data types:", "Python # some JSON: x = '{ \"name\":\"John\", \"age\":30, \"city\":\"New York\"}' # parse", "# parse x: y = json.loads(x) # the result is a Python dictionary:", "27.5}, {\"model\": \"Ford Edge\", \"mpg\": 24.1} ] } print(json.dumps(x)) #Use the indent parameter", "JSON # a Python object (dict): x = { \"name\": \"John\", \"age\": 30,", "= { \"name\": \"John\", \"age\": 30, \"married\": True, \"divorced\": False, \"children\": (\"Ann\",\"Billy\"), \"pets\":", "Edge\", \"mpg\": 24.1} ] } print(json.dumps(x)) #Use the indent parameter to define the", "numbers of indents: #Use the separators parameter change the default separator: y =", "Python object containing all the legal data types: x = { \"name\": \"John\",", "\"age\": 30, \"city\": \"New York\" } # convert into JSON: y = json.dumps(x)", "# the result is a JSON string: print(y) #Convert a Python object containing", "a Python object containing all the legal data types: x = { \"name\":", "from JSON to Python # some JSON: x = '{ \"name\":\"John\", \"age\":30, \"city\":\"New", "None, \"cars\": [ {\"model\": \"BMW 230\", \"mpg\": 27.5}, {\"model\": \"Ford Edge\", \"mpg\": 24.1}", "to specify if the result should be sorted or not: z = json.dumps(x,", "the separators parameter change the default separator: y = json.dumps(x, indent=4, separators=(\". \",", "object (dict): x = { \"name\": \"John\", \"age\": 30, \"city\": \"New York\" }", "# convert into JSON: y = json.dumps(x) # the result is a JSON", "sort_keys parameter to specify if the result should be sorted or not: z", "y = json.dumps(x, indent=4, separators=(\". \", \" = \")) #Use the sort_keys parameter", "default separator: y = json.dumps(x, indent=4, separators=(\". \", \" = \")) #Use the", "#Convert from JSON to Python # some JSON: x = '{ \"name\":\"John\", \"age\":30,", "object containing all the legal data types: x = { \"name\": \"John\", \"age\":", "\"age\": 30, \"married\": True, \"divorced\": False, \"children\": (\"Ann\",\"Billy\"), \"pets\": None, \"cars\": [ {\"model\":", "a Python object (dict): x = { \"name\": \"John\", \"age\": 30, \"city\": \"New", "result should be sorted or not: z = json.dumps(x, indent=4, sort_keys=True) print(y) print(z)", "from Python to JSON # a Python object (dict): x = { \"name\":", "json.dumps(x) # the result is a JSON string: print(y) #Convert a Python object", "JSON to Python # some JSON: x = '{ \"name\":\"John\", \"age\":30, \"city\":\"New York\"}'", "\"name\":\"John\", \"age\":30, \"city\":\"New York\"}' # parse x: y = json.loads(x) # the result", "{ \"name\": \"John\", \"age\": 30, \"city\": \"New York\" } # convert into JSON:", "(\"Ann\",\"Billy\"), \"pets\": None, \"cars\": [ {\"model\": \"BMW 230\", \"mpg\": 27.5}, {\"model\": \"Ford Edge\",", "types: x = { \"name\": \"John\", \"age\": 30, \"married\": True, \"divorced\": False, \"children\":", "\"BMW 230\", \"mpg\": 27.5}, {\"model\": \"Ford Edge\", \"mpg\": 24.1} ] } print(json.dumps(x)) #Use", "York\" } # convert into JSON: y = json.dumps(x) # the result is", "\")) #Use the sort_keys parameter to specify if the result should be sorted", "print(y[\"name\"]) #Convert from Python to JSON # a Python object (dict): x =", "# a Python object (dict): x = { \"name\": \"John\", \"age\": 30, \"city\":", "False, \"children\": (\"Ann\",\"Billy\"), \"pets\": None, \"cars\": [ {\"model\": \"BMW 230\", \"mpg\": 27.5}, {\"model\":", "into JSON: y = json.dumps(x) # the result is a JSON string: print(y)", "x = { \"name\": \"John\", \"age\": 30, \"city\": \"New York\" } # convert", "parse x: y = json.loads(x) # the result is a Python dictionary: print(y[\"name\"])", "to Python # some JSON: x = '{ \"name\":\"John\", \"age\":30, \"city\":\"New York\"}' #", "\"New York\" } # convert into JSON: y = json.dumps(x) # the result", "the result is a JSON string: print(y) #Convert a Python object containing all", "print(json.dumps(x)) #Use the indent parameter to define the numbers of indents: #Use the", "= { \"name\": \"John\", \"age\": 30, \"city\": \"New York\" } # convert into", "a Python dictionary: print(y[\"name\"]) #Convert from Python to JSON # a Python object", "\"mpg\": 24.1} ] } print(json.dumps(x)) #Use the indent parameter to define the numbers", "# the result is a Python dictionary: print(y[\"name\"]) #Convert from Python to JSON", "the result is a Python dictionary: print(y[\"name\"]) #Convert from Python to JSON #", "Python to JSON # a Python object (dict): x = { \"name\": \"John\",", "define the numbers of indents: #Use the separators parameter change the default separator:", "\"married\": True, \"divorced\": False, \"children\": (\"Ann\",\"Billy\"), \"pets\": None, \"cars\": [ {\"model\": \"BMW 230\",", "#Use the sort_keys parameter to specify if the result should be sorted or", "True, \"divorced\": False, \"children\": (\"Ann\",\"Billy\"), \"pets\": None, \"cars\": [ {\"model\": \"BMW 230\", \"mpg\":", "(dict): x = { \"name\": \"John\", \"age\": 30, \"city\": \"New York\" } #", "Python object (dict): x = { \"name\": \"John\", \"age\": 30, \"city\": \"New York\"", "the indent parameter to define the numbers of indents: #Use the separators parameter", "separators parameter change the default separator: y = json.dumps(x, indent=4, separators=(\". \", \"" ]
[ "\"#<SYSTEM-MACRO {0} {{{1:X}}}>\".format(self.__class__.__name__, id(self)) # ============================================================================== # Defines system macro classes. # ==============================================================================", "import Expander bindings, body = forms.car, forms.cdr # Expands body recursively. body =", "progn. forms = Cons(Symbol('LAMBDA'), Cons(params, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class DefunSystemMacro(SystemMacro): \"\"\"defun", "Unless required by applicable law or agreed to in writing, software # distributed", "system macro classes. # ============================================================================== class BlockSystemMacro(SystemMacro): \"\"\"block establishes a block and then", "*args, **kwargs): \"\"\"Instantiates LabelsSystemMacro. \"\"\" cls.__name__ = 'LABELS' return object.__new__(cls) def __call__(self, forms,", "Null()))) return forms class LetSystemMacro(SystemMacro): \"\"\"let and let* create new variable bindings and", "__call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of IfSystemMacro. \"\"\" from clispy.expander import Expander", "of the expansion function (but not the lambda-list) are implicitly enclosed in a", "is Symbol('UNQUOTE'): # Unquote (,). return forms.cdr.car elif isinstance(forms.car, Cons) and forms.car.car is", "(but not the lambda-list) are implicitly enclosed in a block whose name is", "env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET', value=LetSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET*', value=LetAsterSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='QUOTE',", "in order of occurence. The body forms (but not the lambda list) of", "the expansion function (but not the lambda-list) are implicitly enclosed in a block", "Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms class BackquoteSystemMacro(SystemMacro): \"\"\"The backquote introduces", "except that the scope of the defined function names for labels encompasses the", "return forms class IfSystemMacro(SystemMacro): \"\"\"if allows the execution of a form to be", "special form involving a lambda expression. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LambdaSystemMacro.", "in a block whose name is name. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates", "return forms class BackquoteSystemMacro(SystemMacro): \"\"\"The backquote introduces a template of a data structure", "isinstance(forms, Cons): # An argument is not an instance of Cons, it is", "encompasses the function definitions themselves as well as the body. \"\"\" def __new__(cls,", "Cons(Symbol('FLET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class IfSystemMacro(SystemMacro): \"\"\"if allows the execution", "# The body of labels has an implicit progn. forms = Cons(Symbol('LABELS'), Cons(bindings,", "forms = Cons(Symbol('DEFMACRO'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms class BackquoteSystemMacro(SystemMacro):", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "created by macrolet are enclosed in an implicit block whose name is the", "test-form. First test-form is evaluated. If the result is true, then then-form is", "selected; otherwise else-form is selected. Whichever form is selected is then evaluated. \"\"\"", "the bindings in parallel and let* does them sequentially. \"\"\" def __new__(cls, *args,", "of LetAsterSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car, forms.cdr #", "elif isinstance(forms.car, Cons) and forms.car.car is Symbol('UNQUOTE-SPLICING'): # Unquote-splicing (,@). return Cons(Symbol('APPEND'), Cons(forms.car.cdr.car,", "the function block name of the function-name or name, as appropriate. flet defines", "has an implicit progn. forms = Cons(Symbol('LABELS'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms", "__call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LablesSystemMacro. \"\"\" from clispy.expander import Expander", "form and an environment. The expansion function returns a form. The body of", "= Expander.expand(then_form, var_env, func_env, macro_env) else_form = Expander.expand(else_form, var_env, func_env, macro_env) forms =", "recursively. body = Expander.expand(body, var_env, macro_env, macro_env) # The body of flet has", "is the function block name of the function-name or name, as appropriate. flet", "a shorthand notation for a function special form involving a lambda expression. \"\"\"", "body of a lambda has an implicit progn. forms = Cons(Symbol('LAMBDA'), Cons(params, Cons(Cons(Symbol('PROGN'),", "an implicit progn. forms = Cons(Symbol('LABELS'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class", "= 'DEFMACRO' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of DefmacroSystemMacro.", "forms class LabelsSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define local functions and macros, and", "block and then evaluates forms as an implicit progn. \"\"\" def __new__(cls, *args,", "macro_env) # The body of let has an implicit progn. forms = Cons(Symbol('LET'),", "of IfSystemMacro. \"\"\" from clispy.expander import Expander # If else_form is Null, then", "body = Cons(Cons(Symbol('PROGN'), body), Null()) # The body of a defmacro has an", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LABELS', value=LabelsSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET', value=LetSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL')", "of a defun has an implicit block. forms = Cons(Symbol('DEFUN'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'),", "cls.__name__ = 'SYSTEM-MACRO' return object.__new__(cls) def __repr__(self): \"\"\"The official string representation. \"\"\" return", "var_env, func_env, macro_env): \"\"\"Behavior of QuoteSystemMacro. \"\"\" # Retruns itself. return Cons(Symbol('QUOTE'), forms)", "Cons(Symbol('IF'), Cons(test_form, Cons(then_form, Cons(else_form, Null())))) return forms class LabelsSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet", "the local definitions. forms are executed in order of occurence. The body forms", "(but not the lambda list) of each function created by flet and labels", "body), Null()))) return forms class LetAsterSystemMacro(SystemMacro): \"\"\"let and let* create new variable bindings", "body forms (but not the lambda list) of each function created by flet", "= Expander.expand(body, var_env, func_env, macro_env) # The body of let has an implicit", "DefunSystemMacro(SystemMacro): \"\"\"defun implicitly puts a block named block-name around the body forms \"\"\"", "let has an implicit progn. forms = Cons(Symbol('LET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "func_env, macro_env) # The body of a defun has an implicit progn. body", "functions related on special operators # ============================================================================== # For special operators assign_helper(symbol_name='BLOCK', value=BlockSystemMacro(),", "= Cons(Cons(Symbol('PROGN'), body), Null()) # The body of a defmacro has an implicit", "value=IfSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LABELS', value=LabelsSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET', value=LetSystemMacro(), package_name='COMMON-LISP', env='MACRO',", "forms class LetSystemMacro(SystemMacro): \"\"\"let and let* create new variable bindings and execute a", "var_env, func_env, macro_env): \"\"\"Behavior of BackquoteSystemMacro. \"\"\" return self.expand_hepler(forms.car) @classmethod def expand_hepler(cls, forms):", "is returned as the expansion of the macro. body = Cons(Cons(Symbol('PROGN'), body), Null())", "Cons(Symbol('LET*'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class QuoteSystemMacro(SystemMacro): \"\"\"The quote special operator", "Expander.expand(body, var_env, func_env, macro_env) # The body of a defun has an implicit", "cls.__name__ = 'QUOTE' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of", "body forms \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefunSystemMacro. \"\"\" cls.__name__ = 'DEFUN'", "assign_helper(symbol_name='DEFUN', value=DefunSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFMACRO', value=DefmacroSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='BACKQUOTE', value=BackquoteSystemMacro(), package_name='COMMON-LISP',", "Cons(else_form, Null())))) return forms class LabelsSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define local functions", "value=LabelsSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET', value=LetSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET*', value=LetAsterSystemMacro(), package_name='COMMON-LISP', env='MACRO',", "def __new__(cls, *args, **kwargs): \"\"\"Instantiates BlockSystemMacro. \"\"\" cls.__name__ = 'BLOCK' return object.__new__(cls) def", "parallel and let* does them sequentially. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetSystemMacro.", "in an implicit block whose name is the function block name of the", "body of let has an implicit progn. forms = Cons(Symbol('LET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body),", "\"\"\" cls.__name__ = 'DEFUN' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior", "= Cons(Cons(Symbol('PROGN'), body), Null()) # The body of a defun has an implicit", "Null())))) return forms class LabelsSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define local functions and", "block whose name is name. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefmacroSystemMacro. \"\"\"", "Cons(Symbol('APPEND'), Cons(forms.car.cdr.car, Cons(cls.expand_hepler(forms.cdr), Null()))) else: # Expands recursively and returns cons. return Cons(Symbol('CONS'),", "on a single test-form. First test-form is evaluated. If the result is true,", "a function special form involving a lambda expression. \"\"\" def __new__(cls, *args, **kwargs):", "form to be dependent on a single test-form. First test-form is evaluated. If", "and macrolet define local functions and macros, and execute forms using the local", "representation. \"\"\" return \"#<SYSTEM-MACRO {0} {{{1:X}}}>\".format(self.__class__.__name__, id(self)) # ============================================================================== # Defines system macro", "*args, **kwargs): \"\"\"Instantiates LetSystemMacro. \"\"\" cls.__name__ = 'LET' return object.__new__(cls) def __call__(self, forms,", "macro_env): \"\"\"Behavior of QuoteSystemMacro. \"\"\" # Retruns itself. return Cons(Symbol('QUOTE'), forms) class LambdaSystemMacro(SystemMacro):", "an implicit progn. forms = Cons(Symbol('LET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class", "macro by associating a macro function with that name in the global environment.", "not use this file except in compliance with the License. # You may", "macro_env, macro_env) # The body of flet has an implicit progn. forms =", "id(self)) # ============================================================================== # Defines system macro classes. # ============================================================================== class BlockSystemMacro(SystemMacro): \"\"\"block", "'BLOCK' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of BlockSystemMacro. \"\"\"", "allows the execution of a form to be dependent on a single test-form.", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "by associating a macro function with that name in the global environment. The", "func_env, macro_env): \"\"\"Behavior of IfSystemMacro. \"\"\" from clispy.expander import Expander # If else_form", "body recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The body of a", "# The body of let* has an implicit progn forms = Cons(Symbol('LET*'), Cons(bindings,", "of the defined function names for labels encompasses the function definitions themselves as", "func_env, macro_env): \"\"\"Behavior of LambdaSystemMacro. \"\"\" from clispy.expander import Expander params, body =", "agreed to in writing, software # distributed under the License is distributed on", "Null()))) return forms class IfSystemMacro(SystemMacro): \"\"\"if allows the execution of a form to", "\"\"\"Behavior of LetSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car, forms.cdr", "func_env, macro_env): \"\"\"Behavior of QuoteSystemMacro. \"\"\" # Retruns itself. return Cons(Symbol('QUOTE'), forms) class", "forms that use these bindings. let performs the bindings in parallel and let*", "definitions. forms are executed in order of occurence. The body forms (but not", "BlockSystemMacro. \"\"\" cls.__name__ = 'BLOCK' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env):", "recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The body of a block", "**kwargs): \"\"\"Instantiates DefmacroSystemMacro. \"\"\" cls.__name__ = 'DEFMACRO' return object.__new__(cls) def __call__(self, forms, var_env,", "# An argument is not an instance of Cons, it is quoted. return", "macro_env) then_form = Expander.expand(then_form, var_env, func_env, macro_env) else_form = Expander.expand(else_form, var_env, func_env, macro_env)", "# # SystemMacro # ============================================================================== class SystemMacro(Macro): \"\"\"SystemMacro provide some macros for defmacro,", "forms (but not the lambda list) of each function created by flet and", "LetAsterSystemMacro(SystemMacro): \"\"\"let and let* create new variable bindings and execute a series of", "of such local functions can be defined. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates", "defun has an implicit block. forms = Cons(Symbol('DEFUN'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)),", "body), Null()))) return forms class QuoteSystemMacro(SystemMacro): \"\"\"The quote special operator just returns object.", "flet has an implicit progn. forms = Cons(Symbol('FLET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return", "Expands body recursively. test_form = Expander.expand(test_form, var_env, func_env, macro_env) then_form = Expander.expand(then_form, var_env,", "Cons(test_form, Cons(then_form, Cons(else_form, Null())))) return forms class LabelsSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define", "the global environment. The macro function is defined in the same lexical environment", "not an instance of Cons, it is quoted. return Cons(Symbol('QUOTE'), Cons(forms, Null())) if", "name is the function block name of the function-name or name, as appropriate.", "__new__(cls, *args, **kwargs): \"\"\"Instantiates SystemMacro. \"\"\" cls.__name__ = 'SYSTEM-MACRO' return object.__new__(cls) def __repr__(self):", "to in writing, software # distributed under the License is distributed on an", "body), Null()))) return forms class FletSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define local functions", "itself. return Cons(Symbol('QUOTE'), forms) class LambdaSystemMacro(SystemMacro): \"\"\"Provides a shorthand notation for a function", "Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class DefunSystemMacro(SystemMacro): \"\"\"defun implicitly puts a block named", "as a macro by associating a macro function with that name in the", "implied. # See the License for the specific language governing permissions and #", "env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LABELS', value=LabelsSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET', value=LetSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET*',", "Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "============================================================================== class BlockSystemMacro(SystemMacro): \"\"\"block establishes a block and then evaluates forms as an", "Null())) if forms.car is Symbol('UNQUOTE'): # Unquote (,). return forms.cdr.car elif isinstance(forms.car, Cons)", "return object.__new__(cls) def __repr__(self): \"\"\"The official string representation. \"\"\" return \"#<SYSTEM-MACRO {0} {{{1:X}}}>\".format(self.__class__.__name__,", "\"\"\"Defines name as a macro by associating a macro function with that name", "**kwargs): \"\"\"Instantiates LetSystemMacro. \"\"\" cls.__name__ = 'LET' return object.__new__(cls) def __call__(self, forms, var_env,", "macro_env): \"\"\"Behavior of IfSystemMacro. \"\"\" from clispy.expander import Expander # If else_form is", "implicit progn. forms = Cons(Symbol('LABELS'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class LetSystemMacro(SystemMacro):", "forms.cdr.car elif isinstance(forms.car, Cons) and forms.car.car is Symbol('UNQUOTE-SPLICING'): # Unquote-splicing (,@). return Cons(Symbol('APPEND'),", "block. forms = Cons(Symbol('DEFUN'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms class", "Expander.expand(body, var_env, macro_env, macro_env) # The body of flet has an implicit progn.", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "function-name or name, as appropriate. flet defines locally named functions and executes a", "recursively and returns cons. return Cons(Symbol('CONS'), Cons(cls.expand_hepler(forms.car), Cons(cls.expand_hepler(forms.cdr), Null()))) # ============================================================================== # Set", "assign_helper(symbol_name='FLET', value=FletSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='IF', value=IfSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LABELS', value=LabelsSystemMacro(), package_name='COMMON-LISP',", "using the local definitions. forms are executed in order of occurence. The body", "\"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LambdaSystemMacro. \"\"\" cls.__name__ = 'LAMBDA' return object.__new__(cls)", "a defmacro has an implicit block. forms = Cons(Symbol('DEFMACRO'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name,", "of DefunSystemMacro. \"\"\" from clispy.expander import Expander name, params, body = forms.car, forms.cdr.car,", "cons. return Cons(Symbol('CONS'), Cons(cls.expand_hepler(forms.car), Cons(cls.expand_hepler(forms.cdr), Null()))) # ============================================================================== # Set functions related on", "backquote introduces a template of a data structure to be built. \"\"\" def", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "return forms class DefunSystemMacro(SystemMacro): \"\"\"defun implicitly puts a block named block-name around the", "For special operators assign_helper(symbol_name='BLOCK', value=BlockSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='FLET', value=FletSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL')", "same lexical environment in which the defmacro form appears. The expansion function accepts", "variable bindings and execute a series of forms that use these bindings. let", "has an implicit block. forms = Cons(Symbol('DEFMACRO'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null()))))", "# ============================================================================== from clispy.macro import Macro from clispy.package import assign_helper, use_package_helper from clispy.type", "execute a series of forms that use these bindings. let performs the bindings", "function block name of the function-name or name, as appropriate. flet defines locally", "The body of labels has an implicit progn. forms = Cons(Symbol('LABELS'), Cons(bindings, Cons(Cons(Symbol('PROGN'),", "params, body = forms.car, forms.cdr # Expands body recursively. body = Expander.expand(body, var_env,", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "var_env, func_env, macro_env) # The value of the last form executed is returned", "notation for a function special form involving a lambda expression. \"\"\" def __new__(cls,", "Null()))) else: # Expands recursively and returns cons. return Cons(Symbol('CONS'), Cons(cls.expand_hepler(forms.car), Cons(cls.expand_hepler(forms.cdr), Null())))", "class BackquoteSystemMacro(SystemMacro): \"\"\"The backquote introduces a template of a data structure to be", "from clispy.expander import Expander params, body = forms.car, forms.cdr # Expands body recursively.", "form involving a lambda expression. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LambdaSystemMacro. \"\"\"", "var_env, macro_env, macro_env) # The body of flet has an implicit progn. forms", "progn forms = Cons(Symbol('LET*'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class QuoteSystemMacro(SystemMacro): \"\"\"The", "implicit block. forms = Cons(Symbol('DEFMACRO'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms", "__new__(cls, *args, **kwargs): \"\"\"Instantiates LambdaSystemMacro. \"\"\" cls.__name__ = 'LAMBDA' return object.__new__(cls) def __call__(self,", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "implicit progn. forms = Cons(Symbol('BLOCK'), Cons(name, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class FletSystemMacro(SystemMacro):", "related on special operators # ============================================================================== # For special operators assign_helper(symbol_name='BLOCK', value=BlockSystemMacro(), package_name='COMMON-LISP',", "IfSystemMacro. \"\"\" cls.__name__ = 'IF' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env):", "\"\"\" cls.__name__ = 'BACKQUOTE' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior", "and execute forms using the local definitions. forms are executed in order of", "class FletSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define local functions and macros, and execute", "forms = Cons(Symbol('LET*'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class QuoteSystemMacro(SystemMacro): \"\"\"The quote", "object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of IfSystemMacro. \"\"\" from clispy.expander", "\"\"\"Instantiates BlockSystemMacro. \"\"\" cls.__name__ = 'BLOCK' return object.__new__(cls) def __call__(self, forms, var_env, func_env,", "forms with these definition bindings. Any number of such local functions can be", "def __new__(cls, *args, **kwargs): \"\"\"Instantiates FletSystemMacro. \"\"\" cls.__name__ = 'FLET' return object.__new__(cls) def", "object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LablesSystemMacro. \"\"\" from clispy.expander", "implicit progn forms = Cons(Symbol('LET*'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class QuoteSystemMacro(SystemMacro):", "quotes recursively. \"\"\" if not isinstance(forms, Cons): # An argument is not an", "return forms.cdr.car elif isinstance(forms.car, Cons) and forms.car.car is Symbol('UNQUOTE-SPLICING'): # Unquote-splicing (,@). return", "them sequentially. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetAsterSytemMacro. \"\"\" cls.__name__ = 'LET*'", "forms class FletSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define local functions and macros, and", "DefmacroSystemMacro. \"\"\" cls.__name__ = 'DEFMACRO' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env):", "these definition bindings. Any number of such local functions can be defined. \"\"\"", "last form executed is returned as the expansion of the macro. The body", "= 'BLOCK' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of BlockSystemMacro.", "# For system functions assign_helper(symbol_name='LAMBDA', value=LambdaSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFUN', value=DefunSystemMacro(), package_name='COMMON-LISP', env='MACRO',", "are implicitly enclosed in a block whose name is name. \"\"\" def __new__(cls,", "else_form is set to Null. test_form, then_form, else_form = forms.car, forms.cdr.car, forms.cdr.cdr.car #", "assign_helper(symbol_name='QUOTE', value=QuoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status='EXTERNAL') # For system functions assign_helper(symbol_name='LAMBDA', value=LambdaSystemMacro(), package_name='COMMON-LISP', env='MACRO',", "LetAsterSytemMacro. \"\"\" cls.__name__ = 'LET*' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env):", "__new__(cls, *args, **kwargs): \"\"\"Instantiates LetAsterSytemMacro. \"\"\" cls.__name__ = 'LET*' return object.__new__(cls) def __call__(self,", "var_env, func_env, macro_env): \"\"\"Behavior of LetSystemMacro. \"\"\" from clispy.expander import Expander bindings, body", "\"\"\" from clispy.expander import Expander bindings, body = forms.car, forms.cdr # Expands body", "Whichever form is selected is then evaluated. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates", "a data structure to be built. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates BackquoteSystemMacro.", "template of a data structure to be built. \"\"\" def __new__(cls, *args, **kwargs):", "return forms class LetAsterSystemMacro(SystemMacro): \"\"\"let and let* create new variable bindings and execute", "Cons(params, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class DefunSystemMacro(SystemMacro): \"\"\"defun implicitly puts a block", "License. # ============================================================================== from clispy.macro import Macro from clispy.package import assign_helper, use_package_helper from", "The body forms of the expansion function (but not the lambda-list) are implicitly", "func_env, macro_env) # The body of let has an implicit progn. forms =", "Expander.expand(body, var_env, func_env, macro_env) # The body of a lambda has an implicit", "does them sequentially. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetAsterSytemMacro. \"\"\" cls.__name__ =", "\"\"\" def __new__(cls, *args, **kwargs): cls.__name__ = 'QUOTE' return object.__new__(cls) def __call__(self, forms,", "\"\"\"Behavior of LetAsterSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car, forms.cdr", "= 'LET*' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LetAsterSystemMacro.", "Expander.expand(body, var_env, func_env, macro_env) # The value of the last form executed is", "= Cons(Symbol('BLOCK'), Cons(name, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class FletSystemMacro(SystemMacro): \"\"\"flet, labels, and", "bindings, body = forms.car, forms.cdr # Expands body recursively. body = Expander.expand(body, var_env,", "\"\"\" return self.expand_hepler(forms.car) @classmethod def expand_hepler(cls, forms): \"\"\"Expand quotes recursively. \"\"\" if not", "else_form is Null, then else_form is set to Null. test_form, then_form, else_form =", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "instance of Cons, it is quoted. return Cons(Symbol('QUOTE'), Cons(forms, Null())) if forms.car is", "func_env, macro_env) forms = Cons(Symbol('IF'), Cons(test_form, Cons(then_form, Cons(else_form, Null())))) return forms class LabelsSystemMacro(SystemMacro):", "import assign_helper, use_package_helper from clispy.type import Cons, Null, Symbol # ============================================================================== # Defines", "# Defines base classes. # # SystemMacro # ============================================================================== class SystemMacro(Macro): \"\"\"SystemMacro provide", "\"\"\"Behavior of IfSystemMacro. \"\"\" from clispy.expander import Expander # If else_form is Null,", "Null()) # The body of a defmacro has an implicit block. forms =", "Cons): # An argument is not an instance of Cons, it is quoted.", "an instance of Cons, it is quoted. return Cons(Symbol('QUOTE'), Cons(forms, Null())) if forms.car", "def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LablesSystemMacro. \"\"\" from clispy.expander import", "progn. forms = Cons(Symbol('FLET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class IfSystemMacro(SystemMacro): \"\"\"if", "a macro function with that name in the global environment. The macro function", "that use these bindings. let performs the bindings in parallel and let* does", "labels is equivalent to flet except that the scope of the defined function", "an implicit block. forms = Cons(Symbol('DEFMACRO'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return", "\"\"\" cls.__name__ = 'LET' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior", "on special operators # ============================================================================== # For special operators assign_helper(symbol_name='BLOCK', value=BlockSystemMacro(), package_name='COMMON-LISP', env='MACRO',", "See the License for the specific language governing permissions and # limitations under", "language governing permissions and # limitations under the License. # ============================================================================== from clispy.macro", "forms class QuoteSystemMacro(SystemMacro): \"\"\"The quote special operator just returns object. \"\"\" def __new__(cls,", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "\"\"\"flet, labels, and macrolet define local functions and macros, and execute forms using", "package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFMACRO', value=DefmacroSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='BACKQUOTE', value=BackquoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL')", "progn. forms = Cons(Symbol('BLOCK'), Cons(name, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class FletSystemMacro(SystemMacro): \"\"\"flet,", "# The body of a defmacro has an implicit block. forms = Cons(Symbol('DEFMACRO'),", "status='EXTERNAL') # For system functions assign_helper(symbol_name='LAMBDA', value=LambdaSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFUN', value=DefunSystemMacro(), package_name='COMMON-LISP',", "the scope of the defined function names for labels encompasses the function definitions", "forms.car, forms.cdr.car, forms.cdr.cdr.car # Expands body recursively. test_form = Expander.expand(test_form, var_env, func_env, macro_env)", "recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The body of let has", "from clispy.expander import Expander name, params, body = forms.car, forms.cdr.car, forms.cdr.cdr # Expands", "Symbol # ============================================================================== # Defines base classes. # # SystemMacro # ============================================================================== class", "then_form, else_form = forms.car, forms.cdr.car, forms.cdr.cdr.car # Expands body recursively. test_form = Expander.expand(test_form,", "of occurence. The body forms (but not the lambda list) of each function", "the specific language governing permissions and # limitations under the License. # ==============================================================================", "and let* does them sequentially. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetAsterSytemMacro. \"\"\"", "'LABELS' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LablesSystemMacro. \"\"\"", "a template of a data structure to be built. \"\"\" def __new__(cls, *args,", "def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of IfSystemMacro. \"\"\" from clispy.expander import", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "func_env, macro_env): \"\"\"Behavior of LetSystemMacro. \"\"\" from clispy.expander import Expander bindings, body =", "LetSystemMacro. \"\"\" cls.__name__ = 'LET' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env):", "list) of each function created by flet and labels and each macro created", "the function definitions themselves as well as the body. \"\"\" def __new__(cls, *args,", "enclosed in a block whose name is name. \"\"\" def __new__(cls, *args, **kwargs):", "*args, **kwargs): \"\"\"Instantiates DefmacroSystemMacro. \"\"\" cls.__name__ = 'DEFMACRO' return object.__new__(cls) def __call__(self, forms,", "(,@). return Cons(Symbol('APPEND'), Cons(forms.car.cdr.car, Cons(cls.expand_hepler(forms.cdr), Null()))) else: # Expands recursively and returns cons.", "object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LetAsterSystemMacro. \"\"\" from clispy.expander", "object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LetSystemMacro. \"\"\" from clispy.expander", "return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LablesSystemMacro. \"\"\" from", "============================================================================== # For special operators assign_helper(symbol_name='BLOCK', value=BlockSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='FLET', value=FletSystemMacro(), package_name='COMMON-LISP',", "body = Expander.expand(body, var_env, func_env, macro_env) # The body of let has an", "not the lambda list) of each function created by flet and labels and", "Cons(name, body)), Null())))) return forms class DefmacroSystemMacro(SystemMacro): \"\"\"Defines name as a macro by", "env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='IF', value=IfSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LABELS', value=LabelsSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET',", "__call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of BlockSystemMacro. \"\"\" from clispy.expander import Expander", "name is name. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefmacroSystemMacro. \"\"\" cls.__name__ =", "shorthand notation for a function special form involving a lambda expression. \"\"\" def", "body recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The body of let", "body of a block has an implicit progn. forms = Cons(Symbol('BLOCK'), Cons(name, Cons(Cons(Symbol('PROGN'),", "class LabelsSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define local functions and macros, and execute", "as the body. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LabelsSystemMacro. \"\"\" cls.__name__ =", "a defun has an implicit progn. body = Cons(Cons(Symbol('PROGN'), body), Null()) # The", "the macro. body = Cons(Cons(Symbol('PROGN'), body), Null()) # The body of a defmacro", "assign_helper(symbol_name='LAMBDA', value=LambdaSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFUN', value=DefunSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFMACRO', value=DefmacroSystemMacro(), package_name='COMMON-LISP',", "is name. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefmacroSystemMacro. \"\"\" cls.__name__ = 'DEFMACRO'", "Null())))) return forms class DefmacroSystemMacro(SystemMacro): \"\"\"Defines name as a macro by associating a", "forms class BackquoteSystemMacro(SystemMacro): \"\"\"The backquote introduces a template of a data structure to", "implicit progn. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates BlockSystemMacro. \"\"\" cls.__name__ = 'BLOCK'", "that name in the global environment. The macro function is defined in the", "Cons(Symbol('BLOCK'), Cons(name, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class FletSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet", "# Retruns itself. return Cons(Symbol('QUOTE'), forms) class LambdaSystemMacro(SystemMacro): \"\"\"Provides a shorthand notation for", "test-form is evaluated. If the result is true, then then-form is selected; otherwise", "KIND, either express or implied. # See the License for the specific language", "else_form = Expander.expand(else_form, var_env, func_env, macro_env) forms = Cons(Symbol('IF'), Cons(test_form, Cons(then_form, Cons(else_form, Null()))))", "macro_env) else_form = Expander.expand(else_form, var_env, func_env, macro_env) forms = Cons(Symbol('IF'), Cons(test_form, Cons(then_form, Cons(else_form,", "Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class LetAsterSystemMacro(SystemMacro): \"\"\"let and let* create new variable", "If the result is true, then then-form is selected; otherwise else-form is selected.", "Expander params, body = forms.car, forms.cdr # Expands body recursively. body = Expander.expand(body,", "a block named block-name around the body forms \"\"\" def __new__(cls, *args, **kwargs):", "of Cons, it is quoted. return Cons(Symbol('QUOTE'), Cons(forms, Null())) if forms.car is Symbol('UNQUOTE'):", "Expander.expand(body, var_env, func_env, macro_env) # The body of labels has an implicit progn.", "function is specified by forms. Forms are executed in order. The value of", "body recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The body of labels", "The body of a block has an implicit progn. forms = Cons(Symbol('BLOCK'), Cons(name,", "status=':EXTERNAL') assign_helper(symbol_name='LABELS', value=LabelsSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET', value=LetSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET*', value=LetAsterSystemMacro(),", "BackquoteSystemMacro. \"\"\" cls.__name__ = 'BACKQUOTE' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env):", "special operators assign_helper(symbol_name='BLOCK', value=BlockSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='FLET', value=FletSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='IF',", "or name, as appropriate. flet defines locally named functions and executes a series", "\"\"\"Behavior of QuoteSystemMacro. \"\"\" # Retruns itself. return Cons(Symbol('QUOTE'), forms) class LambdaSystemMacro(SystemMacro): \"\"\"Provides", "ANY KIND, either express or implied. # See the License for the specific", "name, as appropriate. labels is equivalent to flet except that the scope of", "Expands body, recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The value of", "import Expander # If else_form is Null, then else_form is set to Null.", "status=':EXTERNAL') assign_helper(symbol_name='DEFMACRO', value=DefmacroSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='BACKQUOTE', value=BackquoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') # COMMON-LISP-USER", "\"\"\"defun implicitly puts a block named block-name around the body forms \"\"\" def", "__new__(cls, *args, **kwargs): \"\"\"Instantiates IfSystemMacro. \"\"\" cls.__name__ = 'IF' return object.__new__(cls) def __call__(self,", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "Cons(Symbol('QUOTE'), Cons(forms, Null())) if forms.car is Symbol('UNQUOTE'): # Unquote (,). return forms.cdr.car elif", "macro_env): \"\"\"Behavior of FletSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car,", "macro_env): \"\"\"Behavior of LambdaSystemMacro. \"\"\" from clispy.expander import Expander params, body = forms.car,", "var_env, func_env, macro_env) # The body of a block has an implicit progn.", "{0} {{{1:X}}}>\".format(self.__class__.__name__, id(self)) # ============================================================================== # Defines system macro classes. # ============================================================================== class", "appropriate. labels is equivalent to flet except that the scope of the defined", "object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of DefmacroSystemMacro. \"\"\" from clispy.expander", "assign_helper(symbol_name='BLOCK', value=BlockSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='FLET', value=FletSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='IF', value=IfSystemMacro(), package_name='COMMON-LISP',", "forms.car, forms.cdr # Expands body recursively. body = Expander.expand(body, var_env, macro_env, macro_env) #", "DefmacroSystemMacro. \"\"\" from clispy.expander import Expander name, params, body = forms.car, forms.cdr.car, forms.cdr.cdr", "var_env, func_env, macro_env): \"\"\"Behavior of LablesSystemMacro. \"\"\" from clispy.expander import Expander bindings, body", "cls.__name__ = 'BACKQUOTE' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of", "official string representation. \"\"\" return \"#<SYSTEM-MACRO {0} {{{1:X}}}>\".format(self.__class__.__name__, id(self)) # ============================================================================== # Defines", "expand_hepler(cls, forms): \"\"\"Expand quotes recursively. \"\"\" if not isinstance(forms, Cons): # An argument", "= 'FLET' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of FletSystemMacro.", "some macros for defmacro, defun, and lambda etc. \"\"\" def __new__(cls, *args, **kwargs):", "implicit block. forms = Cons(Symbol('DEFUN'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms", "def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of QuoteSystemMacro. \"\"\" # Retruns itself.", "body), Null()))) return forms class LetSystemMacro(SystemMacro): \"\"\"let and let* create new variable bindings", "forms class LetAsterSystemMacro(SystemMacro): \"\"\"let and let* create new variable bindings and execute a", "All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the", "performs the bindings in parallel and let* does them sequentially. \"\"\" def __new__(cls,", "Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class LetSystemMacro(SystemMacro): \"\"\"let and let* create new", "of forms that use these bindings. let performs the bindings in parallel and", "bindings. Any number of such local functions can be defined. \"\"\" def __new__(cls,", "the result is true, then then-form is selected; otherwise else-form is selected. Whichever", "selected. Whichever form is selected is then evaluated. \"\"\" def __new__(cls, *args, **kwargs):", "import Expander name, body = forms.car, forms.cdr # Expands body recursively. body =", "var_env, func_env, macro_env): \"\"\"Behavior of DefmacroSystemMacro. \"\"\" from clispy.expander import Expander name, params,", "of LetSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car, forms.cdr #", "var_env, func_env, macro_env): \"\"\"Behavior of FletSystemMacro. \"\"\" from clispy.expander import Expander bindings, body", "and # limitations under the License. # ============================================================================== from clispy.macro import Macro from", "# ============================================================================== # Set functions related on special operators # ============================================================================== # For", "macro_env): \"\"\"Behavior of LetAsterSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car,", "return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LambdaSystemMacro. \"\"\" from", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "form executed is returned as the expansion of the macro. The body forms", "func_env, macro_env) else_form = Expander.expand(else_form, var_env, func_env, macro_env) forms = Cons(Symbol('IF'), Cons(test_form, Cons(then_form,", "\"\"\"The official string representation. \"\"\" return \"#<SYSTEM-MACRO {0} {{{1:X}}}>\".format(self.__class__.__name__, id(self)) # ============================================================================== #", "expression. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LambdaSystemMacro. \"\"\" cls.__name__ = 'LAMBDA' return", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetAsterSytemMacro. \"\"\" cls.__name__ = 'LET*' return object.__new__(cls) def", "of a defmacro has an implicit block. forms = Cons(Symbol('DEFMACRO'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'),", "is selected. Whichever form is selected is then evaluated. \"\"\" def __new__(cls, *args,", "function special form involving a lambda expression. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates", "*args, **kwargs): \"\"\"Instantiates BackquoteSystemMacro. \"\"\" cls.__name__ = 'BACKQUOTE' return object.__new__(cls) def __call__(self, forms,", "env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='FLET', value=FletSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='IF', value=IfSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LABELS',", "puts a block named block-name around the body forms \"\"\" def __new__(cls, *args,", "environment in which the defmacro form appears. The expansion function accepts two arguments,", "applicable law or agreed to in writing, software # distributed under the License", "**kwargs): \"\"\"Instantiates FletSystemMacro. \"\"\" cls.__name__ = 'FLET' return object.__new__(cls) def __call__(self, forms, var_env,", "forms as an implicit progn. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates BlockSystemMacro. \"\"\"", "= 'BACKQUOTE' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of BackquoteSystemMacro.", "for defmacro, defun, and lambda etc. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates SystemMacro.", "func_env, macro_env): \"\"\"Behavior of LetAsterSystemMacro. \"\"\" from clispy.expander import Expander bindings, body =", "of the macro. The body forms of the expansion function (but not the", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "forms are executed in order of occurence. The body forms (but not the", "the lambda list) of each function created by flet and labels and each", "them sequentially. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetSystemMacro. \"\"\" cls.__name__ = 'LET'", "forms = Cons(Symbol('LAMBDA'), Cons(params, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class DefunSystemMacro(SystemMacro): \"\"\"defun implicitly", "def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of DefunSystemMacro. \"\"\" from clispy.expander import", "appears. The expansion function accepts two arguments, a form and an environment. The", "last form executed is returned as the expansion of the macro. body =", "forms, var_env, func_env, macro_env): \"\"\"Behavior of LetAsterSystemMacro. \"\"\" from clispy.expander import Expander bindings,", "order. The value of the last form executed is returned as the expansion", "package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='QUOTE', value=QuoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status='EXTERNAL') # For system functions assign_helper(symbol_name='LAMBDA',", "For system functions assign_helper(symbol_name='LAMBDA', value=LambdaSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFUN', value=DefunSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL')", "classes. # ============================================================================== class BlockSystemMacro(SystemMacro): \"\"\"block establishes a block and then evaluates forms", "does them sequentially. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetSystemMacro. \"\"\" cls.__name__ =", "Macro from clispy.package import assign_helper, use_package_helper from clispy.type import Cons, Null, Symbol #", "def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of BackquoteSystemMacro. \"\"\" return self.expand_hepler(forms.car) @classmethod", "writing, software # distributed under the License is distributed on an \"AS IS\"", "form. The body of the expansion function is specified by forms. Forms are", "Cons(name, body)), Null())))) return forms class BackquoteSystemMacro(SystemMacro): \"\"\"The backquote introduces a template of", "\"\"\"Behavior of BackquoteSystemMacro. \"\"\" return self.expand_hepler(forms.car) @classmethod def expand_hepler(cls, forms): \"\"\"Expand quotes recursively.", "forms = Cons(Symbol('IF'), Cons(test_form, Cons(then_form, Cons(else_form, Null())))) return forms class LabelsSystemMacro(SystemMacro): \"\"\"flet, labels,", "has an implicit progn. forms = Cons(Symbol('BLOCK'), Cons(name, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms", "env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFUN', value=DefunSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFMACRO', value=DefmacroSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='BACKQUOTE',", "forms, var_env, func_env, macro_env): \"\"\"Behavior of BlockSystemMacro. \"\"\" from clispy.expander import Expander name,", "compliance with the License. # You may obtain a copy of the License", "Cons(Cons(Symbol('PROGN'), body), Null()) # The body of a defun has an implicit block.", "expansion function returns a form. The body of the expansion function is specified", "a form. The body of the expansion function is specified by forms. Forms", "Cons(Symbol('CONS'), Cons(cls.expand_hepler(forms.car), Cons(cls.expand_hepler(forms.cdr), Null()))) # ============================================================================== # Set functions related on special operators", "governing permissions and # limitations under the License. # ============================================================================== from clispy.macro import", "be defined. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates FletSystemMacro. \"\"\" cls.__name__ = 'FLET'", "forms class DefunSystemMacro(SystemMacro): \"\"\"defun implicitly puts a block named block-name around the body", "macro. The body forms of the expansion function (but not the lambda-list) are", "which the defmacro form appears. The expansion function accepts two arguments, a form", "an implicit progn. forms = Cons(Symbol('FLET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class", "<filename>clispy/macro/system_macro.py # Copyright 2019 <NAME>. All Rights Reserved. # # Licensed under the", "return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LetAsterSystemMacro. \"\"\" from", "assign_helper(symbol_name='IF', value=IfSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LABELS', value=LabelsSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET', value=LetSystemMacro(), package_name='COMMON-LISP',", "status=':EXTERNAL') assign_helper(symbol_name='IF', value=IfSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LABELS', value=LabelsSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET', value=LetSystemMacro(),", "\"\"\" from clispy.expander import Expander name, body = forms.car, forms.cdr # Expands body", "of flet has an implicit progn. forms = Cons(Symbol('FLET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null())))", "**kwargs): cls.__name__ = 'QUOTE' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior", "Defines base classes. # # SystemMacro # ============================================================================== class SystemMacro(Macro): \"\"\"SystemMacro provide some", "import Cons, Null, Symbol # ============================================================================== # Defines base classes. # # SystemMacro", "of the function-name or name, as appropriate. flet defines locally named functions and", "defined in the same lexical environment in which the defmacro form appears. The", "func_env, macro_env): \"\"\"Behavior of BackquoteSystemMacro. \"\"\" return self.expand_hepler(forms.car) @classmethod def expand_hepler(cls, forms): \"\"\"Expand", "base classes. # # SystemMacro # ============================================================================== class SystemMacro(Macro): \"\"\"SystemMacro provide some macros", "func_env, macro_env) # The body of let* has an implicit progn forms =", "*args, **kwargs): cls.__name__ = 'QUOTE' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env):", "of a form to be dependent on a single test-form. First test-form is", "forms, var_env, func_env, macro_env): \"\"\"Behavior of IfSystemMacro. \"\"\" from clispy.expander import Expander #", "forms = Cons(Symbol('LET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class LetAsterSystemMacro(SystemMacro): \"\"\"let and", "functions can be defined. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates FletSystemMacro. \"\"\" cls.__name__", "var_env, func_env, macro_env) then_form = Expander.expand(then_form, var_env, func_env, macro_env) else_form = Expander.expand(else_form, var_env,", "func_env, macro_env) # The body of labels has an implicit progn. forms =", "(the \"License\"); # you may not use this file except in compliance with", "\"\"\"let and let* create new variable bindings and execute a series of forms", "env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='QUOTE', value=QuoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status='EXTERNAL') # For system functions assign_helper(symbol_name='LAMBDA', value=LambdaSystemMacro(),", "__call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of BackquoteSystemMacro. \"\"\" return self.expand_hepler(forms.car) @classmethod def", "**kwargs): \"\"\"Instantiates DefunSystemMacro. \"\"\" cls.__name__ = 'DEFUN' return object.__new__(cls) def __call__(self, forms, var_env,", "# Unless required by applicable law or agreed to in writing, software #", "by applicable law or agreed to in writing, software # distributed under the", "locally named functions and executes a series of forms with these definition bindings.", "Cons(forms.car.cdr.car, Cons(cls.expand_hepler(forms.cdr), Null()))) else: # Expands recursively and returns cons. return Cons(Symbol('CONS'), Cons(cls.expand_hepler(forms.car),", "var_env, func_env, macro_env) forms = Cons(Symbol('IF'), Cons(test_form, Cons(then_form, Cons(else_form, Null())))) return forms class", "as well as the body. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LabelsSystemMacro. \"\"\"", "var_env, func_env, macro_env) # The body of labels has an implicit progn. forms", "let* has an implicit progn forms = Cons(Symbol('LET*'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return", "= Expander.expand(body, var_env, func_env, macro_env) # The body of a defun has an", "function returns a form. The body of the expansion function is specified by", "file except in compliance with the License. # You may obtain a copy", "\"\"\" cls.__name__ = 'LABELS' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior", "of the last form executed is returned as the expansion of the macro.", "is the function block name of the function-name or name, as appropriate. labels", "single test-form. First test-form is evaluated. If the result is true, then then-form", "\"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefmacroSystemMacro. \"\"\" cls.__name__ = 'DEFMACRO' return object.__new__(cls)", "The macro function is defined in the same lexical environment in which the", "and lambda etc. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates SystemMacro. \"\"\" cls.__name__ =", "operator just returns object. \"\"\" def __new__(cls, *args, **kwargs): cls.__name__ = 'QUOTE' return", "status=':EXTERNAL') assign_helper(symbol_name='DEFUN', value=DefunSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFMACRO', value=DefmacroSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='BACKQUOTE', value=BackquoteSystemMacro(),", "= Expander.expand(body, var_env, macro_env, macro_env) # The body of flet has an implicit", "object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of FletSystemMacro. \"\"\" from clispy.expander", "progn. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates BlockSystemMacro. \"\"\" cls.__name__ = 'BLOCK' return", "macro_env) # The body of flet has an implicit progn. forms = Cons(Symbol('FLET'),", "bindings in parallel and let* does them sequentially. \"\"\" def __new__(cls, *args, **kwargs):", "selected is then evaluated. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates IfSystemMacro. \"\"\" cls.__name__", "definition bindings. Any number of such local functions can be defined. \"\"\" def", "block whose name is the function block name of the function-name or name,", "forms = Cons(Symbol('BLOCK'), Cons(name, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class FletSystemMacro(SystemMacro): \"\"\"flet, labels,", "**kwargs): \"\"\"Instantiates LetAsterSytemMacro. \"\"\" cls.__name__ = 'LET*' return object.__new__(cls) def __call__(self, forms, var_env,", "\"\"\"block establishes a block and then evaluates forms as an implicit progn. \"\"\"", "\"\"\" return \"#<SYSTEM-MACRO {0} {{{1:X}}}>\".format(self.__class__.__name__, id(self)) # ============================================================================== # Defines system macro classes.", "cls.__name__ = 'LABELS' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of", "= Cons(Symbol('IF'), Cons(test_form, Cons(then_form, Cons(else_form, Null())))) return forms class LabelsSystemMacro(SystemMacro): \"\"\"flet, labels, and", "DefunSystemMacro. \"\"\" cls.__name__ = 'DEFUN' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env):", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "in order. The value of the last form executed is returned as the", "is defined in the same lexical environment in which the defmacro form appears.", "as an implicit progn. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates BlockSystemMacro. \"\"\" cls.__name__", "as the expansion of the macro. The body forms of the expansion function", "value=QuoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status='EXTERNAL') # For system functions assign_helper(symbol_name='LAMBDA', value=LambdaSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL')", "for a function special form involving a lambda expression. \"\"\" def __new__(cls, *args,", "# ============================================================================== class SystemMacro(Macro): \"\"\"SystemMacro provide some macros for defmacro, defun, and lambda", "be built. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates BackquoteSystemMacro. \"\"\" cls.__name__ = 'BACKQUOTE'", "package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFUN', value=DefunSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFMACRO', value=DefmacroSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL')", "local functions and macros, and execute forms using the local definitions. forms are", "and each macro created by macrolet are enclosed in an implicit block whose", "return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of DefunSystemMacro. \"\"\" from", "Expander bindings, body = forms.car, forms.cdr # Expands body recursively. body = Expander.expand(body,", "forms, var_env, func_env, macro_env): \"\"\"Behavior of DefunSystemMacro. \"\"\" from clispy.expander import Expander name,", "status=':EXTERNAL') assign_helper(symbol_name='LET*', value=LetAsterSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='QUOTE', value=QuoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status='EXTERNAL') # For", "an implicit progn. forms = Cons(Symbol('BLOCK'), Cons(name, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class", "def __new__(cls, *args, **kwargs): \"\"\"Instantiates LabelsSystemMacro. \"\"\" cls.__name__ = 'LABELS' return object.__new__(cls) def", "return forms class LetSystemMacro(SystemMacro): \"\"\"let and let* create new variable bindings and execute", "body of let* has an implicit progn forms = Cons(Symbol('LET*'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body),", "block. forms = Cons(Symbol('DEFMACRO'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms class", "to flet except that the scope of the defined function names for labels", "a block whose name is name. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefmacroSystemMacro.", "body of the expansion function is specified by forms. Forms are executed in", "__call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of DefmacroSystemMacro. \"\"\" from clispy.expander import Expander", "then then-form is selected; otherwise else-form is selected. Whichever form is selected is", "macrolet define local functions and macros, and execute forms using the local definitions.", "forms = Cons(Symbol('DEFUN'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms class DefmacroSystemMacro(SystemMacro):", "\"\"\" from clispy.expander import Expander params, body = forms.car, forms.cdr # Expands body", "executed is returned as the expansion of the macro. body = Cons(Cons(Symbol('PROGN'), body),", "# For special operators assign_helper(symbol_name='BLOCK', value=BlockSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='FLET', value=FletSystemMacro(), package_name='COMMON-LISP', env='MACRO',", "executed is returned as the expansion of the macro. The body forms of", "The expansion function accepts two arguments, a form and an environment. The expansion", "the defined function names for labels encompasses the function definitions themselves as well", "__new__(cls, *args, **kwargs): cls.__name__ = 'QUOTE' return object.__new__(cls) def __call__(self, forms, var_env, func_env,", "# The body of a defun has an implicit progn. body = Cons(Cons(Symbol('PROGN'),", "body = Expander.expand(body, var_env, func_env, macro_env) # The body of labels has an", "'DEFUN' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of DefunSystemMacro. \"\"\"", "body recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The body of let*", "\"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefunSystemMacro. \"\"\" cls.__name__ = 'DEFUN' return object.__new__(cls)", "# The body of a lambda has an implicit progn. forms = Cons(Symbol('LAMBDA'),", "\"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetSystemMacro. \"\"\" cls.__name__ = 'LET' return object.__new__(cls)", "with these definition bindings. Any number of such local functions can be defined.", "recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The body of a lambda", "Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class IfSystemMacro(SystemMacro): \"\"\"if allows the execution of", "= 'QUOTE' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of QuoteSystemMacro.", "if not isinstance(forms, Cons): # An argument is not an instance of Cons,", "associating a macro function with that name in the global environment. The macro", "by forms. Forms are executed in order. The value of the last form", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "a form to be dependent on a single test-form. First test-form is evaluated.", "body of a defun has an implicit block. forms = Cons(Symbol('DEFUN'), Cons(name, Cons(params,", "as appropriate. labels is equivalent to flet except that the scope of the", "an implicit block whose name is the function block name of the function-name", "\"\"\"Behavior of BlockSystemMacro. \"\"\" from clispy.expander import Expander name, body = forms.car, forms.cdr", "of forms with these definition bindings. Any number of such local functions can", "built. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates BackquoteSystemMacro. \"\"\" cls.__name__ = 'BACKQUOTE' return", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "of LambdaSystemMacro. \"\"\" from clispy.expander import Expander params, body = forms.car, forms.cdr #", "object. \"\"\" def __new__(cls, *args, **kwargs): cls.__name__ = 'QUOTE' return object.__new__(cls) def __call__(self,", "var_env, func_env, macro_env): \"\"\"Behavior of LetAsterSystemMacro. \"\"\" from clispy.expander import Expander bindings, body", "object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of QuoteSystemMacro. \"\"\" # Retruns", "\"\"\" cls.__name__ = 'LET*' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior", "returned as the expansion of the macro. The body forms of the expansion", "__new__(cls, *args, **kwargs): \"\"\"Instantiates BackquoteSystemMacro. \"\"\" cls.__name__ = 'BACKQUOTE' return object.__new__(cls) def __call__(self,", "Copyright 2019 <NAME>. All Rights Reserved. # # Licensed under the Apache License,", "'DEFMACRO' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of DefmacroSystemMacro. \"\"\"", "LambdaSystemMacro. \"\"\" from clispy.expander import Expander params, body = forms.car, forms.cdr # Expands", "then_form = Expander.expand(then_form, var_env, func_env, macro_env) else_form = Expander.expand(else_form, var_env, func_env, macro_env) forms", "Any number of such local functions can be defined. \"\"\" def __new__(cls, *args,", "implicitly puts a block named block-name around the body forms \"\"\" def __new__(cls,", "cls.__name__ = 'BLOCK' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of", "__call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LetSystemMacro. \"\"\" from clispy.expander import Expander", "\"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetAsterSytemMacro. \"\"\" cls.__name__ = 'LET*' return object.__new__(cls)", "of QuoteSystemMacro. \"\"\" # Retruns itself. return Cons(Symbol('QUOTE'), forms) class LambdaSystemMacro(SystemMacro): \"\"\"Provides a", "macro created by macrolet are enclosed in an implicit block whose name is", "of LablesSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car, forms.cdr #", "forms, var_env, func_env, macro_env): \"\"\"Behavior of LambdaSystemMacro. \"\"\" from clispy.expander import Expander params,", "BlockSystemMacro. \"\"\" from clispy.expander import Expander name, body = forms.car, forms.cdr # Expands", "env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET*', value=LetAsterSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='QUOTE', value=QuoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status='EXTERNAL') #", "by flet and labels and each macro created by macrolet are enclosed in", "define local functions and macros, and execute forms using the local definitions. forms", "package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='FLET', value=FletSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='IF', value=IfSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL')", "status=':EXTERNAL') assign_helper(symbol_name='FLET', value=FletSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='IF', value=IfSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LABELS', value=LabelsSystemMacro(),", "'LET' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LetSystemMacro. \"\"\"", "from clispy.expander import Expander bindings, body = forms.car, forms.cdr # Expands body recursively.", "If else_form is Null, then else_form is set to Null. test_form, then_form, else_form", "from clispy.package import assign_helper, use_package_helper from clispy.type import Cons, Null, Symbol # ==============================================================================", "the License for the specific language governing permissions and # limitations under the", "defmacro form appears. The expansion function accepts two arguments, a form and an", "are executed in order. The value of the last form executed is returned", "defun has an implicit progn. body = Cons(Cons(Symbol('PROGN'), body), Null()) # The body", "bindings. let performs the bindings in parallel and let* does them sequentially. \"\"\"", "Cons, it is quoted. return Cons(Symbol('QUOTE'), Cons(forms, Null())) if forms.car is Symbol('UNQUOTE'): #", "lambda etc. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates SystemMacro. \"\"\" cls.__name__ = 'SYSTEM-MACRO'", "\"\"\" cls.__name__ = 'BLOCK' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior", "test_form = Expander.expand(test_form, var_env, func_env, macro_env) then_form = Expander.expand(then_form, var_env, func_env, macro_env) else_form", "\"\"\"Behavior of FletSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car, forms.cdr", "an implicit progn. body = Cons(Cons(Symbol('PROGN'), body), Null()) # The body of a", "Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms class DefmacroSystemMacro(SystemMacro): \"\"\"Defines name as", "Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class QuoteSystemMacro(SystemMacro): \"\"\"The quote special operator just returns", "the function block name of the function-name or name, as appropriate. labels is", "var_env, func_env, macro_env): \"\"\"Behavior of LambdaSystemMacro. \"\"\" from clispy.expander import Expander params, body", "else: # Expands recursively and returns cons. return Cons(Symbol('CONS'), Cons(cls.expand_hepler(forms.car), Cons(cls.expand_hepler(forms.cdr), Null()))) #", "return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LetSystemMacro. \"\"\" from", "implicitly enclosed in a block whose name is name. \"\"\" def __new__(cls, *args,", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "an implicit progn forms = Cons(Symbol('LET*'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class", "QuoteSystemMacro. \"\"\" # Retruns itself. return Cons(Symbol('QUOTE'), forms) class LambdaSystemMacro(SystemMacro): \"\"\"Provides a shorthand", "function with that name in the global environment. The macro function is defined", "is Symbol('UNQUOTE-SPLICING'): # Unquote-splicing (,@). return Cons(Symbol('APPEND'), Cons(forms.car.cdr.car, Cons(cls.expand_hepler(forms.cdr), Null()))) else: # Expands", "IfSystemMacro(SystemMacro): \"\"\"if allows the execution of a form to be dependent on a", "be dependent on a single test-form. First test-form is evaluated. If the result", "forms = Cons(Symbol('FLET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class IfSystemMacro(SystemMacro): \"\"\"if allows", "forms, var_env, func_env, macro_env): \"\"\"Behavior of BackquoteSystemMacro. \"\"\" return self.expand_hepler(forms.car) @classmethod def expand_hepler(cls,", "body = forms.car, forms.cdr # Expands body recursively. body = Expander.expand(body, var_env, func_env,", "of a block has an implicit progn. forms = Cons(Symbol('BLOCK'), Cons(name, Cons(Cons(Symbol('PROGN'), body),", "class IfSystemMacro(SystemMacro): \"\"\"if allows the execution of a form to be dependent on", "not isinstance(forms, Cons): # An argument is not an instance of Cons, it", "# ============================================================================== # For special operators assign_helper(symbol_name='BLOCK', value=BlockSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='FLET', value=FletSystemMacro(),", "classes. # # SystemMacro # ============================================================================== class SystemMacro(Macro): \"\"\"SystemMacro provide some macros for", "= forms.car, forms.cdr # Expands body recursively. body = Expander.expand(body, var_env, func_env, macro_env)", "series of forms with these definition bindings. Any number of such local functions", "= Cons(Symbol('LET*'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class QuoteSystemMacro(SystemMacro): \"\"\"The quote special", "# SystemMacro # ============================================================================== class SystemMacro(Macro): \"\"\"SystemMacro provide some macros for defmacro, defun,", "Version 2.0 (the \"License\"); # you may not use this file except in", "return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of BackquoteSystemMacro. \"\"\" return", "clispy.expander import Expander name, body = forms.car, forms.cdr # Expands body recursively. body", "labels encompasses the function definitions themselves as well as the body. \"\"\" def", "equivalent to flet except that the scope of the defined function names for", "macro_env) # The body of a block has an implicit progn. forms =", "function block name of the function-name or name, as appropriate. labels is equivalent", "are executed in order of occurence. The body forms (but not the lambda", "the expansion of the macro. The body forms of the expansion function (but", "forms.car, forms.cdr # Expands body recursively. body = Expander.expand(body, var_env, func_env, macro_env) #", "and returns cons. return Cons(Symbol('CONS'), Cons(cls.expand_hepler(forms.car), Cons(cls.expand_hepler(forms.cdr), Null()))) # ============================================================================== # Set functions", "Expander.expand(body, var_env, func_env, macro_env) # The body of let* has an implicit progn", "flet and labels and each macro created by macrolet are enclosed in an", "function names for labels encompasses the function definitions themselves as well as the", "'QUOTE' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of QuoteSystemMacro. \"\"\"", "= Expander.expand(body, var_env, func_env, macro_env) # The body of a lambda has an", "macro_env): \"\"\"Behavior of DefunSystemMacro. \"\"\" from clispy.expander import Expander name, params, body =", "\"\"\"Instantiates LetAsterSytemMacro. \"\"\" cls.__name__ = 'LET*' return object.__new__(cls) def __call__(self, forms, var_env, func_env,", "etc. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates SystemMacro. \"\"\" cls.__name__ = 'SYSTEM-MACRO' return", "func_env, macro_env) then_form = Expander.expand(then_form, var_env, func_env, macro_env) else_form = Expander.expand(else_form, var_env, func_env,", "The body of a defun has an implicit block. forms = Cons(Symbol('DEFUN'), Cons(name,", "'LAMBDA' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LambdaSystemMacro. \"\"\"", "\"\"\"The quote special operator just returns object. \"\"\" def __new__(cls, *args, **kwargs): cls.__name__", "of DefmacroSystemMacro. \"\"\" from clispy.expander import Expander name, params, body = forms.car, forms.cdr.car,", "# Expands body recursively. test_form = Expander.expand(test_form, var_env, func_env, macro_env) then_form = Expander.expand(then_form,", "macros, and execute forms using the local definitions. forms are executed in order", "= Expander.expand(body, var_env, func_env, macro_env) # The body of labels has an implicit", "function created by flet and labels and each macro created by macrolet are", "= Cons(Symbol('FLET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class IfSystemMacro(SystemMacro): \"\"\"if allows the", "\"\"\"Instantiates LambdaSystemMacro. \"\"\" cls.__name__ = 'LAMBDA' return object.__new__(cls) def __call__(self, forms, var_env, func_env,", "# If else_form is Null, then else_form is set to Null. test_form, then_form,", "Null, then else_form is set to Null. test_form, then_form, else_form = forms.car, forms.cdr.car,", "block named block-name around the body forms \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates", "macro function is defined in the same lexical environment in which the defmacro", "\"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates BackquoteSystemMacro. \"\"\" cls.__name__ = 'BACKQUOTE' return object.__new__(cls)", "= Cons(Symbol('LABELS'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class LetSystemMacro(SystemMacro): \"\"\"let and let*", "block has an implicit progn. forms = Cons(Symbol('BLOCK'), Cons(name, Cons(Cons(Symbol('PROGN'), body), Null()))) return", "Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms class BackquoteSystemMacro(SystemMacro): \"\"\"The backquote introduces a", "*args, **kwargs): \"\"\"Instantiates IfSystemMacro. \"\"\" cls.__name__ = 'IF' return object.__new__(cls) def __call__(self, forms,", "LablesSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car, forms.cdr # Expands", "2019 <NAME>. All Rights Reserved. # # Licensed under the Apache License, Version", "import Macro from clispy.package import assign_helper, use_package_helper from clispy.type import Cons, Null, Symbol", "macro_env): \"\"\"Behavior of BackquoteSystemMacro. \"\"\" return self.expand_hepler(forms.car) @classmethod def expand_hepler(cls, forms): \"\"\"Expand quotes", "the lambda-list) are implicitly enclosed in a block whose name is name. \"\"\"", "implicit progn. body = Cons(Cons(Symbol('PROGN'), body), Null()) # The body of a defun", "class LetAsterSystemMacro(SystemMacro): \"\"\"let and let* create new variable bindings and execute a series", "OF ANY KIND, either express or implied. # See the License for the", "named functions and executes a series of forms with these definition bindings. Any", "body forms of the expansion function (but not the lambda-list) are implicitly enclosed", "__new__(cls, *args, **kwargs): \"\"\"Instantiates DefunSystemMacro. \"\"\" cls.__name__ = 'DEFUN' return object.__new__(cls) def __call__(self,", "**kwargs): \"\"\"Instantiates SystemMacro. \"\"\" cls.__name__ = 'SYSTEM-MACRO' return object.__new__(cls) def __repr__(self): \"\"\"The official", "# ============================================================================== # Defines base classes. # # SystemMacro # ============================================================================== class SystemMacro(Macro):", "DefunSystemMacro. \"\"\" from clispy.expander import Expander name, params, body = forms.car, forms.cdr.car, forms.cdr.cdr", "# Expands body recursively. body = Expander.expand(body, var_env, macro_env, macro_env) # The body", "in the same lexical environment in which the defmacro form appears. The expansion", "for labels encompasses the function definitions themselves as well as the body. \"\"\"", "form appears. The expansion function accepts two arguments, a form and an environment.", "of a lambda has an implicit progn. forms = Cons(Symbol('LAMBDA'), Cons(params, Cons(Cons(Symbol('PROGN'), body),", "Cons(Symbol('LET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class LetAsterSystemMacro(SystemMacro): \"\"\"let and let* create", "'FLET' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of FletSystemMacro. \"\"\"", "FletSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car, forms.cdr # Expands", "func_env, macro_env): \"\"\"Behavior of FletSystemMacro. \"\"\" from clispy.expander import Expander bindings, body =", "assign_helper(symbol_name='LABELS', value=LabelsSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET', value=LetSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET*', value=LetAsterSystemMacro(), package_name='COMMON-LISP',", "def __new__(cls, *args, **kwargs): \"\"\"Instantiates SystemMacro. \"\"\" cls.__name__ = 'SYSTEM-MACRO' return object.__new__(cls) def", "FletSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define local functions and macros, and execute forms", "expansion of the macro. body = Cons(Cons(Symbol('PROGN'), body), Null()) # The body of", "function definitions themselves as well as the body. \"\"\" def __new__(cls, *args, **kwargs):", "env='MACRO', status='EXTERNAL') # For system functions assign_helper(symbol_name='LAMBDA', value=LambdaSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFUN', value=DefunSystemMacro(),", "*args, **kwargs): \"\"\"Instantiates BlockSystemMacro. \"\"\" cls.__name__ = 'BLOCK' return object.__new__(cls) def __call__(self, forms,", "a macro by associating a macro function with that name in the global", "body)), Null())))) return forms class BackquoteSystemMacro(SystemMacro): \"\"\"The backquote introduces a template of a", "__call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of DefunSystemMacro. \"\"\" from clispy.expander import Expander", "can be defined. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates FletSystemMacro. \"\"\" cls.__name__ =", "def __repr__(self): \"\"\"The official string representation. \"\"\" return \"#<SYSTEM-MACRO {0} {{{1:X}}}>\".format(self.__class__.__name__, id(self)) #", "that the scope of the defined function names for labels encompasses the function", "var_env, func_env, macro_env) # The body of let has an implicit progn. forms", "around the body forms \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefunSystemMacro. \"\"\" cls.__name__", "def expand_hepler(cls, forms): \"\"\"Expand quotes recursively. \"\"\" if not isinstance(forms, Cons): # An", "**kwargs): \"\"\"Instantiates BackquoteSystemMacro. \"\"\" cls.__name__ = 'BACKQUOTE' return object.__new__(cls) def __call__(self, forms, var_env,", "= 'IF' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of IfSystemMacro.", "occurence. The body forms (but not the lambda list) of each function created", "# ============================================================================== # Defines system macro classes. # ============================================================================== class BlockSystemMacro(SystemMacro): \"\"\"block establishes", "forms using the local definitions. forms are executed in order of occurence. The", "forms.cdr.cdr # Expands body, recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The", "or agreed to in writing, software # distributed under the License is distributed", "of let* has an implicit progn forms = Cons(Symbol('LET*'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null())))", "Cons(cls.expand_hepler(forms.cdr), Null()))) # ============================================================================== # Set functions related on special operators # ==============================================================================", "Cons(Symbol('LABELS'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class LetSystemMacro(SystemMacro): \"\"\"let and let* create", "lambda-list) are implicitly enclosed in a block whose name is name. \"\"\" def", "defines locally named functions and executes a series of forms with these definition", "block-name around the body forms \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefunSystemMacro. \"\"\"", "the function-name or name, as appropriate. labels is equivalent to flet except that", "The body of flet has an implicit progn. forms = Cons(Symbol('FLET'), Cons(bindings, Cons(Cons(Symbol('PROGN'),", "@classmethod def expand_hepler(cls, forms): \"\"\"Expand quotes recursively. \"\"\" if not isinstance(forms, Cons): #", "Expander.expand(then_form, var_env, func_env, macro_env) else_form = Expander.expand(else_form, var_env, func_env, macro_env) forms = Cons(Symbol('IF'),", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "such local functions can be defined. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates FletSystemMacro.", "recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The value of the last", "implicit block whose name is the function block name of the function-name or", "= 'LABELS' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LablesSystemMacro.", "License. # You may obtain a copy of the License at # #", "The body of a lambda has an implicit progn. forms = Cons(Symbol('LAMBDA'), Cons(params,", "Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class IfSystemMacro(SystemMacro): \"\"\"if allows the execution of a", "result is true, then then-form is selected; otherwise else-form is selected. Whichever form", "from clispy.expander import Expander # If else_form is Null, then else_form is set", "\"\"\"Provides a shorthand notation for a function special form involving a lambda expression.", "class LambdaSystemMacro(SystemMacro): \"\"\"Provides a shorthand notation for a function special form involving a", "var_env, func_env, macro_env) # The body of let* has an implicit progn forms", "definitions themselves as well as the body. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "class QuoteSystemMacro(SystemMacro): \"\"\"The quote special operator just returns object. \"\"\" def __new__(cls, *args,", "= Expander.expand(body, var_env, func_env, macro_env) # The body of let* has an implicit", "clispy.expander import Expander params, body = forms.car, forms.cdr # Expands body recursively. body", "sequentially. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetAsterSytemMacro. \"\"\" cls.__name__ = 'LET*' return", "forms = Cons(Symbol('LABELS'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class LetSystemMacro(SystemMacro): \"\"\"let and", "sequentially. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetSystemMacro. \"\"\" cls.__name__ = 'LET' return", "isinstance(forms.car, Cons) and forms.car.car is Symbol('UNQUOTE-SPLICING'): # Unquote-splicing (,@). return Cons(Symbol('APPEND'), Cons(forms.car.cdr.car, Cons(cls.expand_hepler(forms.cdr),", "return forms class LabelsSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define local functions and macros,", "of a data structure to be built. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates", "forms, var_env, func_env, macro_env): \"\"\"Behavior of QuoteSystemMacro. \"\"\" # Retruns itself. return Cons(Symbol('QUOTE'),", "in which the defmacro form appears. The expansion function accepts two arguments, a", "License, Version 2.0 (the \"License\"); # you may not use this file except", "the last form executed is returned as the expansion of the macro. The", "macro_env): \"\"\"Behavior of DefmacroSystemMacro. \"\"\" from clispy.expander import Expander name, params, body =", "special operator just returns object. \"\"\" def __new__(cls, *args, **kwargs): cls.__name__ = 'QUOTE'", "def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of FletSystemMacro. \"\"\" from clispy.expander import", "(,). return forms.cdr.car elif isinstance(forms.car, Cons) and forms.car.car is Symbol('UNQUOTE-SPLICING'): # Unquote-splicing (,@).", "# Unquote (,). return forms.cdr.car elif isinstance(forms.car, Cons) and forms.car.car is Symbol('UNQUOTE-SPLICING'): #", "= Expander.expand(body, var_env, func_env, macro_env) # The value of the last form executed", "name of the function-name or name, as appropriate. flet defines locally named functions", "'BACKQUOTE' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of BackquoteSystemMacro. \"\"\"", "name as a macro by associating a macro function with that name in", "Null()))) return forms class FletSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define local functions and", "return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of DefmacroSystemMacro. \"\"\" from", "SystemMacro(Macro): \"\"\"SystemMacro provide some macros for defmacro, defun, and lambda etc. \"\"\" def", "return forms class FletSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define local functions and macros,", "let* create new variable bindings and execute a series of forms that use", "let* does them sequentially. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetSystemMacro. \"\"\" cls.__name__", "body = Cons(Cons(Symbol('PROGN'), body), Null()) # The body of a defun has an", "Cons(name, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class FletSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define", "# Set functions related on special operators # ============================================================================== # For special operators", "\"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates IfSystemMacro. \"\"\" cls.__name__ = 'IF' return object.__new__(cls)", "return forms class QuoteSystemMacro(SystemMacro): \"\"\"The quote special operator just returns object. \"\"\" def", "test_form, then_form, else_form = forms.car, forms.cdr.car, forms.cdr.cdr.car # Expands body recursively. test_form =", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "value of the last form executed is returned as the expansion of the", "object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LambdaSystemMacro. \"\"\" from clispy.expander", "it is quoted. return Cons(Symbol('QUOTE'), Cons(forms, Null())) if forms.car is Symbol('UNQUOTE'): # Unquote", "'IF' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of IfSystemMacro. \"\"\"", "macro_env) # The value of the last form executed is returned as the", "Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class QuoteSystemMacro(SystemMacro): \"\"\"The quote special operator just", "\"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LabelsSystemMacro. \"\"\" cls.__name__ = 'LABELS' return object.__new__(cls)", "Null()))) # ============================================================================== # Set functions related on special operators # ============================================================================== #", "name, as appropriate. flet defines locally named functions and executes a series of", "data structure to be built. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates BackquoteSystemMacro. \"\"\"", "var_env, func_env, macro_env): \"\"\"Behavior of DefunSystemMacro. \"\"\" from clispy.expander import Expander name, params,", "executed in order. The value of the last form executed is returned as", "a defun has an implicit block. forms = Cons(Symbol('DEFUN'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name,", "forms.car.car is Symbol('UNQUOTE-SPLICING'): # Unquote-splicing (,@). return Cons(Symbol('APPEND'), Cons(forms.car.cdr.car, Cons(cls.expand_hepler(forms.cdr), Null()))) else: #", "**kwargs): \"\"\"Instantiates LabelsSystemMacro. \"\"\" cls.__name__ = 'LABELS' return object.__new__(cls) def __call__(self, forms, var_env,", "lambda list) of each function created by flet and labels and each macro", "involving a lambda expression. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LambdaSystemMacro. \"\"\" cls.__name__", "__new__(cls, *args, **kwargs): \"\"\"Instantiates LetSystemMacro. \"\"\" cls.__name__ = 'LET' return object.__new__(cls) def __call__(self,", "body), Null()))) return forms class DefunSystemMacro(SystemMacro): \"\"\"defun implicitly puts a block named block-name", "these bindings. let performs the bindings in parallel and let* does them sequentially.", "as appropriate. flet defines locally named functions and executes a series of forms", "= Expander.expand(body, var_env, func_env, macro_env) # The body of a block has an", "The body of let has an implicit progn. forms = Cons(Symbol('LET'), Cons(bindings, Cons(Cons(Symbol('PROGN'),", "or implied. # See the License for the specific language governing permissions and", "clispy.expander import Expander name, params, body = forms.car, forms.cdr.car, forms.cdr.cdr # Expands body,", "\"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates FletSystemMacro. \"\"\" cls.__name__ = 'FLET' return object.__new__(cls)", "def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefunSystemMacro. \"\"\" cls.__name__ = 'DEFUN' return object.__new__(cls) def", "# Expands body, recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The body", "\"\"\" cls.__name__ = 'FLET' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior", "Cons(Symbol('QUOTE'), forms) class LambdaSystemMacro(SystemMacro): \"\"\"Provides a shorthand notation for a function special form", "then else_form is set to Null. test_form, then_form, else_form = forms.car, forms.cdr.car, forms.cdr.cdr.car", "= 'SYSTEM-MACRO' return object.__new__(cls) def __repr__(self): \"\"\"The official string representation. \"\"\" return \"#<SYSTEM-MACRO", "LetAsterSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car, forms.cdr # Expands", "var_env, func_env, macro_env): \"\"\"Behavior of IfSystemMacro. \"\"\" from clispy.expander import Expander # If", "return self.expand_hepler(forms.car) @classmethod def expand_hepler(cls, forms): \"\"\"Expand quotes recursively. \"\"\" if not isinstance(forms,", "__call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LetAsterSystemMacro. \"\"\" from clispy.expander import Expander", "under the License. # ============================================================================== from clispy.macro import Macro from clispy.package import assign_helper,", "structure to be built. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates BackquoteSystemMacro. \"\"\" cls.__name__", "form is selected is then evaluated. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates IfSystemMacro.", "name of the function-name or name, as appropriate. labels is equivalent to flet", "number of such local functions can be defined. \"\"\" def __new__(cls, *args, **kwargs):", "use these bindings. let performs the bindings in parallel and let* does them", "forms class DefmacroSystemMacro(SystemMacro): \"\"\"Defines name as a macro by associating a macro function", "operators assign_helper(symbol_name='BLOCK', value=BlockSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='FLET', value=FletSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='IF', value=IfSystemMacro(),", "*args, **kwargs): \"\"\"Instantiates LambdaSystemMacro. \"\"\" cls.__name__ = 'LAMBDA' return object.__new__(cls) def __call__(self, forms,", "The body of let* has an implicit progn forms = Cons(Symbol('LET*'), Cons(bindings, Cons(Cons(Symbol('PROGN'),", "themselves as well as the body. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LabelsSystemMacro.", "class BlockSystemMacro(SystemMacro): \"\"\"block establishes a block and then evaluates forms as an implicit", "status=':EXTERNAL') assign_helper(symbol_name='QUOTE', value=QuoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status='EXTERNAL') # For system functions assign_helper(symbol_name='LAMBDA', value=LambdaSystemMacro(), package_name='COMMON-LISP',", "use this file except in compliance with the License. # You may obtain", "return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of FletSystemMacro. \"\"\" from", "The body of a defmacro has an implicit block. forms = Cons(Symbol('DEFMACRO'), Cons(name,", "specified by forms. Forms are executed in order. The value of the last", "return forms class DefmacroSystemMacro(SystemMacro): \"\"\"Defines name as a macro by associating a macro", "each function created by flet and labels and each macro created by macrolet", "clispy.package import assign_helper, use_package_helper from clispy.type import Cons, Null, Symbol # ============================================================================== #", "# Unquote-splicing (,@). return Cons(Symbol('APPEND'), Cons(forms.car.cdr.car, Cons(cls.expand_hepler(forms.cdr), Null()))) else: # Expands recursively and", "defined. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates FletSystemMacro. \"\"\" cls.__name__ = 'FLET' return", "forms): \"\"\"Expand quotes recursively. \"\"\" if not isinstance(forms, Cons): # An argument is", "Set functions related on special operators # ============================================================================== # For special operators assign_helper(symbol_name='BLOCK',", "forms.cdr.car, forms.cdr.cdr.car # Expands body recursively. test_form = Expander.expand(test_form, var_env, func_env, macro_env) then_form", "# The body of flet has an implicit progn. forms = Cons(Symbol('FLET'), Cons(bindings,", "forms, var_env, func_env, macro_env): \"\"\"Behavior of DefmacroSystemMacro. \"\"\" from clispy.expander import Expander name,", "\"\"\" cls.__name__ = 'DEFMACRO' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior", "\"\"\" # Retruns itself. return Cons(Symbol('QUOTE'), forms) class LambdaSystemMacro(SystemMacro): \"\"\"Provides a shorthand notation", "for the specific language governing permissions and # limitations under the License. #", "is equivalent to flet except that the scope of the defined function names", "The body of the expansion function is specified by forms. Forms are executed", "BackquoteSystemMacro. \"\"\" return self.expand_hepler(forms.car) @classmethod def expand_hepler(cls, forms): \"\"\"Expand quotes recursively. \"\"\" if", "execute forms using the local definitions. forms are executed in order of occurence.", "body = Expander.expand(body, var_env, func_env, macro_env) # The body of a block has", "recursively. test_form = Expander.expand(test_form, var_env, func_env, macro_env) then_form = Expander.expand(then_form, var_env, func_env, macro_env)", "Cons(then_form, Cons(else_form, Null())))) return forms class LabelsSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define local", "def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefmacroSystemMacro. \"\"\" cls.__name__ = 'DEFMACRO' return object.__new__(cls) def", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class LetSystemMacro(SystemMacro): \"\"\"let and let* create new variable", "of labels has an implicit progn. forms = Cons(Symbol('LABELS'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null())))", "# limitations under the License. # ============================================================================== from clispy.macro import Macro from clispy.package", "introduces a template of a data structure to be built. \"\"\" def __new__(cls,", "package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='BACKQUOTE', value=BackquoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') # COMMON-LISP-USER package use_package_helper(package_name_to_use='COMMON-LISP', package_name='COMMON-LISP-USER')", "as the expansion of the macro. body = Cons(Cons(Symbol('PROGN'), body), Null()) # The", "set to Null. test_form, then_form, else_form = forms.car, forms.cdr.car, forms.cdr.cdr.car # Expands body", "from clispy.expander import Expander name, body = forms.car, forms.cdr # Expands body recursively.", "First test-form is evaluated. If the result is true, then then-form is selected;", "returns object. \"\"\" def __new__(cls, *args, **kwargs): cls.__name__ = 'QUOTE' return object.__new__(cls) def", "SystemMacro. \"\"\" cls.__name__ = 'SYSTEM-MACRO' return object.__new__(cls) def __repr__(self): \"\"\"The official string representation.", "= Cons(Symbol('DEFMACRO'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms class BackquoteSystemMacro(SystemMacro): \"\"\"The", "is not an instance of Cons, it is quoted. return Cons(Symbol('QUOTE'), Cons(forms, Null()))", "Null()) # The body of a defun has an implicit block. forms =", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "of FletSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car, forms.cdr #", "is then evaluated. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates IfSystemMacro. \"\"\" cls.__name__ =", "# Expands body recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The body", "LetSystemMacro(SystemMacro): \"\"\"let and let* create new variable bindings and execute a series of", "BackquoteSystemMacro(SystemMacro): \"\"\"The backquote introduces a template of a data structure to be built.", "forms) class LambdaSystemMacro(SystemMacro): \"\"\"Provides a shorthand notation for a function special form involving", "FletSystemMacro. \"\"\" cls.__name__ = 'FLET' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env):", "body recursively. test_form = Expander.expand(test_form, var_env, func_env, macro_env) then_form = Expander.expand(then_form, var_env, func_env,", "environment. The expansion function returns a form. The body of the expansion function", "scope of the defined function names for labels encompasses the function definitions themselves", "package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET', value=LetSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET*', value=LetAsterSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL')", "and macros, and execute forms using the local definitions. forms are executed in", "quote special operator just returns object. \"\"\" def __new__(cls, *args, **kwargs): cls.__name__ =", "cls.__name__ = 'FLET' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of", "is specified by forms. Forms are executed in order. The value of the", "with the License. # You may obtain a copy of the License at", "The body of a defun has an implicit progn. body = Cons(Cons(Symbol('PROGN'), body),", "progn. forms = Cons(Symbol('LABELS'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class LetSystemMacro(SystemMacro): \"\"\"let", "__repr__(self): \"\"\"The official string representation. \"\"\" return \"#<SYSTEM-MACRO {0} {{{1:X}}}>\".format(self.__class__.__name__, id(self)) # ==============================================================================", "def __new__(cls, *args, **kwargs): cls.__name__ = 'QUOTE' return object.__new__(cls) def __call__(self, forms, var_env,", "QuoteSystemMacro(SystemMacro): \"\"\"The quote special operator just returns object. \"\"\" def __new__(cls, *args, **kwargs):", "local functions can be defined. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates FletSystemMacro. \"\"\"", "IfSystemMacro. \"\"\" from clispy.expander import Expander # If else_form is Null, then else_form", "law or agreed to in writing, software # distributed under the License is", "arguments, a form and an environment. The expansion function returns a form. The", "\"\"\"Instantiates DefunSystemMacro. \"\"\" cls.__name__ = 'DEFUN' return object.__new__(cls) def __call__(self, forms, var_env, func_env,", "has an implicit progn. forms = Cons(Symbol('FLET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms", "Expander # If else_form is Null, then else_form is set to Null. test_form,", "class LetSystemMacro(SystemMacro): \"\"\"let and let* create new variable bindings and execute a series", "Cons, Null, Symbol # ============================================================================== # Defines base classes. # # SystemMacro #", "value=DefunSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFMACRO', value=DefmacroSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='BACKQUOTE', value=BackquoteSystemMacro(), package_name='COMMON-LISP', env='MACRO',", "not the lambda-list) are implicitly enclosed in a block whose name is name.", "package_name='COMMON-LISP', env='MACRO', status='EXTERNAL') # For system functions assign_helper(symbol_name='LAMBDA', value=LambdaSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFUN',", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "returns cons. return Cons(Symbol('CONS'), Cons(cls.expand_hepler(forms.car), Cons(cls.expand_hepler(forms.cdr), Null()))) # ============================================================================== # Set functions related", "import Expander params, body = forms.car, forms.cdr # Expands body recursively. body =", "cls.__name__ = 'LET*' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of", "Cons(Symbol('DEFMACRO'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms class BackquoteSystemMacro(SystemMacro): \"\"\"The backquote", "the body forms \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefunSystemMacro. \"\"\" cls.__name__ =", "the defmacro form appears. The expansion function accepts two arguments, a form and", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "\"\"\"Behavior of LablesSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car, forms.cdr", "and let* create new variable bindings and execute a series of forms that", "returned as the expansion of the macro. body = Cons(Cons(Symbol('PROGN'), body), Null()) #", "block name of the function-name or name, as appropriate. flet defines locally named", "DefmacroSystemMacro(SystemMacro): \"\"\"Defines name as a macro by associating a macro function with that", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "\"\"\"SystemMacro provide some macros for defmacro, defun, and lambda etc. \"\"\" def __new__(cls,", "============================================================================== from clispy.macro import Macro from clispy.package import assign_helper, use_package_helper from clispy.type import", "def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of BlockSystemMacro. \"\"\" from clispy.expander import", "function (but not the lambda-list) are implicitly enclosed in a block whose name", "Expands body recursively. body = Expander.expand(body, var_env, macro_env, macro_env) # The body of", "body of labels has an implicit progn. forms = Cons(Symbol('LABELS'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body),", "just returns object. \"\"\" def __new__(cls, *args, **kwargs): cls.__name__ = 'QUOTE' return object.__new__(cls)", "defmacro has an implicit block. forms = Cons(Symbol('DEFMACRO'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)),", "return Cons(Symbol('QUOTE'), forms) class LambdaSystemMacro(SystemMacro): \"\"\"Provides a shorthand notation for a function special", "the License. # ============================================================================== from clispy.macro import Macro from clispy.package import assign_helper, use_package_helper", "macro_env): \"\"\"Behavior of BlockSystemMacro. \"\"\" from clispy.expander import Expander name, body = forms.car,", "and let* does them sequentially. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetSystemMacro. \"\"\"", "Null()))) return forms class LetAsterSystemMacro(SystemMacro): \"\"\"let and let* create new variable bindings and", "is true, then then-form is selected; otherwise else-form is selected. Whichever form is", "# ============================================================================== class BlockSystemMacro(SystemMacro): \"\"\"block establishes a block and then evaluates forms as", "\"\"\"The backquote introduces a template of a data structure to be built. \"\"\"", "has an implicit block. forms = Cons(Symbol('DEFUN'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null()))))", "func_env, macro_env): \"\"\"Behavior of BlockSystemMacro. \"\"\" from clispy.expander import Expander name, body =", "forms.car, forms.cdr.car, forms.cdr.cdr # Expands body, recursively. body = Expander.expand(body, var_env, func_env, macro_env)", "expansion function accepts two arguments, a form and an environment. The expansion function", "expansion function is specified by forms. Forms are executed in order. The value", "cls.__name__ = 'DEFUN' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of", "\"\"\"Expand quotes recursively. \"\"\" if not isinstance(forms, Cons): # An argument is not", "def __new__(cls, *args, **kwargs): \"\"\"Instantiates LambdaSystemMacro. \"\"\" cls.__name__ = 'LAMBDA' return object.__new__(cls) def", "Null. test_form, then_form, else_form = forms.car, forms.cdr.car, forms.cdr.cdr.car # Expands body recursively. test_form", "new variable bindings and execute a series of forms that use these bindings.", "the last form executed is returned as the expansion of the macro. body", "\"\"\" cls.__name__ = 'SYSTEM-MACRO' return object.__new__(cls) def __repr__(self): \"\"\"The official string representation. \"\"\"", "is quoted. return Cons(Symbol('QUOTE'), Cons(forms, Null())) if forms.car is Symbol('UNQUOTE'): # Unquote (,).", "\"\"\"Instantiates LabelsSystemMacro. \"\"\" cls.__name__ = 'LABELS' return object.__new__(cls) def __call__(self, forms, var_env, func_env,", "Symbol('UNQUOTE'): # Unquote (,). return forms.cdr.car elif isinstance(forms.car, Cons) and forms.car.car is Symbol('UNQUOTE-SPLICING'):", "__new__(cls, *args, **kwargs): \"\"\"Instantiates DefmacroSystemMacro. \"\"\" cls.__name__ = 'DEFMACRO' return object.__new__(cls) def __call__(self,", "\"\"\"Instantiates FletSystemMacro. \"\"\" cls.__name__ = 'FLET' return object.__new__(cls) def __call__(self, forms, var_env, func_env,", "else-form is selected. Whichever form is selected is then evaluated. \"\"\" def __new__(cls,", "recursively. \"\"\" if not isinstance(forms, Cons): # An argument is not an instance", "dependent on a single test-form. First test-form is evaluated. If the result is", "names for labels encompasses the function definitions themselves as well as the body.", "is evaluated. If the result is true, then then-form is selected; otherwise else-form", "from clispy.type import Cons, Null, Symbol # ============================================================================== # Defines base classes. #", "\"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates SystemMacro. \"\"\" cls.__name__ = 'SYSTEM-MACRO' return object.__new__(cls)", "var_env, func_env, macro_env) # The body of a lambda has an implicit progn.", "Cons(cls.expand_hepler(forms.car), Cons(cls.expand_hepler(forms.cdr), Null()))) # ============================================================================== # Set functions related on special operators #", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "'LET*' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LetAsterSystemMacro. \"\"\"", "forms, var_env, func_env, macro_env): \"\"\"Behavior of LablesSystemMacro. \"\"\" from clispy.expander import Expander bindings,", "expansion of the macro. The body forms of the expansion function (but not", "lambda has an implicit progn. forms = Cons(Symbol('LAMBDA'), Cons(params, Cons(Cons(Symbol('PROGN'), body), Null()))) return", "a block has an implicit progn. forms = Cons(Symbol('BLOCK'), Cons(name, Cons(Cons(Symbol('PROGN'), body), Null())))", "a lambda has an implicit progn. forms = Cons(Symbol('LAMBDA'), Cons(params, Cons(Cons(Symbol('PROGN'), body), Null())))", "def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LambdaSystemMacro. \"\"\" from clispy.expander import", "func_env, macro_env) # The value of the last form executed is returned as", "return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of QuoteSystemMacro. \"\"\" #", "object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of BlockSystemMacro. \"\"\" from clispy.expander", "class SystemMacro(Macro): \"\"\"SystemMacro provide some macros for defmacro, defun, and lambda etc. \"\"\"", "= Cons(Symbol('DEFUN'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms class DefmacroSystemMacro(SystemMacro): \"\"\"Defines", "this file except in compliance with the License. # You may obtain a", "argument is not an instance of Cons, it is quoted. return Cons(Symbol('QUOTE'), Cons(forms,", "the function-name or name, as appropriate. flet defines locally named functions and executes", "system functions assign_helper(symbol_name='LAMBDA', value=LambdaSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFUN', value=DefunSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFMACRO',", "the expansion function is specified by forms. Forms are executed in order. The", "enclosed in an implicit block whose name is the function block name of", "a series of forms that use these bindings. let performs the bindings in", "Null()))) return forms class DefunSystemMacro(SystemMacro): \"\"\"defun implicitly puts a block named block-name around", "defined function names for labels encompasses the function definitions themselves as well as", "and executes a series of forms with these definition bindings. Any number of", "a form and an environment. The expansion function returns a form. The body", "progn. forms = Cons(Symbol('LET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class LetAsterSystemMacro(SystemMacro): \"\"\"let", "= 'DEFUN' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of DefunSystemMacro.", "def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LetAsterSystemMacro. \"\"\" from clispy.expander import", "\"\"\"Behavior of DefunSystemMacro. \"\"\" from clispy.expander import Expander name, params, body = forms.car,", "*args, **kwargs): \"\"\"Instantiates FletSystemMacro. \"\"\" cls.__name__ = 'FLET' return object.__new__(cls) def __call__(self, forms,", "var_env, func_env, macro_env): \"\"\"Behavior of BlockSystemMacro. \"\"\" from clispy.expander import Expander name, body", "\"\"\"Instantiates BackquoteSystemMacro. \"\"\" cls.__name__ = 'BACKQUOTE' return object.__new__(cls) def __call__(self, forms, var_env, func_env,", "value=LambdaSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFUN', value=DefunSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFMACRO', value=DefmacroSystemMacro(), package_name='COMMON-LISP', env='MACRO',", "The value of the last form executed is returned as the expansion of", "assign_helper, use_package_helper from clispy.type import Cons, Null, Symbol # ============================================================================== # Defines base", "defmacro, defun, and lambda etc. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates SystemMacro. \"\"\"", "environment. The macro function is defined in the same lexical environment in which", "var_env, func_env, macro_env) # The body of a defun has an implicit progn.", "is selected; otherwise else-form is selected. Whichever form is selected is then evaluated.", "# Expands body, recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The value", "# Defines system macro classes. # ============================================================================== class BlockSystemMacro(SystemMacro): \"\"\"block establishes a block", "forms.cdr # Expands body recursively. body = Expander.expand(body, var_env, macro_env, macro_env) # The", "macro. body = Cons(Cons(Symbol('PROGN'), body), Null()) # The body of a defmacro has", "macro_env) # The body of let* has an implicit progn forms = Cons(Symbol('LET*'),", "has an implicit progn. body = Cons(Cons(Symbol('PROGN'), body), Null()) # The body of", "let performs the bindings in parallel and let* does them sequentially. \"\"\" def", "value=BlockSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='FLET', value=FletSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='IF', value=IfSystemMacro(), package_name='COMMON-LISP', env='MACRO',", "*args, **kwargs): \"\"\"Instantiates SystemMacro. \"\"\" cls.__name__ = 'SYSTEM-MACRO' return object.__new__(cls) def __repr__(self): \"\"\"The", "provide some macros for defmacro, defun, and lambda etc. \"\"\" def __new__(cls, *args,", "value=FletSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='IF', value=IfSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LABELS', value=LabelsSystemMacro(), package_name='COMMON-LISP', env='MACRO',", "an implicit progn. forms = Cons(Symbol('LAMBDA'), Cons(params, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class", "value=LetAsterSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='QUOTE', value=QuoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status='EXTERNAL') # For system functions", "the execution of a form to be dependent on a single test-form. First", "recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The body of let* has", "Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class FletSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define local", "establishes a block and then evaluates forms as an implicit progn. \"\"\" def", "the macro. The body forms of the expansion function (but not the lambda-list)", "functions assign_helper(symbol_name='LAMBDA', value=LambdaSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFUN', value=DefunSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFMACRO', value=DefmacroSystemMacro(),", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "a single test-form. First test-form is evaluated. If the result is true, then", "macros for defmacro, defun, and lambda etc. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates", "status=':EXTERNAL') assign_helper(symbol_name='LET', value=LetSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET*', value=LetAsterSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='QUOTE', value=QuoteSystemMacro(),", "macro_env): \"\"\"Behavior of LablesSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car,", "forms, var_env, func_env, macro_env): \"\"\"Behavior of FletSystemMacro. \"\"\" from clispy.expander import Expander bindings,", "def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetSystemMacro. \"\"\" cls.__name__ = 'LET' return object.__new__(cls) def", "the body. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LabelsSystemMacro. \"\"\" cls.__name__ = 'LABELS'", "permissions and # limitations under the License. # ============================================================================== from clispy.macro import Macro", "and execute a series of forms that use these bindings. let performs the", "name, params, body = forms.car, forms.cdr.car, forms.cdr.cdr # Expands body, recursively. body =", "= Expander.expand(test_form, var_env, func_env, macro_env) then_form = Expander.expand(then_form, var_env, func_env, macro_env) else_form =", "body)), Null())))) return forms class DefmacroSystemMacro(SystemMacro): \"\"\"Defines name as a macro by associating", "and labels and each macro created by macrolet are enclosed in an implicit", "a block and then evaluates forms as an implicit progn. \"\"\" def __new__(cls,", "is set to Null. test_form, then_form, else_form = forms.car, forms.cdr.car, forms.cdr.cdr.car # Expands", "required by applicable law or agreed to in writing, software # distributed under", "two arguments, a form and an environment. The expansion function returns a form.", "operators # ============================================================================== # For special operators assign_helper(symbol_name='BLOCK', value=BlockSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='FLET',", "\"\"\" cls.__name__ = 'IF' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior", "Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class LetAsterSystemMacro(SystemMacro): \"\"\"let and let* create new", "============================================================================== class SystemMacro(Macro): \"\"\"SystemMacro provide some macros for defmacro, defun, and lambda etc.", "The expansion function returns a form. The body of the expansion function is", "assign_helper(symbol_name='LET*', value=LetAsterSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='QUOTE', value=QuoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status='EXTERNAL') # For system", "Expander.expand(body, var_env, func_env, macro_env) # The body of a block has an implicit", "body of flet has an implicit progn. forms = Cons(Symbol('FLET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body),", "execution of a form to be dependent on a single test-form. First test-form", "is Null, then else_form is set to Null. test_form, then_form, else_form = forms.car,", "an environment. The expansion function returns a form. The body of the expansion", "is returned as the expansion of the macro. The body forms of the", "Expander.expand(else_form, var_env, func_env, macro_env) forms = Cons(Symbol('IF'), Cons(test_form, Cons(then_form, Cons(else_form, Null())))) return forms", "# The body of a block has an implicit progn. forms = Cons(Symbol('BLOCK'),", "then evaluated. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates IfSystemMacro. \"\"\" cls.__name__ = 'IF'", "\"\"\"Instantiates SystemMacro. \"\"\" cls.__name__ = 'SYSTEM-MACRO' return object.__new__(cls) def __repr__(self): \"\"\"The official string", "\"\"\"Instantiates IfSystemMacro. \"\"\" cls.__name__ = 'IF' return object.__new__(cls) def __call__(self, forms, var_env, func_env,", "def __new__(cls, *args, **kwargs): \"\"\"Instantiates BackquoteSystemMacro. \"\"\" cls.__name__ = 'BACKQUOTE' return object.__new__(cls) def", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "= forms.car, forms.cdr # Expands body recursively. body = Expander.expand(body, var_env, macro_env, macro_env)", "the same lexical environment in which the defmacro form appears. The expansion function", "*args, **kwargs): \"\"\"Instantiates LetAsterSytemMacro. \"\"\" cls.__name__ = 'LET*' return object.__new__(cls) def __call__(self, forms,", "\"\"\" cls.__name__ = 'LAMBDA' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior", "__call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of QuoteSystemMacro. \"\"\" # Retruns itself. return", "__call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of FletSystemMacro. \"\"\" from clispy.expander import Expander", "body = Expander.expand(body, var_env, func_env, macro_env) # The body of let* has an", "implicit progn. forms = Cons(Symbol('FLET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class IfSystemMacro(SystemMacro):", "from clispy.macro import Macro from clispy.package import assign_helper, use_package_helper from clispy.type import Cons,", "by macrolet are enclosed in an implicit block whose name is the function", "if forms.car is Symbol('UNQUOTE'): # Unquote (,). return forms.cdr.car elif isinstance(forms.car, Cons) and", "true, then then-form is selected; otherwise else-form is selected. Whichever form is selected", "macro_env) forms = Cons(Symbol('IF'), Cons(test_form, Cons(then_form, Cons(else_form, Null())))) return forms class LabelsSystemMacro(SystemMacro): \"\"\"flet,", "= Expander.expand(else_form, var_env, func_env, macro_env) forms = Cons(Symbol('IF'), Cons(test_form, Cons(then_form, Cons(else_form, Null())))) return", "Expands body, recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The body of", "Cons(cls.expand_hepler(forms.cdr), Null()))) else: # Expands recursively and returns cons. return Cons(Symbol('CONS'), Cons(cls.expand_hepler(forms.car), Cons(cls.expand_hepler(forms.cdr),", "forms of the expansion function (but not the lambda-list) are implicitly enclosed in", "var_env, func_env, macro_env) else_form = Expander.expand(else_form, var_env, func_env, macro_env) forms = Cons(Symbol('IF'), Cons(test_form,", "An argument is not an instance of Cons, it is quoted. return Cons(Symbol('QUOTE'),", "body = Expander.expand(body, var_env, func_env, macro_env) # The body of a lambda has", "= Cons(Symbol('LAMBDA'), Cons(params, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class DefunSystemMacro(SystemMacro): \"\"\"defun implicitly puts", "\"\"\"Instantiates DefmacroSystemMacro. \"\"\" cls.__name__ = 'DEFMACRO' return object.__new__(cls) def __call__(self, forms, var_env, func_env,", "func_env, macro_env) # The body of a lambda has an implicit progn. forms", "a lambda expression. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LambdaSystemMacro. \"\"\" cls.__name__ =", "def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LetSystemMacro. \"\"\" from clispy.expander import", "series of forms that use these bindings. let performs the bindings in parallel", "an implicit block. forms = Cons(Symbol('DEFUN'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return", "\"\"\" from clispy.expander import Expander # If else_form is Null, then else_form is", "__new__(cls, *args, **kwargs): \"\"\"Instantiates LabelsSystemMacro. \"\"\" cls.__name__ = 'LABELS' return object.__new__(cls) def __call__(self,", "= forms.car, forms.cdr.car, forms.cdr.cdr # Expands body, recursively. body = Expander.expand(body, var_env, func_env,", "LabelsSystemMacro. \"\"\" cls.__name__ = 'LABELS' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env):", "body = forms.car, forms.cdr.car, forms.cdr.cdr # Expands body, recursively. body = Expander.expand(body, var_env,", "body of a defun has an implicit progn. body = Cons(Cons(Symbol('PROGN'), body), Null())", "with that name in the global environment. The macro function is defined in", "bindings and execute a series of forms that use these bindings. let performs", "cls.__name__ = 'LET' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of", "self.expand_hepler(forms.car) @classmethod def expand_hepler(cls, forms): \"\"\"Expand quotes recursively. \"\"\" if not isinstance(forms, Cons):", "macrolet are enclosed in an implicit block whose name is the function block", "cls.__name__ = 'DEFMACRO' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of", "# you may not use this file except in compliance with the License.", "appropriate. flet defines locally named functions and executes a series of forms with", "Symbol('UNQUOTE-SPLICING'): # Unquote-splicing (,@). return Cons(Symbol('APPEND'), Cons(forms.car.cdr.car, Cons(cls.expand_hepler(forms.cdr), Null()))) else: # Expands recursively", "return \"#<SYSTEM-MACRO {0} {{{1:X}}}>\".format(self.__class__.__name__, id(self)) # ============================================================================== # Defines system macro classes. #", "Unquote-splicing (,@). return Cons(Symbol('APPEND'), Cons(forms.car.cdr.car, Cons(cls.expand_hepler(forms.cdr), Null()))) else: # Expands recursively and returns", "LabelsSystemMacro(SystemMacro): \"\"\"flet, labels, and macrolet define local functions and macros, and execute forms", "Cons(Symbol('DEFUN'), Cons(name, Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms class DefmacroSystemMacro(SystemMacro): \"\"\"Defines name", "use_package_helper from clispy.type import Cons, Null, Symbol # ============================================================================== # Defines base classes.", "labels has an implicit progn. forms = Cons(Symbol('LABELS'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return", "labels and each macro created by macrolet are enclosed in an implicit block", "local definitions. forms are executed in order of occurence. The body forms (but", "then evaluates forms as an implicit progn. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates", "named block-name around the body forms \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefunSystemMacro.", "clispy.expander import Expander bindings, body = forms.car, forms.cdr # Expands body recursively. body", "clispy.expander import Expander # If else_form is Null, then else_form is set to", "Expander.expand(body, var_env, func_env, macro_env) # The body of let has an implicit progn.", "to be dependent on a single test-form. First test-form is evaluated. If the", "<NAME>. All Rights Reserved. # # Licensed under the Apache License, Version 2.0", "License for the specific language governing permissions and # limitations under the License.", "let* does them sequentially. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetAsterSytemMacro. \"\"\" cls.__name__", "has an implicit progn. forms = Cons(Symbol('LAMBDA'), Cons(params, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms", "cls.__name__ = 'LAMBDA' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of", "global environment. The macro function is defined in the same lexical environment in", "def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of DefmacroSystemMacro. \"\"\" from clispy.expander import", "\"License\"); # you may not use this file except in compliance with the", "create new variable bindings and execute a series of forms that use these", "clispy.macro import Macro from clispy.package import assign_helper, use_package_helper from clispy.type import Cons, Null,", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "============================================================================== # Defines system macro classes. # ============================================================================== class BlockSystemMacro(SystemMacro): \"\"\"block establishes a", "**kwargs): \"\"\"Instantiates BlockSystemMacro. \"\"\" cls.__name__ = 'BLOCK' return object.__new__(cls) def __call__(self, forms, var_env,", "import Expander name, params, body = forms.car, forms.cdr.car, forms.cdr.cdr # Expands body, recursively.", "assign_helper(symbol_name='DEFMACRO', value=DefmacroSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='BACKQUOTE', value=BackquoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') # COMMON-LISP-USER package", "accepts two arguments, a form and an environment. The expansion function returns a", "evaluated. If the result is true, then then-form is selected; otherwise else-form is", "__call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LambdaSystemMacro. \"\"\" from clispy.expander import Expander", "return Cons(Symbol('QUOTE'), Cons(forms, Null())) if forms.car is Symbol('UNQUOTE'): # Unquote (,). return forms.cdr.car", "implicit progn. forms = Cons(Symbol('LET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class LetAsterSystemMacro(SystemMacro):", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "form executed is returned as the expansion of the macro. body = Cons(Cons(Symbol('PROGN'),", "in writing, software # distributed under the License is distributed on an \"AS", "and then evaluates forms as an implicit progn. \"\"\" def __new__(cls, *args, **kwargs):", "or name, as appropriate. labels is equivalent to flet except that the scope", "returns a form. The body of the expansion function is specified by forms.", "has an implicit progn. forms = Cons(Symbol('LET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms", "class DefmacroSystemMacro(SystemMacro): \"\"\"Defines name as a macro by associating a macro function with", "specific language governing permissions and # limitations under the License. # ============================================================================== from", "block name of the function-name or name, as appropriate. labels is equivalent to", "function accepts two arguments, a form and an environment. The expansion function returns", "flet defines locally named functions and executes a series of forms with these", "'SYSTEM-MACRO' return object.__new__(cls) def __repr__(self): \"\"\"The official string representation. \"\"\" return \"#<SYSTEM-MACRO {0}", "and forms.car.car is Symbol('UNQUOTE-SPLICING'): # Unquote-splicing (,@). return Cons(Symbol('APPEND'), Cons(forms.car.cdr.car, Cons(cls.expand_hepler(forms.cdr), Null()))) else:", "= 'LET' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LetSystemMacro.", "a series of forms with these definition bindings. Any number of such local", "special operators # ============================================================================== # For special operators assign_helper(symbol_name='BLOCK', value=BlockSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL')", "Defines system macro classes. # ============================================================================== class BlockSystemMacro(SystemMacro): \"\"\"block establishes a block and", "forms.cdr # Expands body recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The", "body), Null()) # The body of a defmacro has an implicit block. forms", "macro classes. # ============================================================================== class BlockSystemMacro(SystemMacro): \"\"\"block establishes a block and then evaluates", "are enclosed in an implicit block whose name is the function block name", "forms, var_env, func_env, macro_env): \"\"\"Behavior of LetSystemMacro. \"\"\" from clispy.expander import Expander bindings,", "limitations under the License. # ============================================================================== from clispy.macro import Macro from clispy.package import", "macro function with that name in the global environment. The macro function is", "Cons(Cons(Symbol('PROGN'), body), Null()) # The body of a defmacro has an implicit block.", "body = Expander.expand(body, var_env, macro_env, macro_env) # The body of flet has an", "def __new__(cls, *args, **kwargs): \"\"\"Instantiates IfSystemMacro. \"\"\" cls.__name__ = 'IF' return object.__new__(cls) def", "Cons) and forms.car.car is Symbol('UNQUOTE-SPLICING'): # Unquote-splicing (,@). return Cons(Symbol('APPEND'), Cons(forms.car.cdr.car, Cons(cls.expand_hepler(forms.cdr), Null())))", "forms.cdr.cdr.car # Expands body recursively. test_form = Expander.expand(test_form, var_env, func_env, macro_env) then_form =", "expansion function (but not the lambda-list) are implicitly enclosed in a block whose", "forms.cdr.car, forms.cdr.cdr # Expands body, recursively. body = Expander.expand(body, var_env, func_env, macro_env) #", "then-form is selected; otherwise else-form is selected. Whichever form is selected is then", "object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of BackquoteSystemMacro. \"\"\" return self.expand_hepler(forms.car)", "2.0 (the \"License\"); # you may not use this file except in compliance", "flet except that the scope of the defined function names for labels encompasses", "BlockSystemMacro(SystemMacro): \"\"\"block establishes a block and then evaluates forms as an implicit progn.", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "macro_env): \"\"\"Behavior of LetSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car,", "\"\"\"Behavior of DefmacroSystemMacro. \"\"\" from clispy.expander import Expander name, params, body = forms.car,", "return Cons(Symbol('APPEND'), Cons(forms.car.cdr.car, Cons(cls.expand_hepler(forms.cdr), Null()))) else: # Expands recursively and returns cons. return", "macro_env) # The body of a lambda has an implicit progn. forms =", "# The value of the last form executed is returned as the expansion", "quoted. return Cons(Symbol('QUOTE'), Cons(forms, Null())) if forms.car is Symbol('UNQUOTE'): # Unquote (,). return", "{{{1:X}}}>\".format(self.__class__.__name__, id(self)) # ============================================================================== # Defines system macro classes. # ============================================================================== class BlockSystemMacro(SystemMacro):", "# # Unless required by applicable law or agreed to in writing, software", "of BackquoteSystemMacro. \"\"\" return self.expand_hepler(forms.car) @classmethod def expand_hepler(cls, forms): \"\"\"Expand quotes recursively. \"\"\"", "express or implied. # See the License for the specific language governing permissions", "function is defined in the same lexical environment in which the defmacro form", "Cons(forms, Null())) if forms.car is Symbol('UNQUOTE'): # Unquote (,). return forms.cdr.car elif isinstance(forms.car,", "assign_helper(symbol_name='LET', value=LetSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET*', value=LetAsterSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='QUOTE', value=QuoteSystemMacro(), package_name='COMMON-LISP',", "either express or implied. # See the License for the specific language governing", "implicit progn. forms = Cons(Symbol('LAMBDA'), Cons(params, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class DefunSystemMacro(SystemMacro):", "otherwise else-form is selected. Whichever form is selected is then evaluated. \"\"\" def", "body of a defmacro has an implicit block. forms = Cons(Symbol('DEFMACRO'), Cons(name, Cons(params,", "functions and macros, and execute forms using the local definitions. forms are executed", "forms class IfSystemMacro(SystemMacro): \"\"\"if allows the execution of a form to be dependent", "order of occurence. The body forms (but not the lambda list) of each", "object.__new__(cls) def __repr__(self): \"\"\"The official string representation. \"\"\" return \"#<SYSTEM-MACRO {0} {{{1:X}}}>\".format(self.__class__.__name__, id(self))", "clispy.type import Cons, Null, Symbol # ============================================================================== # Defines base classes. # #", "Null, Symbol # ============================================================================== # Defines base classes. # # SystemMacro # ==============================================================================", "Expander name, params, body = forms.car, forms.cdr.car, forms.cdr.cdr # Expands body, recursively. body", "name, body = forms.car, forms.cdr # Expands body recursively. body = Expander.expand(body, var_env,", "\"\"\"Instantiates LetSystemMacro. \"\"\" cls.__name__ = 'LET' return object.__new__(cls) def __call__(self, forms, var_env, func_env,", "package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='IF', value=IfSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LABELS', value=LabelsSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL')", "Null()))) return forms class QuoteSystemMacro(SystemMacro): \"\"\"The quote special operator just returns object. \"\"\"", "class DefunSystemMacro(SystemMacro): \"\"\"defun implicitly puts a block named block-name around the body forms", "the expansion of the macro. body = Cons(Cons(Symbol('PROGN'), body), Null()) # The body", "the License. # You may obtain a copy of the License at #", "labels, and macrolet define local functions and macros, and execute forms using the", "defun, and lambda etc. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates SystemMacro. \"\"\" cls.__name__", "\"\"\"Behavior of LambdaSystemMacro. \"\"\" from clispy.expander import Expander params, body = forms.car, forms.cdr", "of a defun has an implicit progn. body = Cons(Cons(Symbol('PROGN'), body), Null()) #", "forms \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefunSystemMacro. \"\"\" cls.__name__ = 'DEFUN' return", "executes a series of forms with these definition bindings. Any number of such", "Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms class DefmacroSystemMacro(SystemMacro): \"\"\"Defines name as a macro", "============================================================================== # Set functions related on special operators # ============================================================================== # For special", "value=DefmacroSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='BACKQUOTE', value=BackquoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') # COMMON-LISP-USER package use_package_helper(package_name_to_use='COMMON-LISP',", "of BlockSystemMacro. \"\"\" from clispy.expander import Expander name, body = forms.car, forms.cdr #", "= 'LAMBDA' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of LambdaSystemMacro.", "LambdaSystemMacro. \"\"\" cls.__name__ = 'LAMBDA' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env):", "forms.car is Symbol('UNQUOTE'): # Unquote (,). return forms.cdr.car elif isinstance(forms.car, Cons) and forms.car.car", "body, recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The value of the", "**kwargs): \"\"\"Instantiates IfSystemMacro. \"\"\" cls.__name__ = 'IF' return object.__new__(cls) def __call__(self, forms, var_env,", "= Cons(Symbol('LET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class LetAsterSystemMacro(SystemMacro): \"\"\"let and let*", "LetSystemMacro. \"\"\" from clispy.expander import Expander bindings, body = forms.car, forms.cdr # Expands", "body. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LabelsSystemMacro. \"\"\" cls.__name__ = 'LABELS' return", "Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms class BackquoteSystemMacro(SystemMacro): \"\"\"The backquote introduces a template", "Forms are executed in order. The value of the last form executed is", "to Null. test_form, then_form, else_form = forms.car, forms.cdr.car, forms.cdr.cdr.car # Expands body recursively.", "Expands recursively and returns cons. return Cons(Symbol('CONS'), Cons(cls.expand_hepler(forms.car), Cons(cls.expand_hepler(forms.cdr), Null()))) # ============================================================================== #", "name in the global environment. The macro function is defined in the same", "Expander.expand(test_form, var_env, func_env, macro_env) then_form = Expander.expand(then_form, var_env, func_env, macro_env) else_form = Expander.expand(else_form,", "body = Expander.expand(body, var_env, func_env, macro_env) # The value of the last form", "Expands body recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The body of", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "Null())))) return forms class BackquoteSystemMacro(SystemMacro): \"\"\"The backquote introduces a template of a data", "of let has an implicit progn. forms = Cons(Symbol('LET'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null())))", "return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of BlockSystemMacro. \"\"\" from", "else_form = forms.car, forms.cdr.car, forms.cdr.cdr.car # Expands body recursively. test_form = Expander.expand(test_form, var_env,", "Expander name, body = forms.car, forms.cdr # Expands body recursively. body = Expander.expand(body,", "in the global environment. The macro function is defined in the same lexical", "\"\"\" from clispy.expander import Expander name, params, body = forms.car, forms.cdr.car, forms.cdr.cdr #", "# Expands recursively and returns cons. return Cons(Symbol('CONS'), Cons(cls.expand_hepler(forms.car), Cons(cls.expand_hepler(forms.cdr), Null()))) # ==============================================================================", "evaluated. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates IfSystemMacro. \"\"\" cls.__name__ = 'IF' return", "executed in order of occurence. The body forms (but not the lambda list)", "*args, **kwargs): \"\"\"Instantiates DefunSystemMacro. \"\"\" cls.__name__ = 'DEFUN' return object.__new__(cls) def __call__(self, forms,", "Cons(params, Cons(Cons(Symbol('BLOCK'), Cons(name, body)), Null())))) return forms class DefmacroSystemMacro(SystemMacro): \"\"\"Defines name as a", "**kwargs): \"\"\"Instantiates LambdaSystemMacro. \"\"\" cls.__name__ = 'LAMBDA' return object.__new__(cls) def __call__(self, forms, var_env,", "\"\"\"if allows the execution of a form to be dependent on a single", "func_env, macro_env) # The body of a block has an implicit progn. forms", "well as the body. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LabelsSystemMacro. \"\"\" cls.__name__", "macro_env) # The body of labels has an implicit progn. forms = Cons(Symbol('LABELS'),", "Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "whose name is the function block name of the function-name or name, as", "The body forms (but not the lambda list) of each function created by", "============================================================================== # Defines base classes. # # SystemMacro # ============================================================================== class SystemMacro(Macro): \"\"\"SystemMacro", "body recursively. body = Expander.expand(body, var_env, macro_env, macro_env) # The body of flet", "function-name or name, as appropriate. labels is equivalent to flet except that the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "of the function-name or name, as appropriate. labels is equivalent to flet except", "lambda expression. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LambdaSystemMacro. \"\"\" cls.__name__ = 'LAMBDA'", "\"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates BlockSystemMacro. \"\"\" cls.__name__ = 'BLOCK' return object.__new__(cls)", "created by flet and labels and each macro created by macrolet are enclosed", "\"\"\" if not isinstance(forms, Cons): # An argument is not an instance of", "func_env, macro_env): \"\"\"Behavior of DefmacroSystemMacro. \"\"\" from clispy.expander import Expander name, params, body", "body, recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The body of a", "has an implicit progn forms = Cons(Symbol('LET*'), Cons(bindings, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms", "SystemMacro # ============================================================================== class SystemMacro(Macro): \"\"\"SystemMacro provide some macros for defmacro, defun, and", "params, body = forms.car, forms.cdr.car, forms.cdr.cdr # Expands body, recursively. body = Expander.expand(body,", "return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of IfSystemMacro. \"\"\" from", "__new__(cls, *args, **kwargs): \"\"\"Instantiates FletSystemMacro. \"\"\" cls.__name__ = 'FLET' return object.__new__(cls) def __call__(self,", "return Cons(Symbol('CONS'), Cons(cls.expand_hepler(forms.car), Cons(cls.expand_hepler(forms.cdr), Null()))) # ============================================================================== # Set functions related on special", "Unquote (,). return forms.cdr.car elif isinstance(forms.car, Cons) and forms.car.car is Symbol('UNQUOTE-SPLICING'): # Unquote-splicing", "an implicit progn. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates BlockSystemMacro. \"\"\" cls.__name__ =", "of each function created by flet and labels and each macro created by", "parallel and let* does them sequentially. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates LetAsterSytemMacro.", "string representation. \"\"\" return \"#<SYSTEM-MACRO {0} {{{1:X}}}>\".format(self.__class__.__name__, id(self)) # ============================================================================== # Defines system", "body), Null()))) return forms class IfSystemMacro(SystemMacro): \"\"\"if allows the execution of a form", "Cons(Symbol('LAMBDA'), Cons(params, Cons(Cons(Symbol('PROGN'), body), Null()))) return forms class DefunSystemMacro(SystemMacro): \"\"\"defun implicitly puts a", "body = Expander.expand(body, var_env, func_env, macro_env) # The body of a defun has", "and an environment. The expansion function returns a form. The body of the", "value=LetSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET*', value=LetAsterSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='QUOTE', value=QuoteSystemMacro(), package_name='COMMON-LISP', env='MACRO',", "except in compliance with the License. # You may obtain a copy of", "recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The body of labels has", "func_env, macro_env): \"\"\"Behavior of LablesSystemMacro. \"\"\" from clispy.expander import Expander bindings, body =", "name. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefmacroSystemMacro. \"\"\" cls.__name__ = 'DEFMACRO' return", "is selected is then evaluated. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates IfSystemMacro. \"\"\"", "package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='LET*', value=LetAsterSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='QUOTE', value=QuoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status='EXTERNAL')", "func_env, macro_env): \"\"\"Behavior of DefunSystemMacro. \"\"\" from clispy.expander import Expander name, params, body", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "of the expansion function is specified by forms. Forms are executed in order.", "body = forms.car, forms.cdr # Expands body recursively. body = Expander.expand(body, var_env, macro_env,", "in parallel and let* does them sequentially. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates", "each macro created by macrolet are enclosed in an implicit block whose name", "= forms.car, forms.cdr.car, forms.cdr.cdr.car # Expands body recursively. test_form = Expander.expand(test_form, var_env, func_env,", "__new__(cls, *args, **kwargs): \"\"\"Instantiates BlockSystemMacro. \"\"\" cls.__name__ = 'BLOCK' return object.__new__(cls) def __call__(self,", "# Copyright 2019 <NAME>. All Rights Reserved. # # Licensed under the Apache", "Retruns itself. return Cons(Symbol('QUOTE'), forms) class LambdaSystemMacro(SystemMacro): \"\"\"Provides a shorthand notation for a", "lexical environment in which the defmacro form appears. The expansion function accepts two", "whose name is name. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates DefmacroSystemMacro. \"\"\" cls.__name__", "object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of DefunSystemMacro. \"\"\" from clispy.expander", "LambdaSystemMacro(SystemMacro): \"\"\"Provides a shorthand notation for a function special form involving a lambda", "to be built. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates BackquoteSystemMacro. \"\"\" cls.__name__ =", "progn. body = Cons(Cons(Symbol('PROGN'), body), Null()) # The body of a defun has", "functions and executes a series of forms with these definition bindings. Any number", "recursively. body = Expander.expand(body, var_env, func_env, macro_env) # The body of a defun", "# The body of let has an implicit progn. forms = Cons(Symbol('LET'), Cons(bindings,", "body), Null()) # The body of a defun has an implicit block. forms", "macro_env) # The body of a defun has an implicit progn. body =", "of the macro. body = Cons(Cons(Symbol('PROGN'), body), Null()) # The body of a", "forms. Forms are executed in order. The value of the last form executed", "# The body of a defun has an implicit block. forms = Cons(Symbol('DEFUN'),", "cls.__name__ = 'IF' return object.__new__(cls) def __call__(self, forms, var_env, func_env, macro_env): \"\"\"Behavior of", "env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='DEFMACRO', value=DefmacroSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') assign_helper(symbol_name='BACKQUOTE', value=BackquoteSystemMacro(), package_name='COMMON-LISP', env='MACRO', status=':EXTERNAL') #", "evaluates forms as an implicit progn. \"\"\" def __new__(cls, *args, **kwargs): \"\"\"Instantiates BlockSystemMacro." ]
[ "# TODO: implement program loop involving vertical/horiontal scrolling if __name__ == \"__main__\": curses.wrapper(main)", "range(len(years)): monthsRange = (1, 13) # normal year covers between months 1-12 if", "#ppage:339, #npage:338 # TODO: implement program loop involving vertical/horiontal scrolling if __name__ ==", "import namedtuple date = namedtuple(\"Date\", \"Year Month Day\") def iter_months_years(startDate: object, endDate: object)", "1: # covering more than a single year, find the months being used", "date. If not formatted correctly, will raise an error giving description of the", "string object representing a formatted date. If not formatted correctly, will raise an", "format is YYYY-MM-DD. Ex. 2012-07-15 \"\"\" # TODO: Make the function an iterable", "== 0: monthsRange = (startDate.Month, 13) # first year in list elif year", "week: if (date) != 0: window.addstr(verticaloffset, horizontaloffset + weekdayindex, 'o') weekdayindex += 1", "date infos. \"\"\" initialize_curses_settings() loc = 0 # dateParser(db.getMinDate, db.getMaxDate) start = parse_date(\"2017-12-1\")", "range(*monthsRange)]) else: # dates are in the same year. grab the months between", "between start and end date years = [year for year in range(startDate.Year, endDate.Year", "9, str(week)) weekdayindex = 0 for date, dayofweek in week: if (date) !=", "dates months.append([i for i in range(startDate.Month, endDate.Month + 1)]) # return [(year, m)", "= 1 window.addstr(1, 1, \"SMTWTFS\") for month in monthtable.values(): for week in month:", "def initialize_curses_settings(): \"\"\"Curses settings that need to be called before the rest of", "year covers between months 1-12 if year == 0: monthsRange = (startDate.Month, 13)", "= (startDate.Month, 13) # first year in list elif year == len(years) -", "Later on should use min/max dates from the database holding the date infos.", "each month/year input passed into the calendar.TextCalendar class method months2calendar(year, month). Differences in", "iter_months_years(startDate, endDate): days_per_monthyear[(year, month)] = tc.monthdays2calendar(year, month) return days_per_monthyear def parse_date(datestring: str) ->", "'o') weekdayindex += 1 verticaloffset += 1 ch = window.getch() print(ch, curses.KEY_PPAGE ==", "same year. grab the months between the dates months.append([i for i in range(startDate.Month,", "tc.setfirstweekday(6) # set to sunday as first day days_per_monthyear = dict() for year,", "import itertools import source.config as config from collections import namedtuple date = namedtuple(\"Date\",", "0 for date, dayofweek in week: if (date) != 0: window.addstr(verticaloffset, horizontaloffset +", "have a list of lists matrix holding weeks per month/year monthtable = days_in_month_year(start,", "source.config as config from collections import namedtuple date = namedtuple(\"Date\", \"Year Month Day\")", "(W => number of weeks in the month): monthdatescalendar -> returns Wx7 matrix", "= (1, 13) # normal year covers between months 1-12 if year ==", "years between start and end date years = [year for year in range(startDate.Year,", "for i in range(startDate.Month, endDate.Month + 1)]) # return [(year, m) for year,", "# return [(year, m) for year, month in zip(years, months) for m in", "in zip(years, months): for m in month: yield (year, m) def days_in_month_year(startDate, endDate):", "weekdayindex = 0 for date, dayofweek in week: if (date) != 0: window.addstr(verticaloffset,", "year in list elif year == len(years) - 1: monthsRange = (1, endDate.Month", "for year, month in iter_months_years(startDate, endDate): days_per_monthyear[(year, month)] = tc.monthdays2calendar(year, month) return days_per_monthyear", "for month in monthtable.values(): for week in month: window.addstr(verticaloffset, horizontaloffset + 9, str(week))", "= parse_date(\"2017-12-1\") end = parse_date(\"2018-2-1\") # we should now have a list of", "from collections import namedtuple date = namedtuple(\"Date\", \"Year Month Day\") def iter_months_years(startDate: object,", "in TextCalendar methods (W => number of weeks in the month): monthdatescalendar ->", "in month: window.addstr(verticaloffset, horizontaloffset + 9, str(week)) weekdayindex = 0 for date, dayofweek", "correct format. Returns a date object with year, month, date properties \"\"\" if", "window.getmaxyx() window.vline(1, 8, curses.ACS_VLINE, y - 2) verticaloffset = 2 horizontaloffset = 1", "the dates months.append([i for i in range(startDate.Month, endDate.Month + 1)]) # return [(year,", "normal year covers between months 1-12 if year == 0: monthsRange = (startDate.Month,", "of ints representing the date \"\"\" # setup calendar settings to retrieve dates", "a formatted date. If not formatted correctly, will raise an error giving description", "covering more than a single year, find the months being used for year", "Differences in TextCalendar methods (W => number of weeks in the month): monthdatescalendar", "Month Day\") def iter_months_years(startDate: object, endDate: object) -> tuple: \"\"\"Returns years and months", "dates. Expected date format is YYYY-MM-DD. Ex. 2012-07-15 \"\"\" # TODO: Make the", "year, month in zip(years, months) for m in month] for year, month in", "start and end dates. Expected date format is YYYY-MM-DD. Ex. 2012-07-15 \"\"\" #", "year, find the months being used for year in range(len(years)): monthsRange = (1,", "f\"{config.DATE_FORMAT_INVALID} {config.DATE_FORMAT_EXPECTED}\" raise ValueError(error) return date(*[int(i) for i in datestring.split('-')]) def initialize_curses_settings(): \"\"\"Curses", "more than a single year, find the months being used for year in", "for the dates passed in. Later on should use min/max dates from the", "calendar widget for the dates passed in. Later on should use min/max dates", "months.append([i for i in range(startDate.Month, endDate.Month + 1)]) # return [(year, m) for", "for year, month in zip(years, months): for m in month: yield (year, m)", "to retrieve dates based on year/month pairs tc = calendar.TextCalendar() tc.setfirstweekday(6) # set", "list of lists matrix holding weeks per month/year monthtable = days_in_month_year(start, end) window.border()", "random import calendar import itertools import source.config as config from collections import namedtuple", "returns Wx7 matrix of ints representing the date \"\"\" # setup calendar settings", "all years between start and end date years = [year for year in", "tc = calendar.TextCalendar() tc.setfirstweekday(6) # set to sunday as first day days_per_monthyear =", "monthsRange = (startDate.Month, 13) # first year in list elif year == len(years)", "last year in list months.append([month for month in range(*monthsRange)]) else: # dates are", "horizontaloffset = 1 window.addstr(1, 1, \"SMTWTFS\") for month in monthtable.values(): for week in", "curses.KEY_PPAGE == ch) #ppage:339, #npage:338 # TODO: implement program loop involving vertical/horiontal scrolling", "years = [year for year in range(startDate.Year, endDate.Year + 1)] if len(years) >", "date = namedtuple(\"Date\", \"Year Month Day\") def iter_months_years(startDate: object, endDate: object) -> tuple:", "calendar settings to retrieve dates based on year/month pairs tc = calendar.TextCalendar() tc.setfirstweekday(6)", "loc = 0 # dateParser(db.getMinDate, db.getMaxDate) start = parse_date(\"2017-12-1\") end = parse_date(\"2018-2-1\") #", "8, curses.ACS_VLINE, y - 2) verticaloffset = 2 horizontaloffset = 1 window.addstr(1, 1,", "+ 1) # last year in list months.append([month for month in range(*monthsRange)]) else:", "months being used for year in range(len(years)): monthsRange = (1, 13) # normal", "if year == 0: monthsRange = (startDate.Month, 13) # first year in list", "object with year, month, date properties \"\"\" if not re.match(config.DATE_FORMAT_REGEX, datestring): error =", "iter_months_years(startDate: object, endDate: object) -> tuple: \"\"\"Returns years and months based on given", "-> tuple: \"\"\"Returns years and months based on given start and end dates.", "= namedtuple(\"Date\", \"Year Month Day\") def iter_months_years(startDate: object, endDate: object) -> tuple: \"\"\"Returns", "pairs tc = calendar.TextCalendar() tc.setfirstweekday(6) # set to sunday as first day days_per_monthyear", "horizontaloffset + 9, str(week)) weekdayindex = 0 for date, dayofweek in week: if", "\"\"\"Takes in a string object representing a formatted date. If not formatted correctly,", "window.border() y, x = window.getmaxyx() window.vline(1, 8, curses.ACS_VLINE, y - 2) verticaloffset =", "window.getch() print(ch, curses.KEY_PPAGE == ch) #ppage:339, #npage:338 # TODO: implement program loop involving", "date object with year, month, date properties \"\"\" if not re.match(config.DATE_FORMAT_REGEX, datestring): error", "end = parse_date(\"2018-2-1\") # we should now have a list of lists matrix", "object) -> tuple: \"\"\"Returns years and months based on given start and end", "year == 0: monthsRange = (startDate.Month, 13) # first year in list elif", "return date(*[int(i) for i in datestring.split('-')]) def initialize_curses_settings(): \"\"\"Curses settings that need to", "database holding the date infos. \"\"\" initialize_curses_settings() loc = 0 # dateParser(db.getMinDate, db.getMaxDate)", "now have a list of lists matrix holding weeks per month/year monthtable =", "namedtuple date = namedtuple(\"Date\", \"Year Month Day\") def iter_months_years(startDate: object, endDate: object) ->", "for i in datestring.split('-')]) def initialize_curses_settings(): \"\"\"Curses settings that need to be called", "for each month/year input passed into the calendar.TextCalendar class method months2calendar(year, month). Differences", "import calendar import itertools import source.config as config from collections import namedtuple date", "the correct format. Returns a date object with year, month, date properties \"\"\"", "number of weeks in the month): monthdatescalendar -> returns Wx7 matrix of datetime", "= tc.monthdays2calendar(year, month) return days_per_monthyear def parse_date(datestring: str) -> object: \"\"\"Takes in a", "a string object representing a formatted date. If not formatted correctly, will raise", "window.addstr(verticaloffset, horizontaloffset + 9, str(week)) weekdayindex = 0 for date, dayofweek in week:", "lists matrix holding weeks per month/year monthtable = days_in_month_year(start, end) window.border() y, x", "days_per_monthyear def parse_date(datestring: str) -> object: \"\"\"Takes in a string object representing a", "an error giving description of the correct format. Returns a date object with", "year. grab the months between the dates months.append([i for i in range(startDate.Month, endDate.Month", "on year/month pairs tc = calendar.TextCalendar() tc.setfirstweekday(6) # set to sunday as first", "month) return days_per_monthyear def parse_date(datestring: str) -> object: \"\"\"Takes in a string object", "raise an error giving description of the correct format. Returns a date object", "[year for year in range(startDate.Year, endDate.Year + 1)] if len(years) > 1: #", "year == len(years) - 1: monthsRange = (1, endDate.Month + 1) # last", "in list elif year == len(years) - 1: monthsRange = (1, endDate.Month +", "year in list months.append([month for month in range(*monthsRange)]) else: # dates are in", "per month/year monthtable = days_in_month_year(start, end) window.border() y, x = window.getmaxyx() window.vline(1, 8,", "tuple: \"\"\"Returns years and months based on given start and end dates. Expected", "dates passed in. Later on should use min/max dates from the database holding", "year, month in iter_months_years(startDate, endDate): days_per_monthyear[(year, month)] = tc.monthdays2calendar(year, month) return days_per_monthyear def", "month)] = tc.monthdays2calendar(year, month) return days_per_monthyear def parse_date(datestring: str) -> object: \"\"\"Takes in", "in a string object representing a formatted date. If not formatted correctly, will", "the month): monthdatescalendar -> returns Wx7 matrix of datetime objects monthdays2calendar -> returns", "str(week)) weekdayindex = 0 for date, dayofweek in week: if (date) != 0:", "settings to retrieve dates based on year/month pairs tc = calendar.TextCalendar() tc.setfirstweekday(6) #", "1, \"SMTWTFS\") for month in monthtable.values(): for week in month: window.addstr(verticaloffset, horizontaloffset +", "as first day days_per_monthyear = dict() for year, month in iter_months_years(startDate, endDate): days_per_monthyear[(year,", "import curses import random import calendar import itertools import source.config as config from", "horizontaloffset + weekdayindex, 'o') weekdayindex += 1 verticaloffset += 1 ch = window.getch()", "ValueError(error) return date(*[int(i) for i in datestring.split('-')]) def initialize_curses_settings(): \"\"\"Curses settings that need", "def days_in_month_year(startDate, endDate): \"\"\"Returns the day/date tuple combination for each month/year input passed", "namedtuple(\"Date\", \"Year Month Day\") def iter_months_years(startDate: object, endDate: object) -> tuple: \"\"\"Returns years", "!= 0: window.addstr(verticaloffset, horizontaloffset + weekdayindex, 'o') weekdayindex += 1 verticaloffset += 1", "month/year input passed into the calendar.TextCalendar class method months2calendar(year, month). Differences in TextCalendar", "(date, day) monthdayscalendar -> returns Wx7 matrix of ints representing the date \"\"\"", "x = window.getmaxyx() window.vline(1, 8, curses.ACS_VLINE, y - 2) verticaloffset = 2 horizontaloffset", "tuple objects (date, day) monthdayscalendar -> returns Wx7 matrix of ints representing the", "in zip(years, months) for m in month] for year, month in zip(years, months):", "set to sunday as first day days_per_monthyear = dict() for year, month in", "months between the dates months.append([i for i in range(startDate.Month, endDate.Month + 1)]) #", "endDate): days_per_monthyear[(year, month)] = tc.monthdays2calendar(year, month) return days_per_monthyear def parse_date(datestring: str) -> object:", "based on given start and end dates. Expected date format is YYYY-MM-DD. Ex.", "days_in_month_year(startDate, endDate): \"\"\"Returns the day/date tuple combination for each month/year input passed into", "\"Year Month Day\") def iter_months_years(startDate: object, endDate: object) -> tuple: \"\"\"Returns years and", "parse_date(datestring: str) -> object: \"\"\"Takes in a string object representing a formatted date.", "\"\"\" if not re.match(config.DATE_FORMAT_REGEX, datestring): error = f\"{config.DATE_FORMAT_INVALID} {config.DATE_FORMAT_EXPECTED}\" raise ValueError(error) return date(*[int(i)", "yield (year, m) def days_in_month_year(startDate, endDate): \"\"\"Returns the day/date tuple combination for each", "# last year in list months.append([month for month in range(*monthsRange)]) else: # dates", "in list months.append([month for month in range(*monthsRange)]) else: # dates are in the", "not re.match(config.DATE_FORMAT_REGEX, datestring): error = f\"{config.DATE_FORMAT_INVALID} {config.DATE_FORMAT_EXPECTED}\" raise ValueError(error) return date(*[int(i) for i", "#npage:338 # TODO: implement program loop involving vertical/horiontal scrolling if __name__ == \"__main__\":", "initialize_curses_settings(): \"\"\"Curses settings that need to be called before the rest of program\"\"\"", "-> returns Wx7 matrix of ints representing the date \"\"\" # setup calendar", "days_per_monthyear[(year, month)] = tc.monthdays2calendar(year, month) return days_per_monthyear def parse_date(datestring: str) -> object: \"\"\"Takes", "curses import random import calendar import itertools import source.config as config from collections", "to sunday as first day days_per_monthyear = dict() for year, month in iter_months_years(startDate,", "not formatted correctly, will raise an error giving description of the correct format.", "the months being used for year in range(len(years)): monthsRange = (1, 13) #", "an iterable months = [] # begin with all years between start and", "m in month] for year, month in zip(years, months): for m in month:", "Wx7 matrix of tuple objects (date, day) monthdayscalendar -> returns Wx7 matrix of", "\"\"\"Returns years and months based on given start and end dates. Expected date", "= [year for year in range(startDate.Year, endDate.Year + 1)] if len(years) > 1:", "matrix holding weeks per month/year monthtable = days_in_month_year(start, end) window.border() y, x =", "0 # dateParser(db.getMinDate, db.getMaxDate) start = parse_date(\"2017-12-1\") end = parse_date(\"2018-2-1\") # we should", "- 1: monthsRange = (1, endDate.Month + 1) # last year in list", "endDate.Year + 1)] if len(years) > 1: # covering more than a single", "weekdayindex += 1 verticaloffset += 1 ch = window.getch() print(ch, curses.KEY_PPAGE == ch)", "range(startDate.Month, endDate.Month + 1)]) # return [(year, m) for year, month in zip(years,", "import re import curses import random import calendar import itertools import source.config as", "into the calendar.TextCalendar class method months2calendar(year, month). Differences in TextCalendar methods (W =>", "0: monthsRange = (startDate.Month, 13) # first year in list elif year ==", "== len(years) - 1: monthsRange = (1, endDate.Month + 1) # last year", "monthdays2calendar -> returns Wx7 matrix of tuple objects (date, day) monthdayscalendar -> returns", "Wx7 matrix of ints representing the date \"\"\" # setup calendar settings to", "weeks per month/year monthtable = days_in_month_year(start, end) window.border() y, x = window.getmaxyx() window.vline(1,", "representing the date \"\"\" # setup calendar settings to retrieve dates based on", "# we should now have a list of lists matrix holding weeks per", "of tuple objects (date, day) monthdayscalendar -> returns Wx7 matrix of ints representing", "range(startDate.Year, endDate.Year + 1)] if len(years) > 1: # covering more than a", "def main(window): \"\"\"Creates a navigatable calendar widget for the dates passed in. Later", "month] for year, month in zip(years, months): for m in month: yield (year,", "months2calendar(year, month). Differences in TextCalendar methods (W => number of weeks in the", "date years = [year for year in range(startDate.Year, endDate.Year + 1)] if len(years)", "in range(startDate.Year, endDate.Year + 1)] if len(years) > 1: # covering more than", "calendar.TextCalendar class method months2calendar(year, month). Differences in TextCalendar methods (W => number of", "== ch) #ppage:339, #npage:338 # TODO: implement program loop involving vertical/horiontal scrolling if", "Expected date format is YYYY-MM-DD. Ex. 2012-07-15 \"\"\" # TODO: Make the function", "\"\"\" # TODO: Make the function an iterable months = [] # begin", "0: window.addstr(verticaloffset, horizontaloffset + weekdayindex, 'o') weekdayindex += 1 verticaloffset += 1 ch", "13) # first year in list elif year == len(years) - 1: monthsRange", "is YYYY-MM-DD. Ex. 2012-07-15 \"\"\" # TODO: Make the function an iterable months", "dates from the database holding the date infos. \"\"\" initialize_curses_settings() loc = 0", "of lists matrix holding weeks per month/year monthtable = days_in_month_year(start, end) window.border() y,", "+ 9, str(week)) weekdayindex = 0 for date, dayofweek in week: if (date)", "the rest of program\"\"\" curses.curs_set(0) def main(window): \"\"\"Creates a navigatable calendar widget for", "dates based on year/month pairs tc = calendar.TextCalendar() tc.setfirstweekday(6) # set to sunday", "of datetime objects monthdays2calendar -> returns Wx7 matrix of tuple objects (date, day)", "month: yield (year, m) def days_in_month_year(startDate, endDate): \"\"\"Returns the day/date tuple combination for", "# setup calendar settings to retrieve dates based on year/month pairs tc =", "with year, month, date properties \"\"\" if not re.match(config.DATE_FORMAT_REGEX, datestring): error = f\"{config.DATE_FORMAT_INVALID}", "months based on given start and end dates. Expected date format is YYYY-MM-DD.", "year in range(len(years)): monthsRange = (1, 13) # normal year covers between months", "y - 2) verticaloffset = 2 horizontaloffset = 1 window.addstr(1, 1, \"SMTWTFS\") for", "window.vline(1, 8, curses.ACS_VLINE, y - 2) verticaloffset = 2 horizontaloffset = 1 window.addstr(1,", "month in monthtable.values(): for week in month: window.addstr(verticaloffset, horizontaloffset + 9, str(week)) weekdayindex", "str) -> object: \"\"\"Takes in a string object representing a formatted date. If", "date(*[int(i) for i in datestring.split('-')]) def initialize_curses_settings(): \"\"\"Curses settings that need to be", "object, endDate: object) -> tuple: \"\"\"Returns years and months based on given start", "monthdatescalendar -> returns Wx7 matrix of datetime objects monthdays2calendar -> returns Wx7 matrix", "a date object with year, month, date properties \"\"\" if not re.match(config.DATE_FORMAT_REGEX, datestring):", "should use min/max dates from the database holding the date infos. \"\"\" initialize_curses_settings()", "ch) #ppage:339, #npage:338 # TODO: implement program loop involving vertical/horiontal scrolling if __name__", "# normal year covers between months 1-12 if year == 0: monthsRange =", "Make the function an iterable months = [] # begin with all years", "# dateParser(db.getMinDate, db.getMaxDate) start = parse_date(\"2017-12-1\") end = parse_date(\"2018-2-1\") # we should now", "db.getMaxDate) start = parse_date(\"2017-12-1\") end = parse_date(\"2018-2-1\") # we should now have a", "config from collections import namedtuple date = namedtuple(\"Date\", \"Year Month Day\") def iter_months_years(startDate:", "2012-07-15 \"\"\" # TODO: Make the function an iterable months = [] #", "endDate): \"\"\"Returns the day/date tuple combination for each month/year input passed into the", "single year, find the months being used for year in range(len(years)): monthsRange =", "len(years) > 1: # covering more than a single year, find the months", "of weeks in the month): monthdatescalendar -> returns Wx7 matrix of datetime objects", "monthsRange = (1, endDate.Month + 1) # last year in list months.append([month for", "date, dayofweek in week: if (date) != 0: window.addstr(verticaloffset, horizontaloffset + weekdayindex, 'o')", "day days_per_monthyear = dict() for year, month in iter_months_years(startDate, endDate): days_per_monthyear[(year, month)] =", "covers between months 1-12 if year == 0: monthsRange = (startDate.Month, 13) #", "methods (W => number of weeks in the month): monthdatescalendar -> returns Wx7", "1-12 if year == 0: monthsRange = (startDate.Month, 13) # first year in", "import random import calendar import itertools import source.config as config from collections import", "of program\"\"\" curses.curs_set(0) def main(window): \"\"\"Creates a navigatable calendar widget for the dates", "months.append([month for month in range(*monthsRange)]) else: # dates are in the same year.", "ints representing the date \"\"\" # setup calendar settings to retrieve dates based", "objects (date, day) monthdayscalendar -> returns Wx7 matrix of ints representing the date", "in month: yield (year, m) def days_in_month_year(startDate, endDate): \"\"\"Returns the day/date tuple combination", "calendar.TextCalendar() tc.setfirstweekday(6) # set to sunday as first day days_per_monthyear = dict() for", "returns Wx7 matrix of datetime objects monthdays2calendar -> returns Wx7 matrix of tuple", "tc.monthdays2calendar(year, month) return days_per_monthyear def parse_date(datestring: str) -> object: \"\"\"Takes in a string", "monthtable.values(): for week in month: window.addstr(verticaloffset, horizontaloffset + 9, str(week)) weekdayindex = 0", "months = [] # begin with all years between start and end date", "def parse_date(datestring: str) -> object: \"\"\"Takes in a string object representing a formatted", "y, x = window.getmaxyx() window.vline(1, 8, curses.ACS_VLINE, y - 2) verticaloffset = 2", "for date, dayofweek in week: if (date) != 0: window.addstr(verticaloffset, horizontaloffset + weekdayindex,", "\"SMTWTFS\") for month in monthtable.values(): for week in month: window.addstr(verticaloffset, horizontaloffset + 9,", "1 verticaloffset += 1 ch = window.getch() print(ch, curses.KEY_PPAGE == ch) #ppage:339, #npage:338", "(startDate.Month, 13) # first year in list elif year == len(years) - 1:", "-> returns Wx7 matrix of tuple objects (date, day) monthdayscalendar -> returns Wx7", "infos. \"\"\" initialize_curses_settings() loc = 0 # dateParser(db.getMinDate, db.getMaxDate) start = parse_date(\"2017-12-1\") end", "for m in month: yield (year, m) def days_in_month_year(startDate, endDate): \"\"\"Returns the day/date", "month). Differences in TextCalendar methods (W => number of weeks in the month):", "\"\"\"Creates a navigatable calendar widget for the dates passed in. Later on should", "in range(startDate.Month, endDate.Month + 1)]) # return [(year, m) for year, month in", "= parse_date(\"2018-2-1\") # we should now have a list of lists matrix holding", "end) window.border() y, x = window.getmaxyx() window.vline(1, 8, curses.ACS_VLINE, y - 2) verticaloffset", "i in range(startDate.Month, endDate.Month + 1)]) # return [(year, m) for year, month", "for week in month: window.addstr(verticaloffset, horizontaloffset + 9, str(week)) weekdayindex = 0 for", "dayofweek in week: if (date) != 0: window.addstr(verticaloffset, horizontaloffset + weekdayindex, 'o') weekdayindex", "year in range(startDate.Year, endDate.Year + 1)] if len(years) > 1: # covering more", "date properties \"\"\" if not re.match(config.DATE_FORMAT_REGEX, datestring): error = f\"{config.DATE_FORMAT_INVALID} {config.DATE_FORMAT_EXPECTED}\" raise ValueError(error)", "dateParser(db.getMinDate, db.getMaxDate) start = parse_date(\"2017-12-1\") end = parse_date(\"2018-2-1\") # we should now have", "1: monthsRange = (1, endDate.Month + 1) # last year in list months.append([month", "function an iterable months = [] # begin with all years between start", "+ 1)]) # return [(year, m) for year, month in zip(years, months) for", "monthdayscalendar -> returns Wx7 matrix of ints representing the date \"\"\" # setup", "# set to sunday as first day days_per_monthyear = dict() for year, month", "than a single year, find the months being used for year in range(len(years)):", "settings that need to be called before the rest of program\"\"\" curses.curs_set(0) def", "months): for m in month: yield (year, m) def days_in_month_year(startDate, endDate): \"\"\"Returns the", "Ex. 2012-07-15 \"\"\" # TODO: Make the function an iterable months = []", "for year in range(startDate.Year, endDate.Year + 1)] if len(years) > 1: # covering", "iterable months = [] # begin with all years between start and end", "= (1, endDate.Month + 1) # last year in list months.append([month for month", "endDate.Month + 1) # last year in list months.append([month for month in range(*monthsRange)])", "start = parse_date(\"2017-12-1\") end = parse_date(\"2018-2-1\") # we should now have a list", "YYYY-MM-DD. Ex. 2012-07-15 \"\"\" # TODO: Make the function an iterable months =", "list elif year == len(years) - 1: monthsRange = (1, endDate.Month + 1)", "m) for year, month in zip(years, months) for m in month] for year,", "in week: if (date) != 0: window.addstr(verticaloffset, horizontaloffset + weekdayindex, 'o') weekdayindex +=", "-> object: \"\"\"Takes in a string object representing a formatted date. If not", "month in range(*monthsRange)]) else: # dates are in the same year. grab the", "# covering more than a single year, find the months being used for", "\"\"\"Calendar_widget.py\"\"\" import re import curses import random import calendar import itertools import source.config", "representing a formatted date. If not formatted correctly, will raise an error giving", "grab the months between the dates months.append([i for i in range(startDate.Month, endDate.Month +", "years and months based on given start and end dates. Expected date format", "calendar import itertools import source.config as config from collections import namedtuple date =", "= f\"{config.DATE_FORMAT_INVALID} {config.DATE_FORMAT_EXPECTED}\" raise ValueError(error) return date(*[int(i) for i in datestring.split('-')]) def initialize_curses_settings():", "we should now have a list of lists matrix holding weeks per month/year", "matrix of ints representing the date \"\"\" # setup calendar settings to retrieve", "on should use min/max dates from the database holding the date infos. \"\"\"", "re import curses import random import calendar import itertools import source.config as config", "with all years between start and end date years = [year for year", "and end dates. Expected date format is YYYY-MM-DD. Ex. 2012-07-15 \"\"\" # TODO:", "formatted correctly, will raise an error giving description of the correct format. Returns", "and end date years = [year for year in range(startDate.Year, endDate.Year + 1)]", "min/max dates from the database holding the date infos. \"\"\" initialize_curses_settings() loc =", "properties \"\"\" if not re.match(config.DATE_FORMAT_REGEX, datestring): error = f\"{config.DATE_FORMAT_INVALID} {config.DATE_FORMAT_EXPECTED}\" raise ValueError(error) return", "as config from collections import namedtuple date = namedtuple(\"Date\", \"Year Month Day\") def", "the date infos. \"\"\" initialize_curses_settings() loc = 0 # dateParser(db.getMinDate, db.getMaxDate) start =", "re.match(config.DATE_FORMAT_REGEX, datestring): error = f\"{config.DATE_FORMAT_INVALID} {config.DATE_FORMAT_EXPECTED}\" raise ValueError(error) return date(*[int(i) for i in", "the database holding the date infos. \"\"\" initialize_curses_settings() loc = 0 # dateParser(db.getMinDate,", "window.addstr(verticaloffset, horizontaloffset + weekdayindex, 'o') weekdayindex += 1 verticaloffset += 1 ch =", "If not formatted correctly, will raise an error giving description of the correct", "between the dates months.append([i for i in range(startDate.Month, endDate.Month + 1)]) # return", "format. Returns a date object with year, month, date properties \"\"\" if not", "datetime objects monthdays2calendar -> returns Wx7 matrix of tuple objects (date, day) monthdayscalendar", "are in the same year. grab the months between the dates months.append([i for", "first day days_per_monthyear = dict() for year, month in iter_months_years(startDate, endDate): days_per_monthyear[(year, month)]", "a single year, find the months being used for year in range(len(years)): monthsRange", "objects monthdays2calendar -> returns Wx7 matrix of tuple objects (date, day) monthdayscalendar ->", "{config.DATE_FORMAT_EXPECTED}\" raise ValueError(error) return date(*[int(i) for i in datestring.split('-')]) def initialize_curses_settings(): \"\"\"Curses settings", "before the rest of program\"\"\" curses.curs_set(0) def main(window): \"\"\"Creates a navigatable calendar widget", "program\"\"\" curses.curs_set(0) def main(window): \"\"\"Creates a navigatable calendar widget for the dates passed", "collections import namedtuple date = namedtuple(\"Date\", \"Year Month Day\") def iter_months_years(startDate: object, endDate:", "month): monthdatescalendar -> returns Wx7 matrix of datetime objects monthdays2calendar -> returns Wx7", "months) for m in month] for year, month in zip(years, months): for m", "-> returns Wx7 matrix of datetime objects monthdays2calendar -> returns Wx7 matrix of", "holding the date infos. \"\"\" initialize_curses_settings() loc = 0 # dateParser(db.getMinDate, db.getMaxDate) start", "passed in. Later on should use min/max dates from the database holding the", "(1, endDate.Month + 1) # last year in list months.append([month for month in", "if len(years) > 1: # covering more than a single year, find the", "in the month): monthdatescalendar -> returns Wx7 matrix of datetime objects monthdays2calendar ->", "1)]) # return [(year, m) for year, month in zip(years, months) for m", "in iter_months_years(startDate, endDate): days_per_monthyear[(year, month)] = tc.monthdays2calendar(year, month) return days_per_monthyear def parse_date(datestring: str)", "the dates passed in. Later on should use min/max dates from the database", "for year in range(len(years)): monthsRange = (1, 13) # normal year covers between", "1) # last year in list months.append([month for month in range(*monthsRange)]) else: #", "year/month pairs tc = calendar.TextCalendar() tc.setfirstweekday(6) # set to sunday as first day", "else: # dates are in the same year. grab the months between the", "matrix of datetime objects monthdays2calendar -> returns Wx7 matrix of tuple objects (date,", "in month] for year, month in zip(years, months): for m in month: yield", "object representing a formatted date. If not formatted correctly, will raise an error", "= [] # begin with all years between start and end date years", "endDate.Month + 1)]) # return [(year, m) for year, month in zip(years, months)", "i in datestring.split('-')]) def initialize_curses_settings(): \"\"\"Curses settings that need to be called before", "between months 1-12 if year == 0: monthsRange = (startDate.Month, 13) # first", "being used for year in range(len(years)): monthsRange = (1, 13) # normal year", "error giving description of the correct format. Returns a date object with year,", "need to be called before the rest of program\"\"\" curses.curs_set(0) def main(window): \"\"\"Creates", "date \"\"\" # setup calendar settings to retrieve dates based on year/month pairs", "(1, 13) # normal year covers between months 1-12 if year == 0:", "13) # normal year covers between months 1-12 if year == 0: monthsRange", "if not re.match(config.DATE_FORMAT_REGEX, datestring): error = f\"{config.DATE_FORMAT_INVALID} {config.DATE_FORMAT_EXPECTED}\" raise ValueError(error) return date(*[int(i) for", "called before the rest of program\"\"\" curses.curs_set(0) def main(window): \"\"\"Creates a navigatable calendar", "(date) != 0: window.addstr(verticaloffset, horizontaloffset + weekdayindex, 'o') weekdayindex += 1 verticaloffset +=", "elif year == len(years) - 1: monthsRange = (1, endDate.Month + 1) #", "2 horizontaloffset = 1 window.addstr(1, 1, \"SMTWTFS\") for month in monthtable.values(): for week", "day) monthdayscalendar -> returns Wx7 matrix of ints representing the date \"\"\" #", "formatted date. If not formatted correctly, will raise an error giving description of", "+ 1)] if len(years) > 1: # covering more than a single year,", "initialize_curses_settings() loc = 0 # dateParser(db.getMinDate, db.getMaxDate) start = parse_date(\"2017-12-1\") end = parse_date(\"2018-2-1\")", "zip(years, months) for m in month] for year, month in zip(years, months): for", "in range(*monthsRange)]) else: # dates are in the same year. grab the months", "month in zip(years, months): for m in month: yield (year, m) def days_in_month_year(startDate,", "zip(years, months): for m in month: yield (year, m) def days_in_month_year(startDate, endDate): \"\"\"Returns", "verticaloffset = 2 horizontaloffset = 1 window.addstr(1, 1, \"SMTWTFS\") for month in monthtable.values():", "2) verticaloffset = 2 horizontaloffset = 1 window.addstr(1, 1, \"SMTWTFS\") for month in", "itertools import source.config as config from collections import namedtuple date = namedtuple(\"Date\", \"Year", "dict() for year, month in iter_months_years(startDate, endDate): days_per_monthyear[(year, month)] = tc.monthdays2calendar(year, month) return", "monthtable = days_in_month_year(start, end) window.border() y, x = window.getmaxyx() window.vline(1, 8, curses.ACS_VLINE, y", "year, month in zip(years, months): for m in month: yield (year, m) def", "and months based on given start and end dates. Expected date format is", "1)] if len(years) > 1: # covering more than a single year, find", "week in month: window.addstr(verticaloffset, horizontaloffset + 9, str(week)) weekdayindex = 0 for date,", "[(year, m) for year, month in zip(years, months) for m in month] for", "if (date) != 0: window.addstr(verticaloffset, horizontaloffset + weekdayindex, 'o') weekdayindex += 1 verticaloffset", "input passed into the calendar.TextCalendar class method months2calendar(year, month). Differences in TextCalendar methods", "correctly, will raise an error giving description of the correct format. Returns a", "start and end date years = [year for year in range(startDate.Year, endDate.Year +", "return days_per_monthyear def parse_date(datestring: str) -> object: \"\"\"Takes in a string object representing", "in range(len(years)): monthsRange = (1, 13) # normal year covers between months 1-12", "in. Later on should use min/max dates from the database holding the date", "end date years = [year for year in range(startDate.Year, endDate.Year + 1)] if", "\"\"\" initialize_curses_settings() loc = 0 # dateParser(db.getMinDate, db.getMaxDate) start = parse_date(\"2017-12-1\") end =", "passed into the calendar.TextCalendar class method months2calendar(year, month). Differences in TextCalendar methods (W", "datestring): error = f\"{config.DATE_FORMAT_INVALID} {config.DATE_FORMAT_EXPECTED}\" raise ValueError(error) return date(*[int(i) for i in datestring.split('-')])", "from the database holding the date infos. \"\"\" initialize_curses_settings() loc = 0 #", "for month in range(*monthsRange)]) else: # dates are in the same year. grab", "\"\"\"Returns the day/date tuple combination for each month/year input passed into the calendar.TextCalendar", "year, month, date properties \"\"\" if not re.match(config.DATE_FORMAT_REGEX, datestring): error = f\"{config.DATE_FORMAT_INVALID} {config.DATE_FORMAT_EXPECTED}\"", "in monthtable.values(): for week in month: window.addstr(verticaloffset, horizontaloffset + 9, str(week)) weekdayindex =", "find the months being used for year in range(len(years)): monthsRange = (1, 13)", "days_in_month_year(start, end) window.border() y, x = window.getmaxyx() window.vline(1, 8, curses.ACS_VLINE, y - 2)", "object: \"\"\"Takes in a string object representing a formatted date. If not formatted", "should now have a list of lists matrix holding weeks per month/year monthtable", "\"\"\" # setup calendar settings to retrieve dates based on year/month pairs tc", "def iter_months_years(startDate: object, endDate: object) -> tuple: \"\"\"Returns years and months based on", "that need to be called before the rest of program\"\"\" curses.curs_set(0) def main(window):", "TODO: Make the function an iterable months = [] # begin with all", "= dict() for year, month in iter_months_years(startDate, endDate): days_per_monthyear[(year, month)] = tc.monthdays2calendar(year, month)", "tuple combination for each month/year input passed into the calendar.TextCalendar class method months2calendar(year,", "> 1: # covering more than a single year, find the months being", "import source.config as config from collections import namedtuple date = namedtuple(\"Date\", \"Year Month", "Day\") def iter_months_years(startDate: object, endDate: object) -> tuple: \"\"\"Returns years and months based", "for m in month] for year, month in zip(years, months): for m in", "= window.getch() print(ch, curses.KEY_PPAGE == ch) #ppage:339, #npage:338 # TODO: implement program loop", "given start and end dates. Expected date format is YYYY-MM-DD. Ex. 2012-07-15 \"\"\"", "= calendar.TextCalendar() tc.setfirstweekday(6) # set to sunday as first day days_per_monthyear = dict()", "parse_date(\"2018-2-1\") # we should now have a list of lists matrix holding weeks", "= window.getmaxyx() window.vline(1, 8, curses.ACS_VLINE, y - 2) verticaloffset = 2 horizontaloffset =", "be called before the rest of program\"\"\" curses.curs_set(0) def main(window): \"\"\"Creates a navigatable", "curses.curs_set(0) def main(window): \"\"\"Creates a navigatable calendar widget for the dates passed in.", "end dates. Expected date format is YYYY-MM-DD. Ex. 2012-07-15 \"\"\" # TODO: Make", "matrix of tuple objects (date, day) monthdayscalendar -> returns Wx7 matrix of ints", "=> number of weeks in the month): monthdatescalendar -> returns Wx7 matrix of", "in the same year. grab the months between the dates months.append([i for i", "len(years) - 1: monthsRange = (1, endDate.Month + 1) # last year in", "weeks in the month): monthdatescalendar -> returns Wx7 matrix of datetime objects monthdays2calendar", "day/date tuple combination for each month/year input passed into the calendar.TextCalendar class method", "a list of lists matrix holding weeks per month/year monthtable = days_in_month_year(start, end)", "month, date properties \"\"\" if not re.match(config.DATE_FORMAT_REGEX, datestring): error = f\"{config.DATE_FORMAT_INVALID} {config.DATE_FORMAT_EXPECTED}\" raise", "[] # begin with all years between start and end date years =", "month in zip(years, months) for m in month] for year, month in zip(years,", "month/year monthtable = days_in_month_year(start, end) window.border() y, x = window.getmaxyx() window.vline(1, 8, curses.ACS_VLINE,", "monthsRange = (1, 13) # normal year covers between months 1-12 if year", "to be called before the rest of program\"\"\" curses.curs_set(0) def main(window): \"\"\"Creates a", "giving description of the correct format. Returns a date object with year, month,", "the function an iterable months = [] # begin with all years between", "class method months2calendar(year, month). Differences in TextCalendar methods (W => number of weeks", "begin with all years between start and end date years = [year for", "date format is YYYY-MM-DD. Ex. 2012-07-15 \"\"\" # TODO: Make the function an", "m) def days_in_month_year(startDate, endDate): \"\"\"Returns the day/date tuple combination for each month/year input", "verticaloffset += 1 ch = window.getch() print(ch, curses.KEY_PPAGE == ch) #ppage:339, #npage:338 #", "# first year in list elif year == len(years) - 1: monthsRange =", "= days_in_month_year(start, end) window.border() y, x = window.getmaxyx() window.vline(1, 8, curses.ACS_VLINE, y -", "+ weekdayindex, 'o') weekdayindex += 1 verticaloffset += 1 ch = window.getch() print(ch,", "+= 1 ch = window.getch() print(ch, curses.KEY_PPAGE == ch) #ppage:339, #npage:338 # TODO:", "in datestring.split('-')]) def initialize_curses_settings(): \"\"\"Curses settings that need to be called before the", "(year, m) def days_in_month_year(startDate, endDate): \"\"\"Returns the day/date tuple combination for each month/year", "TextCalendar methods (W => number of weeks in the month): monthdatescalendar -> returns", "- 2) verticaloffset = 2 horizontaloffset = 1 window.addstr(1, 1, \"SMTWTFS\") for month", "# TODO: Make the function an iterable months = [] # begin with", "the day/date tuple combination for each month/year input passed into the calendar.TextCalendar class", "navigatable calendar widget for the dates passed in. Later on should use min/max", "= 2 horizontaloffset = 1 window.addstr(1, 1, \"SMTWTFS\") for month in monthtable.values(): for", "dates are in the same year. grab the months between the dates months.append([i", "setup calendar settings to retrieve dates based on year/month pairs tc = calendar.TextCalendar()", "returns Wx7 matrix of tuple objects (date, day) monthdayscalendar -> returns Wx7 matrix", "the date \"\"\" # setup calendar settings to retrieve dates based on year/month", "used for year in range(len(years)): monthsRange = (1, 13) # normal year covers", "on given start and end dates. Expected date format is YYYY-MM-DD. Ex. 2012-07-15", "month: window.addstr(verticaloffset, horizontaloffset + 9, str(week)) weekdayindex = 0 for date, dayofweek in", "widget for the dates passed in. Later on should use min/max dates from", "1 window.addstr(1, 1, \"SMTWTFS\") for month in monthtable.values(): for week in month: window.addstr(verticaloffset,", "# begin with all years between start and end date years = [year", "1 ch = window.getch() print(ch, curses.KEY_PPAGE == ch) #ppage:339, #npage:338 # TODO: implement", "= 0 for date, dayofweek in week: if (date) != 0: window.addstr(verticaloffset, horizontaloffset", "print(ch, curses.KEY_PPAGE == ch) #ppage:339, #npage:338 # TODO: implement program loop involving vertical/horiontal", "datestring.split('-')]) def initialize_curses_settings(): \"\"\"Curses settings that need to be called before the rest", "holding weeks per month/year monthtable = days_in_month_year(start, end) window.border() y, x = window.getmaxyx()", "month in iter_months_years(startDate, endDate): days_per_monthyear[(year, month)] = tc.monthdays2calendar(year, month) return days_per_monthyear def parse_date(datestring:", "error = f\"{config.DATE_FORMAT_INVALID} {config.DATE_FORMAT_EXPECTED}\" raise ValueError(error) return date(*[int(i) for i in datestring.split('-')]) def", "for year, month in zip(years, months) for m in month] for year, month", "weekdayindex, 'o') weekdayindex += 1 verticaloffset += 1 ch = window.getch() print(ch, curses.KEY_PPAGE", "m in month: yield (year, m) def days_in_month_year(startDate, endDate): \"\"\"Returns the day/date tuple", "Wx7 matrix of datetime objects monthdays2calendar -> returns Wx7 matrix of tuple objects", "sunday as first day days_per_monthyear = dict() for year, month in iter_months_years(startDate, endDate):", "ch = window.getch() print(ch, curses.KEY_PPAGE == ch) #ppage:339, #npage:338 # TODO: implement program", "first year in list elif year == len(years) - 1: monthsRange = (1,", "the same year. grab the months between the dates months.append([i for i in", "method months2calendar(year, month). Differences in TextCalendar methods (W => number of weeks in", "+= 1 verticaloffset += 1 ch = window.getch() print(ch, curses.KEY_PPAGE == ch) #ppage:339,", "# dates are in the same year. grab the months between the dates", "days_per_monthyear = dict() for year, month in iter_months_years(startDate, endDate): days_per_monthyear[(year, month)] = tc.monthdays2calendar(year,", "based on year/month pairs tc = calendar.TextCalendar() tc.setfirstweekday(6) # set to sunday as", "window.addstr(1, 1, \"SMTWTFS\") for month in monthtable.values(): for week in month: window.addstr(verticaloffset, horizontaloffset", "months 1-12 if year == 0: monthsRange = (startDate.Month, 13) # first year", "return [(year, m) for year, month in zip(years, months) for m in month]", "rest of program\"\"\" curses.curs_set(0) def main(window): \"\"\"Creates a navigatable calendar widget for the", "list months.append([month for month in range(*monthsRange)]) else: # dates are in the same", "will raise an error giving description of the correct format. Returns a date", "curses.ACS_VLINE, y - 2) verticaloffset = 2 horizontaloffset = 1 window.addstr(1, 1, \"SMTWTFS\")", "main(window): \"\"\"Creates a navigatable calendar widget for the dates passed in. Later on", "the calendar.TextCalendar class method months2calendar(year, month). Differences in TextCalendar methods (W => number", "Returns a date object with year, month, date properties \"\"\" if not re.match(config.DATE_FORMAT_REGEX,", "description of the correct format. Returns a date object with year, month, date", "retrieve dates based on year/month pairs tc = calendar.TextCalendar() tc.setfirstweekday(6) # set to", "the months between the dates months.append([i for i in range(startDate.Month, endDate.Month + 1)])", "use min/max dates from the database holding the date infos. \"\"\" initialize_curses_settings() loc", "of the correct format. Returns a date object with year, month, date properties", "a navigatable calendar widget for the dates passed in. Later on should use", "endDate: object) -> tuple: \"\"\"Returns years and months based on given start and", "= 0 # dateParser(db.getMinDate, db.getMaxDate) start = parse_date(\"2017-12-1\") end = parse_date(\"2018-2-1\") # we", "parse_date(\"2017-12-1\") end = parse_date(\"2018-2-1\") # we should now have a list of lists", "raise ValueError(error) return date(*[int(i) for i in datestring.split('-')]) def initialize_curses_settings(): \"\"\"Curses settings that", "\"\"\"Curses settings that need to be called before the rest of program\"\"\" curses.curs_set(0)", "combination for each month/year input passed into the calendar.TextCalendar class method months2calendar(year, month)." ]
[ "an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND,", "distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "of sampler will build an internal method to prune or reduce number of", "express or implied. See the License for the # specific language governing permissions", "of the dimensions for the configs \"\"\" def __init__(self, dims): self.dims = dims", "on hardware to speed up tuning without performance loss. Parameters ---------- dims: list", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "(the # \"License\"); you may not use this file except in compliance #", "def sample(self, xs): \"\"\"Sample a subset of configs from a larger set of", "hardware to speed up tuning without performance loss. Parameters ---------- dims: list knob", "This type of sampler will build an internal method to prune or reduce", "License for the # specific language governing permissions and limitations # under the", "speed up tuning without performance loss. \"\"\" import numpy as np from ..env", "you under the Apache License, Version 2.0 (the # \"License\"); you may not", "(ASF) under one # or more contributor license agreements. See the NOTICE file", "the License. # pylint: disable=no-else-return,invalid-name,consider-using-enumerate,abstract-method \"\"\"Base class for sampler This type of sampler", "configs from a larger set Returns ------- a reduced set of configs \"\"\"", "permissions and limitations # under the License. # pylint: disable=no-else-return,invalid-name,consider-using-enumerate,abstract-method \"\"\"Base class for", "a larger set Returns ------- a reduced set of configs \"\"\" raise NotImplementedError()", "\"\"\" import numpy as np from ..env import GLOBAL_SCOPE class Sampler(object): \"\"\"Base class", "License is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "on hardware to speed up tuning without performance loss. \"\"\" import numpy as", "sampler will build an internal method to prune or reduce number of configs", "for the configs \"\"\" def __init__(self, dims): self.dims = dims def sample(self, xs):", "of configs from a larger set Returns ------- a reduced set of configs", "this file # to you under the Apache License, Version 2.0 (the #", "software distributed under the License is distributed on an # \"AS IS\" BASIS,", "law or agreed to in writing, # software distributed under the License is", "governing permissions and limitations # under the License. # pylint: disable=no-else-return,invalid-name,consider-using-enumerate,abstract-method \"\"\"Base class", "# # Unless required by applicable law or agreed to in writing, #", "# software distributed under the License is distributed on an # \"AS IS\"", "to you under the Apache License, Version 2.0 (the # \"License\"); you may", "def __init__(self, dims): self.dims = dims def sample(self, xs): \"\"\"Sample a subset of", "xs: Array of int The indexes of configs from a larger set Returns", "the configs \"\"\" def __init__(self, dims): self.dims = dims def sample(self, xs): \"\"\"Sample", "file # distributed with this work for additional information # regarding copyright ownership.", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "loss. \"\"\" import numpy as np from ..env import GLOBAL_SCOPE class Sampler(object): \"\"\"Base", "# under the License. # pylint: disable=no-else-return,invalid-name,consider-using-enumerate,abstract-method \"\"\"Base class for sampler This type", "# Licensed to the Apache Software Foundation (ASF) under one # or more", "to prune or reduce number of configs measured on hardware to speed up", "Version 2.0 (the # \"License\"); you may not use this file except in", "for the # specific language governing permissions and limitations # under the License.", "without performance loss. \"\"\" import numpy as np from ..env import GLOBAL_SCOPE class", "import numpy as np from ..env import GLOBAL_SCOPE class Sampler(object): \"\"\"Base class for", "under the Apache License, Version 2.0 (the # \"License\"); you may not use", "number of configs measured on hardware to speed up tuning without performance loss.", "GLOBAL_SCOPE class Sampler(object): \"\"\"Base class for sampler This type of sampler will build", "copyright ownership. The ASF licenses this file # to you under the Apache", "internal method to prune or reduce number of configs measured on hardware to", "ownership. The ASF licenses this file # to you under the Apache License,", "tuning without performance loss. Parameters ---------- dims: list knob form of the dimensions", "License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "measured on hardware to speed up tuning without performance loss. \"\"\" import numpy", "under the License is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES", "specific language governing permissions and limitations # under the License. # pylint: disable=no-else-return,invalid-name,consider-using-enumerate,abstract-method", "additional information # regarding copyright ownership. The ASF licenses this file # to", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied.", "# \"License\"); you may not use this file except in compliance # with", "OF ANY # KIND, either express or implied. See the License for the", "or implied. See the License for the # specific language governing permissions and", "class Sampler(object): \"\"\"Base class for sampler This type of sampler will build an", "# Unless required by applicable law or agreed to in writing, # software", "tuning without performance loss. \"\"\" import numpy as np from ..env import GLOBAL_SCOPE", "= dims def sample(self, xs): \"\"\"Sample a subset of configs from a larger", "form of the dimensions for the configs \"\"\" def __init__(self, dims): self.dims =", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "Licensed to the Apache Software Foundation (ASF) under one # or more contributor", "import GLOBAL_SCOPE class Sampler(object): \"\"\"Base class for sampler This type of sampler will", "xs): \"\"\"Sample a subset of configs from a larger set of configs Parameters", "The indexes of configs from a larger set Returns ------- a reduced set", "distributed under the License is distributed on an # \"AS IS\" BASIS, WITHOUT", "or more contributor license agreements. See the NOTICE file # distributed with this", "hardware to speed up tuning without performance loss. \"\"\" import numpy as np", "to in writing, # software distributed under the License is distributed on an", "class for sampler This type of sampler will build an internal method to", "or reduce number of configs measured on hardware to speed up tuning without", "# specific language governing permissions and limitations # under the License. # pylint:", "agreed to in writing, # software distributed under the License is distributed on", "OR CONDITIONS OF ANY # KIND, either express or implied. See the License", "Foundation (ASF) under one # or more contributor license agreements. See the NOTICE", "of configs measured on hardware to speed up tuning without performance loss. \"\"\"", "Apache Software Foundation (ASF) under one # or more contributor license agreements. See", "will build an internal method to prune or reduce number of configs measured", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express", "disable=no-else-return,invalid-name,consider-using-enumerate,abstract-method \"\"\"Base class for sampler This type of sampler will build an internal", "np from ..env import GLOBAL_SCOPE class Sampler(object): \"\"\"Base class for sampler This type", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "License, Version 2.0 (the # \"License\"); you may not use this file except", "to the Apache Software Foundation (ASF) under one # or more contributor license", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "indexes of configs from a larger set Returns ------- a reduced set of", "dims: list knob form of the dimensions for the configs \"\"\" def __init__(self,", "this file except in compliance # with the License. You may obtain a", "measured on hardware to speed up tuning without performance loss. Parameters ---------- dims:", "of configs Parameters ---------- xs: Array of int The indexes of configs from", "in compliance # with the License. You may obtain a copy of the", "Array of int The indexes of configs from a larger set Returns -------", "implied. See the License for the # specific language governing permissions and limitations", "may not use this file except in compliance # with the License. You", "or agreed to in writing, # software distributed under the License is distributed", "dims): self.dims = dims def sample(self, xs): \"\"\"Sample a subset of configs from", "use this file except in compliance # with the License. You may obtain", "\"\"\"Base class for sampler This type of sampler will build an internal method", "ASF licenses this file # to you under the Apache License, Version 2.0", "under the License. # pylint: disable=no-else-return,invalid-name,consider-using-enumerate,abstract-method \"\"\"Base class for sampler This type of", "for sampler This type of sampler will build an internal method to prune", "Sampler(object): \"\"\"Base class for sampler This type of sampler will build an internal", "Unless required by applicable law or agreed to in writing, # software distributed", "Software Foundation (ASF) under one # or more contributor license agreements. See the", "configs \"\"\" def __init__(self, dims): self.dims = dims def sample(self, xs): \"\"\"Sample a", "---------- dims: list knob form of the dimensions for the configs \"\"\" def", "distributed with this work for additional information # regarding copyright ownership. The ASF", "# distributed with this work for additional information # regarding copyright ownership. The", "license agreements. See the NOTICE file # distributed with this work for additional", "the License is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "loss. Parameters ---------- dims: list knob form of the dimensions for the configs", "numpy as np from ..env import GLOBAL_SCOPE class Sampler(object): \"\"\"Base class for sampler", "from a larger set of configs Parameters ---------- xs: Array of int The", "\"License\"); you may not use this file except in compliance # with the", "on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY #", "configs from a larger set of configs Parameters ---------- xs: Array of int", "regarding copyright ownership. The ASF licenses this file # to you under the", "# KIND, either express or implied. See the License for the # specific", "up tuning without performance loss. \"\"\" import numpy as np from ..env import", "# pylint: disable=no-else-return,invalid-name,consider-using-enumerate,abstract-method \"\"\"Base class for sampler This type of sampler will build", "of configs measured on hardware to speed up tuning without performance loss. Parameters", "this work for additional information # regarding copyright ownership. The ASF licenses this", "with this work for additional information # regarding copyright ownership. The ASF licenses", "the # specific language governing permissions and limitations # under the License. #", "of int The indexes of configs from a larger set Returns ------- a", "knob form of the dimensions for the configs \"\"\" def __init__(self, dims): self.dims", "ANY # KIND, either express or implied. See the License for the #", "of configs from a larger set of configs Parameters ---------- xs: Array of", "See the NOTICE file # distributed with this work for additional information #", "contributor license agreements. See the NOTICE file # distributed with this work for", "either express or implied. See the License for the # specific language governing", "from ..env import GLOBAL_SCOPE class Sampler(object): \"\"\"Base class for sampler This type of", "..env import GLOBAL_SCOPE class Sampler(object): \"\"\"Base class for sampler This type of sampler", "the License. You may obtain a copy of the License at # #", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "an internal method to prune or reduce number of configs measured on hardware", "speed up tuning without performance loss. Parameters ---------- dims: list knob form of", "type of sampler will build an internal method to prune or reduce number", "to speed up tuning without performance loss. \"\"\" import numpy as np from", "the NOTICE file # distributed with this work for additional information # regarding", "in writing, # software distributed under the License is distributed on an #", "the Apache Software Foundation (ASF) under one # or more contributor license agreements.", "agreements. See the NOTICE file # distributed with this work for additional information", "and limitations # under the License. # pylint: disable=no-else-return,invalid-name,consider-using-enumerate,abstract-method \"\"\"Base class for sampler", "not use this file except in compliance # with the License. You may", "self.dims = dims def sample(self, xs): \"\"\"Sample a subset of configs from a", "reduce number of configs measured on hardware to speed up tuning without performance", "writing, # software distributed under the License is distributed on an # \"AS", "performance loss. \"\"\" import numpy as np from ..env import GLOBAL_SCOPE class Sampler(object):", "the dimensions for the configs \"\"\" def __init__(self, dims): self.dims = dims def", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "# or more contributor license agreements. See the NOTICE file # distributed with", "method to prune or reduce number of configs measured on hardware to speed", "up tuning without performance loss. Parameters ---------- dims: list knob form of the", "set of configs Parameters ---------- xs: Array of int The indexes of configs", "See the License for the # specific language governing permissions and limitations #", "WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the", "NOTICE file # distributed with this work for additional information # regarding copyright", "sampler This type of sampler will build an internal method to prune or", "dimensions for the configs \"\"\" def __init__(self, dims): self.dims = dims def sample(self,", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or", "2.0 (the # \"License\"); you may not use this file except in compliance", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "configs measured on hardware to speed up tuning without performance loss. Parameters ----------", "---------- xs: Array of int The indexes of configs from a larger set", "int The indexes of configs from a larger set Returns ------- a reduced", "Parameters ---------- xs: Array of int The indexes of configs from a larger", "# with the License. You may obtain a copy of the License at", "dims def sample(self, xs): \"\"\"Sample a subset of configs from a larger set", "KIND, either express or implied. See the License for the # specific language", "a larger set of configs Parameters ---------- xs: Array of int The indexes", "configs measured on hardware to speed up tuning without performance loss. \"\"\" import", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "# regarding copyright ownership. The ASF licenses this file # to you under", "the Apache License, Version 2.0 (the # \"License\"); you may not use this", "Apache License, Version 2.0 (the # \"License\"); you may not use this file", "limitations # under the License. # pylint: disable=no-else-return,invalid-name,consider-using-enumerate,abstract-method \"\"\"Base class for sampler This", "more contributor license agreements. See the NOTICE file # distributed with this work", "under one # or more contributor license agreements. See the NOTICE file #", "<filename>python/tvm/autotvm/tuner/sampler.py # Licensed to the Apache Software Foundation (ASF) under one # or", "# to you under the Apache License, Version 2.0 (the # \"License\"); you", "required by applicable law or agreed to in writing, # software distributed under", "subset of configs from a larger set of configs Parameters ---------- xs: Array", "is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "to speed up tuning without performance loss. Parameters ---------- dims: list knob form", "larger set of configs Parameters ---------- xs: Array of int The indexes of", "compliance # with the License. You may obtain a copy of the License", "by applicable law or agreed to in writing, # software distributed under the", "for additional information # regarding copyright ownership. The ASF licenses this file #", "list knob form of the dimensions for the configs \"\"\" def __init__(self, dims):", "WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See", "with the License. You may obtain a copy of the License at #", "performance loss. Parameters ---------- dims: list knob form of the dimensions for the", "information # regarding copyright ownership. The ASF licenses this file # to you", "The ASF licenses this file # to you under the Apache License, Version", "file except in compliance # with the License. You may obtain a copy", "as np from ..env import GLOBAL_SCOPE class Sampler(object): \"\"\"Base class for sampler This", "build an internal method to prune or reduce number of configs measured on", "\"\"\"Sample a subset of configs from a larger set of configs Parameters ----------", "prune or reduce number of configs measured on hardware to speed up tuning", "sample(self, xs): \"\"\"Sample a subset of configs from a larger set of configs", "CONDITIONS OF ANY # KIND, either express or implied. See the License for", "a subset of configs from a larger set of configs Parameters ---------- xs:", "file # to you under the Apache License, Version 2.0 (the # \"License\");", "the License for the # specific language governing permissions and limitations # under", "Parameters ---------- dims: list knob form of the dimensions for the configs \"\"\"", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "work for additional information # regarding copyright ownership. The ASF licenses this file", "applicable law or agreed to in writing, # software distributed under the License", "one # or more contributor license agreements. See the NOTICE file # distributed", "except in compliance # with the License. You may obtain a copy of", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "you may not use this file except in compliance # with the License.", "\"\"\" def __init__(self, dims): self.dims = dims def sample(self, xs): \"\"\"Sample a subset", "from a larger set Returns ------- a reduced set of configs \"\"\" raise", "licenses this file # to you under the Apache License, Version 2.0 (the", "without performance loss. Parameters ---------- dims: list knob form of the dimensions for", "language governing permissions and limitations # under the License. # pylint: disable=no-else-return,invalid-name,consider-using-enumerate,abstract-method \"\"\"Base", "pylint: disable=no-else-return,invalid-name,consider-using-enumerate,abstract-method \"\"\"Base class for sampler This type of sampler will build an", "__init__(self, dims): self.dims = dims def sample(self, xs): \"\"\"Sample a subset of configs", "configs Parameters ---------- xs: Array of int The indexes of configs from a", "# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either", "License. # pylint: disable=no-else-return,invalid-name,consider-using-enumerate,abstract-method \"\"\"Base class for sampler This type of sampler will" ]
[ "/ resolution.y; float col = 0.0; for (int i = 1; i <=", "gl_FragCoord.xy / resolution.xy; vec2 p = -1.0 + 2.0 * q; p.x *=", "i = 1; i <= 7; ++i) { col += ball(p, float(i), 0.3);", "0.3); } for (int i = 1; i <= 5; ++i) { col", "q = gl_FragCoord.xy / resolution.xy; vec2 p = -1.0 + 2.0 * q;", "vec2 r = vec2(p.x - cos(time * k) * d, p.y + sin(time", "vec4(col*0.8, col, col*1.8, 1.0); } </script> <script id=\"sv\" type=\"x-shader/x-vertex\"> attribute vec4 vPosition; void", "length(r)); } void main(void) { vec2 q = gl_FragCoord.xy / resolution.xy; vec2 p", "1.0, 0.03 / length(r)); } void main(void) { vec2 q = gl_FragCoord.xy /", "i = 1; i <= 5; ++i) { col += ball(p, float(i), 0.1);", "* k) * d, p.y + sin(time * k) * d); return smoothstep(0.0,", "* d, p.y + sin(time * k) * d); return smoothstep(0.0, 1.0, 0.03", "k) * d); return smoothstep(0.0, 1.0, 0.03 / length(r)); } void main(void) {", "col += ball(p, float(i), 0.1); } gl_FragColor = vec4(col*0.8, col, col*1.8, 1.0); }", "} </script> <script id=\"sv\" type=\"x-shader/x-vertex\"> attribute vec4 vPosition; void main (void) { gl_Position", "main(void) { vec2 q = gl_FragCoord.xy / resolution.xy; vec2 p = -1.0 +", "-1.0 + 2.0 * q; p.x *= resolution.x / resolution.y; float col =", "= 0.0; for (int i = 1; i <= 7; ++i) { col", "for (int i = 1; i <= 5; ++i) { col += ball(p,", "gl_FragColor = vec4(col*0.8, col, col*1.8, 1.0); } </script> <script id=\"sv\" type=\"x-shader/x-vertex\"> attribute vec4", "d); return smoothstep(0.0, 1.0, 0.03 / length(r)); } void main(void) { vec2 q", "void main(void) { vec2 q = gl_FragCoord.xy / resolution.xy; vec2 p = -1.0", "mouse; uniform vec2 resolution; float ball(vec2 p, float k, float d) { vec2", "col = 0.0; for (int i = 1; i <= 7; ++i) {", "return smoothstep(0.0, 1.0, 0.03 / length(r)); } void main(void) { vec2 q =", "d, p.y + sin(time * k) * d); return smoothstep(0.0, 1.0, 0.03 /", "k) * d, p.y + sin(time * k) * d); return smoothstep(0.0, 1.0,", "col, col*1.8, 1.0); } </script> <script id=\"sv\" type=\"x-shader/x-vertex\"> attribute vec4 vPosition; void main", "float time; uniform vec2 mouse; uniform vec2 resolution; float ball(vec2 p, float k,", "type=\"x-shader/x-fragment\"> precision highp float; uniform float time; uniform vec2 mouse; uniform vec2 resolution;", "float(i), 0.1); } gl_FragColor = vec4(col*0.8, col, col*1.8, 1.0); } </script> <script id=\"sv\"", "= 1; i <= 5; ++i) { col += ball(p, float(i), 0.1); }", "/ length(r)); } void main(void) { vec2 q = gl_FragCoord.xy / resolution.xy; vec2", "= gl_FragCoord.xy / resolution.xy; vec2 p = -1.0 + 2.0 * q; p.x", "resolution.xy; vec2 p = -1.0 + 2.0 * q; p.x *= resolution.x /", "1; i <= 7; ++i) { col += ball(p, float(i), 0.3); } for", "uniform float time; uniform vec2 mouse; uniform vec2 resolution; float ball(vec2 p, float", "vec2 mouse; uniform vec2 resolution; float ball(vec2 p, float k, float d) {", "resolution.x / resolution.y; float col = 0.0; for (int i = 1; i", "= vec2(p.x - cos(time * k) * d, p.y + sin(time * k)", "} void main(void) { vec2 q = gl_FragCoord.xy / resolution.xy; vec2 p =", " <script id=\"sf\" type=\"x-shader/x-fragment\"> precision highp float; uniform float time; uniform vec2 mouse;", "p = -1.0 + 2.0 * q; p.x *= resolution.x / resolution.y; float", "uniform vec2 mouse; uniform vec2 resolution; float ball(vec2 p, float k, float d)", "+= ball(p, float(i), 0.3); } for (int i = 1; i <= 5;", "precision highp float; uniform float time; uniform vec2 mouse; uniform vec2 resolution; float", "0.03 / length(r)); } void main(void) { vec2 q = gl_FragCoord.xy / resolution.xy;", "1.0); } </script> <script id=\"sv\" type=\"x-shader/x-vertex\"> attribute vec4 vPosition; void main (void) {", "} gl_FragColor = vec4(col*0.8, col, col*1.8, 1.0); } </script> <script id=\"sv\" type=\"x-shader/x-vertex\"> attribute", "- cos(time * k) * d, p.y + sin(time * k) * d);", "++i) { col += ball(p, float(i), 0.1); } gl_FragColor = vec4(col*0.8, col, col*1.8,", "float col = 0.0; for (int i = 1; i <= 7; ++i)", "resolution; float ball(vec2 p, float k, float d) { vec2 r = vec2(p.x", "vec2 q = gl_FragCoord.xy / resolution.xy; vec2 p = -1.0 + 2.0 *", "col += ball(p, float(i), 0.3); } for (int i = 1; i <=", "r = vec2(p.x - cos(time * k) * d, p.y + sin(time *", "{ col += ball(p, float(i), 0.3); } for (int i = 1; i", "++i) { col += ball(p, float(i), 0.3); } for (int i = 1;", "vec2 p = -1.0 + 2.0 * q; p.x *= resolution.x / resolution.y;", "{ col += ball(p, float(i), 0.1); } gl_FragColor = vec4(col*0.8, col, col*1.8, 1.0);", "attribute vec4 vPosition; void main (void) { gl_Position = vPosition; } </script> <canvas", "+ 2.0 * q; p.x *= resolution.x / resolution.y; float col = 0.0;", "p.y + sin(time * k) * d); return smoothstep(0.0, 1.0, 0.03 / length(r));", "(int i = 1; i <= 5; ++i) { col += ball(p, float(i),", "{ vec2 q = gl_FragCoord.xy / resolution.xy; vec2 p = -1.0 + 2.0", "0.1); } gl_FragColor = vec4(col*0.8, col, col*1.8, 1.0); } </script> <script id=\"sv\" type=\"x-shader/x-vertex\">", "id=\"sf\" type=\"x-shader/x-fragment\"> precision highp float; uniform float time; uniform vec2 mouse; uniform vec2", "= vec4(col*0.8, col, col*1.8, 1.0); } </script> <script id=\"sv\" type=\"x-shader/x-vertex\"> attribute vec4 vPosition;", "<script id=\"sv\" type=\"x-shader/x-vertex\"> attribute vec4 vPosition; void main (void) { gl_Position = vPosition;", "float d) { vec2 r = vec2(p.x - cos(time * k) * d,", "+ sin(time * k) * d); return smoothstep(0.0, 1.0, 0.03 / length(r)); }", "float; uniform float time; uniform vec2 mouse; uniform vec2 resolution; float ball(vec2 p,", "* k) * d); return smoothstep(0.0, 1.0, 0.03 / length(r)); } void main(void)", "i <= 5; ++i) { col += ball(p, float(i), 0.1); } gl_FragColor =", "vec2 resolution; float ball(vec2 p, float k, float d) { vec2 r =", "highp float; uniform float time; uniform vec2 mouse; uniform vec2 resolution; float ball(vec2", "{ vec2 r = vec2(p.x - cos(time * k) * d, p.y +", "0.0; for (int i = 1; i <= 7; ++i) { col +=", "= 1; i <= 7; ++i) { col += ball(p, float(i), 0.3); }", "k, float d) { vec2 r = vec2(p.x - cos(time * k) *", "cos(time * k) * d, p.y + sin(time * k) * d); return", "vec4 vPosition; void main (void) { gl_Position = vPosition; } </script> <canvas id=\"cnv\"></canvas>", "resolution.y; float col = 0.0; for (int i = 1; i <= 7;", "(int i = 1; i <= 7; ++i) { col += ball(p, float(i),", "uniform vec2 resolution; float ball(vec2 p, float k, float d) { vec2 r", "</script> <script id=\"sv\" type=\"x-shader/x-vertex\"> attribute vec4 vPosition; void main (void) { gl_Position =", "col*1.8, 1.0); } </script> <script id=\"sv\" type=\"x-shader/x-vertex\"> attribute vec4 vPosition; void main (void)", "for (int i = 1; i <= 7; ++i) { col += ball(p,", "} for (int i = 1; i <= 5; ++i) { col +=", "float ball(vec2 p, float k, float d) { vec2 r = vec2(p.x -", "2.0 * q; p.x *= resolution.x / resolution.y; float col = 0.0; for", "<= 7; ++i) { col += ball(p, float(i), 0.3); } for (int i", "p.x *= resolution.x / resolution.y; float col = 0.0; for (int i =", "ball(vec2 p, float k, float d) { vec2 r = vec2(p.x - cos(time", "smoothstep(0.0, 1.0, 0.03 / length(r)); } void main(void) { vec2 q = gl_FragCoord.xy", "q; p.x *= resolution.x / resolution.y; float col = 0.0; for (int i", "<script id=\"sf\" type=\"x-shader/x-fragment\"> precision highp float; uniform float time; uniform vec2 mouse; uniform", "float(i), 0.3); } for (int i = 1; i <= 5; ++i) {", "d) { vec2 r = vec2(p.x - cos(time * k) * d, p.y", "id=\"sv\" type=\"x-shader/x-vertex\"> attribute vec4 vPosition; void main (void) { gl_Position = vPosition; }", "*= resolution.x / resolution.y; float col = 0.0; for (int i = 1;", "p, float k, float d) { vec2 r = vec2(p.x - cos(time *", "+= ball(p, float(i), 0.1); } gl_FragColor = vec4(col*0.8, col, col*1.8, 1.0); } </script>", "vec2(p.x - cos(time * k) * d, p.y + sin(time * k) *", "type=\"x-shader/x-vertex\"> attribute vec4 vPosition; void main (void) { gl_Position = vPosition; } </script>", "time; uniform vec2 mouse; uniform vec2 resolution; float ball(vec2 p, float k, float", "/ resolution.xy; vec2 p = -1.0 + 2.0 * q; p.x *= resolution.x", "= -1.0 + 2.0 * q; p.x *= resolution.x / resolution.y; float col", "ball(p, float(i), 0.1); } gl_FragColor = vec4(col*0.8, col, col*1.8, 1.0); } </script> <script", "i <= 7; ++i) { col += ball(p, float(i), 0.3); } for (int", "5; ++i) { col += ball(p, float(i), 0.1); } gl_FragColor = vec4(col*0.8, col,", "sin(time * k) * d); return smoothstep(0.0, 1.0, 0.03 / length(r)); } void", "7; ++i) { col += ball(p, float(i), 0.3); } for (int i =", "* q; p.x *= resolution.x / resolution.y; float col = 0.0; for (int", "float k, float d) { vec2 r = vec2(p.x - cos(time * k)", "ball(p, float(i), 0.3); } for (int i = 1; i <= 5; ++i)", "* d); return smoothstep(0.0, 1.0, 0.03 / length(r)); } void main(void) { vec2", "<= 5; ++i) { col += ball(p, float(i), 0.1); } gl_FragColor = vec4(col*0.8,", "1; i <= 5; ++i) { col += ball(p, float(i), 0.1); } gl_FragColor" ]
[ "flask import Flask, Request, Response, request import json def devices(): dict_device = request.get_data(as_text=True)", "Request, Response, request import json def devices(): dict_device = request.get_data(as_text=True) dados_device = json.loads(dict_device)", "Flask, Request, Response, request import json def devices(): dict_device = request.get_data(as_text=True) dados_device =", "from flask import Flask, Request, Response, request import json def devices(): dict_device =", "import Flask, Request, Response, request import json def devices(): dict_device = request.get_data(as_text=True) dados_device" ]
[ "import FermionOperator as FOp from openfermion.ops import QubitOperator as QOp from openfermion.transforms import", "{'gradient-strategy':'parameter-shift'}) optimizer = createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'}) results = optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_simple_qubit(self):", "= createObjectiveFunction(ansatz, H, n_params, {'gradient-strategy':'parameter-shift'}) optimizer = createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'}) results = optimizer.optimize(obj) self.assertAlmostEqual(results[0],", "\\ FOp('1 0^', 4.2866) + FOp('1^ 0', -4.2866) + FOp('1^ 1', 12.25) @qjit", "QOp('Z1', -6.125) @qjit def ansatz(q: qreg, theta: float): X(q[0]) Ry(q[1], theta) CX(q[1], q[0])", "12.25) ours = createOperator('fermion', H) print(ours.toString()) H = QOp('', 5.907) + QOp('Y0 Y1',", "Operator as OpenFermion FermionOperator H = FOp('', 0.0002899) + FOp('0^ 0', -.43658) +", "ours = createOperator('fermion', H) print(ours.toString()) H = QOp('', 5.907) + QOp('Y0 Y1', -2.1433)", "QOp('Y0 Y1', -2.1433) + \\ QOp('X0 X1', -2.1433) + QOp('Z0', .21829) + QOp('Z1',", "from qcor import * try: from openfermion.ops import FermionOperator as FOp from openfermion.ops", "test_simple_qubit(self): # Create Operator as OpenFermion FermionOperator H = QOp('', 5.907) + QOp('Y0", "= optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_convert_to_qcor(self): H = FOp('', 0.0002899) + FOp('0^", "FOp('', 0.0002899) + FOp('0^ 0', -.43658) + \\ FOp('1 0^', 4.2866) + FOp('1^", "openfermion.transforms import reverse_jordan_wigner, jordan_wigner class TestOpenFermion(unittest.TestCase): def test_simple_fermion(self): # Create Operator as OpenFermion", "= createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'}) results = optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_simple_qubit(self): # Create", "# Create Operator as OpenFermion FermionOperator H = QOp('', 5.907) + QOp('Y0 Y1',", "FermionOperator H = FOp('', 0.0002899) + FOp('0^ 0', -.43658) + \\ FOp('1 0^',", "0', -4.2866) + FOp('1^ 1', 12.25) @qjit def ansatz(q: qreg, theta: float): X(q[0])", "theta) CX(q[1], q[0]) n_params = 1 obj = createObjectiveFunction(ansatz, H, n_params, {'gradient-strategy':'parameter-shift'}) optimizer", "QOp('Z0', .21829) + QOp('Z1', -6.125) ours = createOperator('pauli', H) print(ours.toString()) except: pass if", "+ QOp('Z1', -6.125) ours = createOperator('pauli', H) print(ours.toString()) except: pass if __name__ ==", "results = optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_convert_to_qcor(self): H = FOp('', 0.0002899) +", "-4.2866) + FOp('1^ 1', 12.25) ours = createOperator('fermion', H) print(ours.toString()) H = QOp('',", "H = FOp('', 0.0002899) + FOp('0^ 0', -.43658) + \\ FOp('1 0^', 4.2866)", "0', -4.2866) + FOp('1^ 1', 12.25) ours = createOperator('fermion', H) print(ours.toString()) H =", "H) print(ours.toString()) H = QOp('', 5.907) + QOp('Y0 Y1', -2.1433) + \\ QOp('X0", "= createOperator('fermion', H) print(ours.toString()) H = QOp('', 5.907) + QOp('Y0 Y1', -2.1433) +", "FermionOperator as FOp from openfermion.ops import QubitOperator as QOp from openfermion.transforms import reverse_jordan_wigner,", "QOp('Z0', .21829) + QOp('Z1', -6.125) @qjit def ansatz(q: qreg, theta: float): X(q[0]) Ry(q[1],", "+ FOp('1^ 0', -4.2866) + FOp('1^ 1', 12.25) ours = createOperator('fermion', H) print(ours.toString())", "ansatz(q: qreg, theta: float): X(q[0]) Ry(q[1], theta) CX(q[1], q[0]) n_params = 1 obj", "as OpenFermion FermionOperator H = FOp('', 0.0002899) + FOp('0^ 0', -.43658) + \\", "reverse_jordan_wigner, jordan_wigner class TestOpenFermion(unittest.TestCase): def test_simple_fermion(self): # Create Operator as OpenFermion FermionOperator H", "as OpenFermion FermionOperator H = QOp('', 5.907) + QOp('Y0 Y1', -2.1433) + \\", "def test_simple_fermion(self): # Create Operator as OpenFermion FermionOperator H = FOp('', 0.0002899) +", "+ QOp('Y0 Y1', -2.1433) + \\ QOp('X0 X1', -2.1433) + QOp('Z0', .21829) +", "FOp('1 0^', 4.2866) + FOp('1^ 0', -4.2866) + FOp('1^ 1', 12.25) ours =", "+ FOp('1^ 1', 12.25) ours = createOperator('fermion', H) print(ours.toString()) H = QOp('', 5.907)", "+ FOp('1^ 1', 12.25) @qjit def ansatz(q: qreg, theta: float): X(q[0]) Ry(q[1], theta)", "import * try: from openfermion.ops import FermionOperator as FOp from openfermion.ops import QubitOperator", "X1', -2.1433) + QOp('Z0', .21829) + QOp('Z1', -6.125) @qjit def ansatz(q: qreg, theta:", "qcor import * try: from openfermion.ops import FermionOperator as FOp from openfermion.ops import", "1', 12.25) ours = createOperator('fermion', H) print(ours.toString()) H = QOp('', 5.907) + QOp('Y0", "faulthandler faulthandler.enable() import unittest from qcor import * try: from openfermion.ops import FermionOperator", "results = optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_simple_qubit(self): # Create Operator as OpenFermion", "try: from openfermion.ops import FermionOperator as FOp from openfermion.ops import QubitOperator as QOp", "+ \\ QOp('X0 X1', -2.1433) + QOp('Z0', .21829) + QOp('Z1', -6.125) @qjit def", "+ \\ FOp('1 0^', 4.2866) + FOp('1^ 0', -4.2866) + FOp('1^ 1', 12.25)", "def ansatz(q: qreg, theta: float): X(q[0]) Ry(q[1], theta) CX(q[1], q[0]) n_params = 1", "optimizer = createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'}) results = optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_simple_qubit(self): #", "= 1 obj = createObjectiveFunction(ansatz, H, n_params, {'gradient-strategy':'parameter-shift'}) optimizer = createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'}) results", "class TestOpenFermion(unittest.TestCase): def test_simple_fermion(self): # Create Operator as OpenFermion FermionOperator H = FOp('',", "FOp('0^ 0', -.43658) + \\ FOp('1 0^', 4.2866) + FOp('1^ 0', -4.2866) +", "TestOpenFermion(unittest.TestCase): def test_simple_fermion(self): # Create Operator as OpenFermion FermionOperator H = FOp('', 0.0002899)", "print(ours.toString()) H = QOp('', 5.907) + QOp('Y0 Y1', -2.1433) + \\ QOp('X0 X1',", "createObjectiveFunction(ansatz, H, n_params, {'gradient-strategy':'parameter-shift'}) optimizer = createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'}) results = optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74,", "FOp('1 0^', 4.2866) + FOp('1^ 0', -4.2866) + FOp('1^ 1', 12.25) @qjit def", "0^', 4.2866) + FOp('1^ 0', -4.2866) + FOp('1^ 1', 12.25) @qjit def ansatz(q:", "0^', 4.2866) + FOp('1^ 0', -4.2866) + FOp('1^ 1', 12.25) ours = createOperator('fermion',", "QOp('X0 X1', -2.1433) + QOp('Z0', .21829) + QOp('Z1', -6.125) ours = createOperator('pauli', H)", "createOperator('fermion', H) print(ours.toString()) H = QOp('', 5.907) + QOp('Y0 Y1', -2.1433) + \\", "{'gradient-strategy':'parameter-shift'}) optimizer = createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'}) results = optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_convert_to_qcor(self):", "QOp('Z1', -6.125) ours = createOperator('pauli', H) print(ours.toString()) except: pass if __name__ == '__main__':", "n_params, {'gradient-strategy':'parameter-shift'}) optimizer = createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'}) results = optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def", "CX(q[1], q[0]) n_params = 1 obj = createObjectiveFunction(ansatz, H, n_params, {'gradient-strategy':'parameter-shift'}) optimizer =", "-6.125) ours = createOperator('pauli', H) print(ours.toString()) except: pass if __name__ == '__main__': unittest.main()", "0', -.43658) + \\ FOp('1 0^', 4.2866) + FOp('1^ 0', -4.2866) + FOp('1^", "theta: float): X(q[0]) Ry(q[1], theta) CX(q[1], q[0]) n_params = 1 obj = createObjectiveFunction(ansatz,", "as QOp from openfermion.transforms import reverse_jordan_wigner, jordan_wigner class TestOpenFermion(unittest.TestCase): def test_simple_fermion(self): # Create", "self.assertAlmostEqual(results[0], -1.74, places=1) def test_simple_qubit(self): # Create Operator as OpenFermion FermionOperator H =", "test_convert_to_qcor(self): H = FOp('', 0.0002899) + FOp('0^ 0', -.43658) + \\ FOp('1 0^',", "from openfermion.ops import QubitOperator as QOp from openfermion.transforms import reverse_jordan_wigner, jordan_wigner class TestOpenFermion(unittest.TestCase):", "optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_simple_qubit(self): # Create Operator as OpenFermion FermionOperator H", "import unittest from qcor import * try: from openfermion.ops import FermionOperator as FOp", "@qjit def ansatz(q: qreg, theta: float): X(q[0]) Ry(q[1], theta) CX(q[1], q[0]) n_params =", "-2.1433) + QOp('Z0', .21829) + QOp('Z1', -6.125) @qjit def ansatz(q: qreg, theta: float):", "import faulthandler faulthandler.enable() import unittest from qcor import * try: from openfermion.ops import", "jordan_wigner class TestOpenFermion(unittest.TestCase): def test_simple_fermion(self): # Create Operator as OpenFermion FermionOperator H =", "QOp('', 5.907) + QOp('Y0 Y1', -2.1433) + \\ QOp('X0 X1', -2.1433) + QOp('Z0',", "self.assertAlmostEqual(results[0], -1.74, places=1) def test_convert_to_qcor(self): H = FOp('', 0.0002899) + FOp('0^ 0', -.43658)", "places=1) def test_convert_to_qcor(self): H = FOp('', 0.0002899) + FOp('0^ 0', -.43658) + \\", "from openfermion.transforms import reverse_jordan_wigner, jordan_wigner class TestOpenFermion(unittest.TestCase): def test_simple_fermion(self): # Create Operator as", "1 obj = createObjectiveFunction(ansatz, H, n_params, {'gradient-strategy':'parameter-shift'}) optimizer = createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'}) results =", "createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'}) results = optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_convert_to_qcor(self): H = FOp('',", "X(q[0]) Ry(q[1], theta) CX(q[1], q[0]) n_params = 1 obj = createObjectiveFunction(ansatz, H, n_params,", "QOp from openfermion.transforms import reverse_jordan_wigner, jordan_wigner class TestOpenFermion(unittest.TestCase): def test_simple_fermion(self): # Create Operator", "H = QOp('', 5.907) + QOp('Y0 Y1', -2.1433) + \\ QOp('X0 X1', -2.1433)", "-2.1433) + QOp('Z0', .21829) + QOp('Z1', -6.125) ours = createOperator('pauli', H) print(ours.toString()) except:", "FOp('1^ 1', 12.25) ours = createOperator('fermion', H) print(ours.toString()) H = QOp('', 5.907) +", "0.0002899) + FOp('0^ 0', -.43658) + \\ FOp('1 0^', 4.2866) + FOp('1^ 0',", "places=1) def test_simple_qubit(self): # Create Operator as OpenFermion FermionOperator H = QOp('', 5.907)", "Ry(q[1], theta) CX(q[1], q[0]) n_params = 1 obj = createObjectiveFunction(ansatz, H, n_params, {'gradient-strategy':'parameter-shift'})", "QubitOperator as QOp from openfermion.transforms import reverse_jordan_wigner, jordan_wigner class TestOpenFermion(unittest.TestCase): def test_simple_fermion(self): #", "QOp('X0 X1', -2.1433) + QOp('Z0', .21829) + QOp('Z1', -6.125) @qjit def ansatz(q: qreg,", "-.43658) + \\ FOp('1 0^', 4.2866) + FOp('1^ 0', -4.2866) + FOp('1^ 1',", "import reverse_jordan_wigner, jordan_wigner class TestOpenFermion(unittest.TestCase): def test_simple_fermion(self): # Create Operator as OpenFermion FermionOperator", "Create Operator as OpenFermion FermionOperator H = FOp('', 0.0002899) + FOp('0^ 0', -.43658)", "import QubitOperator as QOp from openfermion.transforms import reverse_jordan_wigner, jordan_wigner class TestOpenFermion(unittest.TestCase): def test_simple_fermion(self):", "q[0]) n_params = 1 obj = createObjectiveFunction(ansatz, H, n_params, {'gradient-strategy':'parameter-shift'}) optimizer = createOptimizer('nlopt',", "+ QOp('Z0', .21829) + QOp('Z1', -6.125) @qjit def ansatz(q: qreg, theta: float): X(q[0])", "* try: from openfermion.ops import FermionOperator as FOp from openfermion.ops import QubitOperator as", "X1', -2.1433) + QOp('Z0', .21829) + QOp('Z1', -6.125) ours = createOperator('pauli', H) print(ours.toString())", "OpenFermion FermionOperator H = QOp('', 5.907) + QOp('Y0 Y1', -2.1433) + \\ QOp('X0", "4.2866) + FOp('1^ 0', -4.2866) + FOp('1^ 1', 12.25) ours = createOperator('fermion', H)", "-4.2866) + FOp('1^ 1', 12.25) @qjit def ansatz(q: qreg, theta: float): X(q[0]) Ry(q[1],", "+ \\ QOp('X0 X1', -2.1433) + QOp('Z0', .21829) + QOp('Z1', -6.125) ours =", "def test_simple_qubit(self): # Create Operator as OpenFermion FermionOperator H = QOp('', 5.907) +", "Y1', -2.1433) + \\ QOp('X0 X1', -2.1433) + QOp('Z0', .21829) + QOp('Z1', -6.125)", "= createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'}) results = optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_convert_to_qcor(self): H =", "optimizer = createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'}) results = optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_convert_to_qcor(self): H", "= QOp('', 5.907) + QOp('Y0 Y1', -2.1433) + \\ QOp('X0 X1', -2.1433) +", ".21829) + QOp('Z1', -6.125) @qjit def ansatz(q: qreg, theta: float): X(q[0]) Ry(q[1], theta)", "obj = createObjectiveFunction(ansatz, H, n_params, {'gradient-strategy':'parameter-shift'}) optimizer = createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'}) results = optimizer.optimize(obj)", "Operator as OpenFermion FermionOperator H = QOp('', 5.907) + QOp('Y0 Y1', -2.1433) +", "FOp('1^ 0', -4.2866) + FOp('1^ 1', 12.25) @qjit def ansatz(q: qreg, theta: float):", "4.2866) + FOp('1^ 0', -4.2866) + FOp('1^ 1', 12.25) @qjit def ansatz(q: qreg,", "FOp from openfermion.ops import QubitOperator as QOp from openfermion.transforms import reverse_jordan_wigner, jordan_wigner class", "= FOp('', 0.0002899) + FOp('0^ 0', -.43658) + \\ FOp('1 0^', 4.2866) +", "FOp('1^ 1', 12.25) @qjit def ansatz(q: qreg, theta: float): X(q[0]) Ry(q[1], theta) CX(q[1],", "-6.125) @qjit def ansatz(q: qreg, theta: float): X(q[0]) Ry(q[1], theta) CX(q[1], q[0]) n_params", "as FOp from openfermion.ops import QubitOperator as QOp from openfermion.transforms import reverse_jordan_wigner, jordan_wigner", "createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'}) results = optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_simple_qubit(self): # Create Operator", "{'nlopt-optimizer':'l-bfgs'}) results = optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_simple_qubit(self): # Create Operator as", "\\ QOp('X0 X1', -2.1433) + QOp('Z0', .21829) + QOp('Z1', -6.125) ours = createOperator('pauli',", "faulthandler.enable() import unittest from qcor import * try: from openfermion.ops import FermionOperator as", "\\ FOp('1 0^', 4.2866) + FOp('1^ 0', -4.2866) + FOp('1^ 1', 12.25) ours", "FOp('1^ 0', -4.2866) + FOp('1^ 1', 12.25) ours = createOperator('fermion', H) print(ours.toString()) H", "+ QOp('Z1', -6.125) @qjit def ansatz(q: qreg, theta: float): X(q[0]) Ry(q[1], theta) CX(q[1],", "FermionOperator H = QOp('', 5.907) + QOp('Y0 Y1', -2.1433) + \\ QOp('X0 X1',", "OpenFermion FermionOperator H = FOp('', 0.0002899) + FOp('0^ 0', -.43658) + \\ FOp('1", "-1.74, places=1) def test_convert_to_qcor(self): H = FOp('', 0.0002899) + FOp('0^ 0', -.43658) +", "-1.74, places=1) def test_simple_qubit(self): # Create Operator as OpenFermion FermionOperator H = QOp('',", "def test_convert_to_qcor(self): H = FOp('', 0.0002899) + FOp('0^ 0', -.43658) + \\ FOp('1", "openfermion.ops import FermionOperator as FOp from openfermion.ops import QubitOperator as QOp from openfermion.transforms", "# Create Operator as OpenFermion FermionOperator H = FOp('', 0.0002899) + FOp('0^ 0',", "{'nlopt-optimizer':'l-bfgs'}) results = optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_convert_to_qcor(self): H = FOp('', 0.0002899)", "test_simple_fermion(self): # Create Operator as OpenFermion FermionOperator H = FOp('', 0.0002899) + FOp('0^", "1', 12.25) @qjit def ansatz(q: qreg, theta: float): X(q[0]) Ry(q[1], theta) CX(q[1], q[0])", "-2.1433) + \\ QOp('X0 X1', -2.1433) + QOp('Z0', .21829) + QOp('Z1', -6.125) ours", "= optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_simple_qubit(self): # Create Operator as OpenFermion FermionOperator", "12.25) @qjit def ansatz(q: qreg, theta: float): X(q[0]) Ry(q[1], theta) CX(q[1], q[0]) n_params", "+ FOp('0^ 0', -.43658) + \\ FOp('1 0^', 4.2866) + FOp('1^ 0', -4.2866)", "optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1) def test_convert_to_qcor(self): H = FOp('', 0.0002899) + FOp('0^ 0',", "<reponame>ausbin/qcor import faulthandler faulthandler.enable() import unittest from qcor import * try: from openfermion.ops", "+ QOp('Z0', .21829) + QOp('Z1', -6.125) ours = createOperator('pauli', H) print(ours.toString()) except: pass", "\\ QOp('X0 X1', -2.1433) + QOp('Z0', .21829) + QOp('Z1', -6.125) @qjit def ansatz(q:", "5.907) + QOp('Y0 Y1', -2.1433) + \\ QOp('X0 X1', -2.1433) + QOp('Z0', .21829)", "-2.1433) + \\ QOp('X0 X1', -2.1433) + QOp('Z0', .21829) + QOp('Z1', -6.125) @qjit", ".21829) + QOp('Z1', -6.125) ours = createOperator('pauli', H) print(ours.toString()) except: pass if __name__", "H, n_params, {'gradient-strategy':'parameter-shift'}) optimizer = createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'}) results = optimizer.optimize(obj) self.assertAlmostEqual(results[0], -1.74, places=1)", "qreg, theta: float): X(q[0]) Ry(q[1], theta) CX(q[1], q[0]) n_params = 1 obj =", "unittest from qcor import * try: from openfermion.ops import FermionOperator as FOp from", "n_params = 1 obj = createObjectiveFunction(ansatz, H, n_params, {'gradient-strategy':'parameter-shift'}) optimizer = createOptimizer('nlopt', {'nlopt-optimizer':'l-bfgs'})", "+ FOp('1^ 0', -4.2866) + FOp('1^ 1', 12.25) @qjit def ansatz(q: qreg, theta:", "openfermion.ops import QubitOperator as QOp from openfermion.transforms import reverse_jordan_wigner, jordan_wigner class TestOpenFermion(unittest.TestCase): def", "float): X(q[0]) Ry(q[1], theta) CX(q[1], q[0]) n_params = 1 obj = createObjectiveFunction(ansatz, H,", "from openfermion.ops import FermionOperator as FOp from openfermion.ops import QubitOperator as QOp from", "Create Operator as OpenFermion FermionOperator H = QOp('', 5.907) + QOp('Y0 Y1', -2.1433)" ]
[ "model.eval() for data, target in validation_batches: data, target = data.to(device), target.cuda(device) output =", "(p=0.25) self.dropout = nn.Dropout(0.25) return def forward(self, x): # add sequence of convolutional", "imagenet # the output won't show up if you don't flush it when", "= nn.Linear(FULLY_CONNECTED_OUT, BREEDS) # dropout layer (p=0.25) self.dropout = nn.Dropout(0.25) return def forward(self,", "a file \"\"\" if load_model and model_path.is_file(): model.load_state_dict(torch.load(model_path)) print(\"Starting Training\") timer.start() model_scratch =", "a file print = partial(print, flush=True) device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")", "= nn.Conv2d(LAYER_ONE_OUT, LAYER_TWO_OUT, KERNEL, padding=PADDING) self.conv3 = nn.Conv2d(LAYER_TWO_OUT, LAYER_THREE_OUT, KERNEL, padding=PADDING) # max", "0.224, 0.225] IMAGE_SIZE = 224 IMAGE_HALF_SIZE = IMAGE_SIZE//2 train_transform = transforms.Compose([ transforms.RandomRotation(30), transforms.RandomResizedCrop(IMAGE_SIZE),", "* correct / total, correct, total)) return def train_and_test(train_batches: torch.utils.data.DataLoader, validate_batches: torch.utils.data.DataLoader, test_batches:", "deviations)]) test_transform = transforms.Compose([transforms.Resize(255), transforms.CenterCrop(IMAGE_SIZE), transforms.ToTensor(), transforms.Normalize(means, deviations)]) training = datasets.ImageFolder(root=str(dog_training_path.folder), transform=train_transform) validation", "x): # add sequence of convolutional and max pooling layers x = self.pool(F.relu(self.conv1(x)))", "torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optimizer", "model ...'.format( validation_loss_min, validation_loss)) torch.save(model.state_dict(), save_path) validation_loss_min = validation_loss return model def test(test_batches:", "= argparse.ArgumentParser( description=\"Test or Train the Naive Dog Classifier\") parser.add_argument(\"--test\", action=\"store_true\", help=\"Only run", "= len(set(dog_training_path.folder.iterdir())) print(\"Number of Dog Breeds: {}\".format(BREEDS)) timer = Timer(beep=SPEAKABLE) means = [0.485,", "type=int, help=\"Training epochs (default: %(default)s)\") parser.add_argument( \"--epoch-offset\", default=0, type=int, help=\"Offset for the output", "data, target = data.to(device), target.cuda(device) output = model(data) loss = criterion(output, target) validation_loss", "model: nn.Module, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epoch_start: int=1, save_path: str=\"model_scratch.pt\"): \"\"\"Trains the Model", "test(loaders_scratch[\"test\"], model_scratch, criterion_scratch) else: train_and_test(epochs=arguments.epochs, train_batches=loaders_scratch[\"train\"], validate_batches=loaders_scratch[\"validate\"], test_batches=loaders_scratch[\"test\"], model=model_scratch, optimizer=optimizer_scratch, criterion=criterion_scratch, epoch_start=arguments.epoch_offset, model_path=model_path.from_folder,", "batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) test_batches = torch.utils.data.DataLoader( testing, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) loaders_scratch = dict(train=train_batches,", "Neural Network to classify dog breeds\"\"\" def __init__(self) -> None: super().__init__() self.conv1 =", "up if you don't flush it when redirecting it to a file print", "Dog Breeds: {}\".format(BREEDS)) timer = Timer(beep=SPEAKABLE) means = [0.485, 0.456, 0.406] deviations =", "Saving model ...'.format( validation_loss_min, validation_loss)) torch.save(model.state_dict(), save_path) validation_loss_min = validation_loss return model def", "for data, target in test_batches: data, target = data.to(device), target.to(device) output = model(data)", "nn.Module, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epoch_start: int=1, save_path: str=\"model_scratch.pt\"): \"\"\"Trains the Model Args:", "in range(epoch_start, end): timer.start() training_loss = 0.0 validation_loss = 0.0 model.train() for data,", "load_model: bool=False) -> None: \"\"\"Trains and Tests the Model Args: train_batches: batch-loaders for", "linear layer (64 * 4 * 4 -> 500) self.fc1 = nn.Linear((IMAGE_HALF_SIZE//4)**2 *", "self.fc2(x) return x model_scratch = Net() if torch.cuda.is_available(): print(\"Using {} GPUs\".format(torch.cuda.device_count())) model_scratch =", "LAYER_ONE_OUT, KERNEL, padding=PADDING) self.conv2 = nn.Conv2d(LAYER_ONE_OUT, LAYER_TWO_OUT, KERNEL, padding=PADDING) self.conv3 = nn.Conv2d(LAYER_TWO_OUT, LAYER_THREE_OUT,", "optimizer.SGD, criterion: nn.CrossEntropyLoss, epoch_start: int=1, save_path: str=\"model_scratch.pt\"): \"\"\"Trains the Model Args: epochs: number", "target in train_batches: data, target = data.to(device), target.to(device) optimizer.zero_grad() output = model(data) loss", "= train(epochs=epochs, epoch_start=epoch_start, train_batches=train_batches, validation_batches=validate_batches, model=model, optimizer=optimizer, criterion=criterion, save_path=model_path) timer.end() # load the", "timer.start() test(test_batches, model, criterion) timer.end() return model_path = DataPathTwo( folder_key=\"MODELS\", filename=\"model_scratch.pt\") assert model_path.folder.is_dir()", "transforms.Compose([transforms.Resize(255), transforms.CenterCrop(IMAGE_SIZE), transforms.ToTensor(), transforms.Normalize(means, deviations)]) training = datasets.ImageFolder(root=str(dog_training_path.folder), transform=train_transform) validation = datasets.ImageFolder(root=str(dog_validation_path.folder), transform=test_transform)", "= model(data) loss = criterion(output, target) test_loss += loss.item() * data.size(0) # convert", "of times to train on the data set train_batches: the batch-loaders for training", "train_transform = transforms.Compose([ transforms.RandomRotation(30), transforms.RandomResizedCrop(IMAGE_SIZE), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(means, deviations)]) test_transform = transforms.Compose([transforms.Resize(255), transforms.CenterCrop(IMAGE_SIZE),", "= nn.Linear((IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT, FULLY_CONNECTED_OUT) self.fc2 = nn.Linear(FULLY_CONNECTED_OUT, BREEDS) # dropout layer (p=0.25)", "x = self.dropout(F.relu(self.fc1(x))) x = self.fc2(x) return x model_scratch = Net() if torch.cuda.is_available():", "num_workers=WORKERS) loaders_scratch = dict(train=train_batches, validate=validation_batches, test=test_batches) LAYER_ONE_OUT = 16 LAYER_TWO_OUT = LAYER_ONE_OUT *", "DataPathTwo from neurotic.tangles.timer import Timer from neurotic.constants.imagenet_map import imagenet # the output won't", "to do backwards propagation epoch_start: number to start the epoch count with save_path:", "Args: train_batches: batch-loaders for training validate_batches: batch-loaders for validation test_batches: batch-loaders for testing", "* LAYER_THREE_OUT) x = self.dropout(x) x = self.dropout(F.relu(self.fc1(x))) x = self.fc2(x) return x", "torch.cuda.is_available() else \"cpu\") print(device) ImageFile.LOAD_TRUNCATED_IMAGES = True load_dotenv() dog_path = DataPathTwo(folder_key=\"DOG_PATH\") dog_training_path =", "0.406] deviations = [0.229, 0.224, 0.225] IMAGE_SIZE = 224 IMAGE_HALF_SIZE = IMAGE_SIZE//2 train_transform", "x = self.pool(F.relu(self.conv2(x))) x = self.pool(F.relu(self.conv3(x))) x = x.view(-1, (IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT) x", "torch.utils.data.DataLoader, validation_batches: torch.utils.data.DataLoader, model: nn.Module, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epoch_start: int=1, save_path: str=\"model_scratch.pt\"):", "Tests the Model Args: train_batches: batch-loaders for training validate_batches: batch-loaders for validation test_batches:", "= 0.0 validation_loss = 0.0 model.train() for data, target in train_batches: data, target", "int=1, load_model: bool=False) -> None: \"\"\"Trains and Tests the Model Args: train_batches: batch-loaders", "number of times to train on the data set epoch_start: number to start", "the data set train_batches: the batch-loaders for training validation_batches: batch-loaders for validation model:", "nn.Linear(FULLY_CONNECTED_OUT, BREEDS) # dropout layer (p=0.25) self.dropout = nn.Dropout(0.25) return def forward(self, x):", "data.size(0) test_loss /= len(test_batches.dataset) print('Test Loss: {:.6f}\\n'.format(test_loss)) print('\\nTest Accuracy: %2d%% (%2d/%2d)' % (", "* 4 * 4 -> 500) self.fc1 = nn.Linear((IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT, FULLY_CONNECTED_OUT) self.fc2", "torch.nn as nn import torch.nn.functional as F import torch.optim as optimizer import torchvision.transforms", "if you don't flush it when redirecting it to a file print =", "model_scratch = Net() if torch.cuda.is_available(): print(\"Using {} GPUs\".format(torch.cuda.device_count())) model_scratch = nn.DataParallel(model_scratch) model_scratch.to(device) criterion_scratch", "optimizer.step() training_loss += loss.item() * data.size(0) model.eval() for data, target in validation_batches: data,", "test(test_batches: torch.utils.data.DataLoader, model: nn.Module, criterion: nn.CrossEntropyLoss) -> None: \"\"\"Test the model Args: test_batches:", "# dropout layer (p=0.25) self.dropout = nn.Dropout(0.25) return def forward(self, x): # add", "target.to(device) output = model(data) loss = criterion(output, target) test_loss += loss.item() * data.size(0)", "len(validation_batches.dataset) timer.end() print('Epoch: {} \\tTraining Loss: {:.6f} \\tValidation Loss: {:.6f}'.format( epoch, training_loss, validation_loss", "deviations)]) training = datasets.ImageFolder(root=str(dog_training_path.folder), transform=train_transform) validation = datasets.ImageFolder(root=str(dog_validation_path.folder), transform=test_transform) testing = datasets.ImageFolder(root=str(dog_testing_path.folder), transform=test_transform)", "test_loss /= len(test_batches.dataset) print('Test Loss: {:.6f}\\n'.format(test_loss)) print('\\nTest Accuracy: %2d%% (%2d/%2d)' % ( 100.", "validation_loss += loss.item() * data.size(0) training_loss /= len(train_batches.dataset) validation_loss /= len(validation_batches.dataset) timer.end() print('Epoch:", "int=10, epoch_start: int=1, load_model: bool=False) -> None: \"\"\"Trains and Tests the Model Args:", "testing = datasets.ImageFolder(root=str(dog_testing_path.folder), transform=test_transform) BATCH_SIZE = 10 WORKERS = 0 train_batches = torch.utils.data.DataLoader(training,", "decreased ({:.6f} --> {:.6f}). Saving model ...'.format( validation_loss_min, validation_loss)) torch.save(model.state_dict(), save_path) validation_loss_min =", "x = self.pool(F.relu(self.conv3(x))) x = x.view(-1, (IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT) x = self.dropout(x) x", "sequence of convolutional and max pooling layers x = self.pool(F.relu(self.conv1(x))) x = self.pool(F.relu(self.conv2(x)))", "(IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT) x = self.dropout(x) x = self.dropout(F.relu(self.fc1(x))) x = self.fc2(x) return", "= Timer(beep=SPEAKABLE) means = [0.485, 0.456, 0.406] deviations = [0.229, 0.224, 0.225] IMAGE_SIZE", "optimizer: the gradient descent object criterion: object to do backwards propagation epoch_start: number", "0. correct = 0. total = 0. model.eval() for data, target in test_batches:", "ImageFile from torchvision import datasets import numpy import torch import torch.nn as nn", "the model from a file \"\"\" if load_model and model_path.is_file(): model.load_state_dict(torch.load(model_path)) print(\"Starting Training\")", "forward(self, x): # add sequence of convolutional and max pooling layers x =", "test_batches: batch loader of test images model: the network to test criterion: calculator", "target.cuda(device) output = model(data) loss = criterion(output, target) validation_loss += loss.item() * data.size(0)", "momentum=0.9) def train(epochs: int, train_batches: torch.utils.data.DataLoader, validation_batches: torch.utils.data.DataLoader, model: nn.Module, optimizer: optimizer.SGD, criterion:", "target) validation_loss += loss.item() * data.size(0) training_loss /= len(train_batches.dataset) validation_loss /= len(validation_batches.dataset) timer.end()", "/= len(train_batches.dataset) validation_loss /= len(validation_batches.dataset) timer.end() print('Epoch: {} \\tTraining Loss: {:.6f} \\tValidation Loss:", "model_path = DataPathTwo( folder_key=\"MODELS\", filename=\"model_scratch.pt\") assert model_path.folder.is_dir() if __name__ == \"__main__\": parser =", "from neurotic.tangles.data_paths import DataPathTwo from neurotic.tangles.timer import Timer from neurotic.constants.imagenet_map import imagenet #", "4 * 4 -> 500) self.fc1 = nn.Linear((IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT, FULLY_CONNECTED_OUT) self.fc2 =", "= [0.485, 0.456, 0.406] deviations = [0.229, 0.224, 0.225] IMAGE_SIZE = 224 IMAGE_HALF_SIZE", "self.pool = nn.MaxPool2d(2, 2) # linear layer (64 * 4 * 4 ->", "arguments.test: test(loaders_scratch[\"test\"], model_scratch, criterion_scratch) else: train_and_test(epochs=arguments.epochs, train_batches=loaders_scratch[\"train\"], validate_batches=loaders_scratch[\"validate\"], test_batches=loaders_scratch[\"test\"], model=model_scratch, optimizer=optimizer_scratch, criterion=criterion_scratch, epoch_start=arguments.epoch_offset,", "criterion_scratch) else: train_and_test(epochs=arguments.epochs, train_batches=loaders_scratch[\"train\"], validate_batches=loaders_scratch[\"validate\"], test_batches=loaders_scratch[\"test\"], model=model_scratch, optimizer=optimizer_scratch, criterion=criterion_scratch, epoch_start=arguments.epoch_offset, model_path=model_path.from_folder, load_model=not arguments.restart)", "= parser.parse_args() if arguments.test: test(loaders_scratch[\"test\"], model_scratch, criterion_scratch) else: train_and_test(epochs=arguments.epochs, train_batches=loaders_scratch[\"train\"], validate_batches=loaders_scratch[\"validate\"], test_batches=loaders_scratch[\"test\"], model=model_scratch,", "predictions = output.data.max(1, keepdim=True)[1] # compare predictions to true label correct += numpy.sum(", "\"cpu\") print(device) ImageFile.LOAD_TRUNCATED_IMAGES = True load_dotenv() dog_path = DataPathTwo(folder_key=\"DOG_PATH\") dog_training_path = DataPathTwo(folder_key=\"DOG_TRAIN\") dog_testing_path", "test_batches = torch.utils.data.DataLoader( testing, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) loaders_scratch = dict(train=train_batches, validate=validation_batches, test=test_batches) LAYER_ONE_OUT", "# linear layer (64 * 4 * 4 -> 500) self.fc1 = nn.Linear((IMAGE_HALF_SIZE//4)**2", "output won't show up if you don't flush it when redirecting it to", "pooling layers x = self.pool(F.relu(self.conv1(x))) x = self.pool(F.relu(self.conv2(x))) x = self.pool(F.relu(self.conv3(x))) x =", "model def test(test_batches: torch.utils.data.DataLoader, model: nn.Module, criterion: nn.CrossEntropyLoss) -> None: \"\"\"Test the model", "nn.Conv2d(LAYER_TWO_OUT, LAYER_THREE_OUT, KERNEL, padding=PADDING) # max pooling layer self.pool = nn.MaxPool2d(2, 2) #", "GPUs\".format(torch.cuda.device_count())) model_scratch = nn.DataParallel(model_scratch) model_scratch.to(device) criterion_scratch = nn.CrossEntropyLoss() optimizer_scratch = optimizer.SGD(model_scratch.parameters(), lr=0.001, momentum=0.9)", "model(data) loss = criterion(output, target) loss.backward() optimizer.step() training_loss += loss.item() * data.size(0) model.eval()", "train_batches: data, target = data.to(device), target.to(device) optimizer.zero_grad() output = model(data) loss = criterion(output,", "validation_batches: data, target = data.to(device), target.cuda(device) output = model(data) loss = criterion(output, target)", "None: \"\"\"Trains and Tests the Model Args: train_batches: batch-loaders for training validate_batches: batch-loaders", "as transforms # this project from neurotic.tangles.data_paths import DataPathTwo from neurotic.tangles.timer import Timer", "target = data.to(device), target.to(device) output = model(data) loss = criterion(output, target) test_loss +=", "loss.item() * data.size(0) model.eval() for data, target in validation_batches: data, target = data.to(device),", "= 0. total = 0. model.eval() for data, target in test_batches: data, target", "the gradient descent object criterion: object to do backwards propagation epochs: number of", "do backwards propagation epochs: number of times to train on the data set", "10 WORKERS = 0 train_batches = torch.utils.data.DataLoader(training, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) validation_batches = torch.utils.data.DataLoader(", "F import torch.optim as optimizer import torchvision.transforms as transforms # this project from", "training_loss /= len(train_batches.dataset) validation_loss /= len(validation_batches.dataset) timer.end() print('Epoch: {} \\tTraining Loss: {:.6f} \\tValidation", "if __name__ == \"__main__\": parser = argparse.ArgumentParser( description=\"Test or Train the Naive Dog", "transform=test_transform) testing = datasets.ImageFolder(root=str(dog_testing_path.folder), transform=test_transform) BATCH_SIZE = 10 WORKERS = 0 train_batches =", "loss = criterion(output, target) test_loss += loss.item() * data.size(0) # convert output probabilities", "criterion: calculator for the loss \"\"\" test_loss = 0. correct = 0. total", "load_dotenv() dog_path = DataPathTwo(folder_key=\"DOG_PATH\") dog_training_path = DataPathTwo(folder_key=\"DOG_TRAIN\") dog_testing_path = DataPathTwo(folder_key=\"DOG_TEST\") dog_validation_path = DataPathTwo(folder_key=\"DOG_VALIDATE\")", "validation_loss return model def test(test_batches: torch.utils.data.DataLoader, model: nn.Module, criterion: nn.CrossEntropyLoss) -> None: \"\"\"Test", "python from functools import partial import argparse import os # pypi from dotenv", "test_batches: data, target = data.to(device), target.to(device) output = model(data) loss = criterion(output, target)", "loaders_scratch = dict(train=train_batches, validate=validation_batches, test=test_batches) LAYER_ONE_OUT = 16 LAYER_TWO_OUT = LAYER_ONE_OUT * 2", "(default: %(default)s)\") parser.add_argument(\"--restart\", action=\"store_true\", help=\"Wipe out old model.\") arguments = parser.parse_args() if arguments.test:", "criterion: nn.CrossEntropyLoss, epoch_start: int=1, save_path: str=\"model_scratch.pt\"): \"\"\"Trains the Model Args: epochs: number of", "criterion=criterion, save_path=model_path) timer.end() # load the best model model.load_state_dict(torch.load(model_path)) print(\"Starting Testing\") timer.start() test(test_batches,", "save the best model optimizer: the gradient descent object criterion: object to do", "target = data.to(device), target.to(device) optimizer.zero_grad() output = model(data) loss = criterion(output, target) loss.backward()", "network to test criterion: calculator for the loss \"\"\" test_loss = 0. correct", "object criterion: object to do backwards propagation epochs: number of times to train", "train on the data set epoch_start: number to start the epoch count with", "if torch.cuda.is_available() else \"cpu\") print(device) ImageFile.LOAD_TRUNCATED_IMAGES = True load_dotenv() dog_path = DataPathTwo(folder_key=\"DOG_PATH\") dog_training_path", "= criterion(output, target) test_loss += loss.item() * data.size(0) # convert output probabilities to", "int, train_batches: torch.utils.data.DataLoader, validation_batches: torch.utils.data.DataLoader, model: nn.Module, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epoch_start: int=1,", "KERNEL, padding=PADDING) # max pooling layer self.pool = nn.MaxPool2d(2, 2) # linear layer", "= x.view(-1, (IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT) x = self.dropout(x) x = self.dropout(F.relu(self.fc1(x))) x =", "test_loss = 0. correct = 0. total = 0. model.eval() for data, target", "the epoch count with save_path: path to save the best network parameters \"\"\"", "for the output of epochs (default: %(default)s)\") parser.add_argument(\"--restart\", action=\"store_true\", help=\"Wipe out old model.\")", "== \"__main__\": parser = argparse.ArgumentParser( description=\"Test or Train the Naive Dog Classifier\") parser.add_argument(\"--test\",", "and model_path.is_file(): model.load_state_dict(torch.load(model_path)) print(\"Starting Training\") timer.start() model_scratch = train(epochs=epochs, epoch_start=epoch_start, train_batches=train_batches, validation_batches=validate_batches, model=model,", "target.data.view_as(predictions))).cpu().numpy()) total += data.size(0) test_loss /= len(test_batches.dataset) print('Test Loss: {:.6f}\\n'.format(test_loss)) print('\\nTest Accuracy: %2d%%", "= output.data.max(1, keepdim=True)[1] # compare predictions to true label correct += numpy.sum( numpy.squeeze(", "total += data.size(0) test_loss /= len(test_batches.dataset) print('Test Loss: {:.6f}\\n'.format(test_loss)) print('\\nTest Accuracy: %2d%% (%2d/%2d)'", "criterion: nn.CrossEntropyLoss, epochs: int=10, epoch_start: int=1, load_model: bool=False) -> None: \"\"\"Trains and Tests", "flush it when redirecting it to a file print = partial(print, flush=True) device", "and Tests the Model Args: train_batches: batch-loaders for training validate_batches: batch-loaders for validation", "train(epochs: int, train_batches: torch.utils.data.DataLoader, validation_batches: torch.utils.data.DataLoader, model: nn.Module, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epoch_start:", "model_path: Path, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epochs: int=10, epoch_start: int=1, load_model: bool=False) ->", "/= len(validation_batches.dataset) timer.end() print('Epoch: {} \\tTraining Loss: {:.6f} \\tValidation Loss: {:.6f}'.format( epoch, training_loss,", "action=\"store_true\", help=\"Only run the test\") parser.add_argument(\"--epochs\", default=10, type=int, help=\"Training epochs (default: %(default)s)\") parser.add_argument(", "descent object criterion: object to do backwards propagation epochs: number of times to", "batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) validation_batches = torch.utils.data.DataLoader( validation, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) test_batches = torch.utils.data.DataLoader(", "self.conv2 = nn.Conv2d(LAYER_ONE_OUT, LAYER_TWO_OUT, KERNEL, padding=PADDING) self.conv3 = nn.Conv2d(LAYER_TWO_OUT, LAYER_THREE_OUT, KERNEL, padding=PADDING) #", "backwards propagation epochs: number of times to train on the data set epoch_start:", "= validation_loss return model def test(test_batches: torch.utils.data.DataLoader, model: nn.Module, criterion: nn.CrossEntropyLoss) -> None:", "import load_dotenv from PIL import Image, ImageFile from torchvision import datasets import numpy", "to a file print = partial(print, flush=True) device = torch.device(\"cuda\" if torch.cuda.is_available() else", "total, correct, total)) return def train_and_test(train_batches: torch.utils.data.DataLoader, validate_batches: torch.utils.data.DataLoader, test_batches: torch.utils.data.DataLoader, model: nn.Module,", "description=\"Test or Train the Naive Dog Classifier\") parser.add_argument(\"--test\", action=\"store_true\", help=\"Only run the test\")", "epoch in range(epoch_start, end): timer.start() training_loss = 0.0 validation_loss = 0.0 model.train() for", "in validation_batches: data, target = data.to(device), target.cuda(device) output = model(data) loss = criterion(output,", "file print = partial(print, flush=True) device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") print(device)", "IMAGE_SIZE = 224 IMAGE_HALF_SIZE = IMAGE_SIZE//2 train_transform = transforms.Compose([ transforms.RandomRotation(30), transforms.RandomResizedCrop(IMAGE_SIZE), transforms.RandomHorizontalFlip(), transforms.ToTensor(),", "breeds\"\"\" def __init__(self) -> None: super().__init__() self.conv1 = nn.Conv2d(3, LAYER_ONE_OUT, KERNEL, padding=PADDING) self.conv2", "criterion(output, target) validation_loss += loss.item() * data.size(0) training_loss /= len(train_batches.dataset) validation_loss /= len(validation_batches.dataset)", "as nn import torch.nn.functional as F import torch.optim as optimizer import torchvision.transforms as", "def test(test_batches: torch.utils.data.DataLoader, model: nn.Module, criterion: nn.CrossEntropyLoss) -> None: \"\"\"Test the model Args:", "or Train the Naive Dog Classifier\") parser.add_argument(\"--test\", action=\"store_true\", help=\"Only run the test\") parser.add_argument(\"--epochs\",", "validation_loss_min: print('Validation loss decreased ({:.6f} --> {:.6f}). Saving model ...'.format( validation_loss_min, validation_loss)) torch.save(model.state_dict(),", "classify dog breeds\"\"\" def __init__(self) -> None: super().__init__() self.conv1 = nn.Conv2d(3, LAYER_ONE_OUT, KERNEL,", "to test criterion: calculator for the loss \"\"\" test_loss = 0. correct =", "neurotic.tangles.data_paths import DataPathTwo from neurotic.tangles.timer import Timer from neurotic.constants.imagenet_map import imagenet # the", "dog_path = DataPathTwo(folder_key=\"DOG_PATH\") dog_training_path = DataPathTwo(folder_key=\"DOG_TRAIN\") dog_testing_path = DataPathTwo(folder_key=\"DOG_TEST\") dog_validation_path = DataPathTwo(folder_key=\"DOG_VALIDATE\") human_path", "lr=0.001, momentum=0.9) def train(epochs: int, train_batches: torch.utils.data.DataLoader, validation_batches: torch.utils.data.DataLoader, model: nn.Module, optimizer: optimizer.SGD,", "run the test\") parser.add_argument(\"--epochs\", default=10, type=int, help=\"Training epochs (default: %(default)s)\") parser.add_argument( \"--epoch-offset\", default=0,", "training validate_batches: batch-loaders for validation test_batches: batch-loaders for testing model: the network to", "+= data.size(0) test_loss /= len(test_batches.dataset) print('Test Loss: {:.6f}\\n'.format(test_loss)) print('\\nTest Accuracy: %2d%% (%2d/%2d)' %", "dog breeds\"\"\" def __init__(self) -> None: super().__init__() self.conv1 = nn.Conv2d(3, LAYER_ONE_OUT, KERNEL, padding=PADDING)", "validation_loss < validation_loss_min: print('Validation loss decreased ({:.6f} --> {:.6f}). Saving model ...'.format( validation_loss_min,", "testing model: the network to train model_path: where to save the best model", "training_loss = 0.0 validation_loss = 0.0 model.train() for data, target in train_batches: data,", "model.load_state_dict(torch.load(model_path)) print(\"Starting Testing\") timer.start() test(test_batches, model, criterion) timer.end() return model_path = DataPathTwo( folder_key=\"MODELS\",", "x.view(-1, (IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT) x = self.dropout(x) x = self.dropout(F.relu(self.fc1(x))) x = self.fc2(x)", "the network to train model_path: where to save the best model optimizer: the", "return model def test(test_batches: torch.utils.data.DataLoader, model: nn.Module, criterion: nn.CrossEntropyLoss) -> None: \"\"\"Test the", "\\tTraining Loss: {:.6f} \\tValidation Loss: {:.6f}'.format( epoch, training_loss, validation_loss )) if validation_loss <", "# pypi from dotenv import load_dotenv from PIL import Image, ImageFile from torchvision", "shuffle=True, num_workers=WORKERS) test_batches = torch.utils.data.DataLoader( testing, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) loaders_scratch = dict(train=train_batches, validate=validation_batches,", ")) if validation_loss < validation_loss_min: print('Validation loss decreased ({:.6f} --> {:.6f}). Saving model", "nn import torch.nn.functional as F import torch.optim as optimizer import torchvision.transforms as transforms", "= True load_dotenv() dog_path = DataPathTwo(folder_key=\"DOG_PATH\") dog_training_path = DataPathTwo(folder_key=\"DOG_TRAIN\") dog_testing_path = DataPathTwo(folder_key=\"DOG_TEST\") dog_validation_path", "= data.to(device), target.to(device) output = model(data) loss = criterion(output, target) test_loss += loss.item()", "optimizer import torchvision.transforms as transforms # this project from neurotic.tangles.data_paths import DataPathTwo from", "# this project from neurotic.tangles.data_paths import DataPathTwo from neurotic.tangles.timer import Timer from neurotic.constants.imagenet_map", "= optimizer.SGD(model_scratch.parameters(), lr=0.001, momentum=0.9) def train(epochs: int, train_batches: torch.utils.data.DataLoader, validation_batches: torch.utils.data.DataLoader, model: nn.Module,", "import argparse import os # pypi from dotenv import load_dotenv from PIL import", "= IMAGE_SIZE//2 train_transform = transforms.Compose([ transforms.RandomRotation(30), transforms.RandomResizedCrop(IMAGE_SIZE), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(means, deviations)]) test_transform =", "nn.Module, criterion: nn.CrossEntropyLoss) -> None: \"\"\"Test the model Args: test_batches: batch loader of", "help=\"Training epochs (default: %(default)s)\") parser.add_argument( \"--epoch-offset\", default=0, type=int, help=\"Offset for the output of", "import torch.optim as optimizer import torchvision.transforms as transforms # this project from neurotic.tangles.data_paths", "optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epoch_start: int=1, save_path: str=\"model_scratch.pt\"): \"\"\"Trains the Model Args: epochs:", "= DataPathTwo( folder_key=\"MODELS\", filename=\"model_scratch.pt\") assert model_path.folder.is_dir() if __name__ == \"__main__\": parser = argparse.ArgumentParser(", "to do backwards propagation epochs: number of times to train on the data", "won't show up if you don't flush it when redirecting it to a", "epoch count with save_path: path to save the best network parameters \"\"\" validation_loss_min", "save_path) validation_loss_min = validation_loss return model def test(test_batches: torch.utils.data.DataLoader, model: nn.Module, criterion: nn.CrossEntropyLoss)", "with load_model: whether to load the model from a file \"\"\" if load_model", "\"__main__\": parser = argparse.ArgumentParser( description=\"Test or Train the Naive Dog Classifier\") parser.add_argument(\"--test\", action=\"store_true\",", "loss.item() * data.size(0) training_loss /= len(train_batches.dataset) validation_loss /= len(validation_batches.dataset) timer.end() print('Epoch: {} \\tTraining", "set epoch_start: number to start the epoch count with load_model: whether to load", "training_loss, validation_loss )) if validation_loss < validation_loss_min: print('Validation loss decreased ({:.6f} --> {:.6f}).", "target) loss.backward() optimizer.step() training_loss += loss.item() * data.size(0) model.eval() for data, target in", "len(set(dog_training_path.folder.iterdir())) print(\"Number of Dog Breeds: {}\".format(BREEDS)) timer = Timer(beep=SPEAKABLE) means = [0.485, 0.456,", "numpy.sum( numpy.squeeze( predictions.eq( target.data.view_as(predictions))).cpu().numpy()) total += data.size(0) test_loss /= len(test_batches.dataset) print('Test Loss: {:.6f}\\n'.format(test_loss))", "(%2d/%2d)' % ( 100. * correct / total, correct, total)) return def train_and_test(train_batches:", "the test\") parser.add_argument(\"--epochs\", default=10, type=int, help=\"Training epochs (default: %(default)s)\") parser.add_argument( \"--epoch-offset\", default=0, type=int,", "self.pool(F.relu(self.conv2(x))) x = self.pool(F.relu(self.conv3(x))) x = x.view(-1, (IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT) x = self.dropout(x)", "train_batches: torch.utils.data.DataLoader, validation_batches: torch.utils.data.DataLoader, model: nn.Module, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epoch_start: int=1, save_path:", "model: the network to test criterion: calculator for the loss \"\"\" test_loss =", "count with load_model: whether to load the model from a file \"\"\" if", "number to start the epoch count with load_model: whether to load the model", "* data.size(0) model.eval() for data, target in validation_batches: data, target = data.to(device), target.cuda(device)", "validate_batches: torch.utils.data.DataLoader, test_batches: torch.utils.data.DataLoader, model: nn.Module, model_path: Path, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epochs:", "self.dropout(x) x = self.dropout(F.relu(self.fc1(x))) x = self.fc2(x) return x model_scratch = Net() if", "LAYER_TWO_OUT = LAYER_ONE_OUT * 2 LAYER_THREE_OUT = LAYER_TWO_OUT * 2 KERNEL = 3", "1 FULLY_CONNECTED_OUT = 500 class Net(nn.Module): \"\"\"Naive Neural Network to classify dog breeds\"\"\"", "= criterion(output, target) validation_loss += loss.item() * data.size(0) training_loss /= len(train_batches.dataset) validation_loss /=", "parser.parse_args() if arguments.test: test(loaders_scratch[\"test\"], model_scratch, criterion_scratch) else: train_and_test(epochs=arguments.epochs, train_batches=loaders_scratch[\"train\"], validate_batches=loaders_scratch[\"validate\"], test_batches=loaders_scratch[\"test\"], model=model_scratch, optimizer=optimizer_scratch,", "epochs: int=10, epoch_start: int=1, load_model: bool=False) -> None: \"\"\"Trains and Tests the Model", "the Model Args: epochs: number of times to train on the data set", "* LAYER_THREE_OUT, FULLY_CONNECTED_OUT) self.fc2 = nn.Linear(FULLY_CONNECTED_OUT, BREEDS) # dropout layer (p=0.25) self.dropout =", "print(\"Number of Dog Breeds: {}\".format(BREEDS)) timer = Timer(beep=SPEAKABLE) means = [0.485, 0.456, 0.406]", "transforms.CenterCrop(IMAGE_SIZE), transforms.ToTensor(), transforms.Normalize(means, deviations)]) training = datasets.ImageFolder(root=str(dog_training_path.folder), transform=train_transform) validation = datasets.ImageFolder(root=str(dog_validation_path.folder), transform=test_transform) testing", "= 1 FULLY_CONNECTED_OUT = 500 class Net(nn.Module): \"\"\"Naive Neural Network to classify dog", "save_path: str=\"model_scratch.pt\"): \"\"\"Trains the Model Args: epochs: number of times to train on", "to start the epoch count with load_model: whether to load the model from", "= partial(print, flush=True) device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") print(device) ImageFile.LOAD_TRUNCATED_IMAGES =", "torch.nn.functional as F import torch.optim as optimizer import torchvision.transforms as transforms # this", "nn.Conv2d(3, LAYER_ONE_OUT, KERNEL, padding=PADDING) self.conv2 = nn.Conv2d(LAYER_ONE_OUT, LAYER_TWO_OUT, KERNEL, padding=PADDING) self.conv3 = nn.Conv2d(LAYER_TWO_OUT,", "from dotenv import load_dotenv from PIL import Image, ImageFile from torchvision import datasets", "2 KERNEL = 3 PADDING = 1 FULLY_CONNECTED_OUT = 500 class Net(nn.Module): \"\"\"Naive", "on the data set epoch_start: number to start the epoch count with load_model:", "model model.load_state_dict(torch.load(model_path)) print(\"Starting Testing\") timer.start() test(test_batches, model, criterion) timer.end() return model_path = DataPathTwo(", "0.0 validation_loss = 0.0 model.train() for data, target in train_batches: data, target =", "{} GPUs\".format(torch.cuda.device_count())) model_scratch = nn.DataParallel(model_scratch) model_scratch.to(device) criterion_scratch = nn.CrossEntropyLoss() optimizer_scratch = optimizer.SGD(model_scratch.parameters(), lr=0.001,", "epoch_start: int=1, save_path: str=\"model_scratch.pt\"): \"\"\"Trains the Model Args: epochs: number of times to", "dropout layer (p=0.25) self.dropout = nn.Dropout(0.25) return def forward(self, x): # add sequence", "train_batches = torch.utils.data.DataLoader(training, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) validation_batches = torch.utils.data.DataLoader( validation, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS)", "loss = criterion(output, target) validation_loss += loss.item() * data.size(0) training_loss /= len(train_batches.dataset) validation_loss", "# compare predictions to true label correct += numpy.sum( numpy.squeeze( predictions.eq( target.data.view_as(predictions))).cpu().numpy()) total", "dog_training_path = DataPathTwo(folder_key=\"DOG_TRAIN\") dog_testing_path = DataPathTwo(folder_key=\"DOG_TEST\") dog_validation_path = DataPathTwo(folder_key=\"DOG_VALIDATE\") human_path = DataPathTwo(folder_key=\"HUMAN_PATH\") BREEDS", "the best model optimizer: the gradient descent object criterion: object to do backwards", "train optimizer: the gradient descent object criterion: object to do backwards propagation epoch_start:", "parameters \"\"\" validation_loss_min = numpy.Inf end = epoch_start + epochs for epoch in", "= 3 PADDING = 1 FULLY_CONNECTED_OUT = 500 class Net(nn.Module): \"\"\"Naive Neural Network", "train_batches: the batch-loaders for training validation_batches: batch-loaders for validation model: the network to", "best model model.load_state_dict(torch.load(model_path)) print(\"Starting Testing\") timer.start() test(test_batches, model, criterion) timer.end() return model_path =", "= DataPathTwo(folder_key=\"HUMAN_PATH\") BREEDS = len(set(dog_training_path.folder.iterdir())) print(\"Number of Dog Breeds: {}\".format(BREEDS)) timer = Timer(beep=SPEAKABLE)", "the epoch count with load_model: whether to load the model from a file", "the best model model.load_state_dict(torch.load(model_path)) print(\"Starting Testing\") timer.start() test(test_batches, model, criterion) timer.end() return model_path", "default=10, type=int, help=\"Training epochs (default: %(default)s)\") parser.add_argument( \"--epoch-offset\", default=0, type=int, help=\"Offset for the", "training = datasets.ImageFolder(root=str(dog_training_path.folder), transform=train_transform) validation = datasets.ImageFolder(root=str(dog_validation_path.folder), transform=test_transform) testing = datasets.ImageFolder(root=str(dog_testing_path.folder), transform=test_transform) BATCH_SIZE", "{} \\tTraining Loss: {:.6f} \\tValidation Loss: {:.6f}'.format( epoch, training_loss, validation_loss )) if validation_loss", "= nn.CrossEntropyLoss() optimizer_scratch = optimizer.SGD(model_scratch.parameters(), lr=0.001, momentum=0.9) def train(epochs: int, train_batches: torch.utils.data.DataLoader, validation_batches:", "epochs (default: %(default)s)\") parser.add_argument( \"--epoch-offset\", default=0, type=int, help=\"Offset for the output of epochs", "train_and_test(train_batches: torch.utils.data.DataLoader, validate_batches: torch.utils.data.DataLoader, test_batches: torch.utils.data.DataLoader, model: nn.Module, model_path: Path, optimizer: optimizer.SGD, criterion:", "= DataPathTwo(folder_key=\"DOG_VALIDATE\") human_path = DataPathTwo(folder_key=\"HUMAN_PATH\") BREEDS = len(set(dog_training_path.folder.iterdir())) print(\"Number of Dog Breeds: {}\".format(BREEDS))", "partial(print, flush=True) device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") print(device) ImageFile.LOAD_TRUNCATED_IMAGES = True", "BREEDS) # dropout layer (p=0.25) self.dropout = nn.Dropout(0.25) return def forward(self, x): #", "epoch_start + epochs for epoch in range(epoch_start, end): timer.start() training_loss = 0.0 validation_loss", "the network to test criterion: calculator for the loss \"\"\" test_loss = 0.", "of times to train on the data set epoch_start: number to start the", "layer (64 * 4 * 4 -> 500) self.fc1 = nn.Linear((IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT,", "parser = argparse.ArgumentParser( description=\"Test or Train the Naive Dog Classifier\") parser.add_argument(\"--test\", action=\"store_true\", help=\"Only", "batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) loaders_scratch = dict(train=train_batches, validate=validation_batches, test=test_batches) LAYER_ONE_OUT = 16 LAYER_TWO_OUT =", "-> None: \"\"\"Test the model Args: test_batches: batch loader of test images model:", "data.to(device), target.cuda(device) output = model(data) loss = criterion(output, target) validation_loss += loss.item() *", "print(device) ImageFile.LOAD_TRUNCATED_IMAGES = True load_dotenv() dog_path = DataPathTwo(folder_key=\"DOG_PATH\") dog_training_path = DataPathTwo(folder_key=\"DOG_TRAIN\") dog_testing_path =", "nn.Module, model_path: Path, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epochs: int=10, epoch_start: int=1, load_model: bool=False)", "= criterion(output, target) loss.backward() optimizer.step() training_loss += loss.item() * data.size(0) model.eval() for data,", "= torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") print(device) ImageFile.LOAD_TRUNCATED_IMAGES = True load_dotenv() dog_path =", "test(test_batches, model, criterion) timer.end() return model_path = DataPathTwo( folder_key=\"MODELS\", filename=\"model_scratch.pt\") assert model_path.folder.is_dir() if", "{:.6f} \\tValidation Loss: {:.6f}'.format( epoch, training_loss, validation_loss )) if validation_loss < validation_loss_min: print('Validation", "nn.CrossEntropyLoss, epoch_start: int=1, save_path: str=\"model_scratch.pt\"): \"\"\"Trains the Model Args: epochs: number of times", "= Net() if torch.cuda.is_available(): print(\"Using {} GPUs\".format(torch.cuda.device_count())) model_scratch = nn.DataParallel(model_scratch) model_scratch.to(device) criterion_scratch =", "int=1, save_path: str=\"model_scratch.pt\"): \"\"\"Trains the Model Args: epochs: number of times to train", "model_path.is_file(): model.load_state_dict(torch.load(model_path)) print(\"Starting Training\") timer.start() model_scratch = train(epochs=epochs, epoch_start=epoch_start, train_batches=train_batches, validation_batches=validate_batches, model=model, optimizer=optimizer,", "x = self.dropout(x) x = self.dropout(F.relu(self.fc1(x))) x = self.fc2(x) return x model_scratch =", "print(\"Using {} GPUs\".format(torch.cuda.device_count())) model_scratch = nn.DataParallel(model_scratch) model_scratch.to(device) criterion_scratch = nn.CrossEntropyLoss() optimizer_scratch = optimizer.SGD(model_scratch.parameters(),", "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") print(device) ImageFile.LOAD_TRUNCATED_IMAGES = True load_dotenv() dog_path", "nn.DataParallel(model_scratch) model_scratch.to(device) criterion_scratch = nn.CrossEntropyLoss() optimizer_scratch = optimizer.SGD(model_scratch.parameters(), lr=0.001, momentum=0.9) def train(epochs: int,", "\"--epoch-offset\", default=0, type=int, help=\"Offset for the output of epochs (default: %(default)s)\") parser.add_argument(\"--restart\", action=\"store_true\",", "on the data set train_batches: the batch-loaders for training validation_batches: batch-loaders for validation", "self.dropout(F.relu(self.fc1(x))) x = self.fc2(x) return x model_scratch = Net() if torch.cuda.is_available(): print(\"Using {}", "data, target in train_batches: data, target = data.to(device), target.to(device) optimizer.zero_grad() output = model(data)", "= DataPathTwo(folder_key=\"DOG_TEST\") dog_validation_path = DataPathTwo(folder_key=\"DOG_VALIDATE\") human_path = DataPathTwo(folder_key=\"HUMAN_PATH\") BREEDS = len(set(dog_training_path.folder.iterdir())) print(\"Number of", "DataPathTwo(folder_key=\"DOG_PATH\") dog_training_path = DataPathTwo(folder_key=\"DOG_TRAIN\") dog_testing_path = DataPathTwo(folder_key=\"DOG_TEST\") dog_validation_path = DataPathTwo(folder_key=\"DOG_VALIDATE\") human_path = DataPathTwo(folder_key=\"HUMAN_PATH\")", "validation test_batches: batch-loaders for testing model: the network to train model_path: where to", "whether to load the model from a file \"\"\" if load_model and model_path.is_file():", "redirecting it to a file print = partial(print, flush=True) device = torch.device(\"cuda\" if", "backwards propagation epoch_start: number to start the epoch count with save_path: path to", "None: super().__init__() self.conv1 = nn.Conv2d(3, LAYER_ONE_OUT, KERNEL, padding=PADDING) self.conv2 = nn.Conv2d(LAYER_ONE_OUT, LAYER_TWO_OUT, KERNEL,", "target in validation_batches: data, target = data.to(device), target.cuda(device) output = model(data) loss =", "the loss \"\"\" test_loss = 0. correct = 0. total = 0. model.eval()", "= transforms.Compose([transforms.Resize(255), transforms.CenterCrop(IMAGE_SIZE), transforms.ToTensor(), transforms.Normalize(means, deviations)]) training = datasets.ImageFolder(root=str(dog_training_path.folder), transform=train_transform) validation = datasets.ImageFolder(root=str(dog_validation_path.folder),", "correct = 0. total = 0. model.eval() for data, target in test_batches: data,", "epoch, training_loss, validation_loss )) if validation_loss < validation_loss_min: print('Validation loss decreased ({:.6f} -->", "-> None: super().__init__() self.conv1 = nn.Conv2d(3, LAYER_ONE_OUT, KERNEL, padding=PADDING) self.conv2 = nn.Conv2d(LAYER_ONE_OUT, LAYER_TWO_OUT,", "model: the network to train model_path: where to save the best model optimizer:", "= data.to(device), target.cuda(device) output = model(data) loss = criterion(output, target) validation_loss += loss.item()", "for data, target in validation_batches: data, target = data.to(device), target.cuda(device) output = model(data)", "ImageFile.LOAD_TRUNCATED_IMAGES = True load_dotenv() dog_path = DataPathTwo(folder_key=\"DOG_PATH\") dog_training_path = DataPathTwo(folder_key=\"DOG_TRAIN\") dog_testing_path = DataPathTwo(folder_key=\"DOG_TEST\")", "from torchvision import datasets import numpy import torch import torch.nn as nn import", "% ( 100. * correct / total, correct, total)) return def train_and_test(train_batches: torch.utils.data.DataLoader,", "DataPathTwo(folder_key=\"DOG_VALIDATE\") human_path = DataPathTwo(folder_key=\"HUMAN_PATH\") BREEDS = len(set(dog_training_path.folder.iterdir())) print(\"Number of Dog Breeds: {}\".format(BREEDS)) timer", "load the best model model.load_state_dict(torch.load(model_path)) print(\"Starting Testing\") timer.start() test(test_batches, model, criterion) timer.end() return", "DataPathTwo(folder_key=\"DOG_TRAIN\") dog_testing_path = DataPathTwo(folder_key=\"DOG_TEST\") dog_validation_path = DataPathTwo(folder_key=\"DOG_VALIDATE\") human_path = DataPathTwo(folder_key=\"HUMAN_PATH\") BREEDS = len(set(dog_training_path.folder.iterdir()))", "= nn.MaxPool2d(2, 2) # linear layer (64 * 4 * 4 -> 500)", "in train_batches: data, target = data.to(device), target.to(device) optimizer.zero_grad() output = model(data) loss =", "model, criterion) timer.end() return model_path = DataPathTwo( folder_key=\"MODELS\", filename=\"model_scratch.pt\") assert model_path.folder.is_dir() if __name__", "for validation model: the network to train optimizer: the gradient descent object criterion:", "shuffle=True, num_workers=WORKERS) loaders_scratch = dict(train=train_batches, validate=validation_batches, test=test_batches) LAYER_ONE_OUT = 16 LAYER_TWO_OUT = LAYER_ONE_OUT", "= 16 LAYER_TWO_OUT = LAYER_ONE_OUT * 2 LAYER_THREE_OUT = LAYER_TWO_OUT * 2 KERNEL", "nn.CrossEntropyLoss) -> None: \"\"\"Test the model Args: test_batches: batch loader of test images", "calculator for the loss \"\"\" test_loss = 0. correct = 0. total =", "the batch-loaders for training validation_batches: batch-loaders for validation model: the network to train", "loader of test images model: the network to test criterion: calculator for the", "\"\"\"Trains and Tests the Model Args: train_batches: batch-loaders for training validate_batches: batch-loaders for", "# python from functools import partial import argparse import os # pypi from", "{:.6f}). Saving model ...'.format( validation_loss_min, validation_loss)) torch.save(model.state_dict(), save_path) validation_loss_min = validation_loss return model", "PADDING = 1 FULLY_CONNECTED_OUT = 500 class Net(nn.Module): \"\"\"Naive Neural Network to classify", "data set epoch_start: number to start the epoch count with load_model: whether to", "= [0.229, 0.224, 0.225] IMAGE_SIZE = 224 IMAGE_HALF_SIZE = IMAGE_SIZE//2 train_transform = transforms.Compose([", "correct / total, correct, total)) return def train_and_test(train_batches: torch.utils.data.DataLoader, validate_batches: torch.utils.data.DataLoader, test_batches: torch.utils.data.DataLoader,", "filename=\"model_scratch.pt\") assert model_path.folder.is_dir() if __name__ == \"__main__\": parser = argparse.ArgumentParser( description=\"Test or Train", "import partial import argparse import os # pypi from dotenv import load_dotenv from", "to train on the data set train_batches: the batch-loaders for training validation_batches: batch-loaders", "total = 0. model.eval() for data, target in test_batches: data, target = data.to(device),", "test_batches: torch.utils.data.DataLoader, model: nn.Module, model_path: Path, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epochs: int=10, epoch_start:", "the network to train optimizer: the gradient descent object criterion: object to do", "model Args: test_batches: batch loader of test images model: the network to test", "of test images model: the network to test criterion: calculator for the loss", "optimizer.zero_grad() output = model(data) loss = criterion(output, target) loss.backward() optimizer.step() training_loss += loss.item()", "torch.utils.data.DataLoader( testing, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) loaders_scratch = dict(train=train_batches, validate=validation_batches, test=test_batches) LAYER_ONE_OUT = 16", "validate=validation_batches, test=test_batches) LAYER_ONE_OUT = 16 LAYER_TWO_OUT = LAYER_ONE_OUT * 2 LAYER_THREE_OUT = LAYER_TWO_OUT", "print(\"Starting Training\") timer.start() model_scratch = train(epochs=epochs, epoch_start=epoch_start, train_batches=train_batches, validation_batches=validate_batches, model=model, optimizer=optimizer, criterion=criterion, save_path=model_path)", "x = self.fc2(x) return x model_scratch = Net() if torch.cuda.is_available(): print(\"Using {} GPUs\".format(torch.cuda.device_count()))", "import DataPathTwo from neurotic.tangles.timer import Timer from neurotic.constants.imagenet_map import imagenet # the output", "label correct += numpy.sum( numpy.squeeze( predictions.eq( target.data.view_as(predictions))).cpu().numpy()) total += data.size(0) test_loss /= len(test_batches.dataset)", "(64 * 4 * 4 -> 500) self.fc1 = nn.Linear((IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT, FULLY_CONNECTED_OUT)", "= self.pool(F.relu(self.conv1(x))) x = self.pool(F.relu(self.conv2(x))) x = self.pool(F.relu(self.conv3(x))) x = x.view(-1, (IMAGE_HALF_SIZE//4)**2 *", "epochs for epoch in range(epoch_start, end): timer.start() training_loss = 0.0 validation_loss = 0.0", "DataPathTwo( folder_key=\"MODELS\", filename=\"model_scratch.pt\") assert model_path.folder.is_dir() if __name__ == \"__main__\": parser = argparse.ArgumentParser( description=\"Test", "testing, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) loaders_scratch = dict(train=train_batches, validate=validation_batches, test=test_batches) LAYER_ONE_OUT = 16 LAYER_TWO_OUT", "nn.CrossEntropyLoss() optimizer_scratch = optimizer.SGD(model_scratch.parameters(), lr=0.001, momentum=0.9) def train(epochs: int, train_batches: torch.utils.data.DataLoader, validation_batches: torch.utils.data.DataLoader,", "start the epoch count with load_model: whether to load the model from a", "predictions.eq( target.data.view_as(predictions))).cpu().numpy()) total += data.size(0) test_loss /= len(test_batches.dataset) print('Test Loss: {:.6f}\\n'.format(test_loss)) print('\\nTest Accuracy:", "epoch_start: int=1, load_model: bool=False) -> None: \"\"\"Trains and Tests the Model Args: train_batches:", "model: nn.Module, criterion: nn.CrossEntropyLoss) -> None: \"\"\"Test the model Args: test_batches: batch loader", "Args: test_batches: batch loader of test images model: the network to test criterion:", "torch.utils.data.DataLoader, model: nn.Module, criterion: nn.CrossEntropyLoss) -> None: \"\"\"Test the model Args: test_batches: batch", "print('Epoch: {} \\tTraining Loss: {:.6f} \\tValidation Loss: {:.6f}'.format( epoch, training_loss, validation_loss )) if", "self.pool(F.relu(self.conv3(x))) x = x.view(-1, (IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT) x = self.dropout(x) x = self.dropout(F.relu(self.fc1(x)))", "validation_batches: torch.utils.data.DataLoader, model: nn.Module, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epoch_start: int=1, save_path: str=\"model_scratch.pt\"): \"\"\"Trains", "= self.dropout(F.relu(self.fc1(x))) x = self.fc2(x) return x model_scratch = Net() if torch.cuda.is_available(): print(\"Using", "train_batches: batch-loaders for training validate_batches: batch-loaders for validation test_batches: batch-loaders for testing model:", "layer (p=0.25) self.dropout = nn.Dropout(0.25) return def forward(self, x): # add sequence of", "0.0 model.train() for data, target in train_batches: data, target = data.to(device), target.to(device) optimizer.zero_grad()", "means = [0.485, 0.456, 0.406] deviations = [0.229, 0.224, 0.225] IMAGE_SIZE = 224", "of epochs (default: %(default)s)\") parser.add_argument(\"--restart\", action=\"store_true\", help=\"Wipe out old model.\") arguments = parser.parse_args()", "Network to classify dog breeds\"\"\" def __init__(self) -> None: super().__init__() self.conv1 = nn.Conv2d(3,", "data.size(0) model.eval() for data, target in validation_batches: data, target = data.to(device), target.cuda(device) output", "= nn.Conv2d(3, LAYER_ONE_OUT, KERNEL, padding=PADDING) self.conv2 = nn.Conv2d(LAYER_ONE_OUT, LAYER_TWO_OUT, KERNEL, padding=PADDING) self.conv3 =", "\"\"\" validation_loss_min = numpy.Inf end = epoch_start + epochs for epoch in range(epoch_start,", "= LAYER_TWO_OUT * 2 KERNEL = 3 PADDING = 1 FULLY_CONNECTED_OUT = 500", "model_scratch = train(epochs=epochs, epoch_start=epoch_start, train_batches=train_batches, validation_batches=validate_batches, model=model, optimizer=optimizer, criterion=criterion, save_path=model_path) timer.end() # load", "import torch.nn.functional as F import torch.optim as optimizer import torchvision.transforms as transforms #", "print = partial(print, flush=True) device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") print(device) ImageFile.LOAD_TRUNCATED_IMAGES", "you don't flush it when redirecting it to a file print = partial(print,", "<reponame>necromuralist/Neurotic-Networking # python from functools import partial import argparse import os # pypi", "\"\"\"Test the model Args: test_batches: batch loader of test images model: the network", "4 -> 500) self.fc1 = nn.Linear((IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT, FULLY_CONNECTED_OUT) self.fc2 = nn.Linear(FULLY_CONNECTED_OUT, BREEDS)", "dict(train=train_batches, validate=validation_batches, test=test_batches) LAYER_ONE_OUT = 16 LAYER_TWO_OUT = LAYER_ONE_OUT * 2 LAYER_THREE_OUT =", "probabilities to predicted class predictions = output.data.max(1, keepdim=True)[1] # compare predictions to true", "train(epochs=epochs, epoch_start=epoch_start, train_batches=train_batches, validation_batches=validate_batches, model=model, optimizer=optimizer, criterion=criterion, save_path=model_path) timer.end() # load the best", "torchvision import datasets import numpy import torch import torch.nn as nn import torch.nn.functional", "validation_loss )) if validation_loss < validation_loss_min: print('Validation loss decreased ({:.6f} --> {:.6f}). Saving", "propagation epochs: number of times to train on the data set epoch_start: number", "add sequence of convolutional and max pooling layers x = self.pool(F.relu(self.conv1(x))) x =", "to save the best model optimizer: the gradient descent object criterion: object to", "500) self.fc1 = nn.Linear((IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT, FULLY_CONNECTED_OUT) self.fc2 = nn.Linear(FULLY_CONNECTED_OUT, BREEDS) # dropout", "propagation epoch_start: number to start the epoch count with save_path: path to save", "__name__ == \"__main__\": parser = argparse.ArgumentParser( description=\"Test or Train the Naive Dog Classifier\")", "{:.6f}'.format( epoch, training_loss, validation_loss )) if validation_loss < validation_loss_min: print('Validation loss decreased ({:.6f}", "argparse import os # pypi from dotenv import load_dotenv from PIL import Image,", "as optimizer import torchvision.transforms as transforms # this project from neurotic.tangles.data_paths import DataPathTwo", "validation_loss /= len(validation_batches.dataset) timer.end() print('Epoch: {} \\tTraining Loss: {:.6f} \\tValidation Loss: {:.6f}'.format( epoch,", "to train on the data set epoch_start: number to start the epoch count", "to load the model from a file \"\"\" if load_model and model_path.is_file(): model.load_state_dict(torch.load(model_path))", "output = model(data) loss = criterion(output, target) test_loss += loss.item() * data.size(0) #", "transform=test_transform) BATCH_SIZE = 10 WORKERS = 0 train_batches = torch.utils.data.DataLoader(training, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS)", "# max pooling layer self.pool = nn.MaxPool2d(2, 2) # linear layer (64 *", "Timer(beep=SPEAKABLE) means = [0.485, 0.456, 0.406] deviations = [0.229, 0.224, 0.225] IMAGE_SIZE =", "from a file \"\"\" if load_model and model_path.is_file(): model.load_state_dict(torch.load(model_path)) print(\"Starting Training\") timer.start() model_scratch", "data, target = data.to(device), target.to(device) output = model(data) loss = criterion(output, target) test_loss", "for training validate_batches: batch-loaders for validation test_batches: batch-loaders for testing model: the network", "+= numpy.sum( numpy.squeeze( predictions.eq( target.data.view_as(predictions))).cpu().numpy()) total += data.size(0) test_loss /= len(test_batches.dataset) print('Test Loss:", "functools import partial import argparse import os # pypi from dotenv import load_dotenv", "of convolutional and max pooling layers x = self.pool(F.relu(self.conv1(x))) x = self.pool(F.relu(self.conv2(x))) x", "path to save the best network parameters \"\"\" validation_loss_min = numpy.Inf end =", "criterion(output, target) test_loss += loss.item() * data.size(0) # convert output probabilities to predicted", "convert output probabilities to predicted class predictions = output.data.max(1, keepdim=True)[1] # compare predictions", "batch-loaders for training validation_batches: batch-loaders for validation model: the network to train optimizer:", "= model(data) loss = criterion(output, target) validation_loss += loss.item() * data.size(0) training_loss /=", "def train(epochs: int, train_batches: torch.utils.data.DataLoader, validation_batches: torch.utils.data.DataLoader, model: nn.Module, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss,", "to save the best network parameters \"\"\" validation_loss_min = numpy.Inf end = epoch_start", "LAYER_THREE_OUT, KERNEL, padding=PADDING) # max pooling layer self.pool = nn.MaxPool2d(2, 2) # linear", "return def forward(self, x): # add sequence of convolutional and max pooling layers", "set train_batches: the batch-loaders for training validation_batches: batch-loaders for validation model: the network", "training validation_batches: batch-loaders for validation model: the network to train optimizer: the gradient", "torchvision.transforms as transforms # this project from neurotic.tangles.data_paths import DataPathTwo from neurotic.tangles.timer import", "it when redirecting it to a file print = partial(print, flush=True) device =", "import numpy import torch import torch.nn as nn import torch.nn.functional as F import", "\"\"\" test_loss = 0. correct = 0. total = 0. model.eval() for data,", "\"\"\"Trains the Model Args: epochs: number of times to train on the data", "predicted class predictions = output.data.max(1, keepdim=True)[1] # compare predictions to true label correct", "--> {:.6f}). Saving model ...'.format( validation_loss_min, validation_loss)) torch.save(model.state_dict(), save_path) validation_loss_min = validation_loss return", "timer.end() return model_path = DataPathTwo( folder_key=\"MODELS\", filename=\"model_scratch.pt\") assert model_path.folder.is_dir() if __name__ == \"__main__\":", "type=int, help=\"Offset for the output of epochs (default: %(default)s)\") parser.add_argument(\"--restart\", action=\"store_true\", help=\"Wipe out", "= self.dropout(x) x = self.dropout(F.relu(self.fc1(x))) x = self.fc2(x) return x model_scratch = Net()", "total)) return def train_and_test(train_batches: torch.utils.data.DataLoader, validate_batches: torch.utils.data.DataLoader, test_batches: torch.utils.data.DataLoader, model: nn.Module, model_path: Path,", "Classifier\") parser.add_argument(\"--test\", action=\"store_true\", help=\"Only run the test\") parser.add_argument(\"--epochs\", default=10, type=int, help=\"Training epochs (default:", "torch.utils.data.DataLoader, model: nn.Module, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epoch_start: int=1, save_path: str=\"model_scratch.pt\"): \"\"\"Trains the", "object criterion: object to do backwards propagation epoch_start: number to start the epoch", "%(default)s)\") parser.add_argument( \"--epoch-offset\", default=0, type=int, help=\"Offset for the output of epochs (default: %(default)s)\")", "self.pool(F.relu(self.conv1(x))) x = self.pool(F.relu(self.conv2(x))) x = self.pool(F.relu(self.conv3(x))) x = x.view(-1, (IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT)", "print('Validation loss decreased ({:.6f} --> {:.6f}). Saving model ...'.format( validation_loss_min, validation_loss)) torch.save(model.state_dict(), save_path)", "test images model: the network to test criterion: calculator for the loss \"\"\"", "= DataPathTwo(folder_key=\"DOG_PATH\") dog_training_path = DataPathTwo(folder_key=\"DOG_TRAIN\") dog_testing_path = DataPathTwo(folder_key=\"DOG_TEST\") dog_validation_path = DataPathTwo(folder_key=\"DOG_VALIDATE\") human_path =", "datasets.ImageFolder(root=str(dog_testing_path.folder), transform=test_transform) BATCH_SIZE = 10 WORKERS = 0 train_batches = torch.utils.data.DataLoader(training, batch_size=BATCH_SIZE, shuffle=True,", "= nn.Dropout(0.25) return def forward(self, x): # add sequence of convolutional and max", "model.train() for data, target in train_batches: data, target = data.to(device), target.to(device) optimizer.zero_grad() output", "str=\"model_scratch.pt\"): \"\"\"Trains the Model Args: epochs: number of times to train on the", "data.size(0) training_loss /= len(train_batches.dataset) validation_loss /= len(validation_batches.dataset) timer.end() print('Epoch: {} \\tTraining Loss: {:.6f}", "batch-loaders for training validate_batches: batch-loaders for validation test_batches: batch-loaders for testing model: the", "help=\"Only run the test\") parser.add_argument(\"--epochs\", default=10, type=int, help=\"Training epochs (default: %(default)s)\") parser.add_argument( \"--epoch-offset\",", "timer.end() # load the best model model.load_state_dict(torch.load(model_path)) print(\"Starting Testing\") timer.start() test(test_batches, model, criterion)", "transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(means, deviations)]) test_transform = transforms.Compose([transforms.Resize(255), transforms.CenterCrop(IMAGE_SIZE), transforms.ToTensor(), transforms.Normalize(means, deviations)]) training =", "BATCH_SIZE = 10 WORKERS = 0 train_batches = torch.utils.data.DataLoader(training, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) validation_batches", "model_path: where to save the best model optimizer: the gradient descent object criterion:", "train model_path: where to save the best model optimizer: the gradient descent object", "DataPathTwo(folder_key=\"DOG_TEST\") dog_validation_path = DataPathTwo(folder_key=\"DOG_VALIDATE\") human_path = DataPathTwo(folder_key=\"HUMAN_PATH\") BREEDS = len(set(dog_training_path.folder.iterdir())) print(\"Number of Dog", "# the output won't show up if you don't flush it when redirecting", "= torch.utils.data.DataLoader( testing, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) loaders_scratch = dict(train=train_batches, validate=validation_batches, test=test_batches) LAYER_ONE_OUT =", "return x model_scratch = Net() if torch.cuda.is_available(): print(\"Using {} GPUs\".format(torch.cuda.device_count())) model_scratch = nn.DataParallel(model_scratch)", "the data set epoch_start: number to start the epoch count with load_model: whether", "folder_key=\"MODELS\", filename=\"model_scratch.pt\") assert model_path.folder.is_dir() if __name__ == \"__main__\": parser = argparse.ArgumentParser( description=\"Test or", "model.\") arguments = parser.parse_args() if arguments.test: test(loaders_scratch[\"test\"], model_scratch, criterion_scratch) else: train_and_test(epochs=arguments.epochs, train_batches=loaders_scratch[\"train\"], validate_batches=loaders_scratch[\"validate\"],", "max pooling layers x = self.pool(F.relu(self.conv1(x))) x = self.pool(F.relu(self.conv2(x))) x = self.pool(F.relu(self.conv3(x))) x", "assert model_path.folder.is_dir() if __name__ == \"__main__\": parser = argparse.ArgumentParser( description=\"Test or Train the", "help=\"Wipe out old model.\") arguments = parser.parse_args() if arguments.test: test(loaders_scratch[\"test\"], model_scratch, criterion_scratch) else:", "numpy.squeeze( predictions.eq( target.data.view_as(predictions))).cpu().numpy()) total += data.size(0) test_loss /= len(test_batches.dataset) print('Test Loss: {:.6f}\\n'.format(test_loss)) print('\\nTest", "target) test_loss += loss.item() * data.size(0) # convert output probabilities to predicted class", "def train_and_test(train_batches: torch.utils.data.DataLoader, validate_batches: torch.utils.data.DataLoader, test_batches: torch.utils.data.DataLoader, model: nn.Module, model_path: Path, optimizer: optimizer.SGD,", "= 0 train_batches = torch.utils.data.DataLoader(training, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) validation_batches = torch.utils.data.DataLoader( validation, batch_size=BATCH_SIZE,", "keepdim=True)[1] # compare predictions to true label correct += numpy.sum( numpy.squeeze( predictions.eq( target.data.view_as(predictions))).cpu().numpy())", "= self.fc2(x) return x model_scratch = Net() if torch.cuda.is_available(): print(\"Using {} GPUs\".format(torch.cuda.device_count())) model_scratch", "batch-loaders for validation test_batches: batch-loaders for testing model: the network to train model_path:", "self.conv1 = nn.Conv2d(3, LAYER_ONE_OUT, KERNEL, padding=PADDING) self.conv2 = nn.Conv2d(LAYER_ONE_OUT, LAYER_TWO_OUT, KERNEL, padding=PADDING) self.conv3", "deviations = [0.229, 0.224, 0.225] IMAGE_SIZE = 224 IMAGE_HALF_SIZE = IMAGE_SIZE//2 train_transform =", "model(data) loss = criterion(output, target) validation_loss += loss.item() * data.size(0) training_loss /= len(train_batches.dataset)", "from neurotic.tangles.timer import Timer from neurotic.constants.imagenet_map import imagenet # the output won't show", "when redirecting it to a file print = partial(print, flush=True) device = torch.device(\"cuda\"", "correct += numpy.sum( numpy.squeeze( predictions.eq( target.data.view_as(predictions))).cpu().numpy()) total += data.size(0) test_loss /= len(test_batches.dataset) print('Test", "transforms.Normalize(means, deviations)]) test_transform = transforms.Compose([transforms.Resize(255), transforms.CenterCrop(IMAGE_SIZE), transforms.ToTensor(), transforms.Normalize(means, deviations)]) training = datasets.ImageFolder(root=str(dog_training_path.folder), transform=train_transform)", "= 10 WORKERS = 0 train_batches = torch.utils.data.DataLoader(training, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) validation_batches =", "loss \"\"\" test_loss = 0. correct = 0. total = 0. model.eval() for", "validate_batches: batch-loaders for validation test_batches: batch-loaders for testing model: the network to train", "* data.size(0) # convert output probabilities to predicted class predictions = output.data.max(1, keepdim=True)[1]", "+= loss.item() * data.size(0) training_loss /= len(train_batches.dataset) validation_loss /= len(validation_batches.dataset) timer.end() print('Epoch: {}", "neurotic.tangles.timer import Timer from neurotic.constants.imagenet_map import imagenet # the output won't show up", "with save_path: path to save the best network parameters \"\"\" validation_loss_min = numpy.Inf", "Train the Naive Dog Classifier\") parser.add_argument(\"--test\", action=\"store_true\", help=\"Only run the test\") parser.add_argument(\"--epochs\", default=10,", "project from neurotic.tangles.data_paths import DataPathTwo from neurotic.tangles.timer import Timer from neurotic.constants.imagenet_map import imagenet", "0.456, 0.406] deviations = [0.229, 0.224, 0.225] IMAGE_SIZE = 224 IMAGE_HALF_SIZE = IMAGE_SIZE//2", "nn.Linear((IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT, FULLY_CONNECTED_OUT) self.fc2 = nn.Linear(FULLY_CONNECTED_OUT, BREEDS) # dropout layer (p=0.25) self.dropout", "data, target = data.to(device), target.to(device) optimizer.zero_grad() output = model(data) loss = criterion(output, target)", "if validation_loss < validation_loss_min: print('Validation loss decreased ({:.6f} --> {:.6f}). Saving model ...'.format(", "optimizer_scratch = optimizer.SGD(model_scratch.parameters(), lr=0.001, momentum=0.9) def train(epochs: int, train_batches: torch.utils.data.DataLoader, validation_batches: torch.utils.data.DataLoader, model:", "* 2 LAYER_THREE_OUT = LAYER_TWO_OUT * 2 KERNEL = 3 PADDING = 1", "= epoch_start + epochs for epoch in range(epoch_start, end): timer.start() training_loss = 0.0", "+= loss.item() * data.size(0) model.eval() for data, target in validation_batches: data, target =", "criterion: nn.CrossEntropyLoss) -> None: \"\"\"Test the model Args: test_batches: batch loader of test", "predictions to true label correct += numpy.sum( numpy.squeeze( predictions.eq( target.data.view_as(predictions))).cpu().numpy()) total += data.size(0)", "import Timer from neurotic.constants.imagenet_map import imagenet # the output won't show up if", "show up if you don't flush it when redirecting it to a file", "data set train_batches: the batch-loaders for training validation_batches: batch-loaders for validation model: the", "optimizer: the gradient descent object criterion: object to do backwards propagation epochs: number", "[0.229, 0.224, 0.225] IMAGE_SIZE = 224 IMAGE_HALF_SIZE = IMAGE_SIZE//2 train_transform = transforms.Compose([ transforms.RandomRotation(30),", "save the best network parameters \"\"\" validation_loss_min = numpy.Inf end = epoch_start +", "loss.item() * data.size(0) # convert output probabilities to predicted class predictions = output.data.max(1,", "= DataPathTwo(folder_key=\"DOG_TRAIN\") dog_testing_path = DataPathTwo(folder_key=\"DOG_TEST\") dog_validation_path = DataPathTwo(folder_key=\"DOG_VALIDATE\") human_path = DataPathTwo(folder_key=\"HUMAN_PATH\") BREEDS =", "0.225] IMAGE_SIZE = 224 IMAGE_HALF_SIZE = IMAGE_SIZE//2 train_transform = transforms.Compose([ transforms.RandomRotation(30), transforms.RandomResizedCrop(IMAGE_SIZE), transforms.RandomHorizontalFlip(),", "to start the epoch count with save_path: path to save the best network", "/ total, correct, total)) return def train_and_test(train_batches: torch.utils.data.DataLoader, validate_batches: torch.utils.data.DataLoader, test_batches: torch.utils.data.DataLoader, model:", "optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epochs: int=10, epoch_start: int=1, load_model: bool=False) -> None: \"\"\"Trains", "import datasets import numpy import torch import torch.nn as nn import torch.nn.functional as", "model: nn.Module, model_path: Path, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epochs: int=10, epoch_start: int=1, load_model:", "in test_batches: data, target = data.to(device), target.to(device) output = model(data) loss = criterion(output,", "...'.format( validation_loss_min, validation_loss)) torch.save(model.state_dict(), save_path) validation_loss_min = validation_loss return model def test(test_batches: torch.utils.data.DataLoader,", "Loss: {:.6f}\\n'.format(test_loss)) print('\\nTest Accuracy: %2d%% (%2d/%2d)' % ( 100. * correct / total,", "( 100. * correct / total, correct, total)) return def train_and_test(train_batches: torch.utils.data.DataLoader, validate_batches:", "= nn.DataParallel(model_scratch) model_scratch.to(device) criterion_scratch = nn.CrossEntropyLoss() optimizer_scratch = optimizer.SGD(model_scratch.parameters(), lr=0.001, momentum=0.9) def train(epochs:", "epoch_start: number to start the epoch count with save_path: path to save the", "the best network parameters \"\"\" validation_loss_min = numpy.Inf end = epoch_start + epochs", "target in test_batches: data, target = data.to(device), target.to(device) output = model(data) loss =", "FULLY_CONNECTED_OUT) self.fc2 = nn.Linear(FULLY_CONNECTED_OUT, BREEDS) # dropout layer (p=0.25) self.dropout = nn.Dropout(0.25) return", "KERNEL = 3 PADDING = 1 FULLY_CONNECTED_OUT = 500 class Net(nn.Module): \"\"\"Naive Neural", "= torch.utils.data.DataLoader(training, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) validation_batches = torch.utils.data.DataLoader( validation, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) test_batches", "don't flush it when redirecting it to a file print = partial(print, flush=True)", "torch.utils.data.DataLoader(training, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) validation_batches = torch.utils.data.DataLoader( validation, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) test_batches =", "return model_path = DataPathTwo( folder_key=\"MODELS\", filename=\"model_scratch.pt\") assert model_path.folder.is_dir() if __name__ == \"__main__\": parser", "transforms.Compose([ transforms.RandomRotation(30), transforms.RandomResizedCrop(IMAGE_SIZE), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(means, deviations)]) test_transform = transforms.Compose([transforms.Resize(255), transforms.CenterCrop(IMAGE_SIZE), transforms.ToTensor(), transforms.Normalize(means,", "LAYER_THREE_OUT) x = self.dropout(x) x = self.dropout(F.relu(self.fc1(x))) x = self.fc2(x) return x model_scratch", "pooling layer self.pool = nn.MaxPool2d(2, 2) # linear layer (64 * 4 *", "argparse.ArgumentParser( description=\"Test or Train the Naive Dog Classifier\") parser.add_argument(\"--test\", action=\"store_true\", help=\"Only run the", "training_loss += loss.item() * data.size(0) model.eval() for data, target in validation_batches: data, target", "self.dropout = nn.Dropout(0.25) return def forward(self, x): # add sequence of convolutional and", "Training\") timer.start() model_scratch = train(epochs=epochs, epoch_start=epoch_start, train_batches=train_batches, validation_batches=validate_batches, model=model, optimizer=optimizer, criterion=criterion, save_path=model_path) timer.end()", "epoch_start=epoch_start, train_batches=train_batches, validation_batches=validate_batches, model=model, optimizer=optimizer, criterion=criterion, save_path=model_path) timer.end() # load the best model", "test_loss += loss.item() * data.size(0) # convert output probabilities to predicted class predictions", "= torch.utils.data.DataLoader( validation, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) test_batches = torch.utils.data.DataLoader( testing, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS)", "Testing\") timer.start() test(test_batches, model, criterion) timer.end() return model_path = DataPathTwo( folder_key=\"MODELS\", filename=\"model_scratch.pt\") assert", "from PIL import Image, ImageFile from torchvision import datasets import numpy import torch", "import imagenet # the output won't show up if you don't flush it", "3 PADDING = 1 FULLY_CONNECTED_OUT = 500 class Net(nn.Module): \"\"\"Naive Neural Network to", "None: \"\"\"Test the model Args: test_batches: batch loader of test images model: the", "else \"cpu\") print(device) ImageFile.LOAD_TRUNCATED_IMAGES = True load_dotenv() dog_path = DataPathTwo(folder_key=\"DOG_PATH\") dog_training_path = DataPathTwo(folder_key=\"DOG_TRAIN\")", "self.fc2 = nn.Linear(FULLY_CONNECTED_OUT, BREEDS) # dropout layer (p=0.25) self.dropout = nn.Dropout(0.25) return def", "torch.utils.data.DataLoader, validate_batches: torch.utils.data.DataLoader, test_batches: torch.utils.data.DataLoader, model: nn.Module, model_path: Path, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss,", "load_model: whether to load the model from a file \"\"\" if load_model and", "[0.485, 0.456, 0.406] deviations = [0.229, 0.224, 0.225] IMAGE_SIZE = 224 IMAGE_HALF_SIZE =", "# load the best model model.load_state_dict(torch.load(model_path)) print(\"Starting Testing\") timer.start() test(test_batches, model, criterion) timer.end()", "LAYER_TWO_OUT, KERNEL, padding=PADDING) self.conv3 = nn.Conv2d(LAYER_TWO_OUT, LAYER_THREE_OUT, KERNEL, padding=PADDING) # max pooling layer", "datasets import numpy import torch import torch.nn as nn import torch.nn.functional as F", "old model.\") arguments = parser.parse_args() if arguments.test: test(loaders_scratch[\"test\"], model_scratch, criterion_scratch) else: train_and_test(epochs=arguments.epochs, train_batches=loaders_scratch[\"train\"],", "timer.start() training_loss = 0.0 validation_loss = 0.0 model.train() for data, target in train_batches:", "data, target in validation_batches: data, target = data.to(device), target.cuda(device) output = model(data) loss", "if load_model and model_path.is_file(): model.load_state_dict(torch.load(model_path)) print(\"Starting Training\") timer.start() model_scratch = train(epochs=epochs, epoch_start=epoch_start, train_batches=train_batches,", "< validation_loss_min: print('Validation loss decreased ({:.6f} --> {:.6f}). Saving model ...'.format( validation_loss_min, validation_loss))", "the gradient descent object criterion: object to do backwards propagation epoch_start: number to", "validation_batches: batch-loaders for validation model: the network to train optimizer: the gradient descent", "WORKERS = 0 train_batches = torch.utils.data.DataLoader(training, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) validation_batches = torch.utils.data.DataLoader( validation,", "object to do backwards propagation epoch_start: number to start the epoch count with", "class Net(nn.Module): \"\"\"Naive Neural Network to classify dog breeds\"\"\" def __init__(self) -> None:", "timer.end() print('Epoch: {} \\tTraining Loss: {:.6f} \\tValidation Loss: {:.6f}'.format( epoch, training_loss, validation_loss ))", "criterion) timer.end() return model_path = DataPathTwo( folder_key=\"MODELS\", filename=\"model_scratch.pt\") assert model_path.folder.is_dir() if __name__ ==", "\\tValidation Loss: {:.6f}'.format( epoch, training_loss, validation_loss )) if validation_loss < validation_loss_min: print('Validation loss", "Naive Dog Classifier\") parser.add_argument(\"--test\", action=\"store_true\", help=\"Only run the test\") parser.add_argument(\"--epochs\", default=10, type=int, help=\"Training", "save_path: path to save the best network parameters \"\"\" validation_loss_min = numpy.Inf end", "this project from neurotic.tangles.data_paths import DataPathTwo from neurotic.tangles.timer import Timer from neurotic.constants.imagenet_map import", "\"\"\" if load_model and model_path.is_file(): model.load_state_dict(torch.load(model_path)) print(\"Starting Training\") timer.start() model_scratch = train(epochs=epochs, epoch_start=epoch_start,", "validation_loss_min = validation_loss return model def test(test_batches: torch.utils.data.DataLoader, model: nn.Module, criterion: nn.CrossEntropyLoss) ->", "validation, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) test_batches = torch.utils.data.DataLoader( testing, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) loaders_scratch =", "gradient descent object criterion: object to do backwards propagation epoch_start: number to start", "the Naive Dog Classifier\") parser.add_argument(\"--test\", action=\"store_true\", help=\"Only run the test\") parser.add_argument(\"--epochs\", default=10, type=int,", "transforms.ToTensor(), transforms.Normalize(means, deviations)]) training = datasets.ImageFolder(root=str(dog_training_path.folder), transform=train_transform) validation = datasets.ImageFolder(root=str(dog_validation_path.folder), transform=test_transform) testing =", "criterion: object to do backwards propagation epochs: number of times to train on", "gradient descent object criterion: object to do backwards propagation epochs: number of times", "best model optimizer: the gradient descent object criterion: object to do backwards propagation", "= self.pool(F.relu(self.conv2(x))) x = self.pool(F.relu(self.conv3(x))) x = x.view(-1, (IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT) x =", "convolutional and max pooling layers x = self.pool(F.relu(self.conv1(x))) x = self.pool(F.relu(self.conv2(x))) x =", "test_batches: batch-loaders for testing model: the network to train model_path: where to save", "Loss: {:.6f}'.format( epoch, training_loss, validation_loss )) if validation_loss < validation_loss_min: print('Validation loss decreased", "target = data.to(device), target.cuda(device) output = model(data) loss = criterion(output, target) validation_loss +=", "parser.add_argument(\"--epochs\", default=10, type=int, help=\"Training epochs (default: %(default)s)\") parser.add_argument( \"--epoch-offset\", default=0, type=int, help=\"Offset for", "parser.add_argument( \"--epoch-offset\", default=0, type=int, help=\"Offset for the output of epochs (default: %(default)s)\") parser.add_argument(\"--restart\",", "correct, total)) return def train_and_test(train_batches: torch.utils.data.DataLoader, validate_batches: torch.utils.data.DataLoader, test_batches: torch.utils.data.DataLoader, model: nn.Module, model_path:", "Model Args: epochs: number of times to train on the data set train_batches:", "network parameters \"\"\" validation_loss_min = numpy.Inf end = epoch_start + epochs for epoch", "Net(nn.Module): \"\"\"Naive Neural Network to classify dog breeds\"\"\" def __init__(self) -> None: super().__init__()", "model.eval() for data, target in test_batches: data, target = data.to(device), target.to(device) output =", "to true label correct += numpy.sum( numpy.squeeze( predictions.eq( target.data.view_as(predictions))).cpu().numpy()) total += data.size(0) test_loss", "transforms.RandomRotation(30), transforms.RandomResizedCrop(IMAGE_SIZE), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(means, deviations)]) test_transform = transforms.Compose([transforms.Resize(255), transforms.CenterCrop(IMAGE_SIZE), transforms.ToTensor(), transforms.Normalize(means, deviations)])", "descent object criterion: object to do backwards propagation epoch_start: number to start the", "model.load_state_dict(torch.load(model_path)) print(\"Starting Training\") timer.start() model_scratch = train(epochs=epochs, epoch_start=epoch_start, train_batches=train_batches, validation_batches=validate_batches, model=model, optimizer=optimizer, criterion=criterion,", "IMAGE_SIZE//2 train_transform = transforms.Compose([ transforms.RandomRotation(30), transforms.RandomResizedCrop(IMAGE_SIZE), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(means, deviations)]) test_transform = transforms.Compose([transforms.Resize(255),", "# convert output probabilities to predicted class predictions = output.data.max(1, keepdim=True)[1] # compare", "help=\"Offset for the output of epochs (default: %(default)s)\") parser.add_argument(\"--restart\", action=\"store_true\", help=\"Wipe out old", "= numpy.Inf end = epoch_start + epochs for epoch in range(epoch_start, end): timer.start()", "500 class Net(nn.Module): \"\"\"Naive Neural Network to classify dog breeds\"\"\" def __init__(self) ->", "partial import argparse import os # pypi from dotenv import load_dotenv from PIL", "Net() if torch.cuda.is_available(): print(\"Using {} GPUs\".format(torch.cuda.device_count())) model_scratch = nn.DataParallel(model_scratch) model_scratch.to(device) criterion_scratch = nn.CrossEntropyLoss()", "timer.start() model_scratch = train(epochs=epochs, epoch_start=epoch_start, train_batches=train_batches, validation_batches=validate_batches, model=model, optimizer=optimizer, criterion=criterion, save_path=model_path) timer.end() #", "images model: the network to test criterion: calculator for the loss \"\"\" test_loss", "layers x = self.pool(F.relu(self.conv1(x))) x = self.pool(F.relu(self.conv2(x))) x = self.pool(F.relu(self.conv3(x))) x = x.view(-1,", "torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") print(device) ImageFile.LOAD_TRUNCATED_IMAGES = True load_dotenv() dog_path = DataPathTwo(folder_key=\"DOG_PATH\")", "validation_loss)) torch.save(model.state_dict(), save_path) validation_loss_min = validation_loss return model def test(test_batches: torch.utils.data.DataLoader, model: nn.Module,", "validation_loss_min = numpy.Inf end = epoch_start + epochs for epoch in range(epoch_start, end):", "dotenv import load_dotenv from PIL import Image, ImageFile from torchvision import datasets import", "compare predictions to true label correct += numpy.sum( numpy.squeeze( predictions.eq( target.data.view_as(predictions))).cpu().numpy()) total +=", "nn.Conv2d(LAYER_ONE_OUT, LAYER_TWO_OUT, KERNEL, padding=PADDING) self.conv3 = nn.Conv2d(LAYER_TWO_OUT, LAYER_THREE_OUT, KERNEL, padding=PADDING) # max pooling", "%(default)s)\") parser.add_argument(\"--restart\", action=\"store_true\", help=\"Wipe out old model.\") arguments = parser.parse_args() if arguments.test: test(loaders_scratch[\"test\"],", "data.size(0) # convert output probabilities to predicted class predictions = output.data.max(1, keepdim=True)[1] #", "number to start the epoch count with save_path: path to save the best", "validation_loss_min, validation_loss)) torch.save(model.state_dict(), save_path) validation_loss_min = validation_loss return model def test(test_batches: torch.utils.data.DataLoader, model:", "optimizer.SGD, criterion: nn.CrossEntropyLoss, epochs: int=10, epoch_start: int=1, load_model: bool=False) -> None: \"\"\"Trains and", "parser.add_argument(\"--test\", action=\"store_true\", help=\"Only run the test\") parser.add_argument(\"--epochs\", default=10, type=int, help=\"Training epochs (default: %(default)s)\")", "0. total = 0. model.eval() for data, target in test_batches: data, target =", "os # pypi from dotenv import load_dotenv from PIL import Image, ImageFile from", "num_workers=WORKERS) test_batches = torch.utils.data.DataLoader( testing, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) loaders_scratch = dict(train=train_batches, validate=validation_batches, test=test_batches)", "end): timer.start() training_loss = 0.0 validation_loss = 0.0 model.train() for data, target in", "= 0.0 model.train() for data, target in train_batches: data, target = data.to(device), target.to(device)", "= 0. model.eval() for data, target in test_batches: data, target = data.to(device), target.to(device)", "it to a file print = partial(print, flush=True) device = torch.device(\"cuda\" if torch.cuda.is_available()", "model optimizer: the gradient descent object criterion: object to do backwards propagation epochs:", "Accuracy: %2d%% (%2d/%2d)' % ( 100. * correct / total, correct, total)) return", "= model(data) loss = criterion(output, target) loss.backward() optimizer.step() training_loss += loss.item() * data.size(0)", "for validation test_batches: batch-loaders for testing model: the network to train model_path: where", "where to save the best model optimizer: the gradient descent object criterion: object", "action=\"store_true\", help=\"Wipe out old model.\") arguments = parser.parse_args() if arguments.test: test(loaders_scratch[\"test\"], model_scratch, criterion_scratch)", "return def train_and_test(train_batches: torch.utils.data.DataLoader, validate_batches: torch.utils.data.DataLoader, test_batches: torch.utils.data.DataLoader, model: nn.Module, model_path: Path, optimizer:", "if torch.cuda.is_available(): print(\"Using {} GPUs\".format(torch.cuda.device_count())) model_scratch = nn.DataParallel(model_scratch) model_scratch.to(device) criterion_scratch = nn.CrossEntropyLoss() optimizer_scratch", "PIL import Image, ImageFile from torchvision import datasets import numpy import torch import", "2) # linear layer (64 * 4 * 4 -> 500) self.fc1 =", "output probabilities to predicted class predictions = output.data.max(1, keepdim=True)[1] # compare predictions to", "{}\".format(BREEDS)) timer = Timer(beep=SPEAKABLE) means = [0.485, 0.456, 0.406] deviations = [0.229, 0.224,", "nn.MaxPool2d(2, 2) # linear layer (64 * 4 * 4 -> 500) self.fc1", "loss.backward() optimizer.step() training_loss += loss.item() * data.size(0) model.eval() for data, target in validation_batches:", "timer = Timer(beep=SPEAKABLE) means = [0.485, 0.456, 0.406] deviations = [0.229, 0.224, 0.225]", "load the model from a file \"\"\" if load_model and model_path.is_file(): model.load_state_dict(torch.load(model_path)) print(\"Starting", "model(data) loss = criterion(output, target) test_loss += loss.item() * data.size(0) # convert output", "loss decreased ({:.6f} --> {:.6f}). Saving model ...'.format( validation_loss_min, validation_loss)) torch.save(model.state_dict(), save_path) validation_loss_min", "validation_loss = 0.0 model.train() for data, target in train_batches: data, target = data.to(device),", "Timer from neurotic.constants.imagenet_map import imagenet # the output won't show up if you", "model: the network to train optimizer: the gradient descent object criterion: object to", "max pooling layer self.pool = nn.MaxPool2d(2, 2) # linear layer (64 * 4", "= 0. correct = 0. total = 0. model.eval() for data, target in", "padding=PADDING) # max pooling layer self.pool = nn.MaxPool2d(2, 2) # linear layer (64", "0. model.eval() for data, target in test_batches: data, target = data.to(device), target.to(device) output", "network to train optimizer: the gradient descent object criterion: object to do backwards", "shuffle=True, num_workers=WORKERS) validation_batches = torch.utils.data.DataLoader( validation, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) test_batches = torch.utils.data.DataLoader( testing,", "{:.6f}\\n'.format(test_loss)) print('\\nTest Accuracy: %2d%% (%2d/%2d)' % ( 100. * correct / total, correct,", "LAYER_THREE_OUT, FULLY_CONNECTED_OUT) self.fc2 = nn.Linear(FULLY_CONNECTED_OUT, BREEDS) # dropout layer (p=0.25) self.dropout = nn.Dropout(0.25)", "from neurotic.constants.imagenet_map import imagenet # the output won't show up if you don't", "= datasets.ImageFolder(root=str(dog_validation_path.folder), transform=test_transform) testing = datasets.ImageFolder(root=str(dog_testing_path.folder), transform=test_transform) BATCH_SIZE = 10 WORKERS = 0", "batch-loaders for testing model: the network to train model_path: where to save the", "test_transform = transforms.Compose([transforms.Resize(255), transforms.CenterCrop(IMAGE_SIZE), transforms.ToTensor(), transforms.Normalize(means, deviations)]) training = datasets.ImageFolder(root=str(dog_training_path.folder), transform=train_transform) validation =", "optimizer.SGD(model_scratch.parameters(), lr=0.001, momentum=0.9) def train(epochs: int, train_batches: torch.utils.data.DataLoader, validation_batches: torch.utils.data.DataLoader, model: nn.Module, optimizer:", "flush=True) device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") print(device) ImageFile.LOAD_TRUNCATED_IMAGES = True load_dotenv()", "count with save_path: path to save the best network parameters \"\"\" validation_loss_min =", "output.data.max(1, keepdim=True)[1] # compare predictions to true label correct += numpy.sum( numpy.squeeze( predictions.eq(", "nn.CrossEntropyLoss, epochs: int=10, epoch_start: int=1, load_model: bool=False) -> None: \"\"\"Trains and Tests the", "bool=False) -> None: \"\"\"Trains and Tests the Model Args: train_batches: batch-loaders for training", "len(train_batches.dataset) validation_loss /= len(validation_batches.dataset) timer.end() print('Epoch: {} \\tTraining Loss: {:.6f} \\tValidation Loss: {:.6f}'.format(", "to predicted class predictions = output.data.max(1, keepdim=True)[1] # compare predictions to true label", "class predictions = output.data.max(1, keepdim=True)[1] # compare predictions to true label correct +=", "neurotic.constants.imagenet_map import imagenet # the output won't show up if you don't flush", "object to do backwards propagation epochs: number of times to train on the", "# add sequence of convolutional and max pooling layers x = self.pool(F.relu(self.conv1(x))) x", "validation_batches=validate_batches, model=model, optimizer=optimizer, criterion=criterion, save_path=model_path) timer.end() # load the best model model.load_state_dict(torch.load(model_path)) print(\"Starting", "self.fc1 = nn.Linear((IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT, FULLY_CONNECTED_OUT) self.fc2 = nn.Linear(FULLY_CONNECTED_OUT, BREEDS) # dropout layer", "import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as", "0 train_batches = torch.utils.data.DataLoader(training, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) validation_batches = torch.utils.data.DataLoader( validation, batch_size=BATCH_SIZE, shuffle=True,", "do backwards propagation epoch_start: number to start the epoch count with save_path: path", "print('\\nTest Accuracy: %2d%% (%2d/%2d)' % ( 100. * correct / total, correct, total))", "criterion(output, target) loss.backward() optimizer.step() training_loss += loss.item() * data.size(0) model.eval() for data, target", "= nn.Conv2d(LAYER_TWO_OUT, LAYER_THREE_OUT, KERNEL, padding=PADDING) # max pooling layer self.pool = nn.MaxPool2d(2, 2)", "criterion_scratch = nn.CrossEntropyLoss() optimizer_scratch = optimizer.SGD(model_scratch.parameters(), lr=0.001, momentum=0.9) def train(epochs: int, train_batches: torch.utils.data.DataLoader,", "* 2 KERNEL = 3 PADDING = 1 FULLY_CONNECTED_OUT = 500 class Net(nn.Module):", "test=test_batches) LAYER_ONE_OUT = 16 LAYER_TWO_OUT = LAYER_ONE_OUT * 2 LAYER_THREE_OUT = LAYER_TWO_OUT *", "numpy.Inf end = epoch_start + epochs for epoch in range(epoch_start, end): timer.start() training_loss", "to train model_path: where to save the best model optimizer: the gradient descent", "* 4 -> 500) self.fc1 = nn.Linear((IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT, FULLY_CONNECTED_OUT) self.fc2 = nn.Linear(FULLY_CONNECTED_OUT,", "100. * correct / total, correct, total)) return def train_and_test(train_batches: torch.utils.data.DataLoader, validate_batches: torch.utils.data.DataLoader,", "pypi from dotenv import load_dotenv from PIL import Image, ImageFile from torchvision import", "loss = criterion(output, target) loss.backward() optimizer.step() training_loss += loss.item() * data.size(0) model.eval() for", "model_scratch.to(device) criterion_scratch = nn.CrossEntropyLoss() optimizer_scratch = optimizer.SGD(model_scratch.parameters(), lr=0.001, momentum=0.9) def train(epochs: int, train_batches:", "= data.to(device), target.to(device) optimizer.zero_grad() output = model(data) loss = criterion(output, target) loss.backward() optimizer.step()", "end = epoch_start + epochs for epoch in range(epoch_start, end): timer.start() training_loss =", "layer self.pool = nn.MaxPool2d(2, 2) # linear layer (64 * 4 * 4", "network to train model_path: where to save the best model optimizer: the gradient", "/= len(test_batches.dataset) print('Test Loss: {:.6f}\\n'.format(test_loss)) print('\\nTest Accuracy: %2d%% (%2d/%2d)' % ( 100. *", "dog_validation_path = DataPathTwo(folder_key=\"DOG_VALIDATE\") human_path = DataPathTwo(folder_key=\"HUMAN_PATH\") BREEDS = len(set(dog_training_path.folder.iterdir())) print(\"Number of Dog Breeds:", "the output won't show up if you don't flush it when redirecting it", "criterion: object to do backwards propagation epoch_start: number to start the epoch count", "torch.utils.data.DataLoader, model: nn.Module, model_path: Path, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epochs: int=10, epoch_start: int=1,", "optimizer=optimizer, criterion=criterion, save_path=model_path) timer.end() # load the best model model.load_state_dict(torch.load(model_path)) print(\"Starting Testing\") timer.start()", "model_path.folder.is_dir() if __name__ == \"__main__\": parser = argparse.ArgumentParser( description=\"Test or Train the Naive", "torch.save(model.state_dict(), save_path) validation_loss_min = validation_loss return model def test(test_batches: torch.utils.data.DataLoader, model: nn.Module, criterion:", "transforms.Normalize(means, deviations)]) training = datasets.ImageFolder(root=str(dog_training_path.folder), transform=train_transform) validation = datasets.ImageFolder(root=str(dog_validation_path.folder), transform=test_transform) testing = datasets.ImageFolder(root=str(dog_testing_path.folder),", "__init__(self) -> None: super().__init__() self.conv1 = nn.Conv2d(3, LAYER_ONE_OUT, KERNEL, padding=PADDING) self.conv2 = nn.Conv2d(LAYER_ONE_OUT,", "output = model(data) loss = criterion(output, target) validation_loss += loss.item() * data.size(0) training_loss", "times to train on the data set train_batches: the batch-loaders for training validation_batches:", "epochs (default: %(default)s)\") parser.add_argument(\"--restart\", action=\"store_true\", help=\"Wipe out old model.\") arguments = parser.parse_args() if", "output of epochs (default: %(default)s)\") parser.add_argument(\"--restart\", action=\"store_true\", help=\"Wipe out old model.\") arguments =", "16 LAYER_TWO_OUT = LAYER_ONE_OUT * 2 LAYER_THREE_OUT = LAYER_TWO_OUT * 2 KERNEL =", "data.to(device), target.to(device) optimizer.zero_grad() output = model(data) loss = criterion(output, target) loss.backward() optimizer.step() training_loss", "True load_dotenv() dog_path = DataPathTwo(folder_key=\"DOG_PATH\") dog_training_path = DataPathTwo(folder_key=\"DOG_TRAIN\") dog_testing_path = DataPathTwo(folder_key=\"DOG_TEST\") dog_validation_path =", "2 LAYER_THREE_OUT = LAYER_TWO_OUT * 2 KERNEL = 3 PADDING = 1 FULLY_CONNECTED_OUT", "= 500 class Net(nn.Module): \"\"\"Naive Neural Network to classify dog breeds\"\"\" def __init__(self)", "arguments = parser.parse_args() if arguments.test: test(loaders_scratch[\"test\"], model_scratch, criterion_scratch) else: train_and_test(epochs=arguments.epochs, train_batches=loaders_scratch[\"train\"], validate_batches=loaders_scratch[\"validate\"], test_batches=loaders_scratch[\"test\"],", "padding=PADDING) self.conv3 = nn.Conv2d(LAYER_TWO_OUT, LAYER_THREE_OUT, KERNEL, padding=PADDING) # max pooling layer self.pool =", "x = self.pool(F.relu(self.conv1(x))) x = self.pool(F.relu(self.conv2(x))) x = self.pool(F.relu(self.conv3(x))) x = x.view(-1, (IMAGE_HALF_SIZE//4)**2", "transforms.RandomResizedCrop(IMAGE_SIZE), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(means, deviations)]) test_transform = transforms.Compose([transforms.Resize(255), transforms.CenterCrop(IMAGE_SIZE), transforms.ToTensor(), transforms.Normalize(means, deviations)]) training", "best network parameters \"\"\" validation_loss_min = numpy.Inf end = epoch_start + epochs for", "parser.add_argument(\"--restart\", action=\"store_true\", help=\"Wipe out old model.\") arguments = parser.parse_args() if arguments.test: test(loaders_scratch[\"test\"], model_scratch,", "224 IMAGE_HALF_SIZE = IMAGE_SIZE//2 train_transform = transforms.Compose([ transforms.RandomRotation(30), transforms.RandomResizedCrop(IMAGE_SIZE), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(means, deviations)])", "if arguments.test: test(loaders_scratch[\"test\"], model_scratch, criterion_scratch) else: train_and_test(epochs=arguments.epochs, train_batches=loaders_scratch[\"train\"], validate_batches=loaders_scratch[\"validate\"], test_batches=loaders_scratch[\"test\"], model=model_scratch, optimizer=optimizer_scratch, criterion=criterion_scratch,", "LAYER_THREE_OUT = LAYER_TWO_OUT * 2 KERNEL = 3 PADDING = 1 FULLY_CONNECTED_OUT =", "the output of epochs (default: %(default)s)\") parser.add_argument(\"--restart\", action=\"store_true\", help=\"Wipe out old model.\") arguments", "model from a file \"\"\" if load_model and model_path.is_file(): model.load_state_dict(torch.load(model_path)) print(\"Starting Training\") timer.start()", "datasets.ImageFolder(root=str(dog_validation_path.folder), transform=test_transform) testing = datasets.ImageFolder(root=str(dog_testing_path.folder), transform=test_transform) BATCH_SIZE = 10 WORKERS = 0 train_batches", "model_scratch, criterion_scratch) else: train_and_test(epochs=arguments.epochs, train_batches=loaders_scratch[\"train\"], validate_batches=loaders_scratch[\"validate\"], test_batches=loaders_scratch[\"test\"], model=model_scratch, optimizer=optimizer_scratch, criterion=criterion_scratch, epoch_start=arguments.epoch_offset, model_path=model_path.from_folder, load_model=not", "print('Test Loss: {:.6f}\\n'.format(test_loss)) print('\\nTest Accuracy: %2d%% (%2d/%2d)' % ( 100. * correct /", "({:.6f} --> {:.6f}). Saving model ...'.format( validation_loss_min, validation_loss)) torch.save(model.state_dict(), save_path) validation_loss_min = validation_loss", "from functools import partial import argparse import os # pypi from dotenv import", "validation_batches = torch.utils.data.DataLoader( validation, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) test_batches = torch.utils.data.DataLoader( testing, batch_size=BATCH_SIZE, shuffle=True,", "print(\"Starting Testing\") timer.start() test(test_batches, model, criterion) timer.end() return model_path = DataPathTwo( folder_key=\"MODELS\", filename=\"model_scratch.pt\")", "batch loader of test images model: the network to test criterion: calculator for", "= LAYER_ONE_OUT * 2 LAYER_THREE_OUT = LAYER_TWO_OUT * 2 KERNEL = 3 PADDING", "batch-loaders for validation model: the network to train optimizer: the gradient descent object", "= self.pool(F.relu(self.conv3(x))) x = x.view(-1, (IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT) x = self.dropout(x) x =", "for data, target in train_batches: data, target = data.to(device), target.to(device) optimizer.zero_grad() output =", "test criterion: calculator for the loss \"\"\" test_loss = 0. correct = 0.", "torch.utils.data.DataLoader, test_batches: torch.utils.data.DataLoader, model: nn.Module, model_path: Path, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epochs: int=10,", "epoch count with load_model: whether to load the model from a file \"\"\"", "torch.utils.data.DataLoader( validation, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) test_batches = torch.utils.data.DataLoader( testing, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) loaders_scratch", "%2d%% (%2d/%2d)' % ( 100. * correct / total, correct, total)) return def", "epochs: number of times to train on the data set epoch_start: number to", "for epoch in range(epoch_start, end): timer.start() training_loss = 0.0 validation_loss = 0.0 model.train()", "to train optimizer: the gradient descent object criterion: object to do backwards propagation", "epoch_start: number to start the epoch count with load_model: whether to load the", "* data.size(0) training_loss /= len(train_batches.dataset) validation_loss /= len(validation_batches.dataset) timer.end() print('Epoch: {} \\tTraining Loss:", "data, target in test_batches: data, target = data.to(device), target.to(device) output = model(data) loss", "+= loss.item() * data.size(0) # convert output probabilities to predicted class predictions =", "times to train on the data set epoch_start: number to start the epoch", "len(test_batches.dataset) print('Test Loss: {:.6f}\\n'.format(test_loss)) print('\\nTest Accuracy: %2d%% (%2d/%2d)' % ( 100. * correct", "= datasets.ImageFolder(root=str(dog_testing_path.folder), transform=test_transform) BATCH_SIZE = 10 WORKERS = 0 train_batches = torch.utils.data.DataLoader(training, batch_size=BATCH_SIZE,", "out old model.\") arguments = parser.parse_args() if arguments.test: test(loaders_scratch[\"test\"], model_scratch, criterion_scratch) else: train_and_test(epochs=arguments.epochs,", "validation = datasets.ImageFolder(root=str(dog_validation_path.folder), transform=test_transform) testing = datasets.ImageFolder(root=str(dog_testing_path.folder), transform=test_transform) BATCH_SIZE = 10 WORKERS =", "import torch.nn as nn import torch.nn.functional as F import torch.optim as optimizer import", "for the loss \"\"\" test_loss = 0. correct = 0. total = 0.", "target.to(device) optimizer.zero_grad() output = model(data) loss = criterion(output, target) loss.backward() optimizer.step() training_loss +=", "of Dog Breeds: {}\".format(BREEDS)) timer = Timer(beep=SPEAKABLE) means = [0.485, 0.456, 0.406] deviations", "transform=train_transform) validation = datasets.ImageFolder(root=str(dog_validation_path.folder), transform=test_transform) testing = datasets.ImageFolder(root=str(dog_testing_path.folder), transform=test_transform) BATCH_SIZE = 10 WORKERS", "as F import torch.optim as optimizer import torchvision.transforms as transforms # this project", "Args: epochs: number of times to train on the data set train_batches: the", "\"\"\"Naive Neural Network to classify dog breeds\"\"\" def __init__(self) -> None: super().__init__() self.conv1", "the model Args: test_batches: batch loader of test images model: the network to", "Path, optimizer: optimizer.SGD, criterion: nn.CrossEntropyLoss, epochs: int=10, epoch_start: int=1, load_model: bool=False) -> None:", "data.to(device), target.to(device) output = model(data) loss = criterion(output, target) test_loss += loss.item() *", "= transforms.Compose([ transforms.RandomRotation(30), transforms.RandomResizedCrop(IMAGE_SIZE), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(means, deviations)]) test_transform = transforms.Compose([transforms.Resize(255), transforms.CenterCrop(IMAGE_SIZE), transforms.ToTensor(),", "self.conv3 = nn.Conv2d(LAYER_TWO_OUT, LAYER_THREE_OUT, KERNEL, padding=PADDING) # max pooling layer self.pool = nn.MaxPool2d(2,", "x model_scratch = Net() if torch.cuda.is_available(): print(\"Using {} GPUs\".format(torch.cuda.device_count())) model_scratch = nn.DataParallel(model_scratch) model_scratch.to(device)", "model_scratch = nn.DataParallel(model_scratch) model_scratch.to(device) criterion_scratch = nn.CrossEntropyLoss() optimizer_scratch = optimizer.SGD(model_scratch.parameters(), lr=0.001, momentum=0.9) def", "and max pooling layers x = self.pool(F.relu(self.conv1(x))) x = self.pool(F.relu(self.conv2(x))) x = self.pool(F.relu(self.conv3(x)))", "file \"\"\" if load_model and model_path.is_file(): model.load_state_dict(torch.load(model_path)) print(\"Starting Training\") timer.start() model_scratch = train(epochs=epochs,", "Dog Classifier\") parser.add_argument(\"--test\", action=\"store_true\", help=\"Only run the test\") parser.add_argument(\"--epochs\", default=10, type=int, help=\"Training epochs", "-> 500) self.fc1 = nn.Linear((IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT, FULLY_CONNECTED_OUT) self.fc2 = nn.Linear(FULLY_CONNECTED_OUT, BREEDS) #", "load_model and model_path.is_file(): model.load_state_dict(torch.load(model_path)) print(\"Starting Training\") timer.start() model_scratch = train(epochs=epochs, epoch_start=epoch_start, train_batches=train_batches, validation_batches=validate_batches,", "model=model, optimizer=optimizer, criterion=criterion, save_path=model_path) timer.end() # load the best model model.load_state_dict(torch.load(model_path)) print(\"Starting Testing\")", "nn.Dropout(0.25) return def forward(self, x): # add sequence of convolutional and max pooling", "Breeds: {}\".format(BREEDS)) timer = Timer(beep=SPEAKABLE) means = [0.485, 0.456, 0.406] deviations = [0.229,", "BREEDS = len(set(dog_training_path.folder.iterdir())) print(\"Number of Dog Breeds: {}\".format(BREEDS)) timer = Timer(beep=SPEAKABLE) means =", "the Model Args: train_batches: batch-loaders for training validate_batches: batch-loaders for validation test_batches: batch-loaders", "Image, ImageFile from torchvision import datasets import numpy import torch import torch.nn as", "train_batches=train_batches, validation_batches=validate_batches, model=model, optimizer=optimizer, criterion=criterion, save_path=model_path) timer.end() # load the best model model.load_state_dict(torch.load(model_path))", "human_path = DataPathTwo(folder_key=\"HUMAN_PATH\") BREEDS = len(set(dog_training_path.folder.iterdir())) print(\"Number of Dog Breeds: {}\".format(BREEDS)) timer =", "def forward(self, x): # add sequence of convolutional and max pooling layers x", "def __init__(self) -> None: super().__init__() self.conv1 = nn.Conv2d(3, LAYER_ONE_OUT, KERNEL, padding=PADDING) self.conv2 =", "LAYER_TWO_OUT * 2 KERNEL = 3 PADDING = 1 FULLY_CONNECTED_OUT = 500 class", "datasets.ImageFolder(root=str(dog_training_path.folder), transform=train_transform) validation = datasets.ImageFolder(root=str(dog_validation_path.folder), transform=test_transform) testing = datasets.ImageFolder(root=str(dog_testing_path.folder), transform=test_transform) BATCH_SIZE = 10", "test\") parser.add_argument(\"--epochs\", default=10, type=int, help=\"Training epochs (default: %(default)s)\") parser.add_argument( \"--epoch-offset\", default=0, type=int, help=\"Offset", "transforms # this project from neurotic.tangles.data_paths import DataPathTwo from neurotic.tangles.timer import Timer from", "number of times to train on the data set train_batches: the batch-loaders for", "LAYER_ONE_OUT * 2 LAYER_THREE_OUT = LAYER_TWO_OUT * 2 KERNEL = 3 PADDING =", "torch.optim as optimizer import torchvision.transforms as transforms # this project from neurotic.tangles.data_paths import", "epochs: number of times to train on the data set train_batches: the batch-loaders", "true label correct += numpy.sum( numpy.squeeze( predictions.eq( target.data.view_as(predictions))).cpu().numpy()) total += data.size(0) test_loss /=", "import torchvision.transforms as transforms # this project from neurotic.tangles.data_paths import DataPathTwo from neurotic.tangles.timer", "default=0, type=int, help=\"Offset for the output of epochs (default: %(default)s)\") parser.add_argument(\"--restart\", action=\"store_true\", help=\"Wipe", "= 224 IMAGE_HALF_SIZE = IMAGE_SIZE//2 train_transform = transforms.Compose([ transforms.RandomRotation(30), transforms.RandomResizedCrop(IMAGE_SIZE), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(means,", "KERNEL, padding=PADDING) self.conv2 = nn.Conv2d(LAYER_ONE_OUT, LAYER_TWO_OUT, KERNEL, padding=PADDING) self.conv3 = nn.Conv2d(LAYER_TWO_OUT, LAYER_THREE_OUT, KERNEL,", "= dict(train=train_batches, validate=validation_batches, test=test_batches) LAYER_ONE_OUT = 16 LAYER_TWO_OUT = LAYER_ONE_OUT * 2 LAYER_THREE_OUT", "DataPathTwo(folder_key=\"HUMAN_PATH\") BREEDS = len(set(dog_training_path.folder.iterdir())) print(\"Number of Dog Breeds: {}\".format(BREEDS)) timer = Timer(beep=SPEAKABLE) means", "output = model(data) loss = criterion(output, target) loss.backward() optimizer.step() training_loss += loss.item() *", "num_workers=WORKERS) validation_batches = torch.utils.data.DataLoader( validation, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS) test_batches = torch.utils.data.DataLoader( testing, batch_size=BATCH_SIZE,", "= datasets.ImageFolder(root=str(dog_training_path.folder), transform=train_transform) validation = datasets.ImageFolder(root=str(dog_validation_path.folder), transform=test_transform) testing = datasets.ImageFolder(root=str(dog_testing_path.folder), transform=test_transform) BATCH_SIZE =", "IMAGE_HALF_SIZE = IMAGE_SIZE//2 train_transform = transforms.Compose([ transforms.RandomRotation(30), transforms.RandomResizedCrop(IMAGE_SIZE), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(means, deviations)]) test_transform", "padding=PADDING) self.conv2 = nn.Conv2d(LAYER_ONE_OUT, LAYER_TWO_OUT, KERNEL, padding=PADDING) self.conv3 = nn.Conv2d(LAYER_TWO_OUT, LAYER_THREE_OUT, KERNEL, padding=PADDING)", "import Image, ImageFile from torchvision import datasets import numpy import torch import torch.nn", "dog_testing_path = DataPathTwo(folder_key=\"DOG_TEST\") dog_validation_path = DataPathTwo(folder_key=\"DOG_VALIDATE\") human_path = DataPathTwo(folder_key=\"HUMAN_PATH\") BREEDS = len(set(dog_training_path.folder.iterdir())) print(\"Number", "LAYER_ONE_OUT = 16 LAYER_TWO_OUT = LAYER_ONE_OUT * 2 LAYER_THREE_OUT = LAYER_TWO_OUT * 2", "FULLY_CONNECTED_OUT = 500 class Net(nn.Module): \"\"\"Naive Neural Network to classify dog breeds\"\"\" def", "x = x.view(-1, (IMAGE_HALF_SIZE//4)**2 * LAYER_THREE_OUT) x = self.dropout(x) x = self.dropout(F.relu(self.fc1(x))) x", "start the epoch count with save_path: path to save the best network parameters", "torch.cuda.is_available(): print(\"Using {} GPUs\".format(torch.cuda.device_count())) model_scratch = nn.DataParallel(model_scratch) model_scratch.to(device) criterion_scratch = nn.CrossEntropyLoss() optimizer_scratch =", "to classify dog breeds\"\"\" def __init__(self) -> None: super().__init__() self.conv1 = nn.Conv2d(3, LAYER_ONE_OUT,", "train on the data set train_batches: the batch-loaders for training validation_batches: batch-loaders for", "validation model: the network to train optimizer: the gradient descent object criterion: object", "import os # pypi from dotenv import load_dotenv from PIL import Image, ImageFile", "+ epochs for epoch in range(epoch_start, end): timer.start() training_loss = 0.0 validation_loss =", "Loss: {:.6f} \\tValidation Loss: {:.6f}'.format( epoch, training_loss, validation_loss )) if validation_loss < validation_loss_min:", "for testing model: the network to train model_path: where to save the best", "range(epoch_start, end): timer.start() training_loss = 0.0 validation_loss = 0.0 model.train() for data, target", "-> None: \"\"\"Trains and Tests the Model Args: train_batches: batch-loaders for training validate_batches:", "(default: %(default)s)\") parser.add_argument( \"--epoch-offset\", default=0, type=int, help=\"Offset for the output of epochs (default:", "numpy import torch import torch.nn as nn import torch.nn.functional as F import torch.optim", "Model Args: train_batches: batch-loaders for training validate_batches: batch-loaders for validation test_batches: batch-loaders for", "load_dotenv from PIL import Image, ImageFile from torchvision import datasets import numpy import", "KERNEL, padding=PADDING) self.conv3 = nn.Conv2d(LAYER_TWO_OUT, LAYER_THREE_OUT, KERNEL, padding=PADDING) # max pooling layer self.pool", "save_path=model_path) timer.end() # load the best model model.load_state_dict(torch.load(model_path)) print(\"Starting Testing\") timer.start() test(test_batches, model,", "super().__init__() self.conv1 = nn.Conv2d(3, LAYER_ONE_OUT, KERNEL, padding=PADDING) self.conv2 = nn.Conv2d(LAYER_ONE_OUT, LAYER_TWO_OUT, KERNEL, padding=PADDING)", "transforms.ToTensor(), transforms.Normalize(means, deviations)]) test_transform = transforms.Compose([transforms.Resize(255), transforms.CenterCrop(IMAGE_SIZE), transforms.ToTensor(), transforms.Normalize(means, deviations)]) training = datasets.ImageFolder(root=str(dog_training_path.folder),", "for training validation_batches: batch-loaders for validation model: the network to train optimizer: the" ]
[ "in items_and_annotations} == set( [i.reference_id for i in items] ) ref_item = dataset.refloc(target_item.reference_id)", "for _ in items_and_annotations} == set( [i.reference_id for i in items] ) ref_item", "dataset.items_and_annotations() target_item = items[0] assert {_[\"item\"].reference_id for _ in items_and_annotations} == set( [i.reference_id", "items = dataset.items items_and_annotations = dataset.items_and_annotations() target_item = items[0] assert {_[\"item\"].reference_id for _", "test_get_pandaset_items(CLIENT): dataset: Dataset = CLIENT.get_dataset(PANDASET_ID) items = dataset.items items_and_annotations = dataset.items_and_annotations() target_item =", "dataset: Dataset = CLIENT.get_dataset(PANDASET_ID) items = dataset.items items_and_annotations = dataset.items_and_annotations() target_item = items[0]", "Dataset PANDASET_ID = \"ds_bwhjbyfb8mjj0ykagxf0\" def test_get_pandaset_items(CLIENT): dataset: Dataset = CLIENT.get_dataset(PANDASET_ID) items = dataset.items", "== set( [i.reference_id for i in items] ) ref_item = dataset.refloc(target_item.reference_id) assert ref_item[\"item\"]", "{_[\"item\"].reference_id for _ in items_and_annotations} == set( [i.reference_id for i in items] )", "_ in items_and_annotations} == set( [i.reference_id for i in items] ) ref_item =", "i in items] ) ref_item = dataset.refloc(target_item.reference_id) assert ref_item[\"item\"] == target_item index_item =", "= CLIENT.get_dataset(PANDASET_ID) items = dataset.items items_and_annotations = dataset.items_and_annotations() target_item = items[0] assert {_[\"item\"].reference_id", "items] ) ref_item = dataset.refloc(target_item.reference_id) assert ref_item[\"item\"] == target_item index_item = dataset.iloc(0) assert", "dataset.items items_and_annotations = dataset.items_and_annotations() target_item = items[0] assert {_[\"item\"].reference_id for _ in items_and_annotations}", "Dataset = CLIENT.get_dataset(PANDASET_ID) items = dataset.items items_and_annotations = dataset.items_and_annotations() target_item = items[0] assert", "= dataset.items items_and_annotations = dataset.items_and_annotations() target_item = items[0] assert {_[\"item\"].reference_id for _ in", "import Dataset PANDASET_ID = \"ds_bwhjbyfb8mjj0ykagxf0\" def test_get_pandaset_items(CLIENT): dataset: Dataset = CLIENT.get_dataset(PANDASET_ID) items =", "PANDASET_ID = \"ds_bwhjbyfb8mjj0ykagxf0\" def test_get_pandaset_items(CLIENT): dataset: Dataset = CLIENT.get_dataset(PANDASET_ID) items = dataset.items items_and_annotations", "= dataset.items_and_annotations() target_item = items[0] assert {_[\"item\"].reference_id for _ in items_and_annotations} == set(", "in items] ) ref_item = dataset.refloc(target_item.reference_id) assert ref_item[\"item\"] == target_item index_item = dataset.iloc(0)", "\"ds_bwhjbyfb8mjj0ykagxf0\" def test_get_pandaset_items(CLIENT): dataset: Dataset = CLIENT.get_dataset(PANDASET_ID) items = dataset.items items_and_annotations = dataset.items_and_annotations()", "nucleus.dataset import Dataset PANDASET_ID = \"ds_bwhjbyfb8mjj0ykagxf0\" def test_get_pandaset_items(CLIENT): dataset: Dataset = CLIENT.get_dataset(PANDASET_ID) items", "for i in items] ) ref_item = dataset.refloc(target_item.reference_id) assert ref_item[\"item\"] == target_item index_item", "items[0] assert {_[\"item\"].reference_id for _ in items_and_annotations} == set( [i.reference_id for i in", "target_item = items[0] assert {_[\"item\"].reference_id for _ in items_and_annotations} == set( [i.reference_id for", ") ref_item = dataset.refloc(target_item.reference_id) assert ref_item[\"item\"] == target_item index_item = dataset.iloc(0) assert index_item[\"item\"]", "from nucleus.dataset import Dataset PANDASET_ID = \"ds_bwhjbyfb8mjj0ykagxf0\" def test_get_pandaset_items(CLIENT): dataset: Dataset = CLIENT.get_dataset(PANDASET_ID)", "[i.reference_id for i in items] ) ref_item = dataset.refloc(target_item.reference_id) assert ref_item[\"item\"] == target_item", "= \"ds_bwhjbyfb8mjj0ykagxf0\" def test_get_pandaset_items(CLIENT): dataset: Dataset = CLIENT.get_dataset(PANDASET_ID) items = dataset.items items_and_annotations =", "items_and_annotations = dataset.items_and_annotations() target_item = items[0] assert {_[\"item\"].reference_id for _ in items_and_annotations} ==", "def test_get_pandaset_items(CLIENT): dataset: Dataset = CLIENT.get_dataset(PANDASET_ID) items = dataset.items items_and_annotations = dataset.items_and_annotations() target_item", "items_and_annotations} == set( [i.reference_id for i in items] ) ref_item = dataset.refloc(target_item.reference_id) assert", "set( [i.reference_id for i in items] ) ref_item = dataset.refloc(target_item.reference_id) assert ref_item[\"item\"] ==", "= dataset.refloc(target_item.reference_id) assert ref_item[\"item\"] == target_item index_item = dataset.iloc(0) assert index_item[\"item\"] in items", "assert {_[\"item\"].reference_id for _ in items_and_annotations} == set( [i.reference_id for i in items]", "CLIENT.get_dataset(PANDASET_ID) items = dataset.items items_and_annotations = dataset.items_and_annotations() target_item = items[0] assert {_[\"item\"].reference_id for", "= items[0] assert {_[\"item\"].reference_id for _ in items_and_annotations} == set( [i.reference_id for i", "ref_item = dataset.refloc(target_item.reference_id) assert ref_item[\"item\"] == target_item index_item = dataset.iloc(0) assert index_item[\"item\"] in" ]
[ "getpass.getuser() if username.lower() == 'youval': osType = sys.platform if osType.startswith('win'): tempdir = (r'C:\\Phenix\\Dev\\Work\\work\\NCS\\junk')", "phenix_sources\\phenix_regression\\development\\ncs_constraints.py \"\"\" username = getpass.getuser() if username.lower() == 'youval': osType = sys.platform if", "if username.lower() == 'youval': osType = sys.platform if osType.startswith('win'): tempdir = (r'C:\\Phenix\\Dev\\Work\\work\\NCS\\junk') else:", "getpass import sys import os def run(file_name): pdb_processed_file = monomer_library.pdb_interpretation.run(args=[file_name], assume_hydrogens_all_missing=False, hard_minimum_nonbonded_distance=0.0, nonbonded_distance_threshold=None,", "def run(file_name): pdb_processed_file = monomer_library.pdb_interpretation.run(args=[file_name], assume_hydrogens_all_missing=False, hard_minimum_nonbonded_distance=0.0, nonbonded_distance_threshold=None, substitute_non_crystallographic_unit_cell_if_necessary=True) grm = pdb_processed_file.geometry_restraints_manager() print", "sys.platform if osType.startswith('win'): tempdir = (r'C:\\Phenix\\Dev\\Work\\work\\NCS\\junk') else: tempdir = ('/net/cci/youval/Work/work/NCS/junk') os.chdir(tempdir) if __name__=='__main__':", "import monomer_library import mmtbx.monomer_library.server import getpass import sys import os def run(file_name): pdb_processed_file", "grm = pdb_processed_file.geometry_restraints_manager() print 'done' def set_test_folder(): \"\"\" Change working directory to avoid", "sys import os def run(file_name): pdb_processed_file = monomer_library.pdb_interpretation.run(args=[file_name], assume_hydrogens_all_missing=False, hard_minimum_nonbonded_distance=0.0, nonbonded_distance_threshold=None, substitute_non_crystallographic_unit_cell_if_necessary=True) grm", "username.lower() == 'youval': osType = sys.platform if osType.startswith('win'): tempdir = (r'C:\\Phenix\\Dev\\Work\\work\\NCS\\junk') else: tempdir", "'youval': osType = sys.platform if osType.startswith('win'): tempdir = (r'C:\\Phenix\\Dev\\Work\\work\\NCS\\junk') else: tempdir = ('/net/cci/youval/Work/work/NCS/junk')", "print 'done' def set_test_folder(): \"\"\" Change working directory to avoid littering of phenix_sources\\phenix_regression\\development\\ncs_constraints.py", "import mmtbx.monomer_library.pdb_interpretation from mmtbx import monomer_library import mmtbx.monomer_library.server import getpass import sys import", "avoid littering of phenix_sources\\phenix_regression\\development\\ncs_constraints.py \"\"\" username = getpass.getuser() if username.lower() == 'youval': osType", "monomer_library import mmtbx.monomer_library.server import getpass import sys import os def run(file_name): pdb_processed_file =", "'done' def set_test_folder(): \"\"\" Change working directory to avoid littering of phenix_sources\\phenix_regression\\development\\ncs_constraints.py \"\"\"", "= sys.platform if osType.startswith('win'): tempdir = (r'C:\\Phenix\\Dev\\Work\\work\\NCS\\junk') else: tempdir = ('/net/cci/youval/Work/work/NCS/junk') os.chdir(tempdir) if", "if osType.startswith('win'): tempdir = (r'C:\\Phenix\\Dev\\Work\\work\\NCS\\junk') else: tempdir = ('/net/cci/youval/Work/work/NCS/junk') os.chdir(tempdir) if __name__=='__main__': set_test_folder()", "osType.startswith('win'): tempdir = (r'C:\\Phenix\\Dev\\Work\\work\\NCS\\junk') else: tempdir = ('/net/cci/youval/Work/work/NCS/junk') os.chdir(tempdir) if __name__=='__main__': set_test_folder() run('full_asu.pdb')", "assume_hydrogens_all_missing=False, hard_minimum_nonbonded_distance=0.0, nonbonded_distance_threshold=None, substitute_non_crystallographic_unit_cell_if_necessary=True) grm = pdb_processed_file.geometry_restraints_manager() print 'done' def set_test_folder(): \"\"\" Change", "import getpass import sys import os def run(file_name): pdb_processed_file = monomer_library.pdb_interpretation.run(args=[file_name], assume_hydrogens_all_missing=False, hard_minimum_nonbonded_distance=0.0,", "= getpass.getuser() if username.lower() == 'youval': osType = sys.platform if osType.startswith('win'): tempdir =", "hard_minimum_nonbonded_distance=0.0, nonbonded_distance_threshold=None, substitute_non_crystallographic_unit_cell_if_necessary=True) grm = pdb_processed_file.geometry_restraints_manager() print 'done' def set_test_folder(): \"\"\" Change working", "nonbonded_distance_threshold=None, substitute_non_crystallographic_unit_cell_if_necessary=True) grm = pdb_processed_file.geometry_restraints_manager() print 'done' def set_test_folder(): \"\"\" Change working directory", "os def run(file_name): pdb_processed_file = monomer_library.pdb_interpretation.run(args=[file_name], assume_hydrogens_all_missing=False, hard_minimum_nonbonded_distance=0.0, nonbonded_distance_threshold=None, substitute_non_crystallographic_unit_cell_if_necessary=True) grm = pdb_processed_file.geometry_restraints_manager()", "\"\"\" Change working directory to avoid littering of phenix_sources\\phenix_regression\\development\\ncs_constraints.py \"\"\" username = getpass.getuser()", "set_test_folder(): \"\"\" Change working directory to avoid littering of phenix_sources\\phenix_regression\\development\\ncs_constraints.py \"\"\" username =", "of phenix_sources\\phenix_regression\\development\\ncs_constraints.py \"\"\" username = getpass.getuser() if username.lower() == 'youval': osType = sys.platform", "division import mmtbx.monomer_library.pdb_interpretation from mmtbx import monomer_library import mmtbx.monomer_library.server import getpass import sys", "littering of phenix_sources\\phenix_regression\\development\\ncs_constraints.py \"\"\" username = getpass.getuser() if username.lower() == 'youval': osType =", "import os def run(file_name): pdb_processed_file = monomer_library.pdb_interpretation.run(args=[file_name], assume_hydrogens_all_missing=False, hard_minimum_nonbonded_distance=0.0, nonbonded_distance_threshold=None, substitute_non_crystallographic_unit_cell_if_necessary=True) grm =", "Change working directory to avoid littering of phenix_sources\\phenix_regression\\development\\ncs_constraints.py \"\"\" username = getpass.getuser() if", "directory to avoid littering of phenix_sources\\phenix_regression\\development\\ncs_constraints.py \"\"\" username = getpass.getuser() if username.lower() ==", "mmtbx import monomer_library import mmtbx.monomer_library.server import getpass import sys import os def run(file_name):", "from __future__ import division import mmtbx.monomer_library.pdb_interpretation from mmtbx import monomer_library import mmtbx.monomer_library.server import", "working directory to avoid littering of phenix_sources\\phenix_regression\\development\\ncs_constraints.py \"\"\" username = getpass.getuser() if username.lower()", "\"\"\" username = getpass.getuser() if username.lower() == 'youval': osType = sys.platform if osType.startswith('win'):", "def set_test_folder(): \"\"\" Change working directory to avoid littering of phenix_sources\\phenix_regression\\development\\ncs_constraints.py \"\"\" username", "osType = sys.platform if osType.startswith('win'): tempdir = (r'C:\\Phenix\\Dev\\Work\\work\\NCS\\junk') else: tempdir = ('/net/cci/youval/Work/work/NCS/junk') os.chdir(tempdir)", "pdb_processed_file = monomer_library.pdb_interpretation.run(args=[file_name], assume_hydrogens_all_missing=False, hard_minimum_nonbonded_distance=0.0, nonbonded_distance_threshold=None, substitute_non_crystallographic_unit_cell_if_necessary=True) grm = pdb_processed_file.geometry_restraints_manager() print 'done' def", "from mmtbx import monomer_library import mmtbx.monomer_library.server import getpass import sys import os def", "__future__ import division import mmtbx.monomer_library.pdb_interpretation from mmtbx import monomer_library import mmtbx.monomer_library.server import getpass", "mmtbx.monomer_library.pdb_interpretation from mmtbx import monomer_library import mmtbx.monomer_library.server import getpass import sys import os", "<gh_stars>0 from __future__ import division import mmtbx.monomer_library.pdb_interpretation from mmtbx import monomer_library import mmtbx.monomer_library.server", "run(file_name): pdb_processed_file = monomer_library.pdb_interpretation.run(args=[file_name], assume_hydrogens_all_missing=False, hard_minimum_nonbonded_distance=0.0, nonbonded_distance_threshold=None, substitute_non_crystallographic_unit_cell_if_necessary=True) grm = pdb_processed_file.geometry_restraints_manager() print 'done'", "mmtbx.monomer_library.server import getpass import sys import os def run(file_name): pdb_processed_file = monomer_library.pdb_interpretation.run(args=[file_name], assume_hydrogens_all_missing=False,", "import sys import os def run(file_name): pdb_processed_file = monomer_library.pdb_interpretation.run(args=[file_name], assume_hydrogens_all_missing=False, hard_minimum_nonbonded_distance=0.0, nonbonded_distance_threshold=None, substitute_non_crystallographic_unit_cell_if_necessary=True)", "== 'youval': osType = sys.platform if osType.startswith('win'): tempdir = (r'C:\\Phenix\\Dev\\Work\\work\\NCS\\junk') else: tempdir =", "= monomer_library.pdb_interpretation.run(args=[file_name], assume_hydrogens_all_missing=False, hard_minimum_nonbonded_distance=0.0, nonbonded_distance_threshold=None, substitute_non_crystallographic_unit_cell_if_necessary=True) grm = pdb_processed_file.geometry_restraints_manager() print 'done' def set_test_folder():", "monomer_library.pdb_interpretation.run(args=[file_name], assume_hydrogens_all_missing=False, hard_minimum_nonbonded_distance=0.0, nonbonded_distance_threshold=None, substitute_non_crystallographic_unit_cell_if_necessary=True) grm = pdb_processed_file.geometry_restraints_manager() print 'done' def set_test_folder(): \"\"\"", "substitute_non_crystallographic_unit_cell_if_necessary=True) grm = pdb_processed_file.geometry_restraints_manager() print 'done' def set_test_folder(): \"\"\" Change working directory to", "= pdb_processed_file.geometry_restraints_manager() print 'done' def set_test_folder(): \"\"\" Change working directory to avoid littering", "pdb_processed_file.geometry_restraints_manager() print 'done' def set_test_folder(): \"\"\" Change working directory to avoid littering of", "to avoid littering of phenix_sources\\phenix_regression\\development\\ncs_constraints.py \"\"\" username = getpass.getuser() if username.lower() == 'youval':", "username = getpass.getuser() if username.lower() == 'youval': osType = sys.platform if osType.startswith('win'): tempdir", "import mmtbx.monomer_library.server import getpass import sys import os def run(file_name): pdb_processed_file = monomer_library.pdb_interpretation.run(args=[file_name],", "import division import mmtbx.monomer_library.pdb_interpretation from mmtbx import monomer_library import mmtbx.monomer_library.server import getpass import" ]
[]
[]
[ "title = parse.urlparse(url).netloc prefix = '\\033[32m' if code == 404 else '\\033[31m' suffix", "== 404 else '\\033[31m' suffix = '\\033[0m' result = '{}{}{}'.format(prefix, code, suffix) print(title.ljust(16),", "] def inspect_status_code(url): try: response = request.urlopen(url) return response.code except error.HTTPError as e:", "suffix) print(title.ljust(16), result) def main(): if len(sys.argv) < 2: print('usage: python3 main.py ${USER_ID}')", "= inspect_status_code(url+user_id) title = parse.urlparse(url).netloc prefix = '\\033[32m' if code == 404 else", "multiprocessing import Process urls = [ 'https://github.com/', 'https://twitter.com/', 'https://hub.docker.com/v2/users/' ] def inspect_status_code(url): try:", "if code == 404 else '\\033[31m' suffix = '\\033[0m' result = '{}{}{}'.format(prefix, code,", "= [Process(target=inspect, args=(url, user_id)).start() for url in urls] if __name__ == '__main__': main()", "= request.urlopen(url) return response.code except error.HTTPError as e: return e.code def inspect(url, user_id):", "def inspect(url, user_id): code = inspect_status_code(url+user_id) title = parse.urlparse(url).netloc prefix = '\\033[32m' if", "len(sys.argv) < 2: print('usage: python3 main.py ${USER_ID}') exit(1) user_id = sys.argv[1] ps =", "if len(sys.argv) < 2: print('usage: python3 main.py ${USER_ID}') exit(1) user_id = sys.argv[1] ps", "error from multiprocessing import Process urls = [ 'https://github.com/', 'https://twitter.com/', 'https://hub.docker.com/v2/users/' ] def", "'https://github.com/', 'https://twitter.com/', 'https://hub.docker.com/v2/users/' ] def inspect_status_code(url): try: response = request.urlopen(url) return response.code except", "'https://hub.docker.com/v2/users/' ] def inspect_status_code(url): try: response = request.urlopen(url) return response.code except error.HTTPError as", "else '\\033[31m' suffix = '\\033[0m' result = '{}{}{}'.format(prefix, code, suffix) print(title.ljust(16), result) def", "import request, parse, error from multiprocessing import Process urls = [ 'https://github.com/', 'https://twitter.com/',", "except error.HTTPError as e: return e.code def inspect(url, user_id): code = inspect_status_code(url+user_id) title", "parse.urlparse(url).netloc prefix = '\\033[32m' if code == 404 else '\\033[31m' suffix = '\\033[0m'", "prefix = '\\033[32m' if code == 404 else '\\033[31m' suffix = '\\033[0m' result", "= sys.argv[1] ps = [Process(target=inspect, args=(url, user_id)).start() for url in urls] if __name__", "'{}{}{}'.format(prefix, code, suffix) print(title.ljust(16), result) def main(): if len(sys.argv) < 2: print('usage: python3", "parse, error from multiprocessing import Process urls = [ 'https://github.com/', 'https://twitter.com/', 'https://hub.docker.com/v2/users/' ]", "[ 'https://github.com/', 'https://twitter.com/', 'https://hub.docker.com/v2/users/' ] def inspect_status_code(url): try: response = request.urlopen(url) return response.code", "sys from urllib import request, parse, error from multiprocessing import Process urls =", "code = inspect_status_code(url+user_id) title = parse.urlparse(url).netloc prefix = '\\033[32m' if code == 404", "try: response = request.urlopen(url) return response.code except error.HTTPError as e: return e.code def", "import sys from urllib import request, parse, error from multiprocessing import Process urls", "Process urls = [ 'https://github.com/', 'https://twitter.com/', 'https://hub.docker.com/v2/users/' ] def inspect_status_code(url): try: response =", "response = request.urlopen(url) return response.code except error.HTTPError as e: return e.code def inspect(url,", "'https://twitter.com/', 'https://hub.docker.com/v2/users/' ] def inspect_status_code(url): try: response = request.urlopen(url) return response.code except error.HTTPError", "main.py ${USER_ID}') exit(1) user_id = sys.argv[1] ps = [Process(target=inspect, args=(url, user_id)).start() for url", "code, suffix) print(title.ljust(16), result) def main(): if len(sys.argv) < 2: print('usage: python3 main.py", "return response.code except error.HTTPError as e: return e.code def inspect(url, user_id): code =", "= '\\033[32m' if code == 404 else '\\033[31m' suffix = '\\033[0m' result =", "inspect_status_code(url): try: response = request.urlopen(url) return response.code except error.HTTPError as e: return e.code", "error.HTTPError as e: return e.code def inspect(url, user_id): code = inspect_status_code(url+user_id) title =", "404 else '\\033[31m' suffix = '\\033[0m' result = '{}{}{}'.format(prefix, code, suffix) print(title.ljust(16), result)", "e.code def inspect(url, user_id): code = inspect_status_code(url+user_id) title = parse.urlparse(url).netloc prefix = '\\033[32m'", "result) def main(): if len(sys.argv) < 2: print('usage: python3 main.py ${USER_ID}') exit(1) user_id", "return e.code def inspect(url, user_id): code = inspect_status_code(url+user_id) title = parse.urlparse(url).netloc prefix =", "code == 404 else '\\033[31m' suffix = '\\033[0m' result = '{}{}{}'.format(prefix, code, suffix)", "'\\033[0m' result = '{}{}{}'.format(prefix, code, suffix) print(title.ljust(16), result) def main(): if len(sys.argv) <", "request.urlopen(url) return response.code except error.HTTPError as e: return e.code def inspect(url, user_id): code", "python3 main.py ${USER_ID}') exit(1) user_id = sys.argv[1] ps = [Process(target=inspect, args=(url, user_id)).start() for", "response.code except error.HTTPError as e: return e.code def inspect(url, user_id): code = inspect_status_code(url+user_id)", "user_id = sys.argv[1] ps = [Process(target=inspect, args=(url, user_id)).start() for url in urls] if", "user_id): code = inspect_status_code(url+user_id) title = parse.urlparse(url).netloc prefix = '\\033[32m' if code ==", "= [ 'https://github.com/', 'https://twitter.com/', 'https://hub.docker.com/v2/users/' ] def inspect_status_code(url): try: response = request.urlopen(url) return", "request, parse, error from multiprocessing import Process urls = [ 'https://github.com/', 'https://twitter.com/', 'https://hub.docker.com/v2/users/'", "result = '{}{}{}'.format(prefix, code, suffix) print(title.ljust(16), result) def main(): if len(sys.argv) < 2:", "= '{}{}{}'.format(prefix, code, suffix) print(title.ljust(16), result) def main(): if len(sys.argv) < 2: print('usage:", "e: return e.code def inspect(url, user_id): code = inspect_status_code(url+user_id) title = parse.urlparse(url).netloc prefix", "as e: return e.code def inspect(url, user_id): code = inspect_status_code(url+user_id) title = parse.urlparse(url).netloc", "main(): if len(sys.argv) < 2: print('usage: python3 main.py ${USER_ID}') exit(1) user_id = sys.argv[1]", "def inspect_status_code(url): try: response = request.urlopen(url) return response.code except error.HTTPError as e: return", "2: print('usage: python3 main.py ${USER_ID}') exit(1) user_id = sys.argv[1] ps = [Process(target=inspect, args=(url,", "ps = [Process(target=inspect, args=(url, user_id)).start() for url in urls] if __name__ == '__main__':", "'\\033[32m' if code == 404 else '\\033[31m' suffix = '\\033[0m' result = '{}{}{}'.format(prefix,", "print(title.ljust(16), result) def main(): if len(sys.argv) < 2: print('usage: python3 main.py ${USER_ID}') exit(1)", "from multiprocessing import Process urls = [ 'https://github.com/', 'https://twitter.com/', 'https://hub.docker.com/v2/users/' ] def inspect_status_code(url):", "import Process urls = [ 'https://github.com/', 'https://twitter.com/', 'https://hub.docker.com/v2/users/' ] def inspect_status_code(url): try: response", "def main(): if len(sys.argv) < 2: print('usage: python3 main.py ${USER_ID}') exit(1) user_id =", "${USER_ID}') exit(1) user_id = sys.argv[1] ps = [Process(target=inspect, args=(url, user_id)).start() for url in", "sys.argv[1] ps = [Process(target=inspect, args=(url, user_id)).start() for url in urls] if __name__ ==", "exit(1) user_id = sys.argv[1] ps = [Process(target=inspect, args=(url, user_id)).start() for url in urls]", "urllib import request, parse, error from multiprocessing import Process urls = [ 'https://github.com/',", "print('usage: python3 main.py ${USER_ID}') exit(1) user_id = sys.argv[1] ps = [Process(target=inspect, args=(url, user_id)).start()", "inspect(url, user_id): code = inspect_status_code(url+user_id) title = parse.urlparse(url).netloc prefix = '\\033[32m' if code", "urls = [ 'https://github.com/', 'https://twitter.com/', 'https://hub.docker.com/v2/users/' ] def inspect_status_code(url): try: response = request.urlopen(url)", "= parse.urlparse(url).netloc prefix = '\\033[32m' if code == 404 else '\\033[31m' suffix =", "from urllib import request, parse, error from multiprocessing import Process urls = [", "= '\\033[0m' result = '{}{}{}'.format(prefix, code, suffix) print(title.ljust(16), result) def main(): if len(sys.argv)", "inspect_status_code(url+user_id) title = parse.urlparse(url).netloc prefix = '\\033[32m' if code == 404 else '\\033[31m'", "'\\033[31m' suffix = '\\033[0m' result = '{}{}{}'.format(prefix, code, suffix) print(title.ljust(16), result) def main():", "< 2: print('usage: python3 main.py ${USER_ID}') exit(1) user_id = sys.argv[1] ps = [Process(target=inspect,", "suffix = '\\033[0m' result = '{}{}{}'.format(prefix, code, suffix) print(title.ljust(16), result) def main(): if" ]
[ "for i, cap in enumerate(right_captions): end = right_caption_lengths[i] collate_data['right_captions'][i, :end] = cap[:end] #", "batch_size, num_workers): self.data_dir = data_dir self.which_set = which_set self.dataset_name = dataset_name assert self.which_set", "COCOTextImageDataLoader(BaseDataLoader): \"\"\" COCO Image Caption Model Data Loader \"\"\" def __init__(self, data_dir, which_set,", "= {} # Sort a data list by right caption length (descending order).", "{} # Sort a data list by right caption length (descending order). data.sort(key=lambda", "self.dataset_name = dataset_name assert self.which_set in {'train', 'valid', 'test'} self.image_size = (image_size, image_size)", "shuffle=False, num_workers=0, collate_fn=text_image_collate_fn) class COCOTextImageDataLoader(BaseDataLoader): \"\"\" COCO Image Caption Model Data Loader \"\"\"", "255] to a torch in range [-1.0, 1.0] mean = torch.tensor([0.5, 0.5, 0.5],", "= dataset_name assert self.which_set in {'train', 'valid', 'test'} self.image_size = (image_size, image_size) self.batch_size", "255] to a torch in range [-1.0, 1.0] self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(),", "print('right images 256 shape:', data['right_images_256'].shape) print(\"right embed shape:\", data['right_embeds'].shape) print(\"right caption shape:\", data['right_captions'].shape)", "(descending order). data.sort(key=lambda x: x['right_caption'].size(0), reverse=True) collate_data['right_img_id'] = [] collate_data['class_id'] = [] collate_data['right_txt']", "torch.stack(wrong_images_128, 0) collate_data['wrong_images_256'] = torch.stack(wrong_images_256, 0) return collate_data class TextImageDataLoader(DataLoader): def __init__(self, data_dir,", "wrong_images_128 = [] wrong_images_256 = [] for i in range(len(data)): class_ids.append(data[i]['right_img_id']) collate_data['class_id'].append(data[i]['right_class_id']) collate_data['right_txt'].append(data[i]['right_txt'])", "wrong_embeds = [] wrong_images_32 = [] wrong_images_64 = [] wrong_images_128 = [] wrong_images_256", "which_set self.validation_split = validation_split assert self.which_set in {'train', 'val', 'test'} self.image_size = (image_size,", "torch.stack(right_embeds, 0) collate_data['right_images_32'] = torch.stack(right_images_32, 0) collate_data['right_images_64'] = torch.stack(right_images_64, 0) collate_data['right_images_128'] = torch.stack(right_images_128,", "in right_captions] collate_data['right_caption_lengths'] = torch.LongTensor(right_caption_lengths) collate_data['right_captions'] = torch.zeros(len(right_caption_lengths), max(right_caption_lengths)).long() for i, cap in", "data_dir='/Users/leon/Projects/I2T2I/data/coco/', # dataset_name=\"birds\", which_set='val', image_size=256, batch_size=16, validation_split=0.05, num_workers=0) print(len(data_loader.dataset.vocab)) print(len(data_loader.dataset.vocab.word2idx)) for i, data", "vocab_from_file=True) # self.n_samples = len(self.dataset) if self.which_set == 'train': super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size,", "print('right images 128 shape:', data['right_images_128'].shape) print('right images 256 shape:', data['right_images_256'].shape) print(\"right embed shape:\",", "x: len(x), reverse=True) wrong_caption_lengths = [len(cap) for cap in wrong_captions] collate_data['wrong_caption_lengths'] = torch.LongTensor(wrong_caption_lengths)", "wrong_captions.sort(key=lambda x: len(x), reverse=True) wrong_caption_lengths = [len(cap) for cap in wrong_captions] collate_data['wrong_caption_lengths'] =", "print(\"wrong caption shape:\", data['wrong_captions'].shape) print(\"wrong caption lengths:\", data['wrong_caption_lengths']) print(\"wrong txt:\", data[\"wrong_txt\"]) if i", "= torch.stack(right_images_128, 0) collate_data['right_images_256'] = torch.stack(right_images_256, 0) collate_data['wrong_embeds'] = torch.stack(wrong_embeds, 0) collate_data['wrong_images_32'] =", "torch.LongTensor(right_caption_lengths) collate_data['right_captions'] = torch.zeros(len(right_caption_lengths), max(right_caption_lengths)).long() for i, cap in enumerate(right_captions): end = right_caption_lengths[i]", "else: super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, validation_split=0, num_workers=self.num_workers, collate_fn=text_image_collate_fn) if __name__ == '__main__':", "dataset_name assert self.which_set in {'train', 'valid', 'test'} self.image_size = (image_size, image_size) self.batch_size =", "len(self.dataset) if self.which_set == 'train' or self.which_set == 'valid': super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size,", "print(\"class_ids:\", data[\"class_id\"]) print('right images 32 shape:', data['right_images_32'].shape) print('right images 64 shape:', data['right_images_64'].shape) print('right", "wrong_images_32.append(data[i]['wrong_image_32']) wrong_images_64.append(data[i]['wrong_image_64']) wrong_images_128.append(data[i]['wrong_image_128']) wrong_images_256.append(data[i]['wrong_image_256']) # sort and get captions, lengths, images, embeds, etc.", "torch.stack(wrong_images_64, 0) collate_data['wrong_images_128'] = torch.stack(wrong_images_128, 0) collate_data['wrong_images_256'] = torch.stack(wrong_images_256, 0) return collate_data class", "batch_size=self.batch_size, shuffle=True, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, num_workers=0, collate_fn=text_image_collate_fn)", "[] wrong_captions = [] wrong_embeds = [] wrong_images_32 = [] wrong_images_64 = []", "32 shape:', data['right_images_32'].shape) print('right images 64 shape:', data['right_images_64'].shape) print('right images 128 shape:', data['right_images_128'].shape)", "0.5], dtype=torch.float32) std = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) if which_set == 'val' or", "= [len(cap) for cap in right_captions] collate_data['right_caption_lengths'] = torch.LongTensor(right_caption_lengths) collate_data['right_captions'] = torch.zeros(len(right_caption_lengths), max(right_caption_lengths)).long()", "shape:\", data['wrong_embeds'].shape) print(\"wrong caption shape:\", data['wrong_captions'].shape) print(\"wrong caption lengths:\", data['wrong_caption_lengths']) print(\"wrong txt:\", data[\"wrong_txt\"])", "[len(cap) for cap in wrong_captions] collate_data['wrong_caption_lengths'] = torch.LongTensor(wrong_caption_lengths) collate_data['wrong_captions'] = torch.zeros(len(wrong_caption_lengths), max(wrong_caption_lengths)).long() for", "collate_data['wrong_embeds'] = torch.stack(wrong_embeds, 0) collate_data['wrong_images_32'] = torch.stack(wrong_images_32, 0) collate_data['wrong_images_64'] = torch.stack(wrong_images_64, 0) collate_data['wrong_images_128']", "0) collate_data['wrong_embeds'] = torch.stack(wrong_embeds, 0) collate_data['wrong_images_32'] = torch.stack(wrong_images_32, 0) collate_data['wrong_images_64'] = torch.stack(wrong_images_64, 0)", "from base import BaseDataLoader def text_image_collate_fn(data): collate_data = {} # Sort a data", "= [] right_captions = [] right_embeds = [] right_images_32 = [] right_images_64 =", "caption shape:\", data['wrong_captions'].shape) print(\"wrong caption lengths:\", data['wrong_caption_lengths']) print(\"wrong txt:\", data[\"wrong_txt\"]) if i ==", "for i, data in enumerate(data_loader): print(i) print(\"right_img_id:\", data['right_img_id']) # print(\"class_ids:\", data[\"class_id\"]) print('right images", "wrong_captions = [] wrong_embeds = [] wrong_images_32 = [] wrong_images_64 = [] wrong_images_128", "self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, validation_split=0, num_workers=self.num_workers, collate_fn=text_image_collate_fn) if __name__ == '__main__': data_loader =", "TextImageDataLoader(DataLoader): def __init__(self, data_dir, dataset_name, which_set, image_size, batch_size, num_workers): self.data_dir = data_dir self.which_set", "print(len(data_loader.dataset.vocab)) print(len(data_loader.dataset.vocab.word2idx)) for i, data in enumerate(data_loader): print(i) print(\"right_img_id:\", data['right_img_id']) # print(\"class_ids:\", data[\"class_id\"])", "to a torch in range [-1.0, 1.0] mean = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32)", "by right caption length (descending order). data.sort(key=lambda x: x['right_caption'].size(0), reverse=True) collate_data['right_img_id'] = []", "\"\"\" def __init__(self, data_dir, which_set, image_size, batch_size, validation_split, num_workers): self.data_dir = data_dir self.which_set", "0.5], dtype=torch.float32) if which_set == 'val' or which_set == 'test': self.transform = transforms.Compose([", "which_set == 'val' or which_set == 'test': self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean,", "range [0, 255] to a torch in range [-1.0, 1.0] self.transform = transforms.Compose([", "[-1.0, 1.0] mean = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) std = torch.tensor([0.5, 0.5, 0.5],", "base import BaseDataLoader def text_image_collate_fn(data): collate_data = {} # Sort a data list", "256 shape:', data['right_images_256'].shape) print(\"right embed shape:\", data['right_embeds'].shape) print(\"right caption shape:\", data['right_captions'].shape) print(\"right caption", "# sort and get captions, lengths, images, embeds, etc. wrong_captions.sort(key=lambda x: len(x), reverse=True)", "shuffle=True, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, num_workers=0, collate_fn=text_image_collate_fn) class", "range(len(data)): class_ids.append(data[i]['right_img_id']) collate_data['class_id'].append(data[i]['right_class_id']) collate_data['right_txt'].append(data[i]['right_txt']) right_captions.append(data[i]['right_caption']) right_embeds.append(data[i]['right_embed']) right_images_32.append(data[i]['right_image_32']) right_images_64.append(data[i]['right_image_64']) right_images_128.append(data[i]['right_image_128']) right_images_256.append(data[i]['right_image_256']) collate_data['wrong_txt'].append(data[i]['wrong_txt']) wrong_captions.append(data[i]['wrong_caption']) wrong_embeds.append(data[i]['wrong_embed'])", "validation_split=0, num_workers=self.num_workers, collate_fn=text_image_collate_fn) if __name__ == '__main__': data_loader = COCOTextImageDataLoader( data_dir='/Users/leon/Projects/I2T2I/data/coco/', # dataset_name=\"birds\",", "or self.which_set == 'valid': super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else:", "[] right_captions = [] right_embeds = [] right_images_32 = [] right_images_64 = []", "self.batch_size = batch_size self.num_workers = num_workers # transforms.ToTensor convert PIL images in range", "sort and get captions, lengths, images, embeds, etc. wrong_captions.sort(key=lambda x: len(x), reverse=True) wrong_caption_lengths", "range [0, 255] to a torch in range [-1.0, 1.0] mean = torch.tensor([0.5,", "COCOTextImageDataset(self.data_dir, self.which_set, self.transform, vocab_from_file=True) # self.n_samples = len(self.dataset) if self.which_set == 'train': super(COCOTextImageDataLoader,", "data['wrong_img_id']) print('wrong images 32 shape:', data['wrong_images_32'].shape) print('wrong images 64 shape:', data['wrong_images_64'].shape) print('wrong images", "transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]) ]) self.dataset = TextImageDataset(self.data_dir,", "torch in range [-1.0, 1.0] self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=[0.5, 0.5, 0.5],", "transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]) ]) self.dataset = TextImageDataset(self.data_dir, self.dataset_name, self.which_set, self.transform,", "in range [0, 255] to a torch in range [-1.0, 1.0] mean =", "images 64 shape:', data['wrong_images_64'].shape) print('wrong images 128 shape:', data['wrong_images_128'].shape) print('wrong images 256 shape:',", "mean = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) std = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) if", "range [-1.0, 1.0] mean = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) std = torch.tensor([0.5, 0.5,", "wrong_images_64 = [] wrong_images_128 = [] wrong_images_256 = [] for i in range(len(data)):", "numpy as np from torch.utils.data import DataLoader from torchvision import transforms from data_loader.datasets_custom", "embeds, etc. wrong_captions.sort(key=lambda x: len(x), reverse=True) wrong_caption_lengths = [len(cap) for cap in wrong_captions]", "right_captions = [] right_embeds = [] right_images_32 = [] right_images_64 = [] right_images_128", "shape:\", data['wrong_captions'].shape) print(\"wrong caption lengths:\", data['wrong_caption_lengths']) print(\"wrong txt:\", data[\"wrong_txt\"]) if i == 10:", "64 shape:', data['wrong_images_64'].shape) print('wrong images 128 shape:', data['wrong_images_128'].shape) print('wrong images 256 shape:', data['wrong_images_256'].shape)", "right_captions.append(data[i]['right_caption']) right_embeds.append(data[i]['right_embed']) right_images_32.append(data[i]['right_image_32']) right_images_64.append(data[i]['right_image_64']) right_images_128.append(data[i]['right_image_128']) right_images_256.append(data[i]['right_image_256']) collate_data['wrong_txt'].append(data[i]['wrong_txt']) wrong_captions.append(data[i]['wrong_caption']) wrong_embeds.append(data[i]['wrong_embed']) wrong_images_32.append(data[i]['wrong_image_32']) wrong_images_64.append(data[i]['wrong_image_64']) wrong_images_128.append(data[i]['wrong_image_128']) wrong_images_256.append(data[i]['wrong_image_256'])", "else: super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, num_workers=0, collate_fn=text_image_collate_fn) class COCOTextImageDataLoader(BaseDataLoader): \"\"\" COCO Image", "torch.stack(right_images_256, 0) collate_data['wrong_embeds'] = torch.stack(wrong_embeds, 0) collate_data['wrong_images_32'] = torch.stack(wrong_images_32, 0) collate_data['wrong_images_64'] = torch.stack(wrong_images_64,", "i, cap in enumerate(wrong_captions): end = wrong_caption_lengths[i] collate_data['wrong_captions'][i, :end] = cap[:end] collate_data['class_id'] =", "collate_data['wrong_img_id'] = [] collate_data['wrong_txt'] = [] wrong_captions = [] wrong_embeds = [] wrong_images_32", "right_embeds = [] right_images_32 = [] right_images_64 = [] right_images_128 = [] right_images_256", "transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) else: self.transform = transforms.Compose([ # transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean,", "images in range [0, 255] to a torch in range [-1.0, 1.0] self.transform", "lengths:\", data['right_caption_lengths']) print(\"right txt:\", data[\"right_txt\"]) print(\"wrong_img_id:\", data['wrong_img_id']) print('wrong images 32 shape:', data['wrong_images_32'].shape) print('wrong", "= [] class_ids = [] right_captions = [] right_embeds = [] right_images_32 =", "for cap in right_captions] collate_data['right_caption_lengths'] = torch.LongTensor(right_caption_lengths) collate_data['right_captions'] = torch.zeros(len(right_caption_lengths), max(right_caption_lengths)).long() for i,", "torch.stack(wrong_embeds, 0) collate_data['wrong_images_32'] = torch.stack(wrong_images_32, 0) collate_data['wrong_images_64'] = torch.stack(wrong_images_64, 0) collate_data['wrong_images_128'] = torch.stack(wrong_images_128,", "text_image_collate_fn(data): collate_data = {} # Sort a data list by right caption length", "collate_data['right_img_id'] = [] collate_data['class_id'] = [] collate_data['right_txt'] = [] class_ids = [] right_captions", "return collate_data class TextImageDataLoader(DataLoader): def __init__(self, data_dir, dataset_name, which_set, image_size, batch_size, num_workers): self.data_dir", "collate_fn=text_image_collate_fn) class COCOTextImageDataLoader(BaseDataLoader): \"\"\" COCO Image Caption Model Data Loader \"\"\" def __init__(self,", "shape:', data['wrong_images_64'].shape) print('wrong images 128 shape:', data['wrong_images_128'].shape) print('wrong images 256 shape:', data['wrong_images_256'].shape) print(\"wrong", "lengths, images, embeds, etc. right_caption_lengths = [len(cap) for cap in right_captions] collate_data['right_caption_lengths'] =", "self.data_dir = data_dir self.which_set = which_set self.dataset_name = dataset_name assert self.which_set in {'train',", "self.data_dir = data_dir self.which_set = which_set self.validation_split = validation_split assert self.which_set in {'train',", "32 shape:', data['wrong_images_32'].shape) print('wrong images 64 shape:', data['wrong_images_64'].shape) print('wrong images 128 shape:', data['wrong_images_128'].shape)", "in enumerate(wrong_captions): end = wrong_caption_lengths[i] collate_data['wrong_captions'][i, :end] = cap[:end] collate_data['class_id'] = np.stack(class_ids) collate_data['right_embeds']", "== 'train' or self.which_set == 'valid': super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, num_workers=self.num_workers, collate_fn=text_image_collate_fn", "[] right_images_256 = [] collate_data['wrong_img_id'] = [] collate_data['wrong_txt'] = [] wrong_captions = []", "in enumerate(right_captions): end = right_caption_lengths[i] collate_data['right_captions'][i, :end] = cap[:end] # sort and get", "image_size=256, batch_size=16, validation_split=0.05, num_workers=0) print(len(data_loader.dataset.vocab)) print(len(data_loader.dataset.vocab.word2idx)) for i, data in enumerate(data_loader): print(i) print(\"right_img_id:\",", "dtype=torch.float32) std = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) if which_set == 'val' or which_set", "a torch in range [-1.0, 1.0] self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=[0.5, 0.5,", "images 32 shape:', data['wrong_images_32'].shape) print('wrong images 64 shape:', data['wrong_images_64'].shape) print('wrong images 128 shape:',", "= torch.stack(right_embeds, 0) collate_data['right_images_32'] = torch.stack(right_images_32, 0) collate_data['right_images_64'] = torch.stack(right_images_64, 0) collate_data['right_images_128'] =", "collate_data['wrong_txt'].append(data[i]['wrong_txt']) wrong_captions.append(data[i]['wrong_caption']) wrong_embeds.append(data[i]['wrong_embed']) wrong_images_32.append(data[i]['wrong_image_32']) wrong_images_64.append(data[i]['wrong_image_64']) wrong_images_128.append(data[i]['wrong_image_128']) wrong_images_256.append(data[i]['wrong_image_256']) # sort and get captions, lengths,", "len(x), reverse=True) wrong_caption_lengths = [len(cap) for cap in wrong_captions] collate_data['wrong_caption_lengths'] = torch.LongTensor(wrong_caption_lengths) collate_data['wrong_captions']", "= cap[:end] # sort and get captions, lengths, images, embeds, etc. wrong_captions.sort(key=lambda x:", "[] right_images_32 = [] right_images_64 = [] right_images_128 = [] right_images_256 = []", "self.num_workers = num_workers # transforms.ToTensor convert PIL images in range [0, 255] to", "= [] right_embeds = [] right_images_32 = [] right_images_64 = [] right_images_128 =", "[] right_embeds = [] right_images_32 = [] right_images_64 = [] right_images_128 = []", "max(right_caption_lengths)).long() for i, cap in enumerate(right_captions): end = right_caption_lengths[i] collate_data['right_captions'][i, :end] = cap[:end]", "right_images_128 = [] right_images_256 = [] collate_data['wrong_img_id'] = [] collate_data['wrong_txt'] = [] wrong_captions", "self.which_set in {'train', 'val', 'test'} self.image_size = (image_size, image_size) self.batch_size = batch_size self.num_workers", "as np from torch.utils.data import DataLoader from torchvision import transforms from data_loader.datasets_custom import", "0) collate_data['right_images_256'] = torch.stack(right_images_256, 0) collate_data['wrong_embeds'] = torch.stack(wrong_embeds, 0) collate_data['wrong_images_32'] = torch.stack(wrong_images_32, 0)", "i, cap in enumerate(right_captions): end = right_caption_lengths[i] collate_data['right_captions'][i, :end] = cap[:end] # sort", "128 shape:', data['wrong_images_128'].shape) print('wrong images 256 shape:', data['wrong_images_256'].shape) print(\"wrong embed shape:\", data['wrong_embeds'].shape) print(\"wrong", "right_images_32 = [] right_images_64 = [] right_images_128 = [] right_images_256 = [] collate_data['wrong_img_id']", "data['right_images_64'].shape) print('right images 128 shape:', data['right_images_128'].shape) print('right images 256 shape:', data['right_images_256'].shape) print(\"right embed", "print(i) print(\"right_img_id:\", data['right_img_id']) # print(\"class_ids:\", data[\"class_id\"]) print('right images 32 shape:', data['right_images_32'].shape) print('right images", "shape:', data['right_images_256'].shape) print(\"right embed shape:\", data['right_embeds'].shape) print(\"right caption shape:\", data['right_captions'].shape) print(\"right caption lengths:\",", "data[\"right_txt\"]) print(\"wrong_img_id:\", data['wrong_img_id']) print('wrong images 32 shape:', data['wrong_images_32'].shape) print('wrong images 64 shape:', data['wrong_images_64'].shape)", "right_images_256 = [] collate_data['wrong_img_id'] = [] collate_data['wrong_txt'] = [] wrong_captions = [] wrong_embeds", "collate_data['right_images_128'] = torch.stack(right_images_128, 0) collate_data['right_images_256'] = torch.stack(right_images_256, 0) collate_data['wrong_embeds'] = torch.stack(wrong_embeds, 0) collate_data['wrong_images_32']", "= torch.stack(wrong_images_256, 0) return collate_data class TextImageDataLoader(DataLoader): def __init__(self, data_dir, dataset_name, which_set, image_size,", "batch_size=self.batch_size, shuffle=True, validation_split=validation_split, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, validation_split=0,", "= validation_split assert self.which_set in {'train', 'val', 'test'} self.image_size = (image_size, image_size) self.batch_size", "data['wrong_images_32'].shape) print('wrong images 64 shape:', data['wrong_images_64'].shape) print('wrong images 128 shape:', data['wrong_images_128'].shape) print('wrong images", "self.which_set == 'train' or self.which_set == 'valid': super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, num_workers=self.num_workers,", "shuffle=False, validation_split=0, num_workers=self.num_workers, collate_fn=text_image_collate_fn) if __name__ == '__main__': data_loader = COCOTextImageDataLoader( data_dir='/Users/leon/Projects/I2T2I/data/coco/', #", "0) collate_data['wrong_images_64'] = torch.stack(wrong_images_64, 0) collate_data['wrong_images_128'] = torch.stack(wrong_images_128, 0) collate_data['wrong_images_256'] = torch.stack(wrong_images_256, 0)", "order). data.sort(key=lambda x: x['right_caption'].size(0), reverse=True) collate_data['right_img_id'] = [] collate_data['class_id'] = [] collate_data['right_txt'] =", "cap in wrong_captions] collate_data['wrong_caption_lengths'] = torch.LongTensor(wrong_caption_lengths) collate_data['wrong_captions'] = torch.zeros(len(wrong_caption_lengths), max(wrong_caption_lengths)).long() for i, cap", "txt:\", data[\"right_txt\"]) print(\"wrong_img_id:\", data['wrong_img_id']) print('wrong images 32 shape:', data['wrong_images_32'].shape) print('wrong images 64 shape:',", "images 256 shape:', data['right_images_256'].shape) print(\"right embed shape:\", data['right_embeds'].shape) print(\"right caption shape:\", data['right_captions'].shape) print(\"right", "'__main__': data_loader = COCOTextImageDataLoader( data_dir='/Users/leon/Projects/I2T2I/data/coco/', # dataset_name=\"birds\", which_set='val', image_size=256, batch_size=16, validation_split=0.05, num_workers=0) print(len(data_loader.dataset.vocab))", "reverse=True) collate_data['right_img_id'] = [] collate_data['class_id'] = [] collate_data['right_txt'] = [] class_ids = []", "wrong_images_64.append(data[i]['wrong_image_64']) wrong_images_128.append(data[i]['wrong_image_128']) wrong_images_256.append(data[i]['wrong_image_256']) # sort and get captions, lengths, images, embeds, etc. right_caption_lengths", "super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size,", "TextImageDataset, COCOTextImageDataset from base import BaseDataLoader def text_image_collate_fn(data): collate_data = {} # Sort", "self.image_size = (image_size, image_size) self.batch_size = batch_size self.num_workers = num_workers # transforms.ToTensor convert", "0.5], std=[0.5, 0.5, 0.5]) ]) self.dataset = TextImageDataset(self.data_dir, self.dataset_name, self.which_set, self.transform, vocab_from_file=False) self.n_samples", "right_images_32.append(data[i]['right_image_32']) right_images_64.append(data[i]['right_image_64']) right_images_128.append(data[i]['right_image_128']) right_images_256.append(data[i]['right_image_256']) collate_data['wrong_txt'].append(data[i]['wrong_txt']) wrong_captions.append(data[i]['wrong_caption']) wrong_embeds.append(data[i]['wrong_embed']) wrong_images_32.append(data[i]['wrong_image_32']) wrong_images_64.append(data[i]['wrong_image_64']) wrong_images_128.append(data[i]['wrong_image_128']) wrong_images_256.append(data[i]['wrong_image_256']) # sort", "collate_data['right_txt'] = [] class_ids = [] right_captions = [] right_embeds = [] right_images_32", "transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) self.dataset = COCOTextImageDataset(self.data_dir, self.which_set, self.transform, vocab_from_file=True) # self.n_samples", "= torch.stack(wrong_images_32, 0) collate_data['wrong_images_64'] = torch.stack(wrong_images_64, 0) collate_data['wrong_images_128'] = torch.stack(wrong_images_128, 0) collate_data['wrong_images_256'] =", "transforms.ToTensor convert PIL images in range [0, 255] to a torch in range", "= [] for i in range(len(data)): class_ids.append(data[i]['right_img_id']) collate_data['class_id'].append(data[i]['right_class_id']) collate_data['right_txt'].append(data[i]['right_txt']) right_captions.append(data[i]['right_caption']) right_embeds.append(data[i]['right_embed']) right_images_32.append(data[i]['right_image_32']) right_images_64.append(data[i]['right_image_64'])", "cap[:end] collate_data['class_id'] = np.stack(class_ids) collate_data['right_embeds'] = torch.stack(right_embeds, 0) collate_data['right_images_32'] = torch.stack(right_images_32, 0) collate_data['right_images_64']", "print('wrong images 128 shape:', data['wrong_images_128'].shape) print('wrong images 256 shape:', data['wrong_images_256'].shape) print(\"wrong embed shape:\",", "if self.which_set == 'train' or self.which_set == 'valid': super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True,", "shuffle=True, validation_split=validation_split, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, validation_split=0, num_workers=self.num_workers,", "collate_data['wrong_images_32'] = torch.stack(wrong_images_32, 0) collate_data['wrong_images_64'] = torch.stack(wrong_images_64, 0) collate_data['wrong_images_128'] = torch.stack(wrong_images_128, 0) collate_data['wrong_images_256']", "print('wrong images 64 shape:', data['wrong_images_64'].shape) print('wrong images 128 shape:', data['wrong_images_128'].shape) print('wrong images 256", "64 shape:', data['right_images_64'].shape) print('right images 128 shape:', data['right_images_128'].shape) print('right images 256 shape:', data['right_images_256'].shape)", "self.which_set in {'train', 'valid', 'test'} self.image_size = (image_size, image_size) self.batch_size = batch_size self.num_workers", "wrong_images_128.append(data[i]['wrong_image_128']) wrong_images_256.append(data[i]['wrong_image_256']) # sort and get captions, lengths, images, embeds, etc. right_caption_lengths =", "collate_data['right_captions'][i, :end] = cap[:end] # sort and get captions, lengths, images, embeds, etc.", "collate_data['wrong_images_64'] = torch.stack(wrong_images_64, 0) collate_data['wrong_images_128'] = torch.stack(wrong_images_128, 0) collate_data['wrong_images_256'] = torch.stack(wrong_images_256, 0) return", "collate_fn=text_image_collate_fn ) else: super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, num_workers=0, collate_fn=text_image_collate_fn) class COCOTextImageDataLoader(BaseDataLoader): \"\"\"", "0) collate_data['wrong_images_256'] = torch.stack(wrong_images_256, 0) return collate_data class TextImageDataLoader(DataLoader): def __init__(self, data_dir, dataset_name,", "etc. wrong_captions.sort(key=lambda x: len(x), reverse=True) wrong_caption_lengths = [len(cap) for cap in wrong_captions] collate_data['wrong_caption_lengths']", "torch.stack(right_images_128, 0) collate_data['right_images_256'] = torch.stack(right_images_256, 0) collate_data['wrong_embeds'] = torch.stack(wrong_embeds, 0) collate_data['wrong_images_32'] = torch.stack(wrong_images_32,", "super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, validation_split=validation_split, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset,", "import numpy as np from torch.utils.data import DataLoader from torchvision import transforms from", "super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, validation_split=0, num_workers=self.num_workers, collate_fn=text_image_collate_fn) if __name__ == '__main__': data_loader", "shape:', data['wrong_images_32'].shape) print('wrong images 64 shape:', data['wrong_images_64'].shape) print('wrong images 128 shape:', data['wrong_images_128'].shape) print('wrong", "collate_data['wrong_txt'] = [] wrong_captions = [] wrong_embeds = [] wrong_images_32 = [] wrong_images_64", "data['right_images_256'].shape) print(\"right embed shape:\", data['right_embeds'].shape) print(\"right caption shape:\", data['right_captions'].shape) print(\"right caption lengths:\", data['right_caption_lengths'])", "data['right_embeds'].shape) print(\"right caption shape:\", data['right_captions'].shape) print(\"right caption lengths:\", data['right_caption_lengths']) print(\"right txt:\", data[\"right_txt\"]) print(\"wrong_img_id:\",", "# print(\"class_ids:\", data[\"class_id\"]) print('right images 32 shape:', data['right_images_32'].shape) print('right images 64 shape:', data['right_images_64'].shape)", "collate_data = {} # Sort a data list by right caption length (descending", "# transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) self.dataset = COCOTextImageDataset(self.data_dir, self.which_set, self.transform, vocab_from_file=True) #", "# transforms.ToTensor convert PIL images in range [0, 255] to a torch in", "self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]) ]) self.dataset", "# sort and get captions, lengths, images, embeds, etc. right_caption_lengths = [len(cap) for", "num_workers # transforms.ToTensor convert PIL images in range [0, 255] to a torch", "collate_data['wrong_captions'][i, :end] = cap[:end] collate_data['class_id'] = np.stack(class_ids) collate_data['right_embeds'] = torch.stack(right_embeds, 0) collate_data['right_images_32'] =", "# dataset_name=\"birds\", which_set='val', image_size=256, batch_size=16, validation_split=0.05, num_workers=0) print(len(data_loader.dataset.vocab)) print(len(data_loader.dataset.vocab.word2idx)) for i, data in", "end = right_caption_lengths[i] collate_data['right_captions'][i, :end] = cap[:end] # sort and get captions, lengths,", "= transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) else: self.transform = transforms.Compose([ # transforms.RandomHorizontalFlip(),", "self.which_set = which_set self.dataset_name = dataset_name assert self.which_set in {'train', 'valid', 'test'} self.image_size", "torch.stack(wrong_images_32, 0) collate_data['wrong_images_64'] = torch.stack(wrong_images_64, 0) collate_data['wrong_images_128'] = torch.stack(wrong_images_128, 0) collate_data['wrong_images_256'] = torch.stack(wrong_images_256,", "image_size) self.batch_size = batch_size self.num_workers = num_workers # transforms.ToTensor convert PIL images in", "0) collate_data['wrong_images_128'] = torch.stack(wrong_images_128, 0) collate_data['wrong_images_256'] = torch.stack(wrong_images_256, 0) return collate_data class TextImageDataLoader(DataLoader):", "a data list by right caption length (descending order). data.sort(key=lambda x: x['right_caption'].size(0), reverse=True)", ") else: super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, num_workers=0, collate_fn=text_image_collate_fn) class COCOTextImageDataLoader(BaseDataLoader): \"\"\" COCO", "Caption Model Data Loader \"\"\" def __init__(self, data_dir, which_set, image_size, batch_size, validation_split, num_workers):", "self.transform, vocab_from_file=True) # self.n_samples = len(self.dataset) if self.which_set == 'train': super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset,", "i in range(len(data)): class_ids.append(data[i]['right_img_id']) collate_data['class_id'].append(data[i]['right_class_id']) collate_data['right_txt'].append(data[i]['right_txt']) right_captions.append(data[i]['right_caption']) right_embeds.append(data[i]['right_embed']) right_images_32.append(data[i]['right_image_32']) right_images_64.append(data[i]['right_image_64']) right_images_128.append(data[i]['right_image_128']) right_images_256.append(data[i]['right_image_256']) collate_data['wrong_txt'].append(data[i]['wrong_txt'])", "== 'train': super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, validation_split=validation_split, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(COCOTextImageDataLoader,", "which_set='val', image_size=256, batch_size=16, validation_split=0.05, num_workers=0) print(len(data_loader.dataset.vocab)) print(len(data_loader.dataset.vocab.word2idx)) for i, data in enumerate(data_loader): print(i)", "data['wrong_images_256'].shape) print(\"wrong embed shape:\", data['wrong_embeds'].shape) print(\"wrong caption shape:\", data['wrong_captions'].shape) print(\"wrong caption lengths:\", data['wrong_caption_lengths'])", "= data_dir self.which_set = which_set self.validation_split = validation_split assert self.which_set in {'train', 'val',", "get captions, lengths, images, embeds, etc. right_caption_lengths = [len(cap) for cap in right_captions]", "self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False,", "1.0] mean = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) std = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32)", "wrong_images_256.append(data[i]['wrong_image_256']) # sort and get captions, lengths, images, embeds, etc. right_caption_lengths = [len(cap)", "data_loader.datasets_custom import TextImageDataset, COCOTextImageDataset from base import BaseDataLoader def text_image_collate_fn(data): collate_data = {}", "= torch.LongTensor(wrong_caption_lengths) collate_data['wrong_captions'] = torch.zeros(len(wrong_caption_lengths), max(wrong_caption_lengths)).long() for i, cap in enumerate(wrong_captions): end =", "dataset_name, which_set, image_size, batch_size, num_workers): self.data_dir = data_dir self.which_set = which_set self.dataset_name =", "shape:\", data['right_captions'].shape) print(\"right caption lengths:\", data['right_caption_lengths']) print(\"right txt:\", data[\"right_txt\"]) print(\"wrong_img_id:\", data['wrong_img_id']) print('wrong images", "= [] right_images_32 = [] right_images_64 = [] right_images_128 = [] right_images_256 =", "assert self.which_set in {'train', 'val', 'test'} self.image_size = (image_size, image_size) self.batch_size = batch_size", "= transforms.Compose([ # transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) self.dataset = COCOTextImageDataset(self.data_dir, self.which_set, self.transform,", "= COCOTextImageDataset(self.data_dir, self.which_set, self.transform, vocab_from_file=True) # self.n_samples = len(self.dataset) if self.which_set == 'train':", "256 shape:', data['wrong_images_256'].shape) print(\"wrong embed shape:\", data['wrong_embeds'].shape) print(\"wrong caption shape:\", data['wrong_captions'].shape) print(\"wrong caption", "captions, lengths, images, embeds, etc. right_caption_lengths = [len(cap) for cap in right_captions] collate_data['right_caption_lengths']", "= num_workers # transforms.ToTensor convert PIL images in range [0, 255] to a", "torch.stack(right_images_64, 0) collate_data['right_images_128'] = torch.stack(right_images_128, 0) collate_data['right_images_256'] = torch.stack(right_images_256, 0) collate_data['wrong_embeds'] = torch.stack(wrong_embeds,", "enumerate(right_captions): end = right_caption_lengths[i] collate_data['right_captions'][i, :end] = cap[:end] # sort and get captions,", "TextImageDataset(self.data_dir, self.dataset_name, self.which_set, self.transform, vocab_from_file=False) self.n_samples = len(self.dataset) if self.which_set == 'train' or", "1.0] self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]) ])", "= [] right_images_64 = [] right_images_128 = [] right_images_256 = [] collate_data['wrong_img_id'] =", "0) collate_data['right_images_64'] = torch.stack(right_images_64, 0) collate_data['right_images_128'] = torch.stack(right_images_128, 0) collate_data['right_images_256'] = torch.stack(right_images_256, 0)", "{'train', 'val', 'test'} self.image_size = (image_size, image_size) self.batch_size = batch_size self.num_workers = num_workers", "self.which_set = which_set self.validation_split = validation_split assert self.which_set in {'train', 'val', 'test'} self.image_size", "'valid', 'test'} self.image_size = (image_size, image_size) self.batch_size = batch_size self.num_workers = num_workers #", "right_images_256.append(data[i]['right_image_256']) collate_data['wrong_txt'].append(data[i]['wrong_txt']) wrong_captions.append(data[i]['wrong_caption']) wrong_embeds.append(data[i]['wrong_embed']) wrong_images_32.append(data[i]['wrong_image_32']) wrong_images_64.append(data[i]['wrong_image_64']) wrong_images_128.append(data[i]['wrong_image_128']) wrong_images_256.append(data[i]['wrong_image_256']) # sort and get captions,", "self.n_samples = len(self.dataset) if self.which_set == 'train': super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, validation_split=validation_split,", "print(\"wrong embed shape:\", data['wrong_embeds'].shape) print(\"wrong caption shape:\", data['wrong_captions'].shape) print(\"wrong caption lengths:\", data['wrong_caption_lengths']) print(\"wrong", "self.validation_split = validation_split assert self.which_set in {'train', 'val', 'test'} self.image_size = (image_size, image_size)", "Data Loader \"\"\" def __init__(self, data_dir, which_set, image_size, batch_size, validation_split, num_workers): self.data_dir =", "wrong_captions] collate_data['wrong_caption_lengths'] = torch.LongTensor(wrong_caption_lengths) collate_data['wrong_captions'] = torch.zeros(len(wrong_caption_lengths), max(wrong_caption_lengths)).long() for i, cap in enumerate(wrong_captions):", "= data_dir self.which_set = which_set self.dataset_name = dataset_name assert self.which_set in {'train', 'valid',", "dataset=self.dataset, batch_size=self.batch_size, shuffle=False, num_workers=0, collate_fn=text_image_collate_fn) class COCOTextImageDataLoader(BaseDataLoader): \"\"\" COCO Image Caption Model Data", "dtype=torch.float32) if which_set == 'val' or which_set == 'test': self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(),", "transforms.Normalize(mean=mean, std=std) ]) else: self.transform = transforms.Compose([ # transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ])", "= COCOTextImageDataLoader( data_dir='/Users/leon/Projects/I2T2I/data/coco/', # dataset_name=\"birds\", which_set='val', image_size=256, batch_size=16, validation_split=0.05, num_workers=0) print(len(data_loader.dataset.vocab)) print(len(data_loader.dataset.vocab.word2idx)) for", "'train' or self.which_set == 'valid': super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, num_workers=self.num_workers, collate_fn=text_image_collate_fn )", "def __init__(self, data_dir, which_set, image_size, batch_size, validation_split, num_workers): self.data_dir = data_dir self.which_set =", "sort and get captions, lengths, images, embeds, etc. right_caption_lengths = [len(cap) for cap", "data['wrong_images_128'].shape) print('wrong images 256 shape:', data['wrong_images_256'].shape) print(\"wrong embed shape:\", data['wrong_embeds'].shape) print(\"wrong caption shape:\",", "import torch import numpy as np from torch.utils.data import DataLoader from torchvision import", "data_dir, dataset_name, which_set, image_size, batch_size, num_workers): self.data_dir = data_dir self.which_set = which_set self.dataset_name", "images 64 shape:', data['right_images_64'].shape) print('right images 128 shape:', data['right_images_128'].shape) print('right images 256 shape:',", "np from torch.utils.data import DataLoader from torchvision import transforms from data_loader.datasets_custom import TextImageDataset,", "in enumerate(data_loader): print(i) print(\"right_img_id:\", data['right_img_id']) # print(\"class_ids:\", data[\"class_id\"]) print('right images 32 shape:', data['right_images_32'].shape)", "print(\"right txt:\", data[\"right_txt\"]) print(\"wrong_img_id:\", data['wrong_img_id']) print('wrong images 32 shape:', data['wrong_images_32'].shape) print('wrong images 64", "image_size, batch_size, num_workers): self.data_dir = data_dir self.which_set = which_set self.dataset_name = dataset_name assert", "data['right_images_32'].shape) print('right images 64 shape:', data['right_images_64'].shape) print('right images 128 shape:', data['right_images_128'].shape) print('right images", "assert self.which_set in {'train', 'valid', 'test'} self.image_size = (image_size, image_size) self.batch_size = batch_size", "= wrong_caption_lengths[i] collate_data['wrong_captions'][i, :end] = cap[:end] collate_data['class_id'] = np.stack(class_ids) collate_data['right_embeds'] = torch.stack(right_embeds, 0)", "= [] right_images_256 = [] collate_data['wrong_img_id'] = [] collate_data['wrong_txt'] = [] wrong_captions =", "self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, num_workers=0, collate_fn=text_image_collate_fn) class COCOTextImageDataLoader(BaseDataLoader): \"\"\" COCO Image Caption Model", "collate_fn=text_image_collate_fn) if __name__ == '__main__': data_loader = COCOTextImageDataLoader( data_dir='/Users/leon/Projects/I2T2I/data/coco/', # dataset_name=\"birds\", which_set='val', image_size=256,", "0.5]) ]) self.dataset = TextImageDataset(self.data_dir, self.dataset_name, self.which_set, self.transform, vocab_from_file=False) self.n_samples = len(self.dataset) if", "batch_size=self.batch_size, shuffle=False, validation_split=0, num_workers=self.num_workers, collate_fn=text_image_collate_fn) if __name__ == '__main__': data_loader = COCOTextImageDataLoader( data_dir='/Users/leon/Projects/I2T2I/data/coco/',", "convert PIL images in range [0, 255] to a torch in range [-1.0,", "shape:', data['right_images_64'].shape) print('right images 128 shape:', data['right_images_128'].shape) print('right images 256 shape:', data['right_images_256'].shape) print(\"right", "[] class_ids = [] right_captions = [] right_embeds = [] right_images_32 = []", "get captions, lengths, images, embeds, etc. wrong_captions.sort(key=lambda x: len(x), reverse=True) wrong_caption_lengths = [len(cap)", "in range(len(data)): class_ids.append(data[i]['right_img_id']) collate_data['class_id'].append(data[i]['right_class_id']) collate_data['right_txt'].append(data[i]['right_txt']) right_captions.append(data[i]['right_caption']) right_embeds.append(data[i]['right_embed']) right_images_32.append(data[i]['right_image_32']) right_images_64.append(data[i]['right_image_64']) right_images_128.append(data[i]['right_image_128']) right_images_256.append(data[i]['right_image_256']) collate_data['wrong_txt'].append(data[i]['wrong_txt']) wrong_captions.append(data[i]['wrong_caption'])", "collate_data['wrong_captions'] = torch.zeros(len(wrong_caption_lengths), max(wrong_caption_lengths)).long() for i, cap in enumerate(wrong_captions): end = wrong_caption_lengths[i] collate_data['wrong_captions'][i,", "self.transform, vocab_from_file=False) self.n_samples = len(self.dataset) if self.which_set == 'train' or self.which_set == 'valid':", "0.5, 0.5], std=[0.5, 0.5, 0.5]) ]) self.dataset = TextImageDataset(self.data_dir, self.dataset_name, self.which_set, self.transform, vocab_from_file=False)", "import transforms from data_loader.datasets_custom import TextImageDataset, COCOTextImageDataset from base import BaseDataLoader def text_image_collate_fn(data):", "= [] wrong_images_32 = [] wrong_images_64 = [] wrong_images_128 = [] wrong_images_256 =", "print(\"right_img_id:\", data['right_img_id']) # print(\"class_ids:\", data[\"class_id\"]) print('right images 32 shape:', data['right_images_32'].shape) print('right images 64", "x['right_caption'].size(0), reverse=True) collate_data['right_img_id'] = [] collate_data['class_id'] = [] collate_data['right_txt'] = [] class_ids =", "cap in enumerate(right_captions): end = right_caption_lengths[i] collate_data['right_captions'][i, :end] = cap[:end] # sort and", "]) else: self.transform = transforms.Compose([ # transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) self.dataset =", "wrong_captions.append(data[i]['wrong_caption']) wrong_embeds.append(data[i]['wrong_embed']) wrong_images_32.append(data[i]['wrong_image_32']) wrong_images_64.append(data[i]['wrong_image_64']) wrong_images_128.append(data[i]['wrong_image_128']) wrong_images_256.append(data[i]['wrong_image_256']) # sort and get captions, lengths, images,", "range [-1.0, 1.0] self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5,", "batch_size, validation_split, num_workers): self.data_dir = data_dir self.which_set = which_set self.validation_split = validation_split assert", "= [] wrong_images_64 = [] wrong_images_128 = [] wrong_images_256 = [] for i", "== 'valid': super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(TextImageDataLoader, self).__init__(", ":end] = cap[:end] # sort and get captions, lengths, images, embeds, etc. wrong_captions.sort(key=lambda", "len(self.dataset) if self.which_set == 'train': super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, validation_split=validation_split, num_workers=self.num_workers, collate_fn=text_image_collate_fn", "= [] collate_data['wrong_txt'] = [] wrong_captions = [] wrong_embeds = [] wrong_images_32 =", "torch.stack(wrong_images_256, 0) return collate_data class TextImageDataLoader(DataLoader): def __init__(self, data_dir, dataset_name, which_set, image_size, batch_size,", "__init__(self, data_dir, which_set, image_size, batch_size, validation_split, num_workers): self.data_dir = data_dir self.which_set = which_set", "0) return collate_data class TextImageDataLoader(DataLoader): def __init__(self, data_dir, dataset_name, which_set, image_size, batch_size, num_workers):", "caption lengths:\", data['right_caption_lengths']) print(\"right txt:\", data[\"right_txt\"]) print(\"wrong_img_id:\", data['wrong_img_id']) print('wrong images 32 shape:', data['wrong_images_32'].shape)", "= torch.stack(right_images_64, 0) collate_data['right_images_128'] = torch.stack(right_images_128, 0) collate_data['right_images_256'] = torch.stack(right_images_256, 0) collate_data['wrong_embeds'] =", "images, embeds, etc. wrong_captions.sort(key=lambda x: len(x), reverse=True) wrong_caption_lengths = [len(cap) for cap in", "num_workers=0, collate_fn=text_image_collate_fn) class COCOTextImageDataLoader(BaseDataLoader): \"\"\" COCO Image Caption Model Data Loader \"\"\" def", "print(\"wrong_img_id:\", data['wrong_img_id']) print('wrong images 32 shape:', data['wrong_images_32'].shape) print('wrong images 64 shape:', data['wrong_images_64'].shape) print('wrong", "data_dir self.which_set = which_set self.dataset_name = dataset_name assert self.which_set in {'train', 'valid', 'test'}", "class_ids.append(data[i]['right_img_id']) collate_data['class_id'].append(data[i]['right_class_id']) collate_data['right_txt'].append(data[i]['right_txt']) right_captions.append(data[i]['right_caption']) right_embeds.append(data[i]['right_embed']) right_images_32.append(data[i]['right_image_32']) right_images_64.append(data[i]['right_image_64']) right_images_128.append(data[i]['right_image_128']) right_images_256.append(data[i]['right_image_256']) collate_data['wrong_txt'].append(data[i]['wrong_txt']) wrong_captions.append(data[i]['wrong_caption']) wrong_embeds.append(data[i]['wrong_embed']) wrong_images_32.append(data[i]['wrong_image_32'])", "if self.which_set == 'train': super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, validation_split=validation_split, num_workers=self.num_workers, collate_fn=text_image_collate_fn )", "collate_data['wrong_caption_lengths'] = torch.LongTensor(wrong_caption_lengths) collate_data['wrong_captions'] = torch.zeros(len(wrong_caption_lengths), max(wrong_caption_lengths)).long() for i, cap in enumerate(wrong_captions): end", "= torch.zeros(len(wrong_caption_lengths), max(wrong_caption_lengths)).long() for i, cap in enumerate(wrong_captions): end = wrong_caption_lengths[i] collate_data['wrong_captions'][i, :end]", "__init__(self, data_dir, dataset_name, which_set, image_size, batch_size, num_workers): self.data_dir = data_dir self.which_set = which_set", "class COCOTextImageDataLoader(BaseDataLoader): \"\"\" COCO Image Caption Model Data Loader \"\"\" def __init__(self, data_dir,", "self.dataset = TextImageDataset(self.data_dir, self.dataset_name, self.which_set, self.transform, vocab_from_file=False) self.n_samples = len(self.dataset) if self.which_set ==", "print(\"right embed shape:\", data['right_embeds'].shape) print(\"right caption shape:\", data['right_captions'].shape) print(\"right caption lengths:\", data['right_caption_lengths']) print(\"right", "in wrong_captions] collate_data['wrong_caption_lengths'] = torch.LongTensor(wrong_caption_lengths) collate_data['wrong_captions'] = torch.zeros(len(wrong_caption_lengths), max(wrong_caption_lengths)).long() for i, cap in", "shape:', data['right_images_32'].shape) print('right images 64 shape:', data['right_images_64'].shape) print('right images 128 shape:', data['right_images_128'].shape) print('right", "collate_data['wrong_images_256'] = torch.stack(wrong_images_256, 0) return collate_data class TextImageDataLoader(DataLoader): def __init__(self, data_dir, dataset_name, which_set,", "collate_data['class_id'] = np.stack(class_ids) collate_data['right_embeds'] = torch.stack(right_embeds, 0) collate_data['right_images_32'] = torch.stack(right_images_32, 0) collate_data['right_images_64'] =", "torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) if which_set == 'val' or which_set == 'test': self.transform", "wrong_embeds.append(data[i]['wrong_embed']) wrong_images_32.append(data[i]['wrong_image_32']) wrong_images_64.append(data[i]['wrong_image_64']) wrong_images_128.append(data[i]['wrong_image_128']) wrong_images_256.append(data[i]['wrong_image_256']) # sort and get captions, lengths, images, embeds,", "which_set, image_size, batch_size, validation_split, num_workers): self.data_dir = data_dir self.which_set = which_set self.validation_split =", "collate_data['class_id'] = [] collate_data['right_txt'] = [] class_ids = [] right_captions = [] right_embeds", "torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) std = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) if which_set ==", "{'train', 'valid', 'test'} self.image_size = (image_size, image_size) self.batch_size = batch_size self.num_workers = num_workers", "self.dataset_name, self.which_set, self.transform, vocab_from_file=False) self.n_samples = len(self.dataset) if self.which_set == 'train' or self.which_set", "if which_set == 'val' or which_set == 'test': self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(),", "data['wrong_embeds'].shape) print(\"wrong caption shape:\", data['wrong_captions'].shape) print(\"wrong caption lengths:\", data['wrong_caption_lengths']) print(\"wrong txt:\", data[\"wrong_txt\"]) if", "reverse=True) wrong_caption_lengths = [len(cap) for cap in wrong_captions] collate_data['wrong_caption_lengths'] = torch.LongTensor(wrong_caption_lengths) collate_data['wrong_captions'] =", "to a torch in range [-1.0, 1.0] self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=[0.5,", "for cap in wrong_captions] collate_data['wrong_caption_lengths'] = torch.LongTensor(wrong_caption_lengths) collate_data['wrong_captions'] = torch.zeros(len(wrong_caption_lengths), max(wrong_caption_lengths)).long() for i,", "cap in right_captions] collate_data['right_caption_lengths'] = torch.LongTensor(right_caption_lengths) collate_data['right_captions'] = torch.zeros(len(right_caption_lengths), max(right_caption_lengths)).long() for i, cap", "collate_data['wrong_images_128'] = torch.stack(wrong_images_128, 0) collate_data['wrong_images_256'] = torch.stack(wrong_images_256, 0) return collate_data class TextImageDataLoader(DataLoader): def", "Image Caption Model Data Loader \"\"\" def __init__(self, data_dir, which_set, image_size, batch_size, validation_split,", "enumerate(data_loader): print(i) print(\"right_img_id:\", data['right_img_id']) # print(\"class_ids:\", data[\"class_id\"]) print('right images 32 shape:', data['right_images_32'].shape) print('right", "collate_data['right_images_32'] = torch.stack(right_images_32, 0) collate_data['right_images_64'] = torch.stack(right_images_64, 0) collate_data['right_images_128'] = torch.stack(right_images_128, 0) collate_data['right_images_256']", "= torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) if which_set == 'val' or which_set == 'test':", "data in enumerate(data_loader): print(i) print(\"right_img_id:\", data['right_img_id']) # print(\"class_ids:\", data[\"class_id\"]) print('right images 32 shape:',", "[] wrong_images_32 = [] wrong_images_64 = [] wrong_images_128 = [] wrong_images_256 = []", "class TextImageDataLoader(DataLoader): def __init__(self, data_dir, dataset_name, which_set, image_size, batch_size, num_workers): self.data_dir = data_dir", "= TextImageDataset(self.data_dir, self.dataset_name, self.which_set, self.transform, vocab_from_file=False) self.n_samples = len(self.dataset) if self.which_set == 'train'", "for i in range(len(data)): class_ids.append(data[i]['right_img_id']) collate_data['class_id'].append(data[i]['right_class_id']) collate_data['right_txt'].append(data[i]['right_txt']) right_captions.append(data[i]['right_caption']) right_embeds.append(data[i]['right_embed']) right_images_32.append(data[i]['right_image_32']) right_images_64.append(data[i]['right_image_64']) right_images_128.append(data[i]['right_image_128']) right_images_256.append(data[i]['right_image_256'])", "[] collate_data['wrong_img_id'] = [] collate_data['wrong_txt'] = [] wrong_captions = [] wrong_embeds = []", "transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) else: self.transform = transforms.Compose([ # transforms.RandomHorizontalFlip(), transforms.ToTensor(),", "PIL images in range [0, 255] to a torch in range [-1.0, 1.0]", "collate_data['right_images_256'] = torch.stack(right_images_256, 0) collate_data['wrong_embeds'] = torch.stack(wrong_embeds, 0) collate_data['wrong_images_32'] = torch.stack(wrong_images_32, 0) collate_data['wrong_images_64']", "i, data in enumerate(data_loader): print(i) print(\"right_img_id:\", data['right_img_id']) # print(\"class_ids:\", data[\"class_id\"]) print('right images 32", "Sort a data list by right caption length (descending order). data.sort(key=lambda x: x['right_caption'].size(0),", "Model Data Loader \"\"\" def __init__(self, data_dir, which_set, image_size, batch_size, validation_split, num_workers): self.data_dir", "or which_set == 'test': self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) else:", "and get captions, lengths, images, embeds, etc. wrong_captions.sort(key=lambda x: len(x), reverse=True) wrong_caption_lengths =", "torch.utils.data import DataLoader from torchvision import transforms from data_loader.datasets_custom import TextImageDataset, COCOTextImageDataset from", "[len(cap) for cap in right_captions] collate_data['right_caption_lengths'] = torch.LongTensor(right_caption_lengths) collate_data['right_captions'] = torch.zeros(len(right_caption_lengths), max(right_caption_lengths)).long() for", "num_workers=0) print(len(data_loader.dataset.vocab)) print(len(data_loader.dataset.vocab.word2idx)) for i, data in enumerate(data_loader): print(i) print(\"right_img_id:\", data['right_img_id']) # print(\"class_ids:\",", "= [] right_images_128 = [] right_images_256 = [] collate_data['wrong_img_id'] = [] collate_data['wrong_txt'] =", "images 256 shape:', data['wrong_images_256'].shape) print(\"wrong embed shape:\", data['wrong_embeds'].shape) print(\"wrong caption shape:\", data['wrong_captions'].shape) print(\"wrong", "image_size, batch_size, validation_split, num_workers): self.data_dir = data_dir self.which_set = which_set self.validation_split = validation_split", "from data_loader.datasets_custom import TextImageDataset, COCOTextImageDataset from base import BaseDataLoader def text_image_collate_fn(data): collate_data =", "num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, validation_split=0, num_workers=self.num_workers, collate_fn=text_image_collate_fn) if", "validation_split assert self.which_set in {'train', 'val', 'test'} self.image_size = (image_size, image_size) self.batch_size =", "transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]) ]) self.dataset = TextImageDataset(self.data_dir, self.dataset_name,", "[] collate_data['right_txt'] = [] class_ids = [] right_captions = [] right_embeds = []", "wrong_images_256 = [] for i in range(len(data)): class_ids.append(data[i]['right_img_id']) collate_data['class_id'].append(data[i]['right_class_id']) collate_data['right_txt'].append(data[i]['right_txt']) right_captions.append(data[i]['right_caption']) right_embeds.append(data[i]['right_embed']) right_images_32.append(data[i]['right_image_32'])", "DataLoader from torchvision import transforms from data_loader.datasets_custom import TextImageDataset, COCOTextImageDataset from base import", ":end] = cap[:end] collate_data['class_id'] = np.stack(class_ids) collate_data['right_embeds'] = torch.stack(right_embeds, 0) collate_data['right_images_32'] = torch.stack(right_images_32,", "= torch.stack(wrong_images_128, 0) collate_data['wrong_images_256'] = torch.stack(wrong_images_256, 0) return collate_data class TextImageDataLoader(DataLoader): def __init__(self,", "= torch.stack(right_images_32, 0) collate_data['right_images_64'] = torch.stack(right_images_64, 0) collate_data['right_images_128'] = torch.stack(right_images_128, 0) collate_data['right_images_256'] =", "self.which_set == 'valid': super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(TextImageDataLoader,", "dataset=self.dataset, batch_size=self.batch_size, shuffle=True, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, num_workers=0,", "cap[:end] # sort and get captions, lengths, images, embeds, etc. wrong_captions.sort(key=lambda x: len(x),", "images 32 shape:', data['right_images_32'].shape) print('right images 64 shape:', data['right_images_64'].shape) print('right images 128 shape:',", "transforms.ToTensor(), transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]) ]) self.dataset = TextImageDataset(self.data_dir, self.dataset_name, self.which_set,", "import TextImageDataset, COCOTextImageDataset from base import BaseDataLoader def text_image_collate_fn(data): collate_data = {} #", "'valid': super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(TextImageDataLoader, self).__init__( dataset=self.dataset,", "import BaseDataLoader def text_image_collate_fn(data): collate_data = {} # Sort a data list by", "import DataLoader from torchvision import transforms from data_loader.datasets_custom import TextImageDataset, COCOTextImageDataset from base", "128 shape:', data['right_images_128'].shape) print('right images 256 shape:', data['right_images_256'].shape) print(\"right embed shape:\", data['right_embeds'].shape) print(\"right", "right_embeds.append(data[i]['right_embed']) right_images_32.append(data[i]['right_image_32']) right_images_64.append(data[i]['right_image_64']) right_images_128.append(data[i]['right_image_128']) right_images_256.append(data[i]['right_image_256']) collate_data['wrong_txt'].append(data[i]['wrong_txt']) wrong_captions.append(data[i]['wrong_caption']) wrong_embeds.append(data[i]['wrong_embed']) wrong_images_32.append(data[i]['wrong_image_32']) wrong_images_64.append(data[i]['wrong_image_64']) wrong_images_128.append(data[i]['wrong_image_128']) wrong_images_256.append(data[i]['wrong_image_256']) #", "num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, num_workers=0, collate_fn=text_image_collate_fn) class COCOTextImageDataLoader(BaseDataLoader):", "print('wrong images 32 shape:', data['wrong_images_32'].shape) print('wrong images 64 shape:', data['wrong_images_64'].shape) print('wrong images 128", "in range [-1.0, 1.0] self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5,", "validation_split=0.05, num_workers=0) print(len(data_loader.dataset.vocab)) print(len(data_loader.dataset.vocab.word2idx)) for i, data in enumerate(data_loader): print(i) print(\"right_img_id:\", data['right_img_id']) #", "images, embeds, etc. right_caption_lengths = [len(cap) for cap in right_captions] collate_data['right_caption_lengths'] = torch.LongTensor(right_caption_lengths)", "= [] collate_data['right_txt'] = [] class_ids = [] right_captions = [] right_embeds =", "wrong_caption_lengths = [len(cap) for cap in wrong_captions] collate_data['wrong_caption_lengths'] = torch.LongTensor(wrong_caption_lengths) collate_data['wrong_captions'] = torch.zeros(len(wrong_caption_lengths),", "data_dir self.which_set = which_set self.validation_split = validation_split assert self.which_set in {'train', 'val', 'test'}", "std = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) if which_set == 'val' or which_set ==", "[] wrong_embeds = [] wrong_images_32 = [] wrong_images_64 = [] wrong_images_128 = []", "self.which_set, self.transform, vocab_from_file=True) # self.n_samples = len(self.dataset) if self.which_set == 'train': super(COCOTextImageDataLoader, self).__init__(", "data['wrong_images_64'].shape) print('wrong images 128 shape:', data['wrong_images_128'].shape) print('wrong images 256 shape:', data['wrong_images_256'].shape) print(\"wrong embed", "'val', 'test'} self.image_size = (image_size, image_size) self.batch_size = batch_size self.num_workers = num_workers #", "COCOTextImageDataLoader( data_dir='/Users/leon/Projects/I2T2I/data/coco/', # dataset_name=\"birds\", which_set='val', image_size=256, batch_size=16, validation_split=0.05, num_workers=0) print(len(data_loader.dataset.vocab)) print(len(data_loader.dataset.vocab.word2idx)) for i,", "print('right images 64 shape:', data['right_images_64'].shape) print('right images 128 shape:', data['right_images_128'].shape) print('right images 256", "list by right caption length (descending order). data.sort(key=lambda x: x['right_caption'].size(0), reverse=True) collate_data['right_img_id'] =", "collate_data['right_images_64'] = torch.stack(right_images_64, 0) collate_data['right_images_128'] = torch.stack(right_images_128, 0) collate_data['right_images_256'] = torch.stack(right_images_256, 0) collate_data['wrong_embeds']", "= (image_size, image_size) self.batch_size = batch_size self.num_workers = num_workers # transforms.ToTensor convert PIL", "= len(self.dataset) if self.which_set == 'train' or self.which_set == 'valid': super(TextImageDataLoader, self).__init__( dataset=self.dataset,", "right caption length (descending order). data.sort(key=lambda x: x['right_caption'].size(0), reverse=True) collate_data['right_img_id'] = [] collate_data['class_id']", "data.sort(key=lambda x: x['right_caption'].size(0), reverse=True) collate_data['right_img_id'] = [] collate_data['class_id'] = [] collate_data['right_txt'] = []", "= torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) std = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) if which_set", "class_ids = [] right_captions = [] right_embeds = [] right_images_32 = [] right_images_64", "collate_fn=text_image_collate_fn ) else: super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, validation_split=0, num_workers=self.num_workers, collate_fn=text_image_collate_fn) if __name__", "batch_size=self.batch_size, shuffle=False, num_workers=0, collate_fn=text_image_collate_fn) class COCOTextImageDataLoader(BaseDataLoader): \"\"\" COCO Image Caption Model Data Loader", "\"\"\" COCO Image Caption Model Data Loader \"\"\" def __init__(self, data_dir, which_set, image_size,", "right_captions] collate_data['right_caption_lengths'] = torch.LongTensor(right_caption_lengths) collate_data['right_captions'] = torch.zeros(len(right_caption_lengths), max(right_caption_lengths)).long() for i, cap in enumerate(right_captions):", "self.dataset = COCOTextImageDataset(self.data_dir, self.which_set, self.transform, vocab_from_file=True) # self.n_samples = len(self.dataset) if self.which_set ==", "images 128 shape:', data['right_images_128'].shape) print('right images 256 shape:', data['right_images_256'].shape) print(\"right embed shape:\", data['right_embeds'].shape)", "caption shape:\", data['right_captions'].shape) print(\"right caption lengths:\", data['right_caption_lengths']) print(\"right txt:\", data[\"right_txt\"]) print(\"wrong_img_id:\", data['wrong_img_id']) print('wrong", "which_set self.dataset_name = dataset_name assert self.which_set in {'train', 'valid', 'test'} self.image_size = (image_size,", "0.5, 0.5], dtype=torch.float32) if which_set == 'val' or which_set == 'test': self.transform =", "dataset_name=\"birds\", which_set='val', image_size=256, batch_size=16, validation_split=0.05, num_workers=0) print(len(data_loader.dataset.vocab)) print(len(data_loader.dataset.vocab.word2idx)) for i, data in enumerate(data_loader):", "embeds, etc. right_caption_lengths = [len(cap) for cap in right_captions] collate_data['right_caption_lengths'] = torch.LongTensor(right_caption_lengths) collate_data['right_captions']", "batch_size self.num_workers = num_workers # transforms.ToTensor convert PIL images in range [0, 255]", "which_set, image_size, batch_size, num_workers): self.data_dir = data_dir self.which_set = which_set self.dataset_name = dataset_name", "= torch.stack(right_images_256, 0) collate_data['wrong_embeds'] = torch.stack(wrong_embeds, 0) collate_data['wrong_images_32'] = torch.stack(wrong_images_32, 0) collate_data['wrong_images_64'] =", "etc. right_caption_lengths = [len(cap) for cap in right_captions] collate_data['right_caption_lengths'] = torch.LongTensor(right_caption_lengths) collate_data['right_captions'] =", "collate_data['right_txt'].append(data[i]['right_txt']) right_captions.append(data[i]['right_caption']) right_embeds.append(data[i]['right_embed']) right_images_32.append(data[i]['right_image_32']) right_images_64.append(data[i]['right_image_64']) right_images_128.append(data[i]['right_image_128']) right_images_256.append(data[i]['right_image_256']) collate_data['wrong_txt'].append(data[i]['wrong_txt']) wrong_captions.append(data[i]['wrong_caption']) wrong_embeds.append(data[i]['wrong_embed']) wrong_images_32.append(data[i]['wrong_image_32']) wrong_images_64.append(data[i]['wrong_image_64']) wrong_images_128.append(data[i]['wrong_image_128'])", "= which_set self.dataset_name = dataset_name assert self.which_set in {'train', 'valid', 'test'} self.image_size =", "print(\"wrong caption lengths:\", data['wrong_caption_lengths']) print(\"wrong txt:\", data[\"wrong_txt\"]) if i == 10: print(\"done\") break", "0) collate_data['right_images_128'] = torch.stack(right_images_128, 0) collate_data['right_images_256'] = torch.stack(right_images_256, 0) collate_data['wrong_embeds'] = torch.stack(wrong_embeds, 0)", "COCO Image Caption Model Data Loader \"\"\" def __init__(self, data_dir, which_set, image_size, batch_size,", "embed shape:\", data['right_embeds'].shape) print(\"right caption shape:\", data['right_captions'].shape) print(\"right caption lengths:\", data['right_caption_lengths']) print(\"right txt:\",", "= [] collate_data['wrong_img_id'] = [] collate_data['wrong_txt'] = [] wrong_captions = [] wrong_embeds =", "def text_image_collate_fn(data): collate_data = {} # Sort a data list by right caption", "]) self.dataset = TextImageDataset(self.data_dir, self.dataset_name, self.which_set, self.transform, vocab_from_file=False) self.n_samples = len(self.dataset) if self.which_set", "transforms.Compose([ # transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) self.dataset = COCOTextImageDataset(self.data_dir, self.which_set, self.transform, vocab_from_file=True)", "== 'test': self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) else: self.transform =", "self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) else: self.transform = transforms.Compose([ #", "data[\"class_id\"]) print('right images 32 shape:', data['right_images_32'].shape) print('right images 64 shape:', data['right_images_64'].shape) print('right images", "data_dir, which_set, image_size, batch_size, validation_split, num_workers): self.data_dir = data_dir self.which_set = which_set self.validation_split", "self.n_samples = len(self.dataset) if self.which_set == 'train' or self.which_set == 'valid': super(TextImageDataLoader, self).__init__(", "captions, lengths, images, embeds, etc. wrong_captions.sort(key=lambda x: len(x), reverse=True) wrong_caption_lengths = [len(cap) for", "dataset=self.dataset, batch_size=self.batch_size, shuffle=True, validation_split=validation_split, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False,", "for i, cap in enumerate(wrong_captions): end = wrong_caption_lengths[i] collate_data['wrong_captions'][i, :end] = cap[:end] collate_data['class_id']", "self.which_set == 'train': super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, validation_split=validation_split, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else:", "'train': super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, validation_split=validation_split, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(COCOTextImageDataLoader, self).__init__(", "torch.zeros(len(right_caption_lengths), max(right_caption_lengths)).long() for i, cap in enumerate(right_captions): end = right_caption_lengths[i] collate_data['right_captions'][i, :end] =", "torchvision import transforms from data_loader.datasets_custom import TextImageDataset, COCOTextImageDataset from base import BaseDataLoader def", "right_caption_lengths[i] collate_data['right_captions'][i, :end] = cap[:end] # sort and get captions, lengths, images, embeds,", "shape:', data['wrong_images_128'].shape) print('wrong images 256 shape:', data['wrong_images_256'].shape) print(\"wrong embed shape:\", data['wrong_embeds'].shape) print(\"wrong caption", "[] wrong_images_128 = [] wrong_images_256 = [] for i in range(len(data)): class_ids.append(data[i]['right_img_id']) collate_data['class_id'].append(data[i]['right_class_id'])", "= torch.stack(wrong_images_64, 0) collate_data['wrong_images_128'] = torch.stack(wrong_images_128, 0) collate_data['wrong_images_256'] = torch.stack(wrong_images_256, 0) return collate_data", "batch_size=16, validation_split=0.05, num_workers=0) print(len(data_loader.dataset.vocab)) print(len(data_loader.dataset.vocab.word2idx)) for i, data in enumerate(data_loader): print(i) print(\"right_img_id:\", data['right_img_id'])", "BaseDataLoader def text_image_collate_fn(data): collate_data = {} # Sort a data list by right", "= [] wrong_images_256 = [] for i in range(len(data)): class_ids.append(data[i]['right_img_id']) collate_data['class_id'].append(data[i]['right_class_id']) collate_data['right_txt'].append(data[i]['right_txt']) right_captions.append(data[i]['right_caption'])", "in range [0, 255] to a torch in range [-1.0, 1.0] self.transform =", "data['right_img_id']) # print(\"class_ids:\", data[\"class_id\"]) print('right images 32 shape:', data['right_images_32'].shape) print('right images 64 shape:',", "[] for i in range(len(data)): class_ids.append(data[i]['right_img_id']) collate_data['class_id'].append(data[i]['right_class_id']) collate_data['right_txt'].append(data[i]['right_txt']) right_captions.append(data[i]['right_caption']) right_embeds.append(data[i]['right_embed']) right_images_32.append(data[i]['right_image_32']) right_images_64.append(data[i]['right_image_64']) right_images_128.append(data[i]['right_image_128'])", "= torch.LongTensor(right_caption_lengths) collate_data['right_captions'] = torch.zeros(len(right_caption_lengths), max(right_caption_lengths)).long() for i, cap in enumerate(right_captions): end =", "right_images_128.append(data[i]['right_image_128']) right_images_256.append(data[i]['right_image_256']) collate_data['wrong_txt'].append(data[i]['wrong_txt']) wrong_captions.append(data[i]['wrong_caption']) wrong_embeds.append(data[i]['wrong_embed']) wrong_images_32.append(data[i]['wrong_image_32']) wrong_images_64.append(data[i]['wrong_image_64']) wrong_images_128.append(data[i]['wrong_image_128']) wrong_images_256.append(data[i]['wrong_image_256']) # sort and get", "0) collate_data['right_images_32'] = torch.stack(right_images_32, 0) collate_data['right_images_64'] = torch.stack(right_images_64, 0) collate_data['right_images_128'] = torch.stack(right_images_128, 0)", "from torchvision import transforms from data_loader.datasets_custom import TextImageDataset, COCOTextImageDataset from base import BaseDataLoader", "from torch.utils.data import DataLoader from torchvision import transforms from data_loader.datasets_custom import TextImageDataset, COCOTextImageDataset", "torch in range [-1.0, 1.0] mean = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) std =", "0.5, 0.5]) ]) self.dataset = TextImageDataset(self.data_dir, self.dataset_name, self.which_set, self.transform, vocab_from_file=False) self.n_samples = len(self.dataset)", "torch.LongTensor(wrong_caption_lengths) collate_data['wrong_captions'] = torch.zeros(len(wrong_caption_lengths), max(wrong_caption_lengths)).long() for i, cap in enumerate(wrong_captions): end = wrong_caption_lengths[i]", ") else: super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, validation_split=0, num_workers=self.num_workers, collate_fn=text_image_collate_fn) if __name__ ==", "collate_data['right_caption_lengths'] = torch.LongTensor(right_caption_lengths) collate_data['right_captions'] = torch.zeros(len(right_caption_lengths), max(right_caption_lengths)).long() for i, cap in enumerate(right_captions): end", "collate_data['class_id'].append(data[i]['right_class_id']) collate_data['right_txt'].append(data[i]['right_txt']) right_captions.append(data[i]['right_caption']) right_embeds.append(data[i]['right_embed']) right_images_32.append(data[i]['right_image_32']) right_images_64.append(data[i]['right_image_64']) right_images_128.append(data[i]['right_image_128']) right_images_256.append(data[i]['right_image_256']) collate_data['wrong_txt'].append(data[i]['wrong_txt']) wrong_captions.append(data[i]['wrong_caption']) wrong_embeds.append(data[i]['wrong_embed']) wrong_images_32.append(data[i]['wrong_image_32']) wrong_images_64.append(data[i]['wrong_image_64'])", "caption length (descending order). data.sort(key=lambda x: x['right_caption'].size(0), reverse=True) collate_data['right_img_id'] = [] collate_data['class_id'] =", "Loader \"\"\" def __init__(self, data_dir, which_set, image_size, batch_size, validation_split, num_workers): self.data_dir = data_dir", "images in range [0, 255] to a torch in range [-1.0, 1.0] mean", "enumerate(wrong_captions): end = wrong_caption_lengths[i] collate_data['wrong_captions'][i, :end] = cap[:end] collate_data['class_id'] = np.stack(class_ids) collate_data['right_embeds'] =", "[] wrong_images_256 = [] for i in range(len(data)): class_ids.append(data[i]['right_img_id']) collate_data['class_id'].append(data[i]['right_class_id']) collate_data['right_txt'].append(data[i]['right_txt']) right_captions.append(data[i]['right_caption']) right_embeds.append(data[i]['right_embed'])", "dataset=self.dataset, batch_size=self.batch_size, shuffle=False, validation_split=0, num_workers=self.num_workers, collate_fn=text_image_collate_fn) if __name__ == '__main__': data_loader = COCOTextImageDataLoader(", "def __init__(self, data_dir, dataset_name, which_set, image_size, batch_size, num_workers): self.data_dir = data_dir self.which_set =", "in range [-1.0, 1.0] mean = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) std = torch.tensor([0.5,", "if __name__ == '__main__': data_loader = COCOTextImageDataLoader( data_dir='/Users/leon/Projects/I2T2I/data/coco/', # dataset_name=\"birds\", which_set='val', image_size=256, batch_size=16,", "right_images_64 = [] right_images_128 = [] right_images_256 = [] collate_data['wrong_img_id'] = [] collate_data['wrong_txt']", "num_workers=self.num_workers, collate_fn=text_image_collate_fn) if __name__ == '__main__': data_loader = COCOTextImageDataLoader( data_dir='/Users/leon/Projects/I2T2I/data/coco/', # dataset_name=\"birds\", which_set='val',", "vocab_from_file=False) self.n_samples = len(self.dataset) if self.which_set == 'train' or self.which_set == 'valid': super(TextImageDataLoader,", "torch.zeros(len(wrong_caption_lengths), max(wrong_caption_lengths)).long() for i, cap in enumerate(wrong_captions): end = wrong_caption_lengths[i] collate_data['wrong_captions'][i, :end] =", "= torch.zeros(len(right_caption_lengths), max(right_caption_lengths)).long() for i, cap in enumerate(right_captions): end = right_caption_lengths[i] collate_data['right_captions'][i, :end]", "collate_data['right_embeds'] = torch.stack(right_embeds, 0) collate_data['right_images_32'] = torch.stack(right_images_32, 0) collate_data['right_images_64'] = torch.stack(right_images_64, 0) collate_data['right_images_128']", "[] wrong_images_64 = [] wrong_images_128 = [] wrong_images_256 = [] for i in", "super(TextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, num_workers=0, collate_fn=text_image_collate_fn) class COCOTextImageDataLoader(BaseDataLoader): \"\"\" COCO Image Caption", "x: x['right_caption'].size(0), reverse=True) collate_data['right_img_id'] = [] collate_data['class_id'] = [] collate_data['right_txt'] = [] class_ids", "wrong_caption_lengths[i] collate_data['wrong_captions'][i, :end] = cap[:end] collate_data['class_id'] = np.stack(class_ids) collate_data['right_embeds'] = torch.stack(right_embeds, 0) collate_data['right_images_32']", "validation_split=validation_split, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=False, validation_split=0, num_workers=self.num_workers, collate_fn=text_image_collate_fn)", "= np.stack(class_ids) collate_data['right_embeds'] = torch.stack(right_embeds, 0) collate_data['right_images_32'] = torch.stack(right_images_32, 0) collate_data['right_images_64'] = torch.stack(right_images_64,", "= cap[:end] collate_data['class_id'] = np.stack(class_ids) collate_data['right_embeds'] = torch.stack(right_embeds, 0) collate_data['right_images_32'] = torch.stack(right_images_32, 0)", "= [] wrong_captions = [] wrong_embeds = [] wrong_images_32 = [] wrong_images_64 =", "num_workers): self.data_dir = data_dir self.which_set = which_set self.dataset_name = dataset_name assert self.which_set in", "data['wrong_captions'].shape) print(\"wrong caption lengths:\", data['wrong_caption_lengths']) print(\"wrong txt:\", data[\"wrong_txt\"]) if i == 10: print(\"done\")", "and get captions, lengths, images, embeds, etc. right_caption_lengths = [len(cap) for cap in", "= transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]) ]) self.dataset =", "[0, 255] to a torch in range [-1.0, 1.0] mean = torch.tensor([0.5, 0.5,", "else: self.transform = transforms.Compose([ # transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) self.dataset = COCOTextImageDataset(self.data_dir,", "[] right_images_64 = [] right_images_128 = [] right_images_256 = [] collate_data['wrong_img_id'] = []", "print(\"right caption lengths:\", data['right_caption_lengths']) print(\"right txt:\", data[\"right_txt\"]) print(\"wrong_img_id:\", data['wrong_img_id']) print('wrong images 32 shape:',", "print('wrong images 256 shape:', data['wrong_images_256'].shape) print(\"wrong embed shape:\", data['wrong_embeds'].shape) print(\"wrong caption shape:\", data['wrong_captions'].shape)", "transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) else: self.transform = transforms.Compose([ # transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std)", "= len(self.dataset) if self.which_set == 'train': super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, validation_split=validation_split, num_workers=self.num_workers,", "print(\"right caption shape:\", data['right_captions'].shape) print(\"right caption lengths:\", data['right_caption_lengths']) print(\"right txt:\", data[\"right_txt\"]) print(\"wrong_img_id:\", data['wrong_img_id'])", "data['right_images_128'].shape) print('right images 256 shape:', data['right_images_256'].shape) print(\"right embed shape:\", data['right_embeds'].shape) print(\"right caption shape:\",", "data_loader = COCOTextImageDataLoader( data_dir='/Users/leon/Projects/I2T2I/data/coco/', # dataset_name=\"birds\", which_set='val', image_size=256, batch_size=16, validation_split=0.05, num_workers=0) print(len(data_loader.dataset.vocab)) print(len(data_loader.dataset.vocab.word2idx))", "shape:', data['wrong_images_256'].shape) print(\"wrong embed shape:\", data['wrong_embeds'].shape) print(\"wrong caption shape:\", data['wrong_captions'].shape) print(\"wrong caption lengths:\",", "torch.stack(right_images_32, 0) collate_data['right_images_64'] = torch.stack(right_images_64, 0) collate_data['right_images_128'] = torch.stack(right_images_128, 0) collate_data['right_images_256'] = torch.stack(right_images_256,", "embed shape:\", data['wrong_embeds'].shape) print(\"wrong caption shape:\", data['wrong_captions'].shape) print(\"wrong caption lengths:\", data['wrong_caption_lengths']) print(\"wrong txt:\",", "collate_data class TextImageDataLoader(DataLoader): def __init__(self, data_dir, dataset_name, which_set, image_size, batch_size, num_workers): self.data_dir =", "= batch_size self.num_workers = num_workers # transforms.ToTensor convert PIL images in range [0,", "max(wrong_caption_lengths)).long() for i, cap in enumerate(wrong_captions): end = wrong_caption_lengths[i] collate_data['wrong_captions'][i, :end] = cap[:end]", "std=std) ]) else: self.transform = transforms.Compose([ # transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) self.dataset", "collate_data['right_captions'] = torch.zeros(len(right_caption_lengths), max(right_caption_lengths)).long() for i, cap in enumerate(right_captions): end = right_caption_lengths[i] collate_data['right_captions'][i,", "'test'} self.image_size = (image_size, image_size) self.batch_size = batch_size self.num_workers = num_workers # transforms.ToTensor", "0) collate_data['wrong_images_32'] = torch.stack(wrong_images_32, 0) collate_data['wrong_images_64'] = torch.stack(wrong_images_64, 0) collate_data['wrong_images_128'] = torch.stack(wrong_images_128, 0)", "wrong_images_32 = [] wrong_images_64 = [] wrong_images_128 = [] wrong_images_256 = [] for", "torch import numpy as np from torch.utils.data import DataLoader from torchvision import transforms", "= [] collate_data['class_id'] = [] collate_data['right_txt'] = [] class_ids = [] right_captions =", "right_caption_lengths = [len(cap) for cap in right_captions] collate_data['right_caption_lengths'] = torch.LongTensor(right_caption_lengths) collate_data['right_captions'] = torch.zeros(len(right_caption_lengths),", "[-1.0, 1.0] self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])", "np.stack(class_ids) collate_data['right_embeds'] = torch.stack(right_embeds, 0) collate_data['right_images_32'] = torch.stack(right_images_32, 0) collate_data['right_images_64'] = torch.stack(right_images_64, 0)", "= which_set self.validation_split = validation_split assert self.which_set in {'train', 'val', 'test'} self.image_size =", "end = wrong_caption_lengths[i] collate_data['wrong_captions'][i, :end] = cap[:end] collate_data['class_id'] = np.stack(class_ids) collate_data['right_embeds'] = torch.stack(right_embeds,", "# self.n_samples = len(self.dataset) if self.which_set == 'train': super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True,", "transforms.Normalize(mean=mean, std=std) ]) self.dataset = COCOTextImageDataset(self.data_dir, self.which_set, self.transform, vocab_from_file=True) # self.n_samples = len(self.dataset)", "COCOTextImageDataset from base import BaseDataLoader def text_image_collate_fn(data): collate_data = {} # Sort a", "(image_size, image_size) self.batch_size = batch_size self.num_workers = num_workers # transforms.ToTensor convert PIL images", "'test': self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) else: self.transform = transforms.Compose([", "self.which_set, self.transform, vocab_from_file=False) self.n_samples = len(self.dataset) if self.which_set == 'train' or self.which_set ==", "validation_split, num_workers): self.data_dir = data_dir self.which_set = which_set self.validation_split = validation_split assert self.which_set", "shape:\", data['right_embeds'].shape) print(\"right caption shape:\", data['right_captions'].shape) print(\"right caption lengths:\", data['right_caption_lengths']) print(\"right txt:\", data[\"right_txt\"])", "in {'train', 'valid', 'test'} self.image_size = (image_size, image_size) self.batch_size = batch_size self.num_workers =", "data['right_caption_lengths']) print(\"right txt:\", data[\"right_txt\"]) print(\"wrong_img_id:\", data['wrong_img_id']) print('wrong images 32 shape:', data['wrong_images_32'].shape) print('wrong images", "= right_caption_lengths[i] collate_data['right_captions'][i, :end] = cap[:end] # sort and get captions, lengths, images,", "'val' or which_set == 'test': self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ])", "shape:', data['right_images_128'].shape) print('right images 256 shape:', data['right_images_256'].shape) print(\"right embed shape:\", data['right_embeds'].shape) print(\"right caption", "std=std) ]) self.dataset = COCOTextImageDataset(self.data_dir, self.which_set, self.transform, vocab_from_file=True) # self.n_samples = len(self.dataset) if", "data['right_captions'].shape) print(\"right caption lengths:\", data['right_caption_lengths']) print(\"right txt:\", data[\"right_txt\"]) print(\"wrong_img_id:\", data['wrong_img_id']) print('wrong images 32", "[] collate_data['wrong_txt'] = [] wrong_captions = [] wrong_embeds = [] wrong_images_32 = []", "# Sort a data list by right caption length (descending order). data.sort(key=lambda x:", "[] right_images_128 = [] right_images_256 = [] collate_data['wrong_img_id'] = [] collate_data['wrong_txt'] = []", "length (descending order). data.sort(key=lambda x: x['right_caption'].size(0), reverse=True) collate_data['right_img_id'] = [] collate_data['class_id'] = []", "lengths, images, embeds, etc. wrong_captions.sort(key=lambda x: len(x), reverse=True) wrong_caption_lengths = [len(cap) for cap", "= [] wrong_images_128 = [] wrong_images_256 = [] for i in range(len(data)): class_ids.append(data[i]['right_img_id'])", "[0, 255] to a torch in range [-1.0, 1.0] self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(),", "== 'val' or which_set == 'test': self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std)", "__name__ == '__main__': data_loader = COCOTextImageDataLoader( data_dir='/Users/leon/Projects/I2T2I/data/coco/', # dataset_name=\"birds\", which_set='val', image_size=256, batch_size=16, validation_split=0.05,", "self.transform = transforms.Compose([ # transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) self.dataset = COCOTextImageDataset(self.data_dir, self.which_set,", "cap in enumerate(wrong_captions): end = wrong_caption_lengths[i] collate_data['wrong_captions'][i, :end] = cap[:end] collate_data['class_id'] = np.stack(class_ids)", "= [] wrong_embeds = [] wrong_images_32 = [] wrong_images_64 = [] wrong_images_128 =", "= torch.stack(wrong_embeds, 0) collate_data['wrong_images_32'] = torch.stack(wrong_images_32, 0) collate_data['wrong_images_64'] = torch.stack(wrong_images_64, 0) collate_data['wrong_images_128'] =", "transforms from data_loader.datasets_custom import TextImageDataset, COCOTextImageDataset from base import BaseDataLoader def text_image_collate_fn(data): collate_data", "== '__main__': data_loader = COCOTextImageDataLoader( data_dir='/Users/leon/Projects/I2T2I/data/coco/', # dataset_name=\"birds\", which_set='val', image_size=256, batch_size=16, validation_split=0.05, num_workers=0)", "[] collate_data['class_id'] = [] collate_data['right_txt'] = [] class_ids = [] right_captions = []", "right_images_64.append(data[i]['right_image_64']) right_images_128.append(data[i]['right_image_128']) right_images_256.append(data[i]['right_image_256']) collate_data['wrong_txt'].append(data[i]['wrong_txt']) wrong_captions.append(data[i]['wrong_caption']) wrong_embeds.append(data[i]['wrong_embed']) wrong_images_32.append(data[i]['wrong_image_32']) wrong_images_64.append(data[i]['wrong_image_64']) wrong_images_128.append(data[i]['wrong_image_128']) wrong_images_256.append(data[i]['wrong_image_256']) # sort and", "]) self.dataset = COCOTextImageDataset(self.data_dir, self.which_set, self.transform, vocab_from_file=True) # self.n_samples = len(self.dataset) if self.which_set", "= [len(cap) for cap in wrong_captions] collate_data['wrong_caption_lengths'] = torch.LongTensor(wrong_caption_lengths) collate_data['wrong_captions'] = torch.zeros(len(wrong_caption_lengths), max(wrong_caption_lengths)).long()", "which_set == 'test': self.transform = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) else: self.transform", "data list by right caption length (descending order). data.sort(key=lambda x: x['right_caption'].size(0), reverse=True) collate_data['right_img_id']", "0.5, 0.5], dtype=torch.float32) std = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) if which_set == 'val'", "std=[0.5, 0.5, 0.5]) ]) self.dataset = TextImageDataset(self.data_dir, self.dataset_name, self.which_set, self.transform, vocab_from_file=False) self.n_samples =", "print('right images 32 shape:', data['right_images_32'].shape) print('right images 64 shape:', data['right_images_64'].shape) print('right images 128", "transforms.ToTensor(), transforms.Normalize(mean=mean, std=std) ]) self.dataset = COCOTextImageDataset(self.data_dir, self.which_set, self.transform, vocab_from_file=True) # self.n_samples =", "a torch in range [-1.0, 1.0] mean = torch.tensor([0.5, 0.5, 0.5], dtype=torch.float32) std", "num_workers): self.data_dir = data_dir self.which_set = which_set self.validation_split = validation_split assert self.which_set in", "self).__init__( dataset=self.dataset, batch_size=self.batch_size, shuffle=True, validation_split=validation_split, num_workers=self.num_workers, collate_fn=text_image_collate_fn ) else: super(COCOTextImageDataLoader, self).__init__( dataset=self.dataset, batch_size=self.batch_size,", "in {'train', 'val', 'test'} self.image_size = (image_size, image_size) self.batch_size = batch_size self.num_workers =", "print(len(data_loader.dataset.vocab.word2idx)) for i, data in enumerate(data_loader): print(i) print(\"right_img_id:\", data['right_img_id']) # print(\"class_ids:\", data[\"class_id\"]) print('right", "images 128 shape:', data['wrong_images_128'].shape) print('wrong images 256 shape:', data['wrong_images_256'].shape) print(\"wrong embed shape:\", data['wrong_embeds'].shape)" ]
[ "_ = guest.versions[0] with admin_session() as session: guest = session.query(Guest).where(Guest.guid == guid).one() assert", "import pytest from sqlalchemy.exc import ProgrammingError from sqlalchemy_continuum.utils import count_versions from kokon.orm import", "session: guest = session.query(Guest).where(Guest.guid == guid).one() assert count_versions(guest) == 2 assert str(guest.versions[0].guid) ==", "kokon.utils.db import DB from tests.helpers import admin_session def test_app_user(): with admin_session() as session:", "10], have_pets=False, how_long_to_stay=\"1w\", updated_by_id=\"782962fc-dc11-4a33-8f08-b7da532dd40d\", ) session.add(guest) session.commit() session.refresh(guest) assert guest.claimed_by_id is None #", "children_ages=[1, 10], have_pets=False, how_long_to_stay=\"1w\", updated_by_id=\"782962fc-dc11-4a33-8f08-b7da532dd40d\", ) session.add(guest) session.commit() session.refresh(guest) assert guest.claimed_by_id is None", "guest.claimed_at assert claimed_at is not None guest.adult_male_count = 1 session.commit() with pytest.raises(ProgrammingError): _", "without error and version as well guid = \"74b86069-c837-4431-a7ee-3a4aedda978b\" guest = Guest( guid=guid,", "is not None guest.adult_male_count = 1 session.commit() with pytest.raises(ProgrammingError): _ = guest.versions[0] with", "session.commit() session.refresh(guest) assert guest.claimed_by_id is None # trigger works claimed_at = guest.claimed_at assert", "from kokon.utils.db import DB from tests.helpers import admin_session def test_app_user(): with admin_session() as", ") session.add(guest) session.commit() session.refresh(guest) assert guest.claimed_by_id is None # trigger works claimed_at =", "= 1 session.commit() with pytest.raises(ProgrammingError): _ = guest.versions[0] with admin_session() as session: guest", "import Guest from kokon.utils.db import DB from tests.helpers import admin_session def test_app_user(): with", "admin_session def test_app_user(): with admin_session() as session: session.execute(\"TRUNCATE guests_version RESTART IDENTITY;\") session.execute(\"TRUNCATE guests", "= Guest( guid=guid, full_name=\"<NAME>\", email=\"<EMAIL>\", phone_number=\"100-330-497\", people_in_group=4, adult_male_count=0, adult_female_count=2, children_ages=[1, 10], have_pets=False, how_long_to_stay=\"1w\",", "test_app_user(): with admin_session() as session: session.execute(\"TRUNCATE guests_version RESTART IDENTITY;\") session.execute(\"TRUNCATE guests RESTART IDENTITY;\")", "admin_session() as session: session.execute(\"TRUNCATE guests_version RESTART IDENTITY;\") session.execute(\"TRUNCATE guests RESTART IDENTITY;\") session.execute(\"TRUNCATE transaction", "None # trigger works claimed_at = guest.claimed_at assert claimed_at is not None guest.adult_male_count", "import count_versions from kokon.orm import Guest from kokon.utils.db import DB from tests.helpers import", "pytest.raises(ProgrammingError): _ = guest.versions[0] with admin_session() as session: guest = session.query(Guest).where(Guest.guid == guid).one()", "claimed_at is not None guest.adult_male_count = 1 session.commit() with pytest.raises(ProgrammingError): _ = guest.versions[0]", "from kokon.orm import Guest from kokon.utils.db import DB from tests.helpers import admin_session def", "import admin_session def test_app_user(): with admin_session() as session: session.execute(\"TRUNCATE guests_version RESTART IDENTITY;\") session.execute(\"TRUNCATE", "kokon.orm import Guest from kokon.utils.db import DB from tests.helpers import admin_session def test_app_user():", "def test_app_user(): with admin_session() as session: session.execute(\"TRUNCATE guests_version RESTART IDENTITY;\") session.execute(\"TRUNCATE guests RESTART", "a guest without error and version as well guid = \"74b86069-c837-4431-a7ee-3a4aedda978b\" guest =", "guid=guid, full_name=\"<NAME>\", email=\"<EMAIL>\", phone_number=\"100-330-497\", people_in_group=4, adult_male_count=0, adult_female_count=2, children_ages=[1, 10], have_pets=False, how_long_to_stay=\"1w\", updated_by_id=\"782962fc-dc11-4a33-8f08-b7da532dd40d\", )", "sqlalchemy_continuum.utils import count_versions from kokon.orm import Guest from kokon.utils.db import DB from tests.helpers", "ProgrammingError from sqlalchemy_continuum.utils import count_versions from kokon.orm import Guest from kokon.utils.db import DB", "session.refresh(guest) assert guest.claimed_by_id is None # trigger works claimed_at = guest.claimed_at assert claimed_at", "tests.helpers import admin_session def test_app_user(): with admin_session() as session: session.execute(\"TRUNCATE guests_version RESTART IDENTITY;\")", "with pytest.raises(ProgrammingError): _ = guest.versions[0] with admin_session() as session: guest = session.query(Guest).where(Guest.guid ==", "session.execute(\"TRUNCATE transaction RESTART IDENTITY;\") with DB().acquire() as session: # creates a guest without", "as session: session.execute(\"TRUNCATE guests_version RESTART IDENTITY;\") session.execute(\"TRUNCATE guests RESTART IDENTITY;\") session.execute(\"TRUNCATE transaction RESTART", "with admin_session() as session: session.execute(\"TRUNCATE guests_version RESTART IDENTITY;\") session.execute(\"TRUNCATE guests RESTART IDENTITY;\") session.execute(\"TRUNCATE", "IDENTITY;\") session.execute(\"TRUNCATE guests RESTART IDENTITY;\") session.execute(\"TRUNCATE transaction RESTART IDENTITY;\") with DB().acquire() as session:", "people_in_group=4, adult_male_count=0, adult_female_count=2, children_ages=[1, 10], have_pets=False, how_long_to_stay=\"1w\", updated_by_id=\"782962fc-dc11-4a33-8f08-b7da532dd40d\", ) session.add(guest) session.commit() session.refresh(guest) assert", "email=\"<EMAIL>\", phone_number=\"100-330-497\", people_in_group=4, adult_male_count=0, adult_female_count=2, children_ages=[1, 10], have_pets=False, how_long_to_stay=\"1w\", updated_by_id=\"782962fc-dc11-4a33-8f08-b7da532dd40d\", ) session.add(guest) session.commit()", "from tests.helpers import admin_session def test_app_user(): with admin_session() as session: session.execute(\"TRUNCATE guests_version RESTART", "session.execute(\"TRUNCATE guests_version RESTART IDENTITY;\") session.execute(\"TRUNCATE guests RESTART IDENTITY;\") session.execute(\"TRUNCATE transaction RESTART IDENTITY;\") with", "transaction RESTART IDENTITY;\") with DB().acquire() as session: # creates a guest without error", "works claimed_at = guest.claimed_at assert claimed_at is not None guest.adult_male_count = 1 session.commit()", "well guid = \"74b86069-c837-4431-a7ee-3a4aedda978b\" guest = Guest( guid=guid, full_name=\"<NAME>\", email=\"<EMAIL>\", phone_number=\"100-330-497\", people_in_group=4, adult_male_count=0,", "guest = session.query(Guest).where(Guest.guid == guid).one() assert count_versions(guest) == 2 assert str(guest.versions[0].guid) == guid", "1 session.commit() with pytest.raises(ProgrammingError): _ = guest.versions[0] with admin_session() as session: guest =", "DB().acquire() as session: # creates a guest without error and version as well", "guests_version RESTART IDENTITY;\") session.execute(\"TRUNCATE guests RESTART IDENTITY;\") session.execute(\"TRUNCATE transaction RESTART IDENTITY;\") with DB().acquire()", "guest without error and version as well guid = \"74b86069-c837-4431-a7ee-3a4aedda978b\" guest = Guest(", "= guest.versions[0] with admin_session() as session: guest = session.query(Guest).where(Guest.guid == guid).one() assert count_versions(guest)", "with DB().acquire() as session: # creates a guest without error and version as", "None guest.adult_male_count = 1 session.commit() with pytest.raises(ProgrammingError): _ = guest.versions[0] with admin_session() as", "session.commit() with pytest.raises(ProgrammingError): _ = guest.versions[0] with admin_session() as session: guest = session.query(Guest).where(Guest.guid", "# creates a guest without error and version as well guid = \"74b86069-c837-4431-a7ee-3a4aedda978b\"", "creates a guest without error and version as well guid = \"74b86069-c837-4431-a7ee-3a4aedda978b\" guest", "RESTART IDENTITY;\") with DB().acquire() as session: # creates a guest without error and", "assert guest.claimed_by_id is None # trigger works claimed_at = guest.claimed_at assert claimed_at is", "IDENTITY;\") session.execute(\"TRUNCATE transaction RESTART IDENTITY;\") with DB().acquire() as session: # creates a guest", "guests RESTART IDENTITY;\") session.execute(\"TRUNCATE transaction RESTART IDENTITY;\") with DB().acquire() as session: # creates", "not None guest.adult_male_count = 1 session.commit() with pytest.raises(ProgrammingError): _ = guest.versions[0] with admin_session()", "RESTART IDENTITY;\") session.execute(\"TRUNCATE transaction RESTART IDENTITY;\") with DB().acquire() as session: # creates a", "= guest.claimed_at assert claimed_at is not None guest.adult_male_count = 1 session.commit() with pytest.raises(ProgrammingError):", "as session: guest = session.query(Guest).where(Guest.guid == guid).one() assert count_versions(guest) == 2 assert str(guest.versions[0].guid)", "with admin_session() as session: guest = session.query(Guest).where(Guest.guid == guid).one() assert count_versions(guest) == 2", "error and version as well guid = \"74b86069-c837-4431-a7ee-3a4aedda978b\" guest = Guest( guid=guid, full_name=\"<NAME>\",", "sqlalchemy.exc import ProgrammingError from sqlalchemy_continuum.utils import count_versions from kokon.orm import Guest from kokon.utils.db", "pytest from sqlalchemy.exc import ProgrammingError from sqlalchemy_continuum.utils import count_versions from kokon.orm import Guest", "import ProgrammingError from sqlalchemy_continuum.utils import count_versions from kokon.orm import Guest from kokon.utils.db import", "trigger works claimed_at = guest.claimed_at assert claimed_at is not None guest.adult_male_count = 1", "session.add(guest) session.commit() session.refresh(guest) assert guest.claimed_by_id is None # trigger works claimed_at = guest.claimed_at", "Guest from kokon.utils.db import DB from tests.helpers import admin_session def test_app_user(): with admin_session()", "have_pets=False, how_long_to_stay=\"1w\", updated_by_id=\"782962fc-dc11-4a33-8f08-b7da532dd40d\", ) session.add(guest) session.commit() session.refresh(guest) assert guest.claimed_by_id is None # trigger", "version as well guid = \"74b86069-c837-4431-a7ee-3a4aedda978b\" guest = Guest( guid=guid, full_name=\"<NAME>\", email=\"<EMAIL>\", phone_number=\"100-330-497\",", "from sqlalchemy_continuum.utils import count_versions from kokon.orm import Guest from kokon.utils.db import DB from", "assert claimed_at is not None guest.adult_male_count = 1 session.commit() with pytest.raises(ProgrammingError): _ =", "and version as well guid = \"74b86069-c837-4431-a7ee-3a4aedda978b\" guest = Guest( guid=guid, full_name=\"<NAME>\", email=\"<EMAIL>\",", "count_versions from kokon.orm import Guest from kokon.utils.db import DB from tests.helpers import admin_session", "guest = Guest( guid=guid, full_name=\"<NAME>\", email=\"<EMAIL>\", phone_number=\"100-330-497\", people_in_group=4, adult_male_count=0, adult_female_count=2, children_ages=[1, 10], have_pets=False,", "how_long_to_stay=\"1w\", updated_by_id=\"782962fc-dc11-4a33-8f08-b7da532dd40d\", ) session.add(guest) session.commit() session.refresh(guest) assert guest.claimed_by_id is None # trigger works", "guest.versions[0] with admin_session() as session: guest = session.query(Guest).where(Guest.guid == guid).one() assert count_versions(guest) ==", "RESTART IDENTITY;\") session.execute(\"TRUNCATE guests RESTART IDENTITY;\") session.execute(\"TRUNCATE transaction RESTART IDENTITY;\") with DB().acquire() as", "DB from tests.helpers import admin_session def test_app_user(): with admin_session() as session: session.execute(\"TRUNCATE guests_version", "is None # trigger works claimed_at = guest.claimed_at assert claimed_at is not None", "phone_number=\"100-330-497\", people_in_group=4, adult_male_count=0, adult_female_count=2, children_ages=[1, 10], have_pets=False, how_long_to_stay=\"1w\", updated_by_id=\"782962fc-dc11-4a33-8f08-b7da532dd40d\", ) session.add(guest) session.commit() session.refresh(guest)", "as session: # creates a guest without error and version as well guid", "guest.adult_male_count = 1 session.commit() with pytest.raises(ProgrammingError): _ = guest.versions[0] with admin_session() as session:", "import DB from tests.helpers import admin_session def test_app_user(): with admin_session() as session: session.execute(\"TRUNCATE", "= \"74b86069-c837-4431-a7ee-3a4aedda978b\" guest = Guest( guid=guid, full_name=\"<NAME>\", email=\"<EMAIL>\", phone_number=\"100-330-497\", people_in_group=4, adult_male_count=0, adult_female_count=2, children_ages=[1,", "session.execute(\"TRUNCATE guests RESTART IDENTITY;\") session.execute(\"TRUNCATE transaction RESTART IDENTITY;\") with DB().acquire() as session: #", "claimed_at = guest.claimed_at assert claimed_at is not None guest.adult_male_count = 1 session.commit() with", "adult_male_count=0, adult_female_count=2, children_ages=[1, 10], have_pets=False, how_long_to_stay=\"1w\", updated_by_id=\"782962fc-dc11-4a33-8f08-b7da532dd40d\", ) session.add(guest) session.commit() session.refresh(guest) assert guest.claimed_by_id", "Guest( guid=guid, full_name=\"<NAME>\", email=\"<EMAIL>\", phone_number=\"100-330-497\", people_in_group=4, adult_male_count=0, adult_female_count=2, children_ages=[1, 10], have_pets=False, how_long_to_stay=\"1w\", updated_by_id=\"782962fc-dc11-4a33-8f08-b7da532dd40d\",", "adult_female_count=2, children_ages=[1, 10], have_pets=False, how_long_to_stay=\"1w\", updated_by_id=\"782962fc-dc11-4a33-8f08-b7da532dd40d\", ) session.add(guest) session.commit() session.refresh(guest) assert guest.claimed_by_id is", "IDENTITY;\") with DB().acquire() as session: # creates a guest without error and version", "session: # creates a guest without error and version as well guid =", "updated_by_id=\"782962fc-dc11-4a33-8f08-b7da532dd40d\", ) session.add(guest) session.commit() session.refresh(guest) assert guest.claimed_by_id is None # trigger works claimed_at", "full_name=\"<NAME>\", email=\"<EMAIL>\", phone_number=\"100-330-497\", people_in_group=4, adult_male_count=0, adult_female_count=2, children_ages=[1, 10], have_pets=False, how_long_to_stay=\"1w\", updated_by_id=\"782962fc-dc11-4a33-8f08-b7da532dd40d\", ) session.add(guest)", "admin_session() as session: guest = session.query(Guest).where(Guest.guid == guid).one() assert count_versions(guest) == 2 assert", "session: session.execute(\"TRUNCATE guests_version RESTART IDENTITY;\") session.execute(\"TRUNCATE guests RESTART IDENTITY;\") session.execute(\"TRUNCATE transaction RESTART IDENTITY;\")", "guest.claimed_by_id is None # trigger works claimed_at = guest.claimed_at assert claimed_at is not", "guid = \"74b86069-c837-4431-a7ee-3a4aedda978b\" guest = Guest( guid=guid, full_name=\"<NAME>\", email=\"<EMAIL>\", phone_number=\"100-330-497\", people_in_group=4, adult_male_count=0, adult_female_count=2,", "as well guid = \"74b86069-c837-4431-a7ee-3a4aedda978b\" guest = Guest( guid=guid, full_name=\"<NAME>\", email=\"<EMAIL>\", phone_number=\"100-330-497\", people_in_group=4,", "\"74b86069-c837-4431-a7ee-3a4aedda978b\" guest = Guest( guid=guid, full_name=\"<NAME>\", email=\"<EMAIL>\", phone_number=\"100-330-497\", people_in_group=4, adult_male_count=0, adult_female_count=2, children_ages=[1, 10],", "from sqlalchemy.exc import ProgrammingError from sqlalchemy_continuum.utils import count_versions from kokon.orm import Guest from", "# trigger works claimed_at = guest.claimed_at assert claimed_at is not None guest.adult_male_count =" ]
[ "\"django.utils.log.RequireDebugTrue\"}, \"readable_sql\": {\"()\": \"project_runpy.ReadableSqlFilter\"}, }, \"handlers\": { \"console\": { \"level\": \"DEBUG\", \"formatter\": \"dev\",", "= \"bandc.wsgi.application\" # Database # https://docs.djangoproject.com/en/stable/ref/settings/#databases DATABASES = {\"default\": dj_database_url.config(default=\"sqlite:///bandc.db\")} DEFAULT_AUTO_FIELD = \"django.db.models.AutoField\"", "but Python doesn't make it easy # \"class\": \"pythonjsonlogger.jsonlogger.JsonFormatter\", }, }, \"filters\": {", "https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { \"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\", }, {\"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"}, {\"NAME\":", "\"DEBUG\", \"formatter\": \"dev\", \"class\": \"project_runpy.ColorizingStreamHandler\", }, }, \"loggers\": { \"django.db.backends\": { \"level\": \"DEBUG\"", "WSGI_APPLICATION = \"bandc.wsgi.application\" # Database # https://docs.djangoproject.com/en/stable/ref/settings/#databases DATABASES = {\"default\": dj_database_url.config(default=\"sqlite:///bandc.db\")} DEFAULT_AUTO_FIELD =", "os.environ.get(\"LOG_LEVEL\", \"WARNING\"), \"handlers\": [\"console\"]}, \"formatters\": { \"dev\": { \"format\": \"%(levelname)s %(name)s %(message)s\", #", "True, \"OPTIONS\": { \"context_processors\": [ \"django.template.context_processors.debug\", \"django.template.context_processors.request\", \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"bandc.context_processors.base_url\", ], \"debug\": DEBUG,", "{\"()\": \"project_runpy.ReadableSqlFilter\"}, }, \"handlers\": { \"console\": { \"level\": \"DEBUG\", \"formatter\": \"dev\", \"class\": \"project_runpy.ColorizingStreamHandler\",", "# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { \"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\", }, {\"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"},", "{ \"version\": 1, \"disable_existing_loggers\": False, \"root\": {\"level\": os.environ.get(\"LOG_LEVEL\", \"WARNING\"), \"handlers\": [\"console\"]}, \"formatters\": {", "= env.get(\"DEBUG\", False) ALLOWED_HOSTS = [\"*\"] INSTALLED_APPS = ( \"bandc.apps.agenda.apps.AgendaConfig\", \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\",", "USE_I18N = False USE_L10N = False USE_TZ = True TEMPLATES = [ {", "\"django.middleware.clickjacking.XFrameOptionsMiddleware\", ) ROOT_URLCONF = \"bandc.urls\" WSGI_APPLICATION = \"bandc.wsgi.application\" # Database # https://docs.djangoproject.com/en/stable/ref/settings/#databases DATABASES", "\"en-us\" TIME_ZONE = \"America/Chicago\" USE_I18N = False USE_L10N = False USE_TZ = True", "JavaScript, Images) # https://docs.djangoproject.com/en/dev/howto/static-files/ STATIC_URL = \"/static/\" STATICFILES_DIRS = (os.path.join(BASE_DIR, \"static\"),) MEDIA_ROOT =", "https://docs.djangoproject.com/en/dev/howto/static-files/ STATIC_URL = \"/static/\" STATICFILES_DIRS = (os.path.join(BASE_DIR, \"static\"),) MEDIA_ROOT = os.path.join(BASE_DIR, \"..\", \"media\")", "\"handlers\": { \"console\": { \"level\": \"DEBUG\", \"formatter\": \"dev\", \"class\": \"project_runpy.ColorizingStreamHandler\", }, }, \"loggers\":", "}, \"loggers\": { \"django.db.backends\": { \"level\": \"DEBUG\" if env.get(\"SQL\", False) else \"INFO\", \"handlers\":", "the project like this: os.path.join(BASE_DIR, ...) import os import dj_database_url from project_runpy import", "{\"()\": \"django.utils.log.RequireDebugFalse\"}, \"require_debug_true\": {\"()\": \"django.utils.log.RequireDebugTrue\"}, \"readable_sql\": {\"()\": \"project_runpy.ReadableSqlFilter\"}, }, \"handlers\": { \"console\": {", "= \"django.db.models.AutoField\" # Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { \"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\",", "\"django.contrib.auth.password_validation.NumericPasswordValidator\"}, ] # Internationalization # https://docs.djangoproject.com/en/stable/topics/i18n/ LANGUAGE_CODE = \"en-us\" TIME_ZONE = \"America/Chicago\" USE_I18N", "\"console\": { \"level\": \"DEBUG\", \"formatter\": \"dev\", \"class\": \"project_runpy.ColorizingStreamHandler\", }, }, \"loggers\": { \"django.db.backends\":", "= { \"version\": 1, \"disable_existing_loggers\": False, \"root\": {\"level\": os.environ.get(\"LOG_LEVEL\", \"WARNING\"), \"handlers\": [\"console\"]}, \"formatters\":", "{ \"require_debug_false\": {\"()\": \"django.utils.log.RequireDebugFalse\"}, \"require_debug_true\": {\"()\": \"django.utils.log.RequireDebugTrue\"}, \"readable_sql\": {\"()\": \"project_runpy.ReadableSqlFilter\"}, }, \"handlers\": {", "Internationalization # https://docs.djangoproject.com/en/stable/topics/i18n/ LANGUAGE_CODE = \"en-us\" TIME_ZONE = \"America/Chicago\" USE_I18N = False USE_L10N", "MEDIA_ROOT = os.path.join(BASE_DIR, \"..\", \"media\") MEDIA_URL = \"/media/\" LOGGING = { \"version\": 1,", "INSTALLED_APPS = ( \"bandc.apps.agenda.apps.AgendaConfig\", \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.sessions\", \"django.contrib.messages\", \"django.contrib.staticfiles\", # support \"django_extensions\",", "doesn't make it easy # \"class\": \"pythonjsonlogger.jsonlogger.JsonFormatter\", }, }, \"filters\": { \"require_debug_false\": {\"()\":", "\"pythonjsonlogger.jsonlogger.JsonFormatter\", }, }, \"filters\": { \"require_debug_false\": {\"()\": \"django.utils.log.RequireDebugFalse\"}, \"require_debug_true\": {\"()\": \"django.utils.log.RequireDebugTrue\"}, \"readable_sql\": {\"()\":", "easy # \"class\": \"pythonjsonlogger.jsonlogger.JsonFormatter\", }, }, \"filters\": { \"require_debug_false\": {\"()\": \"django.utils.log.RequireDebugFalse\"}, \"require_debug_true\": {\"()\":", "paths inside the project like this: os.path.join(BASE_DIR, ...) import os import dj_database_url from", "[ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [os.path.join(BASE_DIR, \"templates\")], \"APP_DIRS\": True, \"OPTIONS\": { \"context_processors\": [", "\"django_extensions\", \"django_object_actions\", \"bootstrap_pagination\", ) MIDDLEWARE = ( \"django.middleware.security.SecurityMiddleware\", \"django.contrib.sessions.middleware.SessionMiddleware\", \"django.middleware.common.CommonMiddleware\", \"django.middleware.csrf.CsrfViewMiddleware\", \"django.contrib.auth.middleware.AuthenticationMiddleware\", \"django.contrib.messages.middleware.MessageMiddleware\",", "\"filters\": [\"require_debug_true\", \"readable_sql\"], \"propagate\": False, }, \"sh\": {\"level\": \"WARNING\", \"propagate\": False}, \"pdfminer\": {\"level\":", "= {\"default\": dj_database_url.config(default=\"sqlite:///bandc.db\")} DEFAULT_AUTO_FIELD = \"django.db.models.AutoField\" # Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS =", "False) ALLOWED_HOSTS = [\"*\"] INSTALLED_APPS = ( \"bandc.apps.agenda.apps.AgendaConfig\", \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.sessions\", \"django.contrib.messages\",", "# I want milliseconds but Python doesn't make it easy # \"class\": \"pythonjsonlogger.jsonlogger.JsonFormatter\",", "= (os.path.join(BASE_DIR, \"static\"),) MEDIA_ROOT = os.path.join(BASE_DIR, \"..\", \"media\") MEDIA_URL = \"/media/\" LOGGING =", "os import dj_database_url from project_runpy import env BASE_DIR = os.path.dirname(__file__) SECRET_KEY = env.get(\"SECRET_KEY\",", "= False USE_L10N = False USE_TZ = True TEMPLATES = [ { \"BACKEND\":", "# Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os import", "env.get(\"DEBUG\", False) ALLOWED_HOSTS = [\"*\"] INSTALLED_APPS = ( \"bandc.apps.agenda.apps.AgendaConfig\", \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.sessions\",", "\"dev\": { \"format\": \"%(levelname)s %(name)s %(message)s\", # 'datefmt': '%Y-%m-%dT%H:%M:%S%z', # I want milliseconds", "\"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.sessions\", \"django.contrib.messages\", \"django.contrib.staticfiles\", # support \"django_extensions\", \"django_object_actions\", \"bootstrap_pagination\", ) MIDDLEWARE", "os.path.dirname(__file__) SECRET_KEY = env.get(\"SECRET_KEY\", \"Rotom\") DEBUG = env.get(\"DEBUG\", False) ALLOWED_HOSTS = [\"*\"] INSTALLED_APPS", "import dj_database_url from project_runpy import env BASE_DIR = os.path.dirname(__file__) SECRET_KEY = env.get(\"SECRET_KEY\", \"Rotom\")", "\"require_debug_false\": {\"()\": \"django.utils.log.RequireDebugFalse\"}, \"require_debug_true\": {\"()\": \"django.utils.log.RequireDebugTrue\"}, \"readable_sql\": {\"()\": \"project_runpy.ReadableSqlFilter\"}, }, \"handlers\": { \"console\":", "1, \"disable_existing_loggers\": False, \"root\": {\"level\": os.environ.get(\"LOG_LEVEL\", \"WARNING\"), \"handlers\": [\"console\"]}, \"formatters\": { \"dev\": {", "project like this: os.path.join(BASE_DIR, ...) import os import dj_database_url from project_runpy import env", "ALLOWED_HOSTS = [\"*\"] INSTALLED_APPS = ( \"bandc.apps.agenda.apps.AgendaConfig\", \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.sessions\", \"django.contrib.messages\", \"django.contrib.staticfiles\",", "False USE_TZ = True TEMPLATES = [ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [os.path.join(BASE_DIR, \"templates\")],", "\"context_processors\": [ \"django.template.context_processors.debug\", \"django.template.context_processors.request\", \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"bandc.context_processors.base_url\", ], \"debug\": DEBUG, }, }, ]", "milliseconds but Python doesn't make it easy # \"class\": \"pythonjsonlogger.jsonlogger.JsonFormatter\", }, }, \"filters\":", "SECRET_KEY = env.get(\"SECRET_KEY\", \"Rotom\") DEBUG = env.get(\"DEBUG\", False) ALLOWED_HOSTS = [\"*\"] INSTALLED_APPS =", "'datefmt': '%Y-%m-%dT%H:%M:%S%z', # I want milliseconds but Python doesn't make it easy #", "{ \"level\": \"DEBUG\" if env.get(\"SQL\", False) else \"INFO\", \"handlers\": [\"console\"], \"filters\": [\"require_debug_true\", \"readable_sql\"],", "= \"en-us\" TIME_ZONE = \"America/Chicago\" USE_I18N = False USE_L10N = False USE_TZ =", "}, \"handlers\": { \"console\": { \"level\": \"DEBUG\", \"formatter\": \"dev\", \"class\": \"project_runpy.ColorizingStreamHandler\", }, },", "...) import os import dj_database_url from project_runpy import env BASE_DIR = os.path.dirname(__file__) SECRET_KEY", "BASE_DIR = os.path.dirname(__file__) SECRET_KEY = env.get(\"SECRET_KEY\", \"Rotom\") DEBUG = env.get(\"DEBUG\", False) ALLOWED_HOSTS =", "\"templates\")], \"APP_DIRS\": True, \"OPTIONS\": { \"context_processors\": [ \"django.template.context_processors.debug\", \"django.template.context_processors.request\", \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"bandc.context_processors.base_url\", ],", "\"django.contrib.messages.context_processors.messages\", \"bandc.context_processors.base_url\", ], \"debug\": DEBUG, }, }, ] # Static files (CSS, JavaScript,", "\"%(levelname)s %(name)s %(message)s\", # 'datefmt': '%Y-%m-%dT%H:%M:%S%z', # I want milliseconds but Python doesn't", "%(name)s %(message)s\", # 'datefmt': '%Y-%m-%dT%H:%M:%S%z', # I want milliseconds but Python doesn't make", "[ { \"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\", }, {\"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"}, ] #", "{\"()\": \"django.utils.log.RequireDebugTrue\"}, \"readable_sql\": {\"()\": \"project_runpy.ReadableSqlFilter\"}, }, \"handlers\": { \"console\": { \"level\": \"DEBUG\", \"formatter\":", "\"level\": \"DEBUG\", \"formatter\": \"dev\", \"class\": \"project_runpy.ColorizingStreamHandler\", }, }, \"loggers\": { \"django.db.backends\": { \"level\":", "\"django.contrib.auth.password_validation.MinimumLengthValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"}, ] # Internationalization # https://docs.djangoproject.com/en/stable/topics/i18n/ LANGUAGE_CODE = \"en-us\"", "\"django.contrib.sessions\", \"django.contrib.messages\", \"django.contrib.staticfiles\", # support \"django_extensions\", \"django_object_actions\", \"bootstrap_pagination\", ) MIDDLEWARE = ( \"django.middleware.security.SecurityMiddleware\",", "\"django_object_actions\", \"bootstrap_pagination\", ) MIDDLEWARE = ( \"django.middleware.security.SecurityMiddleware\", \"django.contrib.sessions.middleware.SessionMiddleware\", \"django.middleware.common.CommonMiddleware\", \"django.middleware.csrf.CsrfViewMiddleware\", \"django.contrib.auth.middleware.AuthenticationMiddleware\", \"django.contrib.messages.middleware.MessageMiddleware\", \"django.middleware.clickjacking.XFrameOptionsMiddleware\",", "{ \"level\": \"DEBUG\", \"formatter\": \"dev\", \"class\": \"project_runpy.ColorizingStreamHandler\", }, }, \"loggers\": { \"django.db.backends\": {", "\"django.template.context_processors.request\", \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"bandc.context_processors.base_url\", ], \"debug\": DEBUG, }, }, ] # Static files", "it easy # \"class\": \"pythonjsonlogger.jsonlogger.JsonFormatter\", }, }, \"filters\": { \"require_debug_false\": {\"()\": \"django.utils.log.RequireDebugFalse\"}, \"require_debug_true\":", "this: os.path.join(BASE_DIR, ...) import os import dj_database_url from project_runpy import env BASE_DIR =", "\"WARNING\", \"propagate\": False}, \"pdfminer\": {\"level\": \"WARNING\", \"propagate\": False}, \"factory\": {\"level\": \"ERROR\", \"propagate\": False},", "Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/dev/howto/static-files/ STATIC_URL = \"/static/\" STATICFILES_DIRS = (os.path.join(BASE_DIR,", "\"..\", \"media\") MEDIA_URL = \"/media/\" LOGGING = { \"version\": 1, \"disable_existing_loggers\": False, \"root\":", "https://docs.djangoproject.com/en/stable/ref/settings/#databases DATABASES = {\"default\": dj_database_url.config(default=\"sqlite:///bandc.db\")} DEFAULT_AUTO_FIELD = \"django.db.models.AutoField\" # Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators", "= ( \"bandc.apps.agenda.apps.AgendaConfig\", \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.sessions\", \"django.contrib.messages\", \"django.contrib.staticfiles\", # support \"django_extensions\", \"django_object_actions\",", "{ \"dev\": { \"format\": \"%(levelname)s %(name)s %(message)s\", # 'datefmt': '%Y-%m-%dT%H:%M:%S%z', # I want", "\"bandc.wsgi.application\" # Database # https://docs.djangoproject.com/en/stable/ref/settings/#databases DATABASES = {\"default\": dj_database_url.config(default=\"sqlite:///bandc.db\")} DEFAULT_AUTO_FIELD = \"django.db.models.AutoField\" #", "DEBUG = env.get(\"DEBUG\", False) ALLOWED_HOSTS = [\"*\"] INSTALLED_APPS = ( \"bandc.apps.agenda.apps.AgendaConfig\", \"django.contrib.admin\", \"django.contrib.auth\",", "True TEMPLATES = [ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [os.path.join(BASE_DIR, \"templates\")], \"APP_DIRS\": True, \"OPTIONS\":", "\"bandc.context_processors.base_url\", ], \"debug\": DEBUG, }, }, ] # Static files (CSS, JavaScript, Images)", "\"require_debug_true\": {\"()\": \"django.utils.log.RequireDebugTrue\"}, \"readable_sql\": {\"()\": \"project_runpy.ReadableSqlFilter\"}, }, \"handlers\": { \"console\": { \"level\": \"DEBUG\",", "\"APP_DIRS\": True, \"OPTIONS\": { \"context_processors\": [ \"django.template.context_processors.debug\", \"django.template.context_processors.request\", \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"bandc.context_processors.base_url\", ], \"debug\":", "False) else \"INFO\", \"handlers\": [\"console\"], \"filters\": [\"require_debug_true\", \"readable_sql\"], \"propagate\": False, }, \"sh\": {\"level\":", "{\"default\": dj_database_url.config(default=\"sqlite:///bandc.db\")} DEFAULT_AUTO_FIELD = \"django.db.models.AutoField\" # Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [", "}, {\"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"}, ] # Internationalization # https://docs.djangoproject.com/en/stable/topics/i18n/ LANGUAGE_CODE", "[ \"django.template.context_processors.debug\", \"django.template.context_processors.request\", \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"bandc.context_processors.base_url\", ], \"debug\": DEBUG, }, }, ] #", "\"loggers\": { \"django.db.backends\": { \"level\": \"DEBUG\" if env.get(\"SQL\", False) else \"INFO\", \"handlers\": [\"console\"],", "[\"console\"], \"filters\": [\"require_debug_true\", \"readable_sql\"], \"propagate\": False, }, \"sh\": {\"level\": \"WARNING\", \"propagate\": False}, \"pdfminer\":", "\"formatters\": { \"dev\": { \"format\": \"%(levelname)s %(name)s %(message)s\", # 'datefmt': '%Y-%m-%dT%H:%M:%S%z', # I", "\"readable_sql\"], \"propagate\": False, }, \"sh\": {\"level\": \"WARNING\", \"propagate\": False}, \"pdfminer\": {\"level\": \"WARNING\", \"propagate\":", "USE_TZ = True TEMPLATES = [ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [os.path.join(BASE_DIR, \"templates\")], \"APP_DIRS\":", "STATICFILES_DIRS = (os.path.join(BASE_DIR, \"static\"),) MEDIA_ROOT = os.path.join(BASE_DIR, \"..\", \"media\") MEDIA_URL = \"/media/\" LOGGING", "<reponame>crccheck/atx-bandc # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os", "inside the project like this: os.path.join(BASE_DIR, ...) import os import dj_database_url from project_runpy", "\"readable_sql\": {\"()\": \"project_runpy.ReadableSqlFilter\"}, }, \"handlers\": { \"console\": { \"level\": \"DEBUG\", \"formatter\": \"dev\", \"class\":", "\"propagate\": False}, \"pdfminer\": {\"level\": \"WARNING\", \"propagate\": False}, \"factory\": {\"level\": \"ERROR\", \"propagate\": False}, },", "= env.get(\"SECRET_KEY\", \"Rotom\") DEBUG = env.get(\"DEBUG\", False) ALLOWED_HOSTS = [\"*\"] INSTALLED_APPS = (", "# https://docs.djangoproject.com/en/stable/topics/i18n/ LANGUAGE_CODE = \"en-us\" TIME_ZONE = \"America/Chicago\" USE_I18N = False USE_L10N =", "[\"require_debug_true\", \"readable_sql\"], \"propagate\": False, }, \"sh\": {\"level\": \"WARNING\", \"propagate\": False}, \"pdfminer\": {\"level\": \"WARNING\",", "\"django.contrib.auth.middleware.AuthenticationMiddleware\", \"django.contrib.messages.middleware.MessageMiddleware\", \"django.middleware.clickjacking.XFrameOptionsMiddleware\", ) ROOT_URLCONF = \"bandc.urls\" WSGI_APPLICATION = \"bandc.wsgi.application\" # Database #", "}, }, \"filters\": { \"require_debug_false\": {\"()\": \"django.utils.log.RequireDebugFalse\"}, \"require_debug_true\": {\"()\": \"django.utils.log.RequireDebugTrue\"}, \"readable_sql\": {\"()\": \"project_runpy.ReadableSqlFilter\"},", "'%Y-%m-%dT%H:%M:%S%z', # I want milliseconds but Python doesn't make it easy # \"class\":", "(os.path.join(BASE_DIR, \"static\"),) MEDIA_ROOT = os.path.join(BASE_DIR, \"..\", \"media\") MEDIA_URL = \"/media/\" LOGGING = {", "if env.get(\"SQL\", False) else \"INFO\", \"handlers\": [\"console\"], \"filters\": [\"require_debug_true\", \"readable_sql\"], \"propagate\": False, },", "\"root\": {\"level\": os.environ.get(\"LOG_LEVEL\", \"WARNING\"), \"handlers\": [\"console\"]}, \"formatters\": { \"dev\": { \"format\": \"%(levelname)s %(name)s", "# https://docs.djangoproject.com/en/stable/ref/settings/#databases DATABASES = {\"default\": dj_database_url.config(default=\"sqlite:///bandc.db\")} DEFAULT_AUTO_FIELD = \"django.db.models.AutoField\" # Password validation #", "\"format\": \"%(levelname)s %(name)s %(message)s\", # 'datefmt': '%Y-%m-%dT%H:%M:%S%z', # I want milliseconds but Python", "= \"bandc.urls\" WSGI_APPLICATION = \"bandc.wsgi.application\" # Database # https://docs.djangoproject.com/en/stable/ref/settings/#databases DATABASES = {\"default\": dj_database_url.config(default=\"sqlite:///bandc.db\")}", "= ( \"django.middleware.security.SecurityMiddleware\", \"django.contrib.sessions.middleware.SessionMiddleware\", \"django.middleware.common.CommonMiddleware\", \"django.middleware.csrf.CsrfViewMiddleware\", \"django.contrib.auth.middleware.AuthenticationMiddleware\", \"django.contrib.messages.middleware.MessageMiddleware\", \"django.middleware.clickjacking.XFrameOptionsMiddleware\", ) ROOT_URLCONF = \"bandc.urls\"", "LOGGING = { \"version\": 1, \"disable_existing_loggers\": False, \"root\": {\"level\": os.environ.get(\"LOG_LEVEL\", \"WARNING\"), \"handlers\": [\"console\"]},", "\"disable_existing_loggers\": False, \"root\": {\"level\": os.environ.get(\"LOG_LEVEL\", \"WARNING\"), \"handlers\": [\"console\"]}, \"formatters\": { \"dev\": { \"format\":", "else \"INFO\", \"handlers\": [\"console\"], \"filters\": [\"require_debug_true\", \"readable_sql\"], \"propagate\": False, }, \"sh\": {\"level\": \"WARNING\",", "\"Rotom\") DEBUG = env.get(\"DEBUG\", False) ALLOWED_HOSTS = [\"*\"] INSTALLED_APPS = ( \"bandc.apps.agenda.apps.AgendaConfig\", \"django.contrib.admin\",", "DATABASES = {\"default\": dj_database_url.config(default=\"sqlite:///bandc.db\")} DEFAULT_AUTO_FIELD = \"django.db.models.AutoField\" # Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS", "\"django.contrib.contenttypes\", \"django.contrib.sessions\", \"django.contrib.messages\", \"django.contrib.staticfiles\", # support \"django_extensions\", \"django_object_actions\", \"bootstrap_pagination\", ) MIDDLEWARE = (", "(CSS, JavaScript, Images) # https://docs.djangoproject.com/en/dev/howto/static-files/ STATIC_URL = \"/static/\" STATICFILES_DIRS = (os.path.join(BASE_DIR, \"static\"),) MEDIA_ROOT", "\"handlers\": [\"console\"], \"filters\": [\"require_debug_true\", \"readable_sql\"], \"propagate\": False, }, \"sh\": {\"level\": \"WARNING\", \"propagate\": False},", ") MIDDLEWARE = ( \"django.middleware.security.SecurityMiddleware\", \"django.contrib.sessions.middleware.SessionMiddleware\", \"django.middleware.common.CommonMiddleware\", \"django.middleware.csrf.CsrfViewMiddleware\", \"django.contrib.auth.middleware.AuthenticationMiddleware\", \"django.contrib.messages.middleware.MessageMiddleware\", \"django.middleware.clickjacking.XFrameOptionsMiddleware\", ) ROOT_URLCONF", "support \"django_extensions\", \"django_object_actions\", \"bootstrap_pagination\", ) MIDDLEWARE = ( \"django.middleware.security.SecurityMiddleware\", \"django.contrib.sessions.middleware.SessionMiddleware\", \"django.middleware.common.CommonMiddleware\", \"django.middleware.csrf.CsrfViewMiddleware\", \"django.contrib.auth.middleware.AuthenticationMiddleware\",", "}, }, \"loggers\": { \"django.db.backends\": { \"level\": \"DEBUG\" if env.get(\"SQL\", False) else \"INFO\",", "# \"class\": \"pythonjsonlogger.jsonlogger.JsonFormatter\", }, }, \"filters\": { \"require_debug_false\": {\"()\": \"django.utils.log.RequireDebugFalse\"}, \"require_debug_true\": {\"()\": \"django.utils.log.RequireDebugTrue\"},", "# Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/dev/howto/static-files/ STATIC_URL = \"/static/\" STATICFILES_DIRS =", "], \"debug\": DEBUG, }, }, ] # Static files (CSS, JavaScript, Images) #", "= \"/static/\" STATICFILES_DIRS = (os.path.join(BASE_DIR, \"static\"),) MEDIA_ROOT = os.path.join(BASE_DIR, \"..\", \"media\") MEDIA_URL =", "{ \"format\": \"%(levelname)s %(name)s %(message)s\", # 'datefmt': '%Y-%m-%dT%H:%M:%S%z', # I want milliseconds but", "I want milliseconds but Python doesn't make it easy # \"class\": \"pythonjsonlogger.jsonlogger.JsonFormatter\", },", "Python doesn't make it easy # \"class\": \"pythonjsonlogger.jsonlogger.JsonFormatter\", }, }, \"filters\": { \"require_debug_false\":", "\"class\": \"project_runpy.ColorizingStreamHandler\", }, }, \"loggers\": { \"django.db.backends\": { \"level\": \"DEBUG\" if env.get(\"SQL\", False)", "{\"level\": \"WARNING\", \"propagate\": False}, \"pdfminer\": {\"level\": \"WARNING\", \"propagate\": False}, \"factory\": {\"level\": \"ERROR\", \"propagate\":", "TIME_ZONE = \"America/Chicago\" USE_I18N = False USE_L10N = False USE_TZ = True TEMPLATES", "\"sh\": {\"level\": \"WARNING\", \"propagate\": False}, \"pdfminer\": {\"level\": \"WARNING\", \"propagate\": False}, \"factory\": {\"level\": \"ERROR\",", "}, }, ] # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/dev/howto/static-files/ STATIC_URL =", "}, ] # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/dev/howto/static-files/ STATIC_URL = \"/static/\"", "\"/static/\" STATICFILES_DIRS = (os.path.join(BASE_DIR, \"static\"),) MEDIA_ROOT = os.path.join(BASE_DIR, \"..\", \"media\") MEDIA_URL = \"/media/\"", "\"propagate\": False, }, \"sh\": {\"level\": \"WARNING\", \"propagate\": False}, \"pdfminer\": {\"level\": \"WARNING\", \"propagate\": False},", "\"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\", }, {\"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"}, ] # Internationalization # https://docs.djangoproject.com/en/stable/topics/i18n/", "[os.path.join(BASE_DIR, \"templates\")], \"APP_DIRS\": True, \"OPTIONS\": { \"context_processors\": [ \"django.template.context_processors.debug\", \"django.template.context_processors.request\", \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"bandc.context_processors.base_url\",", "import os import dj_database_url from project_runpy import env BASE_DIR = os.path.dirname(__file__) SECRET_KEY =", "\"DIRS\": [os.path.join(BASE_DIR, \"templates\")], \"APP_DIRS\": True, \"OPTIONS\": { \"context_processors\": [ \"django.template.context_processors.debug\", \"django.template.context_processors.request\", \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\",", "ROOT_URLCONF = \"bandc.urls\" WSGI_APPLICATION = \"bandc.wsgi.application\" # Database # https://docs.djangoproject.com/en/stable/ref/settings/#databases DATABASES = {\"default\":", "= False USE_TZ = True TEMPLATES = [ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [os.path.join(BASE_DIR,", "\"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"bandc.context_processors.base_url\", ], \"debug\": DEBUG, }, }, ] # Static files (CSS,", "False}, \"pdfminer\": {\"level\": \"WARNING\", \"propagate\": False}, \"factory\": {\"level\": \"ERROR\", \"propagate\": False}, }, }", "MEDIA_URL = \"/media/\" LOGGING = { \"version\": 1, \"disable_existing_loggers\": False, \"root\": {\"level\": os.environ.get(\"LOG_LEVEL\",", "{\"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"}, ] # Internationalization # https://docs.djangoproject.com/en/stable/topics/i18n/ LANGUAGE_CODE =", "\"version\": 1, \"disable_existing_loggers\": False, \"root\": {\"level\": os.environ.get(\"LOG_LEVEL\", \"WARNING\"), \"handlers\": [\"console\"]}, \"formatters\": { \"dev\":", "DEBUG, }, }, ] # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/dev/howto/static-files/ STATIC_URL", "\"django.db.backends\": { \"level\": \"DEBUG\" if env.get(\"SQL\", False) else \"INFO\", \"handlers\": [\"console\"], \"filters\": [\"require_debug_true\",", "( \"bandc.apps.agenda.apps.AgendaConfig\", \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.sessions\", \"django.contrib.messages\", \"django.contrib.staticfiles\", # support \"django_extensions\", \"django_object_actions\", \"bootstrap_pagination\",", "like this: os.path.join(BASE_DIR, ...) import os import dj_database_url from project_runpy import env BASE_DIR", "%(message)s\", # 'datefmt': '%Y-%m-%dT%H:%M:%S%z', # I want milliseconds but Python doesn't make it", "validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { \"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\", }, {\"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\"}, {\"NAME\":", "\"django.db.models.AutoField\" # Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { \"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\", },", "\"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.sessions\", \"django.contrib.messages\", \"django.contrib.staticfiles\", # support \"django_extensions\", \"django_object_actions\", \"bootstrap_pagination\", ) MIDDLEWARE =", "env BASE_DIR = os.path.dirname(__file__) SECRET_KEY = env.get(\"SECRET_KEY\", \"Rotom\") DEBUG = env.get(\"DEBUG\", False) ALLOWED_HOSTS", "\"WARNING\"), \"handlers\": [\"console\"]}, \"formatters\": { \"dev\": { \"format\": \"%(levelname)s %(name)s %(message)s\", # 'datefmt':", "\"formatter\": \"dev\", \"class\": \"project_runpy.ColorizingStreamHandler\", }, }, \"loggers\": { \"django.db.backends\": { \"level\": \"DEBUG\" if", "\"django.contrib.sessions.middleware.SessionMiddleware\", \"django.middleware.common.CommonMiddleware\", \"django.middleware.csrf.CsrfViewMiddleware\", \"django.contrib.auth.middleware.AuthenticationMiddleware\", \"django.contrib.messages.middleware.MessageMiddleware\", \"django.middleware.clickjacking.XFrameOptionsMiddleware\", ) ROOT_URLCONF = \"bandc.urls\" WSGI_APPLICATION = \"bandc.wsgi.application\"", "\"django.middleware.csrf.CsrfViewMiddleware\", \"django.contrib.auth.middleware.AuthenticationMiddleware\", \"django.contrib.messages.middleware.MessageMiddleware\", \"django.middleware.clickjacking.XFrameOptionsMiddleware\", ) ROOT_URLCONF = \"bandc.urls\" WSGI_APPLICATION = \"bandc.wsgi.application\" # Database", "= \"America/Chicago\" USE_I18N = False USE_L10N = False USE_TZ = True TEMPLATES =", "\"OPTIONS\": { \"context_processors\": [ \"django.template.context_processors.debug\", \"django.template.context_processors.request\", \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"bandc.context_processors.base_url\", ], \"debug\": DEBUG, },", "\"static\"),) MEDIA_ROOT = os.path.join(BASE_DIR, \"..\", \"media\") MEDIA_URL = \"/media/\" LOGGING = { \"version\":", "MIDDLEWARE = ( \"django.middleware.security.SecurityMiddleware\", \"django.contrib.sessions.middleware.SessionMiddleware\", \"django.middleware.common.CommonMiddleware\", \"django.middleware.csrf.CsrfViewMiddleware\", \"django.contrib.auth.middleware.AuthenticationMiddleware\", \"django.contrib.messages.middleware.MessageMiddleware\", \"django.middleware.clickjacking.XFrameOptionsMiddleware\", ) ROOT_URLCONF =", "# https://docs.djangoproject.com/en/dev/howto/static-files/ STATIC_URL = \"/static/\" STATICFILES_DIRS = (os.path.join(BASE_DIR, \"static\"),) MEDIA_ROOT = os.path.join(BASE_DIR, \"..\",", "\"filters\": { \"require_debug_false\": {\"()\": \"django.utils.log.RequireDebugFalse\"}, \"require_debug_true\": {\"()\": \"django.utils.log.RequireDebugTrue\"}, \"readable_sql\": {\"()\": \"project_runpy.ReadableSqlFilter\"}, }, \"handlers\":", "Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { \"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\", }, {\"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\"},", "{\"level\": os.environ.get(\"LOG_LEVEL\", \"WARNING\"), \"handlers\": [\"console\"]}, \"formatters\": { \"dev\": { \"format\": \"%(levelname)s %(name)s %(message)s\",", "\"bandc.urls\" WSGI_APPLICATION = \"bandc.wsgi.application\" # Database # https://docs.djangoproject.com/en/stable/ref/settings/#databases DATABASES = {\"default\": dj_database_url.config(default=\"sqlite:///bandc.db\")} DEFAULT_AUTO_FIELD", "\"/media/\" LOGGING = { \"version\": 1, \"disable_existing_loggers\": False, \"root\": {\"level\": os.environ.get(\"LOG_LEVEL\", \"WARNING\"), \"handlers\":", "{ \"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\", }, {\"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"}, ] # Internationalization", "# support \"django_extensions\", \"django_object_actions\", \"bootstrap_pagination\", ) MIDDLEWARE = ( \"django.middleware.security.SecurityMiddleware\", \"django.contrib.sessions.middleware.SessionMiddleware\", \"django.middleware.common.CommonMiddleware\", \"django.middleware.csrf.CsrfViewMiddleware\",", ") ROOT_URLCONF = \"bandc.urls\" WSGI_APPLICATION = \"bandc.wsgi.application\" # Database # https://docs.djangoproject.com/en/stable/ref/settings/#databases DATABASES =", "False, \"root\": {\"level\": os.environ.get(\"LOG_LEVEL\", \"WARNING\"), \"handlers\": [\"console\"]}, \"formatters\": { \"dev\": { \"format\": \"%(levelname)s", "= True TEMPLATES = [ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [os.path.join(BASE_DIR, \"templates\")], \"APP_DIRS\": True,", "# Database # https://docs.djangoproject.com/en/stable/ref/settings/#databases DATABASES = {\"default\": dj_database_url.config(default=\"sqlite:///bandc.db\")} DEFAULT_AUTO_FIELD = \"django.db.models.AutoField\" # Password", "\"django.contrib.messages\", \"django.contrib.staticfiles\", # support \"django_extensions\", \"django_object_actions\", \"bootstrap_pagination\", ) MIDDLEWARE = ( \"django.middleware.security.SecurityMiddleware\", \"django.contrib.sessions.middleware.SessionMiddleware\",", "= [\"*\"] INSTALLED_APPS = ( \"bandc.apps.agenda.apps.AgendaConfig\", \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.sessions\", \"django.contrib.messages\", \"django.contrib.staticfiles\", #", "\"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\", }, {\"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"}, ] # Internationalization #", "# 'datefmt': '%Y-%m-%dT%H:%M:%S%z', # I want milliseconds but Python doesn't make it easy", "make it easy # \"class\": \"pythonjsonlogger.jsonlogger.JsonFormatter\", }, }, \"filters\": { \"require_debug_false\": {\"()\": \"django.utils.log.RequireDebugFalse\"},", "{ \"console\": { \"level\": \"DEBUG\", \"formatter\": \"dev\", \"class\": \"project_runpy.ColorizingStreamHandler\", }, }, \"loggers\": {", "env.get(\"SQL\", False) else \"INFO\", \"handlers\": [\"console\"], \"filters\": [\"require_debug_true\", \"readable_sql\"], \"propagate\": False, }, \"sh\":", "os.path.join(BASE_DIR, ...) import os import dj_database_url from project_runpy import env BASE_DIR = os.path.dirname(__file__)", "\"django.template.context_processors.debug\", \"django.template.context_processors.request\", \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"bandc.context_processors.base_url\", ], \"debug\": DEBUG, }, }, ] # Static", "\"America/Chicago\" USE_I18N = False USE_L10N = False USE_TZ = True TEMPLATES = [", "\"django.contrib.staticfiles\", # support \"django_extensions\", \"django_object_actions\", \"bootstrap_pagination\", ) MIDDLEWARE = ( \"django.middleware.security.SecurityMiddleware\", \"django.contrib.sessions.middleware.SessionMiddleware\", \"django.middleware.common.CommonMiddleware\",", "\"project_runpy.ReadableSqlFilter\"}, }, \"handlers\": { \"console\": { \"level\": \"DEBUG\", \"formatter\": \"dev\", \"class\": \"project_runpy.ColorizingStreamHandler\", },", "\"django.middleware.security.SecurityMiddleware\", \"django.contrib.sessions.middleware.SessionMiddleware\", \"django.middleware.common.CommonMiddleware\", \"django.middleware.csrf.CsrfViewMiddleware\", \"django.contrib.auth.middleware.AuthenticationMiddleware\", \"django.contrib.messages.middleware.MessageMiddleware\", \"django.middleware.clickjacking.XFrameOptionsMiddleware\", ) ROOT_URLCONF = \"bandc.urls\" WSGI_APPLICATION =", "want milliseconds but Python doesn't make it easy # \"class\": \"pythonjsonlogger.jsonlogger.JsonFormatter\", }, },", "\"bootstrap_pagination\", ) MIDDLEWARE = ( \"django.middleware.security.SecurityMiddleware\", \"django.contrib.sessions.middleware.SessionMiddleware\", \"django.middleware.common.CommonMiddleware\", \"django.middleware.csrf.CsrfViewMiddleware\", \"django.contrib.auth.middleware.AuthenticationMiddleware\", \"django.contrib.messages.middleware.MessageMiddleware\", \"django.middleware.clickjacking.XFrameOptionsMiddleware\", )", "\"level\": \"DEBUG\" if env.get(\"SQL\", False) else \"INFO\", \"handlers\": [\"console\"], \"filters\": [\"require_debug_true\", \"readable_sql\"], \"propagate\":", "\"DEBUG\" if env.get(\"SQL\", False) else \"INFO\", \"handlers\": [\"console\"], \"filters\": [\"require_debug_true\", \"readable_sql\"], \"propagate\": False,", "# Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { \"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\", }, {\"NAME\":", "AUTH_PASSWORD_VALIDATORS = [ { \"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\", }, {\"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"},", "= os.path.dirname(__file__) SECRET_KEY = env.get(\"SECRET_KEY\", \"Rotom\") DEBUG = env.get(\"DEBUG\", False) ALLOWED_HOSTS = [\"*\"]", "] # Internationalization # https://docs.djangoproject.com/en/stable/topics/i18n/ LANGUAGE_CODE = \"en-us\" TIME_ZONE = \"America/Chicago\" USE_I18N =", "{ \"django.db.backends\": { \"level\": \"DEBUG\" if env.get(\"SQL\", False) else \"INFO\", \"handlers\": [\"console\"], \"filters\":", "from project_runpy import env BASE_DIR = os.path.dirname(__file__) SECRET_KEY = env.get(\"SECRET_KEY\", \"Rotom\") DEBUG =", "[\"console\"]}, \"formatters\": { \"dev\": { \"format\": \"%(levelname)s %(name)s %(message)s\", # 'datefmt': '%Y-%m-%dT%H:%M:%S%z', #", "dj_database_url from project_runpy import env BASE_DIR = os.path.dirname(__file__) SECRET_KEY = env.get(\"SECRET_KEY\", \"Rotom\") DEBUG", "import env BASE_DIR = os.path.dirname(__file__) SECRET_KEY = env.get(\"SECRET_KEY\", \"Rotom\") DEBUG = env.get(\"DEBUG\", False)", "project_runpy import env BASE_DIR = os.path.dirname(__file__) SECRET_KEY = env.get(\"SECRET_KEY\", \"Rotom\") DEBUG = env.get(\"DEBUG\",", "( \"django.middleware.security.SecurityMiddleware\", \"django.contrib.sessions.middleware.SessionMiddleware\", \"django.middleware.common.CommonMiddleware\", \"django.middleware.csrf.CsrfViewMiddleware\", \"django.contrib.auth.middleware.AuthenticationMiddleware\", \"django.contrib.messages.middleware.MessageMiddleware\", \"django.middleware.clickjacking.XFrameOptionsMiddleware\", ) ROOT_URLCONF = \"bandc.urls\" WSGI_APPLICATION", "Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os import dj_database_url", "DEFAULT_AUTO_FIELD = \"django.db.models.AutoField\" # Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { \"NAME\":", "}, \"sh\": {\"level\": \"WARNING\", \"propagate\": False}, \"pdfminer\": {\"level\": \"WARNING\", \"propagate\": False}, \"factory\": {\"level\":", "\"django.contrib.messages.middleware.MessageMiddleware\", \"django.middleware.clickjacking.XFrameOptionsMiddleware\", ) ROOT_URLCONF = \"bandc.urls\" WSGI_APPLICATION = \"bandc.wsgi.application\" # Database # https://docs.djangoproject.com/en/stable/ref/settings/#databases", "\"debug\": DEBUG, }, }, ] # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/dev/howto/static-files/", "LANGUAGE_CODE = \"en-us\" TIME_ZONE = \"America/Chicago\" USE_I18N = False USE_L10N = False USE_TZ", "files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/dev/howto/static-files/ STATIC_URL = \"/static/\" STATICFILES_DIRS = (os.path.join(BASE_DIR, \"static\"),)", "= [ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [os.path.join(BASE_DIR, \"templates\")], \"APP_DIRS\": True, \"OPTIONS\": { \"context_processors\":", "= os.path.join(BASE_DIR, \"..\", \"media\") MEDIA_URL = \"/media/\" LOGGING = { \"version\": 1, \"disable_existing_loggers\":", "\"django.utils.log.RequireDebugFalse\"}, \"require_debug_true\": {\"()\": \"django.utils.log.RequireDebugTrue\"}, \"readable_sql\": {\"()\": \"project_runpy.ReadableSqlFilter\"}, }, \"handlers\": { \"console\": { \"level\":", "\"dev\", \"class\": \"project_runpy.ColorizingStreamHandler\", }, }, \"loggers\": { \"django.db.backends\": { \"level\": \"DEBUG\" if env.get(\"SQL\",", "\"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [os.path.join(BASE_DIR, \"templates\")], \"APP_DIRS\": True, \"OPTIONS\": { \"context_processors\": [ \"django.template.context_processors.debug\", \"django.template.context_processors.request\",", "TEMPLATES = [ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [os.path.join(BASE_DIR, \"templates\")], \"APP_DIRS\": True, \"OPTIONS\": {", "\"class\": \"pythonjsonlogger.jsonlogger.JsonFormatter\", }, }, \"filters\": { \"require_debug_false\": {\"()\": \"django.utils.log.RequireDebugFalse\"}, \"require_debug_true\": {\"()\": \"django.utils.log.RequireDebugTrue\"}, \"readable_sql\":", "\"django.contrib.auth.password_validation.CommonPasswordValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"}, ] # Internationalization # https://docs.djangoproject.com/en/stable/topics/i18n/ LANGUAGE_CODE = \"en-us\" TIME_ZONE =", "\"handlers\": [\"console\"]}, \"formatters\": { \"dev\": { \"format\": \"%(levelname)s %(name)s %(message)s\", # 'datefmt': '%Y-%m-%dT%H:%M:%S%z',", "\"project_runpy.ColorizingStreamHandler\", }, }, \"loggers\": { \"django.db.backends\": { \"level\": \"DEBUG\" if env.get(\"SQL\", False) else", "\"django.template.backends.django.DjangoTemplates\", \"DIRS\": [os.path.join(BASE_DIR, \"templates\")], \"APP_DIRS\": True, \"OPTIONS\": { \"context_processors\": [ \"django.template.context_processors.debug\", \"django.template.context_processors.request\", \"django.contrib.auth.context_processors.auth\",", "dj_database_url.config(default=\"sqlite:///bandc.db\")} DEFAULT_AUTO_FIELD = \"django.db.models.AutoField\" # Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ {", "Database # https://docs.djangoproject.com/en/stable/ref/settings/#databases DATABASES = {\"default\": dj_database_url.config(default=\"sqlite:///bandc.db\")} DEFAULT_AUTO_FIELD = \"django.db.models.AutoField\" # Password validation", "Images) # https://docs.djangoproject.com/en/dev/howto/static-files/ STATIC_URL = \"/static/\" STATICFILES_DIRS = (os.path.join(BASE_DIR, \"static\"),) MEDIA_ROOT = os.path.join(BASE_DIR,", "STATIC_URL = \"/static/\" STATICFILES_DIRS = (os.path.join(BASE_DIR, \"static\"),) MEDIA_ROOT = os.path.join(BASE_DIR, \"..\", \"media\") MEDIA_URL", "os.path.join(BASE_DIR, \"..\", \"media\") MEDIA_URL = \"/media/\" LOGGING = { \"version\": 1, \"disable_existing_loggers\": False,", "env.get(\"SECRET_KEY\", \"Rotom\") DEBUG = env.get(\"DEBUG\", False) ALLOWED_HOSTS = [\"*\"] INSTALLED_APPS = ( \"bandc.apps.agenda.apps.AgendaConfig\",", "= [ { \"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\", }, {\"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"}, ]", "= \"/media/\" LOGGING = { \"version\": 1, \"disable_existing_loggers\": False, \"root\": {\"level\": os.environ.get(\"LOG_LEVEL\", \"WARNING\"),", "\"INFO\", \"handlers\": [\"console\"], \"filters\": [\"require_debug_true\", \"readable_sql\"], \"propagate\": False, }, \"sh\": {\"level\": \"WARNING\", \"propagate\":", "# Internationalization # https://docs.djangoproject.com/en/stable/topics/i18n/ LANGUAGE_CODE = \"en-us\" TIME_ZONE = \"America/Chicago\" USE_I18N = False", "\"media\") MEDIA_URL = \"/media/\" LOGGING = { \"version\": 1, \"disable_existing_loggers\": False, \"root\": {\"level\":", "\"bandc.apps.agenda.apps.AgendaConfig\", \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.sessions\", \"django.contrib.messages\", \"django.contrib.staticfiles\", # support \"django_extensions\", \"django_object_actions\", \"bootstrap_pagination\", )", "USE_L10N = False USE_TZ = True TEMPLATES = [ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\":", "{ \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [os.path.join(BASE_DIR, \"templates\")], \"APP_DIRS\": True, \"OPTIONS\": { \"context_processors\": [ \"django.template.context_processors.debug\",", "https://docs.djangoproject.com/en/stable/topics/i18n/ LANGUAGE_CODE = \"en-us\" TIME_ZONE = \"America/Chicago\" USE_I18N = False USE_L10N = False", "] # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/dev/howto/static-files/ STATIC_URL = \"/static/\" STATICFILES_DIRS", "False, }, \"sh\": {\"level\": \"WARNING\", \"propagate\": False}, \"pdfminer\": {\"level\": \"WARNING\", \"propagate\": False}, \"factory\":", "{\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"}, {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"}, ] # Internationalization # https://docs.djangoproject.com/en/stable/topics/i18n/ LANGUAGE_CODE = \"en-us\" TIME_ZONE", "False USE_L10N = False USE_TZ = True TEMPLATES = [ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",", "[\"*\"] INSTALLED_APPS = ( \"bandc.apps.agenda.apps.AgendaConfig\", \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.sessions\", \"django.contrib.messages\", \"django.contrib.staticfiles\", # support", "\"django.middleware.common.CommonMiddleware\", \"django.middleware.csrf.CsrfViewMiddleware\", \"django.contrib.auth.middleware.AuthenticationMiddleware\", \"django.contrib.messages.middleware.MessageMiddleware\", \"django.middleware.clickjacking.XFrameOptionsMiddleware\", ) ROOT_URLCONF = \"bandc.urls\" WSGI_APPLICATION = \"bandc.wsgi.application\" #", "{ \"context_processors\": [ \"django.template.context_processors.debug\", \"django.template.context_processors.request\", \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"bandc.context_processors.base_url\", ], \"debug\": DEBUG, }, },", "}, \"filters\": { \"require_debug_false\": {\"()\": \"django.utils.log.RequireDebugFalse\"}, \"require_debug_true\": {\"()\": \"django.utils.log.RequireDebugTrue\"}, \"readable_sql\": {\"()\": \"project_runpy.ReadableSqlFilter\"}, },", "{\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"}, ] # Internationalization # https://docs.djangoproject.com/en/stable/topics/i18n/ LANGUAGE_CODE = \"en-us\" TIME_ZONE = \"America/Chicago\"" ]
[ "f\"{response.url.scheme.upper()}/\" f\"{response.version.major}.{response.version.minor}\", \"data\": data, \"status\": f\"{response.status}:{response.reason}\", }, ) return response.status, response.headers, await response.read()", "# host is always going to be 'api.github.com'. \"path\": response.url.raw_path_qs, \"version\": f\"{response.url.scheme.upper()}/\" f\"{response.version.major}.{response.version.minor}\",", "b\"\" ) -> Tuple[int, Mapping[str, str], bytes]: \"\"\"This is the same method as", "with self._session.request( method, url, headers=headers, data=body ) as response: # We don't want", "logger.error ) loggerlevel( self.LOG_FORMAT, { \"method\": method, # host is always going to", "from gidgethub.abc import UTF_8_CHARSET from gidgethub.aiohttp import GitHubAPI as BaseGitHubAPI from .log import", "typing import Mapping, Tuple from gidgethub.abc import UTF_8_CHARSET from gidgethub.aiohttp import GitHubAPI as", "of logging the request-response cycle. No need to cover this function. The logger", "async with self._session.request( method, url, headers=headers, data=body ) as response: # We don't", "data = \"NONE\" if body == b\"\" else body.decode(UTF_8_CHARSET) loggerlevel = ( logger.info", "bot made. INFO: All actions taken by the bot. ERROR: Unknown error in", "import GitHubAPI as BaseGitHubAPI from .log import STATUS_OK, inject_status_color, logger TOKEN_ENDPOINT = \"access_tokens\"", "BaseGitHubAPI from .log import STATUS_OK, inject_status_color, logger TOKEN_ENDPOINT = \"access_tokens\" class GitHubAPI(BaseGitHubAPI): #", "headers=headers, data=body ) as response: # We don't want to reveal the `installation_id`", "response.url.name != TOKEN_ENDPOINT: inject_status_color(response.status) data = \"NONE\" if body == b\"\" else body.decode(UTF_8_CHARSET)", "this function. The logger information will be useful to know what actions the", "gidgethub.aiohttp import GitHubAPI as BaseGitHubAPI from .log import STATUS_OK, inject_status_color, logger TOKEN_ENDPOINT =", "to know what actions the bot made. INFO: All actions taken by the", "method, # host is always going to be 'api.github.com'. \"path\": response.url.raw_path_qs, \"version\": f\"{response.url.scheme.upper()}/\"", "to be 'api.github.com'. \"path\": response.url.raw_path_qs, \"version\": f\"{response.url.scheme.upper()}/\" f\"{response.version.major}.{response.version.minor}\", \"data\": data, \"status\": f\"{response.status}:{response.reason}\", },", "made. INFO: All actions taken by the bot. ERROR: Unknown error in the", "request-response cycle. No need to cover this function. The logger information will be", "cycle. No need to cover this function. The logger information will be useful", "# pragma: no cover LOG_FORMAT = 'api \"%(method)s %(path)s %(data)s %(version)s\" => %(status)s'", "= \"access_tokens\" class GitHubAPI(BaseGitHubAPI): # pragma: no cover LOG_FORMAT = 'api \"%(method)s %(path)s", "the `installation_id` from the URL. if response.url.name != TOKEN_ENDPOINT: inject_status_color(response.status) data = \"NONE\"", "as BaseGitHubAPI from .log import STATUS_OK, inject_status_color, logger TOKEN_ENDPOINT = \"access_tokens\" class GitHubAPI(BaseGitHubAPI):", "by the bot. ERROR: Unknown error in the API call. \"\"\" async with", "_request( self, method: str, url: str, headers: Mapping[str, str], body: bytes = b\"\"", "def _request( self, method: str, url: str, headers: Mapping[str, str], body: bytes =", "= \"NONE\" if body == b\"\" else body.decode(UTF_8_CHARSET) loggerlevel = ( logger.info if", "addition of logging the request-response cycle. No need to cover this function. The", "the bot. ERROR: Unknown error in the API call. \"\"\" async with self._session.request(", "-> Tuple[int, Mapping[str, str], bytes]: \"\"\"This is the same method as `gidgethub.aiohttp.GitHubAPI._request` with", "( logger.info if response.status in STATUS_OK else logger.error ) loggerlevel( self.LOG_FORMAT, { \"method\":", "body == b\"\" else body.decode(UTF_8_CHARSET) loggerlevel = ( logger.info if response.status in STATUS_OK", "Mapping[str, str], body: bytes = b\"\" ) -> Tuple[int, Mapping[str, str], bytes]: \"\"\"This", "= 'api \"%(method)s %(path)s %(data)s %(version)s\" => %(status)s' async def _request( self, method:", "\"access_tokens\" class GitHubAPI(BaseGitHubAPI): # pragma: no cover LOG_FORMAT = 'api \"%(method)s %(path)s %(data)s", "=> %(status)s' async def _request( self, method: str, url: str, headers: Mapping[str, str],", "useful to know what actions the bot made. INFO: All actions taken by", ") as response: # We don't want to reveal the `installation_id` from the", "self, method: str, url: str, headers: Mapping[str, str], body: bytes = b\"\" )", "reveal the `installation_id` from the URL. if response.url.name != TOKEN_ENDPOINT: inject_status_color(response.status) data =", "\"NONE\" if body == b\"\" else body.decode(UTF_8_CHARSET) loggerlevel = ( logger.info if response.status", "== b\"\" else body.decode(UTF_8_CHARSET) loggerlevel = ( logger.info if response.status in STATUS_OK else", "want to reveal the `installation_id` from the URL. if response.url.name != TOKEN_ENDPOINT: inject_status_color(response.status)", "same method as `gidgethub.aiohttp.GitHubAPI._request` with the addition of logging the request-response cycle. No", "going to be 'api.github.com'. \"path\": response.url.raw_path_qs, \"version\": f\"{response.url.scheme.upper()}/\" f\"{response.version.major}.{response.version.minor}\", \"data\": data, \"status\": f\"{response.status}:{response.reason}\",", "!= TOKEN_ENDPOINT: inject_status_color(response.status) data = \"NONE\" if body == b\"\" else body.decode(UTF_8_CHARSET) loggerlevel", "Mapping[str, str], bytes]: \"\"\"This is the same method as `gidgethub.aiohttp.GitHubAPI._request` with the addition", "bytes]: \"\"\"This is the same method as `gidgethub.aiohttp.GitHubAPI._request` with the addition of logging", "{ \"method\": method, # host is always going to be 'api.github.com'. \"path\": response.url.raw_path_qs,", "bytes = b\"\" ) -> Tuple[int, Mapping[str, str], bytes]: \"\"\"This is the same", "logger TOKEN_ENDPOINT = \"access_tokens\" class GitHubAPI(BaseGitHubAPI): # pragma: no cover LOG_FORMAT = 'api", "str], body: bytes = b\"\" ) -> Tuple[int, Mapping[str, str], bytes]: \"\"\"This is", "TOKEN_ENDPOINT: inject_status_color(response.status) data = \"NONE\" if body == b\"\" else body.decode(UTF_8_CHARSET) loggerlevel =", "from .log import STATUS_OK, inject_status_color, logger TOKEN_ENDPOINT = \"access_tokens\" class GitHubAPI(BaseGitHubAPI): # pragma:", "UTF_8_CHARSET from gidgethub.aiohttp import GitHubAPI as BaseGitHubAPI from .log import STATUS_OK, inject_status_color, logger", "the API call. \"\"\" async with self._session.request( method, url, headers=headers, data=body ) as", "URL. if response.url.name != TOKEN_ENDPOINT: inject_status_color(response.status) data = \"NONE\" if body == b\"\"", "the same method as `gidgethub.aiohttp.GitHubAPI._request` with the addition of logging the request-response cycle.", "API call. \"\"\" async with self._session.request( method, url, headers=headers, data=body ) as response:", "to cover this function. The logger information will be useful to know what", "%(version)s\" => %(status)s' async def _request( self, method: str, url: str, headers: Mapping[str,", ") -> Tuple[int, Mapping[str, str], bytes]: \"\"\"This is the same method as `gidgethub.aiohttp.GitHubAPI._request`", "actions taken by the bot. ERROR: Unknown error in the API call. \"\"\"", "All actions taken by the bot. ERROR: Unknown error in the API call.", "the addition of logging the request-response cycle. No need to cover this function.", "self.LOG_FORMAT, { \"method\": method, # host is always going to be 'api.github.com'. \"path\":", "'api.github.com'. \"path\": response.url.raw_path_qs, \"version\": f\"{response.url.scheme.upper()}/\" f\"{response.version.major}.{response.version.minor}\", \"data\": data, \"status\": f\"{response.status}:{response.reason}\", }, ) return", "b\"\" else body.decode(UTF_8_CHARSET) loggerlevel = ( logger.info if response.status in STATUS_OK else logger.error", "LOG_FORMAT = 'api \"%(method)s %(path)s %(data)s %(version)s\" => %(status)s' async def _request( self,", "logging the request-response cycle. No need to cover this function. The logger information", "loggerlevel = ( logger.info if response.status in STATUS_OK else logger.error ) loggerlevel( self.LOG_FORMAT,", "import Mapping, Tuple from gidgethub.abc import UTF_8_CHARSET from gidgethub.aiohttp import GitHubAPI as BaseGitHubAPI", "str, url: str, headers: Mapping[str, str], body: bytes = b\"\" ) -> Tuple[int,", "pragma: no cover LOG_FORMAT = 'api \"%(method)s %(path)s %(data)s %(version)s\" => %(status)s' async", "\"path\": response.url.raw_path_qs, \"version\": f\"{response.url.scheme.upper()}/\" f\"{response.version.major}.{response.version.minor}\", \"data\": data, \"status\": f\"{response.status}:{response.reason}\", }, ) return response.status,", "Mapping, Tuple from gidgethub.abc import UTF_8_CHARSET from gidgethub.aiohttp import GitHubAPI as BaseGitHubAPI from", "%(path)s %(data)s %(version)s\" => %(status)s' async def _request( self, method: str, url: str,", "class GitHubAPI(BaseGitHubAPI): # pragma: no cover LOG_FORMAT = 'api \"%(method)s %(path)s %(data)s %(version)s\"", "the request-response cycle. No need to cover this function. The logger information will", "\"\"\" async with self._session.request( method, url, headers=headers, data=body ) as response: # We", "body: bytes = b\"\" ) -> Tuple[int, Mapping[str, str], bytes]: \"\"\"This is the", "function. The logger information will be useful to know what actions the bot", "\"\"\"This is the same method as `gidgethub.aiohttp.GitHubAPI._request` with the addition of logging the", "no cover LOG_FORMAT = 'api \"%(method)s %(path)s %(data)s %(version)s\" => %(status)s' async def", "the URL. if response.url.name != TOKEN_ENDPOINT: inject_status_color(response.status) data = \"NONE\" if body ==", "cover LOG_FORMAT = 'api \"%(method)s %(path)s %(data)s %(version)s\" => %(status)s' async def _request(", "url: str, headers: Mapping[str, str], body: bytes = b\"\" ) -> Tuple[int, Mapping[str,", "`gidgethub.aiohttp.GitHubAPI._request` with the addition of logging the request-response cycle. No need to cover", "always going to be 'api.github.com'. \"path\": response.url.raw_path_qs, \"version\": f\"{response.url.scheme.upper()}/\" f\"{response.version.major}.{response.version.minor}\", \"data\": data, \"status\":", "to reveal the `installation_id` from the URL. if response.url.name != TOKEN_ENDPOINT: inject_status_color(response.status) data", "inject_status_color(response.status) data = \"NONE\" if body == b\"\" else body.decode(UTF_8_CHARSET) loggerlevel = (", "taken by the bot. ERROR: Unknown error in the API call. \"\"\" async", "actions the bot made. INFO: All actions taken by the bot. ERROR: Unknown", "We don't want to reveal the `installation_id` from the URL. if response.url.name !=", "import UTF_8_CHARSET from gidgethub.aiohttp import GitHubAPI as BaseGitHubAPI from .log import STATUS_OK, inject_status_color,", "= b\"\" ) -> Tuple[int, Mapping[str, str], bytes]: \"\"\"This is the same method", "STATUS_OK else logger.error ) loggerlevel( self.LOG_FORMAT, { \"method\": method, # host is always", "if body == b\"\" else body.decode(UTF_8_CHARSET) loggerlevel = ( logger.info if response.status in", "with the addition of logging the request-response cycle. No need to cover this", "GitHubAPI as BaseGitHubAPI from .log import STATUS_OK, inject_status_color, logger TOKEN_ENDPOINT = \"access_tokens\" class", "method as `gidgethub.aiohttp.GitHubAPI._request` with the addition of logging the request-response cycle. No need", "is the same method as `gidgethub.aiohttp.GitHubAPI._request` with the addition of logging the request-response", "str], bytes]: \"\"\"This is the same method as `gidgethub.aiohttp.GitHubAPI._request` with the addition of", "will be useful to know what actions the bot made. INFO: All actions", "if response.status in STATUS_OK else logger.error ) loggerlevel( self.LOG_FORMAT, { \"method\": method, #", "The logger information will be useful to know what actions the bot made.", "'api \"%(method)s %(path)s %(data)s %(version)s\" => %(status)s' async def _request( self, method: str,", "what actions the bot made. INFO: All actions taken by the bot. ERROR:", "host is always going to be 'api.github.com'. \"path\": response.url.raw_path_qs, \"version\": f\"{response.url.scheme.upper()}/\" f\"{response.version.major}.{response.version.minor}\", \"data\":", "GitHubAPI(BaseGitHubAPI): # pragma: no cover LOG_FORMAT = 'api \"%(method)s %(path)s %(data)s %(version)s\" =>", "%(status)s' async def _request( self, method: str, url: str, headers: Mapping[str, str], body:", "is always going to be 'api.github.com'. \"path\": response.url.raw_path_qs, \"version\": f\"{response.url.scheme.upper()}/\" f\"{response.version.major}.{response.version.minor}\", \"data\": data,", "information will be useful to know what actions the bot made. INFO: All", "need to cover this function. The logger information will be useful to know", "call. \"\"\" async with self._session.request( method, url, headers=headers, data=body ) as response: #", "response: # We don't want to reveal the `installation_id` from the URL. if", ".log import STATUS_OK, inject_status_color, logger TOKEN_ENDPOINT = \"access_tokens\" class GitHubAPI(BaseGitHubAPI): # pragma: no", "INFO: All actions taken by the bot. ERROR: Unknown error in the API", "<filename>algorithms_keeper/api.py from typing import Mapping, Tuple from gidgethub.abc import UTF_8_CHARSET from gidgethub.aiohttp import", "logger.info if response.status in STATUS_OK else logger.error ) loggerlevel( self.LOG_FORMAT, { \"method\": method,", "be 'api.github.com'. \"path\": response.url.raw_path_qs, \"version\": f\"{response.url.scheme.upper()}/\" f\"{response.version.major}.{response.version.minor}\", \"data\": data, \"status\": f\"{response.status}:{response.reason}\", }, )", "url, headers=headers, data=body ) as response: # We don't want to reveal the", "as `gidgethub.aiohttp.GitHubAPI._request` with the addition of logging the request-response cycle. No need to", "the bot made. INFO: All actions taken by the bot. ERROR: Unknown error", "method: str, url: str, headers: Mapping[str, str], body: bytes = b\"\" ) ->", "if response.url.name != TOKEN_ENDPOINT: inject_status_color(response.status) data = \"NONE\" if body == b\"\" else", "else logger.error ) loggerlevel( self.LOG_FORMAT, { \"method\": method, # host is always going", "bot. ERROR: Unknown error in the API call. \"\"\" async with self._session.request( method,", "loggerlevel( self.LOG_FORMAT, { \"method\": method, # host is always going to be 'api.github.com'.", "response.status in STATUS_OK else logger.error ) loggerlevel( self.LOG_FORMAT, { \"method\": method, # host", "from typing import Mapping, Tuple from gidgethub.abc import UTF_8_CHARSET from gidgethub.aiohttp import GitHubAPI", ") loggerlevel( self.LOG_FORMAT, { \"method\": method, # host is always going to be", "\"version\": f\"{response.url.scheme.upper()}/\" f\"{response.version.major}.{response.version.minor}\", \"data\": data, \"status\": f\"{response.status}:{response.reason}\", }, ) return response.status, response.headers, await", "\"%(method)s %(path)s %(data)s %(version)s\" => %(status)s' async def _request( self, method: str, url:", "error in the API call. \"\"\" async with self._session.request( method, url, headers=headers, data=body", "in the API call. \"\"\" async with self._session.request( method, url, headers=headers, data=body )", "logger information will be useful to know what actions the bot made. INFO:", "cover this function. The logger information will be useful to know what actions", "from the URL. if response.url.name != TOKEN_ENDPOINT: inject_status_color(response.status) data = \"NONE\" if body", "data=body ) as response: # We don't want to reveal the `installation_id` from", "in STATUS_OK else logger.error ) loggerlevel( self.LOG_FORMAT, { \"method\": method, # host is", "import STATUS_OK, inject_status_color, logger TOKEN_ENDPOINT = \"access_tokens\" class GitHubAPI(BaseGitHubAPI): # pragma: no cover", "\"method\": method, # host is always going to be 'api.github.com'. \"path\": response.url.raw_path_qs, \"version\":", "don't want to reveal the `installation_id` from the URL. if response.url.name != TOKEN_ENDPOINT:", "= ( logger.info if response.status in STATUS_OK else logger.error ) loggerlevel( self.LOG_FORMAT, {", "ERROR: Unknown error in the API call. \"\"\" async with self._session.request( method, url,", "know what actions the bot made. INFO: All actions taken by the bot.", "gidgethub.abc import UTF_8_CHARSET from gidgethub.aiohttp import GitHubAPI as BaseGitHubAPI from .log import STATUS_OK,", "from gidgethub.aiohttp import GitHubAPI as BaseGitHubAPI from .log import STATUS_OK, inject_status_color, logger TOKEN_ENDPOINT", "method, url, headers=headers, data=body ) as response: # We don't want to reveal", "STATUS_OK, inject_status_color, logger TOKEN_ENDPOINT = \"access_tokens\" class GitHubAPI(BaseGitHubAPI): # pragma: no cover LOG_FORMAT", "No need to cover this function. The logger information will be useful to", "response.url.raw_path_qs, \"version\": f\"{response.url.scheme.upper()}/\" f\"{response.version.major}.{response.version.minor}\", \"data\": data, \"status\": f\"{response.status}:{response.reason}\", }, ) return response.status, response.headers,", "`installation_id` from the URL. if response.url.name != TOKEN_ENDPOINT: inject_status_color(response.status) data = \"NONE\" if", "headers: Mapping[str, str], body: bytes = b\"\" ) -> Tuple[int, Mapping[str, str], bytes]:", "Tuple[int, Mapping[str, str], bytes]: \"\"\"This is the same method as `gidgethub.aiohttp.GitHubAPI._request` with the", "inject_status_color, logger TOKEN_ENDPOINT = \"access_tokens\" class GitHubAPI(BaseGitHubAPI): # pragma: no cover LOG_FORMAT =", "self._session.request( method, url, headers=headers, data=body ) as response: # We don't want to", "else body.decode(UTF_8_CHARSET) loggerlevel = ( logger.info if response.status in STATUS_OK else logger.error )", "body.decode(UTF_8_CHARSET) loggerlevel = ( logger.info if response.status in STATUS_OK else logger.error ) loggerlevel(", "%(data)s %(version)s\" => %(status)s' async def _request( self, method: str, url: str, headers:", "async def _request( self, method: str, url: str, headers: Mapping[str, str], body: bytes", "TOKEN_ENDPOINT = \"access_tokens\" class GitHubAPI(BaseGitHubAPI): # pragma: no cover LOG_FORMAT = 'api \"%(method)s", "str, headers: Mapping[str, str], body: bytes = b\"\" ) -> Tuple[int, Mapping[str, str],", "Unknown error in the API call. \"\"\" async with self._session.request( method, url, headers=headers,", "# We don't want to reveal the `installation_id` from the URL. if response.url.name", "as response: # We don't want to reveal the `installation_id` from the URL.", "be useful to know what actions the bot made. INFO: All actions taken", "Tuple from gidgethub.abc import UTF_8_CHARSET from gidgethub.aiohttp import GitHubAPI as BaseGitHubAPI from .log" ]
[ "return bcl.huffman_compress_quant_buffer(buf, self._offset, self._scale) def decode(self, buf, out=None): ret = bcl.huffman_decompress_buffer(buf, None) ret", "from . import bcl import numcodecs from numcodecs.abc import Codec class Huffman(Codec): codec_id='pymecompress.huffman'", "config): return cls() numcodecs.register_codec(Huffman) class HuffmanQuant16(Codec): codec_id = 'pymecompress.quant16' def __init__(self, offset=0, scale=1):", "return cls() numcodecs.register_codec(Huffman) class HuffmanQuant16(Codec): codec_id = 'pymecompress.quant16' def __init__(self, offset=0, scale=1): self._offset", "self._offset if out is None: out = ret else: out[:] = ret return", "'pymecompress.quant16' def __init__(self, offset=0, scale=1): self._offset = offset self._scale = scale def encode(self,", "HuffmanQuant16(Codec): codec_id = 'pymecompress.quant16' def __init__(self, offset=0, scale=1): self._offset = offset self._scale =", "def __init__(self, offset=0, scale=1): self._offset = offset self._scale = scale def encode(self, buf):", "def get_config(self): return {'codec_id': self.codec_id, 'offset': self._offset, 'scale' : self._scale} @classmethod def from_config(cls,", "def get_config(self): return {'codec_id': self.codec_id} @classmethod def from_config(cls, config): return cls() numcodecs.register_codec(Huffman) class", "bcl import numcodecs from numcodecs.abc import Codec class Huffman(Codec): codec_id='pymecompress.huffman' def encode(self, buf):", "'offset': self._offset, 'scale' : self._scale} @classmethod def from_config(cls, config): return cls(offset=config.get('offset', 0), scale=config.get('scale',", "return {'codec_id': self.codec_id} @classmethod def from_config(cls, config): return cls() numcodecs.register_codec(Huffman) class HuffmanQuant16(Codec): codec_id", "+ self._offset if out is None: out = ret else: out[:] = ret", "codec_id='pymecompress.huffman' def encode(self, buf): return bcl.huffman_compress_buffer(buf) def decode(self, buf, out=None): return bcl.huffman_decompress_buffer(buf, out)", "buf, out=None): ret = bcl.huffman_decompress_buffer(buf, None) ret = (ret*ret)/self._scale + self._offset if out", "self._scale) def decode(self, buf, out=None): ret = bcl.huffman_decompress_buffer(buf, None) ret = (ret*ret)/self._scale +", "class Huffman(Codec): codec_id='pymecompress.huffman' def encode(self, buf): return bcl.huffman_compress_buffer(buf) def decode(self, buf, out=None): return", "Codec class Huffman(Codec): codec_id='pymecompress.huffman' def encode(self, buf): return bcl.huffman_compress_buffer(buf) def decode(self, buf, out=None):", "self.codec_id, 'offset': self._offset, 'scale' : self._scale} @classmethod def from_config(cls, config): return cls(offset=config.get('offset', 0),", "(ret*ret)/self._scale + self._offset if out is None: out = ret else: out[:] =", "out def get_config(self): return {'codec_id': self.codec_id, 'offset': self._offset, 'scale' : self._scale} @classmethod def", "numcodecs compatible compression and quantization codecs. \"\"\" from . import bcl import numcodecs", "get_config(self): return {'codec_id': self.codec_id, 'offset': self._offset, 'scale' : self._scale} @classmethod def from_config(cls, config):", "None: out = ret else: out[:] = ret return out def get_config(self): return", "def encode(self, buf): return bcl.huffman_compress_buffer(buf) def decode(self, buf, out=None): return bcl.huffman_decompress_buffer(buf, out) def", "out) def get_config(self): return {'codec_id': self.codec_id} @classmethod def from_config(cls, config): return cls() numcodecs.register_codec(Huffman)", "return bcl.huffman_decompress_buffer(buf, out) def get_config(self): return {'codec_id': self.codec_id} @classmethod def from_config(cls, config): return", "ret else: out[:] = ret return out def get_config(self): return {'codec_id': self.codec_id, 'offset':", "= scale def encode(self, buf): return bcl.huffman_compress_quant_buffer(buf, self._offset, self._scale) def decode(self, buf, out=None):", "= bcl.huffman_decompress_buffer(buf, None) ret = (ret*ret)/self._scale + self._offset if out is None: out", "\"\"\" from . import bcl import numcodecs from numcodecs.abc import Codec class Huffman(Codec):", "{'codec_id': self.codec_id} @classmethod def from_config(cls, config): return cls() numcodecs.register_codec(Huffman) class HuffmanQuant16(Codec): codec_id =", "import bcl import numcodecs from numcodecs.abc import Codec class Huffman(Codec): codec_id='pymecompress.huffman' def encode(self,", "quantization codecs. \"\"\" from . import bcl import numcodecs from numcodecs.abc import Codec", "numcodecs.register_codec(Huffman) class HuffmanQuant16(Codec): codec_id = 'pymecompress.quant16' def __init__(self, offset=0, scale=1): self._offset = offset", "if out is None: out = ret else: out[:] = ret return out", "def decode(self, buf, out=None): ret = bcl.huffman_decompress_buffer(buf, None) ret = (ret*ret)/self._scale + self._offset", "codecs. \"\"\" from . import bcl import numcodecs from numcodecs.abc import Codec class", "from numcodecs.abc import Codec class Huffman(Codec): codec_id='pymecompress.huffman' def encode(self, buf): return bcl.huffman_compress_buffer(buf) def", "self._offset, self._scale) def decode(self, buf, out=None): ret = bcl.huffman_decompress_buffer(buf, None) ret = (ret*ret)/self._scale", "cls() numcodecs.register_codec(Huffman) class HuffmanQuant16(Codec): codec_id = 'pymecompress.quant16' def __init__(self, offset=0, scale=1): self._offset =", "numcodecs from numcodecs.abc import Codec class Huffman(Codec): codec_id='pymecompress.huffman' def encode(self, buf): return bcl.huffman_compress_buffer(buf)", "offset self._scale = scale def encode(self, buf): return bcl.huffman_compress_quant_buffer(buf, self._offset, self._scale) def decode(self,", "= (ret*ret)/self._scale + self._offset if out is None: out = ret else: out[:]", "return out def get_config(self): return {'codec_id': self.codec_id, 'offset': self._offset, 'scale' : self._scale} @classmethod", "= offset self._scale = scale def encode(self, buf): return bcl.huffman_compress_quant_buffer(buf, self._offset, self._scale) def", "out=None): return bcl.huffman_decompress_buffer(buf, out) def get_config(self): return {'codec_id': self.codec_id} @classmethod def from_config(cls, config):", "get_config(self): return {'codec_id': self.codec_id} @classmethod def from_config(cls, config): return cls() numcodecs.register_codec(Huffman) class HuffmanQuant16(Codec):", "buf): return bcl.huffman_compress_quant_buffer(buf, self._offset, self._scale) def decode(self, buf, out=None): ret = bcl.huffman_decompress_buffer(buf, None)", "is None: out = ret else: out[:] = ret return out def get_config(self):", "numcodecs.abc import Codec class Huffman(Codec): codec_id='pymecompress.huffman' def encode(self, buf): return bcl.huffman_compress_buffer(buf) def decode(self,", "decode(self, buf, out=None): ret = bcl.huffman_decompress_buffer(buf, None) ret = (ret*ret)/self._scale + self._offset if", "{'codec_id': self.codec_id, 'offset': self._offset, 'scale' : self._scale} @classmethod def from_config(cls, config): return cls(offset=config.get('offset',", "bcl.huffman_compress_quant_buffer(buf, self._offset, self._scale) def decode(self, buf, out=None): ret = bcl.huffman_decompress_buffer(buf, None) ret =", "self._offset = offset self._scale = scale def encode(self, buf): return bcl.huffman_compress_quant_buffer(buf, self._offset, self._scale)", "decode(self, buf, out=None): return bcl.huffman_decompress_buffer(buf, out) def get_config(self): return {'codec_id': self.codec_id} @classmethod def", "buf): return bcl.huffman_compress_buffer(buf) def decode(self, buf, out=None): return bcl.huffman_decompress_buffer(buf, out) def get_config(self): return", "out is None: out = ret else: out[:] = ret return out def", "compatible compression and quantization codecs. \"\"\" from . import bcl import numcodecs from", "__init__(self, offset=0, scale=1): self._offset = offset self._scale = scale def encode(self, buf): return", "self._offset, 'scale' : self._scale} @classmethod def from_config(cls, config): return cls(offset=config.get('offset', 0), scale=config.get('scale', 1))", "bcl.huffman_decompress_buffer(buf, None) ret = (ret*ret)/self._scale + self._offset if out is None: out =", "class HuffmanQuant16(Codec): codec_id = 'pymecompress.quant16' def __init__(self, offset=0, scale=1): self._offset = offset self._scale", "return {'codec_id': self.codec_id, 'offset': self._offset, 'scale' : self._scale} @classmethod def from_config(cls, config): return", "from_config(cls, config): return cls() numcodecs.register_codec(Huffman) class HuffmanQuant16(Codec): codec_id = 'pymecompress.quant16' def __init__(self, offset=0,", "bcl.huffman_compress_buffer(buf) def decode(self, buf, out=None): return bcl.huffman_decompress_buffer(buf, out) def get_config(self): return {'codec_id': self.codec_id}", "ret return out def get_config(self): return {'codec_id': self.codec_id, 'offset': self._offset, 'scale' : self._scale}", "encode(self, buf): return bcl.huffman_compress_buffer(buf) def decode(self, buf, out=None): return bcl.huffman_decompress_buffer(buf, out) def get_config(self):", "def encode(self, buf): return bcl.huffman_compress_quant_buffer(buf, self._offset, self._scale) def decode(self, buf, out=None): ret =", "codec_id = 'pymecompress.quant16' def __init__(self, offset=0, scale=1): self._offset = offset self._scale = scale", "import Codec class Huffman(Codec): codec_id='pymecompress.huffman' def encode(self, buf): return bcl.huffman_compress_buffer(buf) def decode(self, buf,", "else: out[:] = ret return out def get_config(self): return {'codec_id': self.codec_id, 'offset': self._offset,", "'scale' : self._scale} @classmethod def from_config(cls, config): return cls(offset=config.get('offset', 0), scale=config.get('scale', 1)) numcodecs.register_codec(HuffmanQuant16)", "encode(self, buf): return bcl.huffman_compress_quant_buffer(buf, self._offset, self._scale) def decode(self, buf, out=None): ret = bcl.huffman_decompress_buffer(buf,", "self.codec_id} @classmethod def from_config(cls, config): return cls() numcodecs.register_codec(Huffman) class HuffmanQuant16(Codec): codec_id = 'pymecompress.quant16'", "def from_config(cls, config): return cls() numcodecs.register_codec(Huffman) class HuffmanQuant16(Codec): codec_id = 'pymecompress.quant16' def __init__(self,", "Huffman(Codec): codec_id='pymecompress.huffman' def encode(self, buf): return bcl.huffman_compress_buffer(buf) def decode(self, buf, out=None): return bcl.huffman_decompress_buffer(buf,", "compression and quantization codecs. \"\"\" from . import bcl import numcodecs from numcodecs.abc", "scale def encode(self, buf): return bcl.huffman_compress_quant_buffer(buf, self._offset, self._scale) def decode(self, buf, out=None): ret", "@classmethod def from_config(cls, config): return cls() numcodecs.register_codec(Huffman) class HuffmanQuant16(Codec): codec_id = 'pymecompress.quant16' def", "bcl.huffman_decompress_buffer(buf, out) def get_config(self): return {'codec_id': self.codec_id} @classmethod def from_config(cls, config): return cls()", "scale=1): self._offset = offset self._scale = scale def encode(self, buf): return bcl.huffman_compress_quant_buffer(buf, self._offset,", "= 'pymecompress.quant16' def __init__(self, offset=0, scale=1): self._offset = offset self._scale = scale def", "buf, out=None): return bcl.huffman_decompress_buffer(buf, out) def get_config(self): return {'codec_id': self.codec_id} @classmethod def from_config(cls,", "return bcl.huffman_compress_buffer(buf) def decode(self, buf, out=None): return bcl.huffman_decompress_buffer(buf, out) def get_config(self): return {'codec_id':", "= ret else: out[:] = ret return out def get_config(self): return {'codec_id': self.codec_id,", "def decode(self, buf, out=None): return bcl.huffman_decompress_buffer(buf, out) def get_config(self): return {'codec_id': self.codec_id} @classmethod", "ret = bcl.huffman_decompress_buffer(buf, None) ret = (ret*ret)/self._scale + self._offset if out is None:", "None) ret = (ret*ret)/self._scale + self._offset if out is None: out = ret", "out = ret else: out[:] = ret return out def get_config(self): return {'codec_id':", ". import bcl import numcodecs from numcodecs.abc import Codec class Huffman(Codec): codec_id='pymecompress.huffman' def", "ret = (ret*ret)/self._scale + self._offset if out is None: out = ret else:", "out[:] = ret return out def get_config(self): return {'codec_id': self.codec_id, 'offset': self._offset, 'scale'", "out=None): ret = bcl.huffman_decompress_buffer(buf, None) ret = (ret*ret)/self._scale + self._offset if out is", "offset=0, scale=1): self._offset = offset self._scale = scale def encode(self, buf): return bcl.huffman_compress_quant_buffer(buf,", "import numcodecs from numcodecs.abc import Codec class Huffman(Codec): codec_id='pymecompress.huffman' def encode(self, buf): return", "<filename>pymecompress/codecs.py \"\"\" numcodecs compatible compression and quantization codecs. \"\"\" from . import bcl", "= ret return out def get_config(self): return {'codec_id': self.codec_id, 'offset': self._offset, 'scale' :", "self._scale = scale def encode(self, buf): return bcl.huffman_compress_quant_buffer(buf, self._offset, self._scale) def decode(self, buf,", "and quantization codecs. \"\"\" from . import bcl import numcodecs from numcodecs.abc import", "\"\"\" numcodecs compatible compression and quantization codecs. \"\"\" from . import bcl import" ]
[]
[ "= normalize(x) print(y) x = gaussian(x, 0.0, 1.0, 1.0) print(x) if __name__ ==", "import normalize, gaussian def main(): x = torch.tensor([1, 2, 3], dtype=torch.float) y =", "cppexample import normalize, gaussian def main(): x = torch.tensor([1, 2, 3], dtype=torch.float) y", "= torch.tensor([1, 2, 3], dtype=torch.float) y = normalize(x) print(y) x = gaussian(x, 0.0,", "dtype=torch.float) y = normalize(x) print(y) x = gaussian(x, 0.0, 1.0, 1.0) print(x) if", "import torch from cppexample import normalize, gaussian def main(): x = torch.tensor([1, 2,", "normalize, gaussian def main(): x = torch.tensor([1, 2, 3], dtype=torch.float) y = normalize(x)", "y = normalize(x) print(y) x = gaussian(x, 0.0, 1.0, 1.0) print(x) if __name__", "gaussian def main(): x = torch.tensor([1, 2, 3], dtype=torch.float) y = normalize(x) print(y)", "print(y) x = gaussian(x, 0.0, 1.0, 1.0) print(x) if __name__ == '__main__': main()", "torch.tensor([1, 2, 3], dtype=torch.float) y = normalize(x) print(y) x = gaussian(x, 0.0, 1.0,", "def main(): x = torch.tensor([1, 2, 3], dtype=torch.float) y = normalize(x) print(y) x", "from cppexample import normalize, gaussian def main(): x = torch.tensor([1, 2, 3], dtype=torch.float)", "main(): x = torch.tensor([1, 2, 3], dtype=torch.float) y = normalize(x) print(y) x =", "3], dtype=torch.float) y = normalize(x) print(y) x = gaussian(x, 0.0, 1.0, 1.0) print(x)", "<filename>test.py import torch from cppexample import normalize, gaussian def main(): x = torch.tensor([1,", "normalize(x) print(y) x = gaussian(x, 0.0, 1.0, 1.0) print(x) if __name__ == '__main__':", "x = torch.tensor([1, 2, 3], dtype=torch.float) y = normalize(x) print(y) x = gaussian(x,", "2, 3], dtype=torch.float) y = normalize(x) print(y) x = gaussian(x, 0.0, 1.0, 1.0)", "torch from cppexample import normalize, gaussian def main(): x = torch.tensor([1, 2, 3]," ]
[ "super().__init__() self.M = M self.angle = 359 / 10 * self.M def forward(self,", "__init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.autocontrast(img) class Sharpness(nn.Module):", "import torch.nn as nn from torchvision import transforms as ttf class RandAugment(nn.Module): def", "super().__init__() self.M = M def forward(self, img): return ttf.functional.equalize(img) class Solarize(nn.Module): def __init__(self,", "M): super().__init__() self.M = M def forward(self, img): return img class Contrast(nn.Module): def", "M def forward(self, img): return img class Contrast(nn.Module): def __init__(self, M): super().__init__() self.M", "[self.angle, 0]) class ShearY(nn.Module): def __init__(self, M): super().__init__() self.M = M self.angle =", "M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_contrast(img, self.M / 5.)", "359 / 10 * self.M - 180 def forward(self, img): return ttf.functional.affine(img, 0,", "ttf.functional.adjust_brightness(img, self.M / 5.) class Equalize(nn.Module): def __init__(self, M): super().__init__() self.M = M", "img): self.aug_index = torch.randperm(len(self.aug_list))[:self.N] self.augmentations = nn.ModuleList([]) for aug_id in self.aug_index: self.augmentations.append(self.aug_list[aug_id](self.M)) self.augmentations", "def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_sharpness(img, self.M", "= M self.angle = 359 / 10 * self.M - 180 def forward(self,", "(max_size - 1) / 10 * self.M], 1, [0, 0]) class AutoContrast(nn.Module): def", "10 * self.M - 180 def forward(self, img): return ttf.functional.affine(img, 0, [0, 0],", "M def forward(self, img): return ttf.functional.solarize(img, (10 - self.M) * 25.5) class Posterize(nn.Module):", "self.M = M def forward(self, img): return ttf.functional.autocontrast(img) class Sharpness(nn.Module): def __init__(self, M):", "class Brightness(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return", "= img.size()[1] except TypeError: max_size = img.size()[1] return ttf.functional.affine(img, 0, [0, (max_size -", "def forward(self, img): return ttf.functional.adjust_saturation(img, self.M / 5.) class Brightness(nn.Module): def __init__(self, M):", "M): super().__init__() self.M = M self.angle = 359 / 10 * self.M def", "def forward(self, img): return img class Contrast(nn.Module): def __init__(self, M): super().__init__() self.M =", "try: max_size = img.size()[1] except TypeError: max_size = img.size()[1] return ttf.functional.affine(img, 0, [0,", "N self.M = M self.aug_list = [Rotate, ShearX, ShearY, TranslateX, TranslateY, AutoContrast, Sharpness,", "max_size = img.size()[1] return ttf.functional.affine(img, 0, [0, (max_size - 1) / 10 *", "translate y translate x autoContrast sharpness identity contrast color brightness eqaulize solarize posterize", "import transforms as ttf class RandAugment(nn.Module): def __init__(self, N, M): super().__init__() \"\"\" rotate", "def forward(self, img): return ttf.functional.rotate(img, self.angle) class ShearX(nn.Module): def __init__(self, M): super().__init__() self.M", "return ttf.functional.rotate(img, self.angle) class ShearX(nn.Module): def __init__(self, M): super().__init__() self.M = M self.angle", "ttf.functional.adjust_sharpness(img, self.M / 5.) class Identity(nn.Module): def __init__(self, M): super().__init__() self.M = M", "return ttf.functional.affine(img, 0, [(max_size - 1) / 10 * self.M, 0], 1, [0,", "1, [0, 0]) class TranslateY(nn.Module): def __init__(self, M): super().__init__() self.M = M def", "M def forward(self, img): return ttf.functional.adjust_brightness(img, self.M / 5.) class Equalize(nn.Module): def __init__(self,", "ttf.functional.affine(img, 0, [0, 0], 1, [0, self.angle]) class TranslateX(nn.Module): def __init__(self, M): super().__init__()", "1, [0, 0]) class AutoContrast(nn.Module): def __init__(self, M): super().__init__() self.M = M def", "M): super().__init__() self.M = M def forward(self, img): return ttf.functional.equalize(img) class Solarize(nn.Module): def", "return ttf.functional.affine(img, 0, [0, (max_size - 1) / 10 * self.M], 1, [0,", "return ttf.functional.adjust_sharpness(img, self.M / 5.) class Identity(nn.Module): def __init__(self, M): super().__init__() self.M =", "= img.size()[0] except TypeError: max_size = img.size()[0] return ttf.functional.affine(img, 0, [(max_size - 1)", "Identity(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return img", "/ 10 * self.M def forward(self, img): return ttf.functional.rotate(img, self.angle) class ShearX(nn.Module): def", "- 180 def forward(self, img): return ttf.functional.affine(img, 0, [0, 0], 1, [self.angle, 0])", "super().__init__() self.M = M def forward(self, img): try: max_size = img.size()[1] except TypeError:", "class RandAugment(nn.Module): def __init__(self, N, M): super().__init__() \"\"\" rotate shear x shear y", "return ttf.functional.affine(img, 0, [0, 0], 1, [0, self.angle]) class TranslateX(nn.Module): def __init__(self, M):", "M): super().__init__() self.M = M def forward(self, img): return ttf.functional.autocontrast(img) class Sharpness(nn.Module): def", "__init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.solarize(img, (10 -", "* 25.5) class Posterize(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self,", "class Posterize(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return", "sharpness identity contrast color brightness eqaulize solarize posterize \"\"\" self.N = N self.M", "def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_brightness(img, self.M", "solarize posterize \"\"\" self.N = N self.M = M self.aug_list = [Rotate, ShearX,", "return ttf.functional.solarize(img, (10 - self.M) * 25.5) class Posterize(nn.Module): def __init__(self, M): super().__init__()", "import torch import torch.nn as nn from torchvision import transforms as ttf class", "TranslateX, TranslateY, AutoContrast, Sharpness, Identity, Contrast, Color, Brightness, Equalize, Solarize, Posterize] def forward(self,", "autoContrast sharpness identity contrast color brightness eqaulize solarize posterize \"\"\" self.N = N", "= M def forward(self, img): return ttf.functional.adjust_contrast(img, self.M / 5.) class Color(nn.Module): def", "[Rotate, ShearX, ShearY, TranslateX, TranslateY, AutoContrast, Sharpness, Identity, Contrast, Color, Brightness, Equalize, Solarize,", "class Rotate(nn.Module): def __init__(self, M): super().__init__() self.M = M self.angle = 359 /", "super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_contrast(img, self.M / 5.) class", "self.aug_list = [Rotate, ShearX, ShearY, TranslateX, TranslateY, AutoContrast, Sharpness, Identity, Contrast, Color, Brightness,", "M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_brightness(img, self.M / 5.)", "* self.M def forward(self, img): return ttf.functional.rotate(img, self.angle) class ShearX(nn.Module): def __init__(self, M):", "ttf.functional.affine(img, 0, [(max_size - 1) / 10 * self.M, 0], 1, [0, 0])", "0]) class TranslateY(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img):", "ShearX(nn.Module): def __init__(self, M): super().__init__() self.M = M self.angle = 359 / 10", "= [Rotate, ShearX, ShearY, TranslateX, TranslateY, AutoContrast, Sharpness, Identity, Contrast, Color, Brightness, Equalize,", "\"\"\" self.N = N self.M = M self.aug_list = [Rotate, ShearX, ShearY, TranslateX,", "self.M = M def forward(self, img): return ttf.functional.equalize(img) class Solarize(nn.Module): def __init__(self, M):", "= torch.randperm(len(self.aug_list))[:self.N] self.augmentations = nn.ModuleList([]) for aug_id in self.aug_index: self.augmentations.append(self.aug_list[aug_id](self.M)) self.augmentations = nn.Sequential(*self.augmentations)", "forward(self, img): return ttf.functional.adjust_brightness(img, self.M / 5.) class Equalize(nn.Module): def __init__(self, M): super().__init__()", "in self.aug_index: self.augmentations.append(self.aug_list[aug_id](self.M)) self.augmentations = nn.Sequential(*self.augmentations) return self.augmentations(img) class Rotate(nn.Module): def __init__(self, M):", "self.angle = 359 / 10 * self.M - 180 def forward(self, img): return", "0, [(max_size - 1) / 10 * self.M, 0], 1, [0, 0]) class", "= M def forward(self, img): return ttf.functional.posterize(img, round((10 - self.M) / 10 *", "super().__init__() self.M = M def forward(self, img): try: max_size = img.size()[0] except TypeError:", "25.5) class Posterize(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img):", "nn.Sequential(*self.augmentations) return self.augmentations(img) class Rotate(nn.Module): def __init__(self, M): super().__init__() self.M = M self.angle", "M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_sharpness(img, self.M / 5.)", "= M def forward(self, img): try: max_size = img.size()[1] except TypeError: max_size =", "img.size()[1] return ttf.functional.affine(img, 0, [0, (max_size - 1) / 10 * self.M], 1,", "color brightness eqaulize solarize posterize \"\"\" self.N = N self.M = M self.aug_list", "ShearX, ShearY, TranslateX, TranslateY, AutoContrast, Sharpness, Identity, Contrast, Color, Brightness, Equalize, Solarize, Posterize]", "M def forward(self, img): return ttf.functional.posterize(img, round((10 - self.M) / 10 * 8))", "AutoContrast(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.autocontrast(img)", "0, [0, (max_size - 1) / 10 * self.M], 1, [0, 0]) class", "0]) class AutoContrast(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img):", "return ttf.functional.adjust_contrast(img, self.M / 5.) class Color(nn.Module): def __init__(self, M): super().__init__() self.M =", "[0, (max_size - 1) / 10 * self.M], 1, [0, 0]) class AutoContrast(nn.Module):", "Sharpness(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_sharpness(img,", "/ 5.) class Color(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self,", "self.M = M def forward(self, img): return ttf.functional.adjust_brightness(img, self.M / 5.) class Equalize(nn.Module):", "5.) class Equalize(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img):", "img): return ttf.functional.equalize(img) class Solarize(nn.Module): def __init__(self, M): super().__init__() self.M = M def", "forward(self, img): return ttf.functional.affine(img, 0, [0, 0], 1, [self.angle, 0]) class ShearY(nn.Module): def", "Equalize, Solarize, Posterize] def forward(self, img): self.aug_index = torch.randperm(len(self.aug_list))[:self.N] self.augmentations = nn.ModuleList([]) for", "def forward(self, img): try: max_size = img.size()[1] except TypeError: max_size = img.size()[1] return", "TranslateY, AutoContrast, Sharpness, Identity, Contrast, Color, Brightness, Equalize, Solarize, Posterize] def forward(self, img):", "180 def forward(self, img): return ttf.functional.affine(img, 0, [0, 0], 1, [self.angle, 0]) class", "img.size()[0] except TypeError: max_size = img.size()[0] return ttf.functional.affine(img, 0, [(max_size - 1) /", "def forward(self, img): return ttf.functional.adjust_brightness(img, self.M / 5.) class Equalize(nn.Module): def __init__(self, M):", "self.M = M def forward(self, img): return img class Contrast(nn.Module): def __init__(self, M):", "ttf.functional.adjust_saturation(img, self.M / 5.) class Brightness(nn.Module): def __init__(self, M): super().__init__() self.M = M", "0], 1, [0, 0]) class TranslateY(nn.Module): def __init__(self, M): super().__init__() self.M = M", "0, [0, 0], 1, [self.angle, 0]) class ShearY(nn.Module): def __init__(self, M): super().__init__() self.M", "from torchvision import transforms as ttf class RandAugment(nn.Module): def __init__(self, N, M): super().__init__()", "torch.randperm(len(self.aug_list))[:self.N] self.augmentations = nn.ModuleList([]) for aug_id in self.aug_index: self.augmentations.append(self.aug_list[aug_id](self.M)) self.augmentations = nn.Sequential(*self.augmentations) return", "torch import torch.nn as nn from torchvision import transforms as ttf class RandAugment(nn.Module):", "0], 1, [0, self.angle]) class TranslateX(nn.Module): def __init__(self, M): super().__init__() self.M = M", "def forward(self, img): self.aug_index = torch.randperm(len(self.aug_list))[:self.N] self.augmentations = nn.ModuleList([]) for aug_id in self.aug_index:", "forward(self, img): return ttf.functional.equalize(img) class Solarize(nn.Module): def __init__(self, M): super().__init__() self.M = M", "return self.augmentations(img) class Rotate(nn.Module): def __init__(self, M): super().__init__() self.M = M self.angle =", "self.M = M self.angle = 359 / 10 * self.M def forward(self, img):", "forward(self, img): return ttf.functional.rotate(img, self.angle) class ShearX(nn.Module): def __init__(self, M): super().__init__() self.M =", "= 359 / 10 * self.M - 180 def forward(self, img): return ttf.functional.affine(img,", "forward(self, img): return ttf.functional.adjust_sharpness(img, self.M / 5.) class Identity(nn.Module): def __init__(self, M): super().__init__()", "max_size = img.size()[0] return ttf.functional.affine(img, 0, [(max_size - 1) / 10 * self.M,", "forward(self, img): try: max_size = img.size()[0] except TypeError: max_size = img.size()[0] return ttf.functional.affine(img,", "self.M = M def forward(self, img): return ttf.functional.posterize(img, round((10 - self.M) / 10", "def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.solarize(img, (10", "/ 10 * self.M - 180 def forward(self, img): return ttf.functional.affine(img, 0, [0,", "M def forward(self, img): return ttf.functional.adjust_contrast(img, self.M / 5.) class Color(nn.Module): def __init__(self,", "= nn.ModuleList([]) for aug_id in self.aug_index: self.augmentations.append(self.aug_list[aug_id](self.M)) self.augmentations = nn.Sequential(*self.augmentations) return self.augmentations(img) class", "def __init__(self, M): super().__init__() self.M = M def forward(self, img): return img class", "[0, 0], 1, [self.angle, 0]) class ShearY(nn.Module): def __init__(self, M): super().__init__() self.M =", "Brightness(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_brightness(img,", "TranslateY(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): try: max_size", "as ttf class RandAugment(nn.Module): def __init__(self, N, M): super().__init__() \"\"\" rotate shear x", "10 * self.M def forward(self, img): return ttf.functional.rotate(img, self.angle) class ShearX(nn.Module): def __init__(self,", "Solarize(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.solarize(img,", "M self.aug_list = [Rotate, ShearX, ShearY, TranslateX, TranslateY, AutoContrast, Sharpness, Identity, Contrast, Color,", "- 180 def forward(self, img): return ttf.functional.affine(img, 0, [0, 0], 1, [0, self.angle])", "(10 - self.M) * 25.5) class Posterize(nn.Module): def __init__(self, M): super().__init__() self.M =", "img): return ttf.functional.adjust_brightness(img, self.M / 5.) class Equalize(nn.Module): def __init__(self, M): super().__init__() self.M", "self.M = M self.angle = 359 / 10 * self.M - 180 def", "img): return ttf.functional.adjust_sharpness(img, self.M / 5.) class Identity(nn.Module): def __init__(self, M): super().__init__() self.M", "translate x autoContrast sharpness identity contrast color brightness eqaulize solarize posterize \"\"\" self.N", "= M def forward(self, img): return ttf.functional.adjust_saturation(img, self.M / 5.) class Brightness(nn.Module): def", "forward(self, img): self.aug_index = torch.randperm(len(self.aug_list))[:self.N] self.augmentations = nn.ModuleList([]) for aug_id in self.aug_index: self.augmentations.append(self.aug_list[aug_id](self.M))", "self.N = N self.M = M self.aug_list = [Rotate, ShearX, ShearY, TranslateX, TranslateY,", "M def forward(self, img): return ttf.functional.autocontrast(img) class Sharpness(nn.Module): def __init__(self, M): super().__init__() self.M", "self.augmentations = nn.ModuleList([]) for aug_id in self.aug_index: self.augmentations.append(self.aug_list[aug_id](self.M)) self.augmentations = nn.Sequential(*self.augmentations) return self.augmentations(img)", "def __init__(self, N, M): super().__init__() \"\"\" rotate shear x shear y translate y", "self.M = M self.aug_list = [Rotate, ShearX, ShearY, TranslateX, TranslateY, AutoContrast, Sharpness, Identity,", "super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_brightness(img, self.M / 5.) class", "return ttf.functional.autocontrast(img) class Sharpness(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self,", "shear x shear y translate y translate x autoContrast sharpness identity contrast color", "5.) class Identity(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img):", "y translate x autoContrast sharpness identity contrast color brightness eqaulize solarize posterize \"\"\"", "img): return img class Contrast(nn.Module): def __init__(self, M): super().__init__() self.M = M def", "ttf.functional.solarize(img, (10 - self.M) * 25.5) class Posterize(nn.Module): def __init__(self, M): super().__init__() self.M", "180 def forward(self, img): return ttf.functional.affine(img, 0, [0, 0], 1, [0, self.angle]) class", "shear y translate y translate x autoContrast sharpness identity contrast color brightness eqaulize", "nn from torchvision import transforms as ttf class RandAugment(nn.Module): def __init__(self, N, M):", "- 1) / 10 * self.M], 1, [0, 0]) class AutoContrast(nn.Module): def __init__(self,", "img): return ttf.functional.solarize(img, (10 - self.M) * 25.5) class Posterize(nn.Module): def __init__(self, M):", "for aug_id in self.aug_index: self.augmentations.append(self.aug_list[aug_id](self.M)) self.augmentations = nn.Sequential(*self.augmentations) return self.augmentations(img) class Rotate(nn.Module): def", "self.M / 5.) class Color(nn.Module): def __init__(self, M): super().__init__() self.M = M def", "self.M = M def forward(self, img): return ttf.functional.solarize(img, (10 - self.M) * 25.5)", "Solarize, Posterize] def forward(self, img): self.aug_index = torch.randperm(len(self.aug_list))[:self.N] self.augmentations = nn.ModuleList([]) for aug_id", "__init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_brightness(img, self.M /", "class Color(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return", "Color(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_saturation(img,", "def forward(self, img): return ttf.functional.adjust_contrast(img, self.M / 5.) class Color(nn.Module): def __init__(self, M):", "self.aug_index: self.augmentations.append(self.aug_list[aug_id](self.M)) self.augmentations = nn.Sequential(*self.augmentations) return self.augmentations(img) class Rotate(nn.Module): def __init__(self, M): super().__init__()", "eqaulize solarize posterize \"\"\" self.N = N self.M = M self.aug_list = [Rotate,", "def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_saturation(img, self.M", "__init__(self, M): super().__init__() self.M = M self.angle = 359 / 10 * self.M", "\"\"\" rotate shear x shear y translate y translate x autoContrast sharpness identity", "= M def forward(self, img): try: max_size = img.size()[0] except TypeError: max_size =", "__init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_sharpness(img, self.M /", "ShearY(nn.Module): def __init__(self, M): super().__init__() self.M = M self.angle = 359 / 10", "def forward(self, img): return ttf.functional.autocontrast(img) class Sharpness(nn.Module): def __init__(self, M): super().__init__() self.M =", "img): return ttf.functional.adjust_contrast(img, self.M / 5.) class Color(nn.Module): def __init__(self, M): super().__init__() self.M", "super().__init__() self.M = M def forward(self, img): return ttf.functional.posterize(img, round((10 - self.M) /", "return ttf.functional.affine(img, 0, [0, 0], 1, [self.angle, 0]) class ShearY(nn.Module): def __init__(self, M):", "def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.autocontrast(img) class", "M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_saturation(img, self.M / 5.)", "/ 5.) class Equalize(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self,", "[0, self.angle]) class TranslateX(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self,", "posterize \"\"\" self.N = N self.M = M self.aug_list = [Rotate, ShearX, ShearY,", "ttf.functional.equalize(img) class Solarize(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img):", "def forward(self, img): try: max_size = img.size()[0] except TypeError: max_size = img.size()[0] return", "torch.nn as nn from torchvision import transforms as ttf class RandAugment(nn.Module): def __init__(self,", "self.M = M def forward(self, img): return ttf.functional.adjust_sharpness(img, self.M / 5.) class Identity(nn.Module):", "def forward(self, img): return ttf.functional.affine(img, 0, [0, 0], 1, [self.angle, 0]) class ShearY(nn.Module):", "ttf.functional.autocontrast(img) class Sharpness(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img):", "M): super().__init__() self.M = M def forward(self, img): return ttf.functional.solarize(img, (10 - self.M)", "img): return ttf.functional.adjust_saturation(img, self.M / 5.) class Brightness(nn.Module): def __init__(self, M): super().__init__() self.M", "TranslateX(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): try: max_size", "self.M], 1, [0, 0]) class AutoContrast(nn.Module): def __init__(self, M): super().__init__() self.M = M", "ttf.functional.rotate(img, self.angle) class ShearX(nn.Module): def __init__(self, M): super().__init__() self.M = M self.angle =", "Posterize(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.posterize(img,", "def forward(self, img): return ttf.functional.affine(img, 0, [0, 0], 1, [0, self.angle]) class TranslateX(nn.Module):", "= nn.Sequential(*self.augmentations) return self.augmentations(img) class Rotate(nn.Module): def __init__(self, M): super().__init__() self.M = M", "self.M, 0], 1, [0, 0]) class TranslateY(nn.Module): def __init__(self, M): super().__init__() self.M =", "super().__init__() self.M = M self.angle = 359 / 10 * self.M - 180", "TypeError: max_size = img.size()[0] return ttf.functional.affine(img, 0, [(max_size - 1) / 10 *", "self.angle) class ShearX(nn.Module): def __init__(self, M): super().__init__() self.M = M self.angle = 359", "self.augmentations = nn.Sequential(*self.augmentations) return self.augmentations(img) class Rotate(nn.Module): def __init__(self, M): super().__init__() self.M =", "__init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.posterize(img, round((10 -", "class ShearY(nn.Module): def __init__(self, M): super().__init__() self.M = M self.angle = 359 /", "brightness eqaulize solarize posterize \"\"\" self.N = N self.M = M self.aug_list =", "M def forward(self, img): try: max_size = img.size()[1] except TypeError: max_size = img.size()[1]", "except TypeError: max_size = img.size()[0] return ttf.functional.affine(img, 0, [(max_size - 1) / 10", "img.size()[0] return ttf.functional.affine(img, 0, [(max_size - 1) / 10 * self.M, 0], 1,", "M): super().__init__() self.M = M def forward(self, img): try: max_size = img.size()[0] except", "x shear y translate y translate x autoContrast sharpness identity contrast color brightness", "__init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.equalize(img) class Solarize(nn.Module):", "M): super().__init__() \"\"\" rotate shear x shear y translate y translate x autoContrast", "return img class Contrast(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self,", "__init__(self, M): super().__init__() self.M = M def forward(self, img): try: max_size = img.size()[1]", "5.) class Brightness(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img):", "M): super().__init__() self.M = M def forward(self, img): try: max_size = img.size()[1] except", "__init__(self, N, M): super().__init__() \"\"\" rotate shear x shear y translate y translate", "class Solarize(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return", "except TypeError: max_size = img.size()[1] return ttf.functional.affine(img, 0, [0, (max_size - 1) /", "self.M / 5.) class Equalize(nn.Module): def __init__(self, M): super().__init__() self.M = M def", "0], 1, [self.angle, 0]) class ShearY(nn.Module): def __init__(self, M): super().__init__() self.M = M", "1) / 10 * self.M, 0], 1, [0, 0]) class TranslateY(nn.Module): def __init__(self,", "super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_saturation(img, self.M / 5.) class", "def forward(self, img): return ttf.functional.solarize(img, (10 - self.M) * 25.5) class Posterize(nn.Module): def", "self.aug_index = torch.randperm(len(self.aug_list))[:self.N] self.augmentations = nn.ModuleList([]) for aug_id in self.aug_index: self.augmentations.append(self.aug_list[aug_id](self.M)) self.augmentations =", "* self.M - 180 def forward(self, img): return ttf.functional.affine(img, 0, [0, 0], 1,", "- self.M) * 25.5) class Posterize(nn.Module): def __init__(self, M): super().__init__() self.M = M", "self.M / 5.) class Brightness(nn.Module): def __init__(self, M): super().__init__() self.M = M def", "super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_sharpness(img, self.M / 5.) class", "[0, 0]) class TranslateY(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self,", "self.M - 180 def forward(self, img): return ttf.functional.affine(img, 0, [0, 0], 1, [self.angle,", "M def forward(self, img): return ttf.functional.adjust_sharpness(img, self.M / 5.) class Identity(nn.Module): def __init__(self,", "forward(self, img): return ttf.functional.adjust_contrast(img, self.M / 5.) class Color(nn.Module): def __init__(self, M): super().__init__()", "/ 10 * self.M, 0], 1, [0, 0]) class TranslateY(nn.Module): def __init__(self, M):", "class Identity(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return", "transforms as ttf class RandAugment(nn.Module): def __init__(self, N, M): super().__init__() \"\"\" rotate shear", "M def forward(self, img): try: max_size = img.size()[0] except TypeError: max_size = img.size()[0]", "Brightness, Equalize, Solarize, Posterize] def forward(self, img): self.aug_index = torch.randperm(len(self.aug_list))[:self.N] self.augmentations = nn.ModuleList([])", "self.angle = 359 / 10 * self.M def forward(self, img): return ttf.functional.rotate(img, self.angle)", "1, [0, self.angle]) class TranslateX(nn.Module): def __init__(self, M): super().__init__() self.M = M def", "Contrast(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_contrast(img,", "[0, 0], 1, [0, self.angle]) class TranslateX(nn.Module): def __init__(self, M): super().__init__() self.M =", "Equalize(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.equalize(img)", "try: max_size = img.size()[0] except TypeError: max_size = img.size()[0] return ttf.functional.affine(img, 0, [(max_size", "as nn from torchvision import transforms as ttf class RandAugment(nn.Module): def __init__(self, N,", "__init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_saturation(img, self.M /", "10 * self.M], 1, [0, 0]) class AutoContrast(nn.Module): def __init__(self, M): super().__init__() self.M", "M def forward(self, img): return ttf.functional.adjust_saturation(img, self.M / 5.) class Brightness(nn.Module): def __init__(self,", "AutoContrast, Sharpness, Identity, Contrast, Color, Brightness, Equalize, Solarize, Posterize] def forward(self, img): self.aug_index", "img): return ttf.functional.autocontrast(img) class Sharpness(nn.Module): def __init__(self, M): super().__init__() self.M = M def", "5.) class Color(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img):", "return ttf.functional.equalize(img) class Solarize(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self,", "contrast color brightness eqaulize solarize posterize \"\"\" self.N = N self.M = M", "identity contrast color brightness eqaulize solarize posterize \"\"\" self.N = N self.M =", "ttf.functional.affine(img, 0, [0, 0], 1, [self.angle, 0]) class ShearY(nn.Module): def __init__(self, M): super().__init__()", "forward(self, img): return img class Contrast(nn.Module): def __init__(self, M): super().__init__() self.M = M", "class Sharpness(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return", "Contrast, Color, Brightness, Equalize, Solarize, Posterize] def forward(self, img): self.aug_index = torch.randperm(len(self.aug_list))[:self.N] self.augmentations", "super().__init__() self.M = M def forward(self, img): return ttf.functional.solarize(img, (10 - self.M) *", "class Contrast(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return", "self.M = M def forward(self, img): return ttf.functional.adjust_saturation(img, self.M / 5.) class Brightness(nn.Module):", "0]) class ShearY(nn.Module): def __init__(self, M): super().__init__() self.M = M self.angle = 359", "class Equalize(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return", "N, M): super().__init__() \"\"\" rotate shear x shear y translate y translate x", "self.M = M def forward(self, img): return ttf.functional.adjust_contrast(img, self.M / 5.) class Color(nn.Module):", "[0, 0]) class AutoContrast(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self,", "= img.size()[1] return ttf.functional.affine(img, 0, [0, (max_size - 1) / 10 * self.M],", "= M def forward(self, img): return ttf.functional.adjust_sharpness(img, self.M / 5.) class Identity(nn.Module): def", "= M def forward(self, img): return ttf.functional.equalize(img) class Solarize(nn.Module): def __init__(self, M): super().__init__()", "super().__init__() self.M = M def forward(self, img): return img class Contrast(nn.Module): def __init__(self,", "= N self.M = M self.aug_list = [Rotate, ShearX, ShearY, TranslateX, TranslateY, AutoContrast,", "class ShearX(nn.Module): def __init__(self, M): super().__init__() self.M = M self.angle = 359 /", "def forward(self, img): return ttf.functional.adjust_sharpness(img, self.M / 5.) class Identity(nn.Module): def __init__(self, M):", "class TranslateX(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): try:", "self.angle]) class TranslateX(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img):", "1, [self.angle, 0]) class ShearY(nn.Module): def __init__(self, M): super().__init__() self.M = M self.angle", "img): return ttf.functional.rotate(img, self.angle) class ShearX(nn.Module): def __init__(self, M): super().__init__() self.M = M", "TypeError: max_size = img.size()[1] return ttf.functional.affine(img, 0, [0, (max_size - 1) / 10", "self.M - 180 def forward(self, img): return ttf.functional.affine(img, 0, [0, 0], 1, [0,", "Identity, Contrast, Color, Brightness, Equalize, Solarize, Posterize] def forward(self, img): self.aug_index = torch.randperm(len(self.aug_list))[:self.N]", "Rotate(nn.Module): def __init__(self, M): super().__init__() self.M = M self.angle = 359 / 10", "img): return ttf.functional.affine(img, 0, [0, 0], 1, [0, self.angle]) class TranslateX(nn.Module): def __init__(self,", "10 * self.M, 0], 1, [0, 0]) class TranslateY(nn.Module): def __init__(self, M): super().__init__()", "ttf class RandAugment(nn.Module): def __init__(self, N, M): super().__init__() \"\"\" rotate shear x shear", "forward(self, img): return ttf.functional.affine(img, 0, [0, 0], 1, [0, self.angle]) class TranslateX(nn.Module): def", "= img.size()[0] return ttf.functional.affine(img, 0, [(max_size - 1) / 10 * self.M, 0],", "359 / 10 * self.M def forward(self, img): return ttf.functional.rotate(img, self.angle) class ShearX(nn.Module):", "def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.equalize(img) class", "M def forward(self, img): return ttf.functional.equalize(img) class Solarize(nn.Module): def __init__(self, M): super().__init__() self.M", "/ 5.) class Identity(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self,", "/ 5.) class Brightness(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self,", "def __init__(self, M): super().__init__() self.M = M def forward(self, img): try: max_size =", "self.M = M def forward(self, img): try: max_size = img.size()[0] except TypeError: max_size", "self.augmentations.append(self.aug_list[aug_id](self.M)) self.augmentations = nn.Sequential(*self.augmentations) return self.augmentations(img) class Rotate(nn.Module): def __init__(self, M): super().__init__() self.M", "Color, Brightness, Equalize, Solarize, Posterize] def forward(self, img): self.aug_index = torch.randperm(len(self.aug_list))[:self.N] self.augmentations =", "ttf.functional.adjust_contrast(img, self.M / 5.) class Color(nn.Module): def __init__(self, M): super().__init__() self.M = M", "max_size = img.size()[1] except TypeError: max_size = img.size()[1] return ttf.functional.affine(img, 0, [0, (max_size", "self.M / 5.) class Identity(nn.Module): def __init__(self, M): super().__init__() self.M = M def", "= M self.angle = 359 / 10 * self.M def forward(self, img): return", "M): super().__init__() self.M = M self.angle = 359 / 10 * self.M -", "forward(self, img): return ttf.functional.adjust_saturation(img, self.M / 5.) class Brightness(nn.Module): def __init__(self, M): super().__init__()", "self.augmentations(img) class Rotate(nn.Module): def __init__(self, M): super().__init__() self.M = M self.angle = 359", "def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_contrast(img, self.M", "rotate shear x shear y translate y translate x autoContrast sharpness identity contrast", "= M self.aug_list = [Rotate, ShearX, ShearY, TranslateX, TranslateY, AutoContrast, Sharpness, Identity, Contrast,", "0, [0, 0], 1, [0, self.angle]) class TranslateX(nn.Module): def __init__(self, M): super().__init__() self.M", "forward(self, img): return ttf.functional.autocontrast(img) class Sharpness(nn.Module): def __init__(self, M): super().__init__() self.M = M", "def __init__(self, M): super().__init__() self.M = M self.angle = 359 / 10 *", "= M def forward(self, img): return ttf.functional.solarize(img, (10 - self.M) * 25.5) class", "img): try: max_size = img.size()[1] except TypeError: max_size = img.size()[1] return ttf.functional.affine(img, 0,", "torchvision import transforms as ttf class RandAugment(nn.Module): def __init__(self, N, M): super().__init__() \"\"\"", "img class Contrast(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img):", "self.M) * 25.5) class Posterize(nn.Module): def __init__(self, M): super().__init__() self.M = M def", "M self.angle = 359 / 10 * self.M def forward(self, img): return ttf.functional.rotate(img,", "def __init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.posterize(img, round((10", "super().__init__() self.M = M def forward(self, img): return ttf.functional.autocontrast(img) class Sharpness(nn.Module): def __init__(self,", "max_size = img.size()[0] except TypeError: max_size = img.size()[0] return ttf.functional.affine(img, 0, [(max_size -", "return ttf.functional.adjust_brightness(img, self.M / 5.) class Equalize(nn.Module): def __init__(self, M): super().__init__() self.M =", "ttf.functional.affine(img, 0, [0, (max_size - 1) / 10 * self.M], 1, [0, 0])", "def forward(self, img): return ttf.functional.equalize(img) class Solarize(nn.Module): def __init__(self, M): super().__init__() self.M =", "forward(self, img): try: max_size = img.size()[1] except TypeError: max_size = img.size()[1] return ttf.functional.affine(img,", "- 1) / 10 * self.M, 0], 1, [0, 0]) class TranslateY(nn.Module): def", "img): try: max_size = img.size()[0] except TypeError: max_size = img.size()[0] return ttf.functional.affine(img, 0,", "* self.M], 1, [0, 0]) class AutoContrast(nn.Module): def __init__(self, M): super().__init__() self.M =", "= M def forward(self, img): return img class Contrast(nn.Module): def __init__(self, M): super().__init__()", "[(max_size - 1) / 10 * self.M, 0], 1, [0, 0]) class TranslateY(nn.Module):", "return ttf.functional.adjust_saturation(img, self.M / 5.) class Brightness(nn.Module): def __init__(self, M): super().__init__() self.M =", "RandAugment(nn.Module): def __init__(self, N, M): super().__init__() \"\"\" rotate shear x shear y translate", "nn.ModuleList([]) for aug_id in self.aug_index: self.augmentations.append(self.aug_list[aug_id](self.M)) self.augmentations = nn.Sequential(*self.augmentations) return self.augmentations(img) class Rotate(nn.Module):", "x autoContrast sharpness identity contrast color brightness eqaulize solarize posterize \"\"\" self.N =", "__init__(self, M): super().__init__() self.M = M def forward(self, img): try: max_size = img.size()[0]", "super().__init__() \"\"\" rotate shear x shear y translate y translate x autoContrast sharpness", "/ 10 * self.M], 1, [0, 0]) class AutoContrast(nn.Module): def __init__(self, M): super().__init__()", "self.M = M def forward(self, img): try: max_size = img.size()[1] except TypeError: max_size", "= M def forward(self, img): return ttf.functional.adjust_brightness(img, self.M / 5.) class Equalize(nn.Module): def", "ShearY, TranslateX, TranslateY, AutoContrast, Sharpness, Identity, Contrast, Color, Brightness, Equalize, Solarize, Posterize] def", "1) / 10 * self.M], 1, [0, 0]) class AutoContrast(nn.Module): def __init__(self, M):", "* self.M, 0], 1, [0, 0]) class TranslateY(nn.Module): def __init__(self, M): super().__init__() self.M", "Sharpness, Identity, Contrast, Color, Brightness, Equalize, Solarize, Posterize] def forward(self, img): self.aug_index =", "= M def forward(self, img): return ttf.functional.autocontrast(img) class Sharpness(nn.Module): def __init__(self, M): super().__init__()", "self.M def forward(self, img): return ttf.functional.rotate(img, self.angle) class ShearX(nn.Module): def __init__(self, M): super().__init__()", "img.size()[1] except TypeError: max_size = img.size()[1] return ttf.functional.affine(img, 0, [0, (max_size - 1)", "class AutoContrast(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): return", "aug_id in self.aug_index: self.augmentations.append(self.aug_list[aug_id](self.M)) self.augmentations = nn.Sequential(*self.augmentations) return self.augmentations(img) class Rotate(nn.Module): def __init__(self,", "M self.angle = 359 / 10 * self.M - 180 def forward(self, img):", "Posterize] def forward(self, img): self.aug_index = torch.randperm(len(self.aug_list))[:self.N] self.augmentations = nn.ModuleList([]) for aug_id in", "= 359 / 10 * self.M def forward(self, img): return ttf.functional.rotate(img, self.angle) class", "forward(self, img): return ttf.functional.solarize(img, (10 - self.M) * 25.5) class Posterize(nn.Module): def __init__(self,", "img): return ttf.functional.affine(img, 0, [0, 0], 1, [self.angle, 0]) class ShearY(nn.Module): def __init__(self,", "class TranslateY(nn.Module): def __init__(self, M): super().__init__() self.M = M def forward(self, img): try:", "__init__(self, M): super().__init__() self.M = M def forward(self, img): return ttf.functional.adjust_contrast(img, self.M /", "M): super().__init__() self.M = M def forward(self, img): return ttf.functional.posterize(img, round((10 - self.M)", "y translate y translate x autoContrast sharpness identity contrast color brightness eqaulize solarize", "__init__(self, M): super().__init__() self.M = M def forward(self, img): return img class Contrast(nn.Module):" ]
[ "Testload(unittest.TestCase): def setUp(self): self.kuorma_1=load(name=\"Lattialämmitys\", ID=12345, sensorPin=\"P11\", relayPin=2, maximumCurrent=10, phase=1, priority=0) self.kuorma_2=load(\"Kiuas\",12346,\"P12\",2,10,1,0) def tearDown(self):", "load from main import * #Testaa funktiot pääohjelmasta class TestMain(unittest.TestCase): def test_openLoads(self): #Testi", "val_to_volt(2000) self.assertEqual(result_1, 1.1) self.assertEqual(result_2, 0.0806) self.assertEqual(result_3, 0.5372) def test_adc_read(self): sensorPin='P14' result = adc_read(sensorPin)", "from lib.classes import load from main import * #Testaa funktiot pääohjelmasta class TestMain(unittest.TestCase):", "0) def test_adc_save(self): #Testi tähän pass #Testaa luokan \"load\" metodit class Testload(unittest.TestCase): def", "result_1 = val_to_volt(4095) result_2 = val_to_volt(300) result_3 = val_to_volt(2000) self.assertEqual(result_1, 1.1) self.assertEqual(result_2, 0.0806)", "= val_to_volt(2000) self.assertEqual(result_1, 1.1) self.assertEqual(result_2, 0.0806) self.assertEqual(result_3, 0.5372) def test_adc_read(self): sensorPin='P14' result =", "TestMittaus(unittest.TestCase): def test_val_to_volt(self): result_1 = val_to_volt(4095) result_2 = val_to_volt(300) result_3 = val_to_volt(2000) self.assertEqual(result_1,", "test_getName(self): result_1 = self.kuorma_1.getName() result_2 = self.kuorma_2.getName() self.assertEqual(result_1, self.kuorma_1._load__name) self.assertEqual(result_2, self.kuorma_2._load__name) def test_getCons(self):", "pass #Testaa luokan \"load\" metodit class Testload(unittest.TestCase): def setUp(self): self.kuorma_1=load(name=\"Lattialämmitys\", ID=12345, sensorPin=\"P11\", relayPin=2,", "testi tähän result_1 = self.kuorma_1.resetHour() result_2 = self.kuorma_2.resetHour() self.assertEqual(result_1, 1) self.assertEqual(result_2, 1) def", "test.py import unittest from lib.mittaus import * from lib.classes import load from main", "#Testi tähän pass def test_openPhases(self): #Testi tähön pass def test_openMonthMax(self): #Testi tähän pass", "objektin def test_getConsAll(self): result = getConsAll() val = isinstance(result, object) self.assertTrue(val) #Testaa paketin", "result_2 = self.kuorma_2.getCons() self.assertGreaterEqual(result_1, 0) self.assertGreaterEqual(result_2, 0) def test_info(self): #Testi tähän pass if", "def setUp(self): self.kuorma_1=load(name=\"Lattialämmitys\", ID=12345, sensorPin=\"P11\", relayPin=2, maximumCurrent=10, phase=1, priority=0) self.kuorma_2=load(\"Kiuas\",12346,\"P12\",2,10,1,0) def tearDown(self): #Testi", "def test_getCons(self): #Parempi testi tähän result_1 = self.kuorma_1.getCons() result_2 = self.kuorma_2.getCons() self.assertGreaterEqual(result_1, 0)", "self.assertGreaterEqual(result_1, 0) self.assertGreaterEqual(result_2, 0) def test_info(self): #Testi tähän pass if __name__ == \"__main__\":", "= isinstance(result, object) self.assertTrue(val) #Testaa paketin \"mittaus\" funktiot class TestMittaus(unittest.TestCase): def test_val_to_volt(self): result_1", "result_2 = self.kuorma_2.getName() self.assertEqual(result_1, self.kuorma_1._load__name) self.assertEqual(result_2, self.kuorma_2._load__name) def test_getCons(self): #Parempi testi tähän result_1", "testi tähän result_1 = self.kuorma_1.getCons() result_2 = self.kuorma_2.getCons() self.assertGreaterEqual(result_1, 0) self.assertGreaterEqual(result_2, 0) def", "self.kuorma_2._load__name) def test_getCons(self): #Parempi testi tähän result_1 = self.kuorma_1.getCons() result_2 = self.kuorma_2.getCons() self.assertGreaterEqual(result_1,", "def test_adc_save(self): #Testi tähän pass #Testaa luokan \"load\" metodit class Testload(unittest.TestCase): def setUp(self):", "py -m unittest test.py import unittest from lib.mittaus import * from lib.classes import", "= adc_read(sensorPin) bol = isinstance(result, int) self.assertTrue(bol) self.assertGreaterEqual(result, 0) def test_adc_save(self): #Testi tähän", "pass def test_changeRelayPin(self): #Testi tähän pass def test_resetHour(self): #Parempi testi tähän result_1 =", "0.0806) self.assertEqual(result_3, 0.5372) def test_adc_read(self): sensorPin='P14' result = adc_read(sensorPin) bol = isinstance(result, int)", "tähän pass #Testaa luokan \"load\" metodit class Testload(unittest.TestCase): def setUp(self): self.kuorma_1=load(name=\"Lattialämmitys\", ID=12345, sensorPin=\"P11\",", "* #Testaa funktiot pääohjelmasta class TestMain(unittest.TestCase): def test_openLoads(self): #Testi tähän pass def test_openPhases(self):", "lib.classes import load from main import * #Testaa funktiot pääohjelmasta class TestMain(unittest.TestCase): def", "isinstance(result, int) self.assertTrue(bol) self.assertGreaterEqual(result, 0) def test_adc_save(self): #Testi tähän pass #Testaa luokan \"load\"", "1) def test_getName(self): result_1 = self.kuorma_1.getName() result_2 = self.kuorma_2.getName() self.assertEqual(result_1, self.kuorma_1._load__name) self.assertEqual(result_2, self.kuorma_2._load__name)", "class TestMittaus(unittest.TestCase): def test_val_to_volt(self): result_1 = val_to_volt(4095) result_2 = val_to_volt(300) result_3 = val_to_volt(2000)", "import * from lib.classes import load from main import * #Testaa funktiot pääohjelmasta", "= isinstance(result, int) self.assertTrue(bol) self.assertGreaterEqual(result, 0) def test_adc_save(self): #Testi tähän pass #Testaa luokan", "class TestMain(unittest.TestCase): def test_openLoads(self): #Testi tähän pass def test_openPhases(self): #Testi tähön pass def", "1) self.assertEqual(result_2, 1) def test_getName(self): result_1 = self.kuorma_1.getName() result_2 = self.kuorma_2.getName() self.assertEqual(result_1, self.kuorma_1._load__name)", "#Aja konsolissa: py -m unittest test.py import unittest from lib.mittaus import * from", "from main import * #Testaa funktiot pääohjelmasta class TestMain(unittest.TestCase): def test_openLoads(self): #Testi tähän", "result_1 = self.kuorma_1.getCons() result_2 = self.kuorma_2.getCons() self.assertGreaterEqual(result_1, 0) self.assertGreaterEqual(result_2, 0) def test_info(self): #Testi", "maximumCurrent=10, phase=1, priority=0) self.kuorma_2=load(\"Kiuas\",12346,\"P12\",2,10,1,0) def tearDown(self): #Testi tähän pass def test_changeRelayPin(self): #Testi tähän", "metodit class Testload(unittest.TestCase): def setUp(self): self.kuorma_1=load(name=\"Lattialämmitys\", ID=12345, sensorPin=\"P11\", relayPin=2, maximumCurrent=10, phase=1, priority=0) self.kuorma_2=load(\"Kiuas\",12346,\"P12\",2,10,1,0)", "#Testi tähän pass def test_changeRelayPin(self): #Testi tähän pass def test_resetHour(self): #Parempi testi tähän", "#Testi tähön pass def test_openMonthMax(self): #Testi tähän pass #Testaa palauttaako getConsAll() objektin def", "#Testi tähän pass #Testaa luokan \"load\" metodit class Testload(unittest.TestCase): def setUp(self): self.kuorma_1=load(name=\"Lattialämmitys\", ID=12345,", "import * #Testaa funktiot pääohjelmasta class TestMain(unittest.TestCase): def test_openLoads(self): #Testi tähän pass def", "sensorPin=\"P11\", relayPin=2, maximumCurrent=10, phase=1, priority=0) self.kuorma_2=load(\"Kiuas\",12346,\"P12\",2,10,1,0) def tearDown(self): #Testi tähän pass def test_changeRelayPin(self):", "def test_val_to_volt(self): result_1 = val_to_volt(4095) result_2 = val_to_volt(300) result_3 = val_to_volt(2000) self.assertEqual(result_1, 1.1)", "priority=0) self.kuorma_2=load(\"Kiuas\",12346,\"P12\",2,10,1,0) def tearDown(self): #Testi tähän pass def test_changeRelayPin(self): #Testi tähän pass def", "tähän pass #Testaa palauttaako getConsAll() objektin def test_getConsAll(self): result = getConsAll() val =", "#Testi tähän pass #Testaa palauttaako getConsAll() objektin def test_getConsAll(self): result = getConsAll() val", "0.5372) def test_adc_read(self): sensorPin='P14' result = adc_read(sensorPin) bol = isinstance(result, int) self.assertTrue(bol) self.assertGreaterEqual(result,", "def test_changeRelayPin(self): #Testi tähän pass def test_resetHour(self): #Parempi testi tähän result_1 = self.kuorma_1.resetHour()", "= self.kuorma_1.resetHour() result_2 = self.kuorma_2.resetHour() self.assertEqual(result_1, 1) self.assertEqual(result_2, 1) def test_getName(self): result_1 =", "tähän pass def test_changeRelayPin(self): #Testi tähän pass def test_resetHour(self): #Parempi testi tähän result_1", "def test_getConsAll(self): result = getConsAll() val = isinstance(result, object) self.assertTrue(val) #Testaa paketin \"mittaus\"", "pass def test_openPhases(self): #Testi tähön pass def test_openMonthMax(self): #Testi tähän pass #Testaa palauttaako", "result = adc_read(sensorPin) bol = isinstance(result, int) self.assertTrue(bol) self.assertGreaterEqual(result, 0) def test_adc_save(self): #Testi", "import load from main import * #Testaa funktiot pääohjelmasta class TestMain(unittest.TestCase): def test_openLoads(self):", "test_adc_save(self): #Testi tähän pass #Testaa luokan \"load\" metodit class Testload(unittest.TestCase): def setUp(self): self.kuorma_1=load(name=\"Lattialämmitys\",", "self.assertEqual(result_2, 0.0806) self.assertEqual(result_3, 0.5372) def test_adc_read(self): sensorPin='P14' result = adc_read(sensorPin) bol = isinstance(result,", "self.assertTrue(bol) self.assertGreaterEqual(result, 0) def test_adc_save(self): #Testi tähän pass #Testaa luokan \"load\" metodit class", "#Testi tähän pass def test_resetHour(self): #Parempi testi tähän result_1 = self.kuorma_1.resetHour() result_2 =", "= self.kuorma_2.getCons() self.assertGreaterEqual(result_1, 0) self.assertGreaterEqual(result_2, 0) def test_info(self): #Testi tähän pass if __name__", "getConsAll() objektin def test_getConsAll(self): result = getConsAll() val = isinstance(result, object) self.assertTrue(val) #Testaa", "paketin \"mittaus\" funktiot class TestMittaus(unittest.TestCase): def test_val_to_volt(self): result_1 = val_to_volt(4095) result_2 = val_to_volt(300)", "def test_openPhases(self): #Testi tähön pass def test_openMonthMax(self): #Testi tähän pass #Testaa palauttaako getConsAll()", "test_val_to_volt(self): result_1 = val_to_volt(4095) result_2 = val_to_volt(300) result_3 = val_to_volt(2000) self.assertEqual(result_1, 1.1) self.assertEqual(result_2,", "getConsAll() val = isinstance(result, object) self.assertTrue(val) #Testaa paketin \"mittaus\" funktiot class TestMittaus(unittest.TestCase): def", "test_resetHour(self): #Parempi testi tähän result_1 = self.kuorma_1.resetHour() result_2 = self.kuorma_2.resetHour() self.assertEqual(result_1, 1) self.assertEqual(result_2,", "#Testaa luokan \"load\" metodit class Testload(unittest.TestCase): def setUp(self): self.kuorma_1=load(name=\"Lattialämmitys\", ID=12345, sensorPin=\"P11\", relayPin=2, maximumCurrent=10,", "self.assertGreaterEqual(result, 0) def test_adc_save(self): #Testi tähän pass #Testaa luokan \"load\" metodit class Testload(unittest.TestCase):", "self.kuorma_2=load(\"Kiuas\",12346,\"P12\",2,10,1,0) def tearDown(self): #Testi tähän pass def test_changeRelayPin(self): #Testi tähän pass def test_resetHour(self):", "def tearDown(self): #Testi tähän pass def test_changeRelayPin(self): #Testi tähän pass def test_resetHour(self): #Parempi", "pass def test_openMonthMax(self): #Testi tähän pass #Testaa palauttaako getConsAll() objektin def test_getConsAll(self): result", "#Parempi testi tähän result_1 = self.kuorma_1.resetHour() result_2 = self.kuorma_2.resetHour() self.assertEqual(result_1, 1) self.assertEqual(result_2, 1)", "lib.mittaus import * from lib.classes import load from main import * #Testaa funktiot", "luokan \"load\" metodit class Testload(unittest.TestCase): def setUp(self): self.kuorma_1=load(name=\"Lattialämmitys\", ID=12345, sensorPin=\"P11\", relayPin=2, maximumCurrent=10, phase=1,", "self.kuorma_2.getCons() self.assertGreaterEqual(result_1, 0) self.assertGreaterEqual(result_2, 0) def test_info(self): #Testi tähän pass if __name__ ==", "val_to_volt(4095) result_2 = val_to_volt(300) result_3 = val_to_volt(2000) self.assertEqual(result_1, 1.1) self.assertEqual(result_2, 0.0806) self.assertEqual(result_3, 0.5372)", "adc_read(sensorPin) bol = isinstance(result, int) self.assertTrue(bol) self.assertGreaterEqual(result, 0) def test_adc_save(self): #Testi tähän pass", "import unittest from lib.mittaus import * from lib.classes import load from main import", "val = isinstance(result, object) self.assertTrue(val) #Testaa paketin \"mittaus\" funktiot class TestMittaus(unittest.TestCase): def test_val_to_volt(self):", "ID=12345, sensorPin=\"P11\", relayPin=2, maximumCurrent=10, phase=1, priority=0) self.kuorma_2=load(\"Kiuas\",12346,\"P12\",2,10,1,0) def tearDown(self): #Testi tähän pass def", "test_openPhases(self): #Testi tähön pass def test_openMonthMax(self): #Testi tähän pass #Testaa palauttaako getConsAll() objektin", "= self.kuorma_1.getCons() result_2 = self.kuorma_2.getCons() self.assertGreaterEqual(result_1, 0) self.assertGreaterEqual(result_2, 0) def test_info(self): #Testi tähän", "funktiot class TestMittaus(unittest.TestCase): def test_val_to_volt(self): result_1 = val_to_volt(4095) result_2 = val_to_volt(300) result_3 =", "\"load\" metodit class Testload(unittest.TestCase): def setUp(self): self.kuorma_1=load(name=\"Lattialämmitys\", ID=12345, sensorPin=\"P11\", relayPin=2, maximumCurrent=10, phase=1, priority=0)", "self.assertEqual(result_2, self.kuorma_2._load__name) def test_getCons(self): #Parempi testi tähän result_1 = self.kuorma_1.getCons() result_2 = self.kuorma_2.getCons()", "<reponame>riikkano/protopaja2018<gh_stars>0 #Aja konsolissa: py -m unittest test.py import unittest from lib.mittaus import *", "= self.kuorma_1.getName() result_2 = self.kuorma_2.getName() self.assertEqual(result_1, self.kuorma_1._load__name) self.assertEqual(result_2, self.kuorma_2._load__name) def test_getCons(self): #Parempi testi", "test_openLoads(self): #Testi tähän pass def test_openPhases(self): #Testi tähön pass def test_openMonthMax(self): #Testi tähän", "self.kuorma_1.resetHour() result_2 = self.kuorma_2.resetHour() self.assertEqual(result_1, 1) self.assertEqual(result_2, 1) def test_getName(self): result_1 = self.kuorma_1.getName()", "test_getCons(self): #Parempi testi tähän result_1 = self.kuorma_1.getCons() result_2 = self.kuorma_2.getCons() self.assertGreaterEqual(result_1, 0) self.assertGreaterEqual(result_2,", "object) self.assertTrue(val) #Testaa paketin \"mittaus\" funktiot class TestMittaus(unittest.TestCase): def test_val_to_volt(self): result_1 = val_to_volt(4095)", "#Testaa paketin \"mittaus\" funktiot class TestMittaus(unittest.TestCase): def test_val_to_volt(self): result_1 = val_to_volt(4095) result_2 =", "val_to_volt(300) result_3 = val_to_volt(2000) self.assertEqual(result_1, 1.1) self.assertEqual(result_2, 0.0806) self.assertEqual(result_3, 0.5372) def test_adc_read(self): sensorPin='P14'", "test_changeRelayPin(self): #Testi tähän pass def test_resetHour(self): #Parempi testi tähän result_1 = self.kuorma_1.resetHour() result_2", "= getConsAll() val = isinstance(result, object) self.assertTrue(val) #Testaa paketin \"mittaus\" funktiot class TestMittaus(unittest.TestCase):", "self.assertEqual(result_1, 1.1) self.assertEqual(result_2, 0.0806) self.assertEqual(result_3, 0.5372) def test_adc_read(self): sensorPin='P14' result = adc_read(sensorPin) bol", "result_2 = val_to_volt(300) result_3 = val_to_volt(2000) self.assertEqual(result_1, 1.1) self.assertEqual(result_2, 0.0806) self.assertEqual(result_3, 0.5372) def", "test_openMonthMax(self): #Testi tähän pass #Testaa palauttaako getConsAll() objektin def test_getConsAll(self): result = getConsAll()", "* from lib.classes import load from main import * #Testaa funktiot pääohjelmasta class", "tearDown(self): #Testi tähän pass def test_changeRelayPin(self): #Testi tähän pass def test_resetHour(self): #Parempi testi", "self.kuorma_2.resetHour() self.assertEqual(result_1, 1) self.assertEqual(result_2, 1) def test_getName(self): result_1 = self.kuorma_1.getName() result_2 = self.kuorma_2.getName()", "setUp(self): self.kuorma_1=load(name=\"Lattialämmitys\", ID=12345, sensorPin=\"P11\", relayPin=2, maximumCurrent=10, phase=1, priority=0) self.kuorma_2=load(\"Kiuas\",12346,\"P12\",2,10,1,0) def tearDown(self): #Testi tähän", "#Testaa funktiot pääohjelmasta class TestMain(unittest.TestCase): def test_openLoads(self): #Testi tähän pass def test_openPhases(self): #Testi", "def test_getName(self): result_1 = self.kuorma_1.getName() result_2 = self.kuorma_2.getName() self.assertEqual(result_1, self.kuorma_1._load__name) self.assertEqual(result_2, self.kuorma_2._load__name) def", "tähän result_1 = self.kuorma_1.getCons() result_2 = self.kuorma_2.getCons() self.assertGreaterEqual(result_1, 0) self.assertGreaterEqual(result_2, 0) def test_info(self):", "from lib.mittaus import * from lib.classes import load from main import * #Testaa", "pääohjelmasta class TestMain(unittest.TestCase): def test_openLoads(self): #Testi tähän pass def test_openPhases(self): #Testi tähön pass", "-m unittest test.py import unittest from lib.mittaus import * from lib.classes import load", "self.kuorma_1._load__name) self.assertEqual(result_2, self.kuorma_2._load__name) def test_getCons(self): #Parempi testi tähän result_1 = self.kuorma_1.getCons() result_2 =", "konsolissa: py -m unittest test.py import unittest from lib.mittaus import * from lib.classes", "pass #Testaa palauttaako getConsAll() objektin def test_getConsAll(self): result = getConsAll() val = isinstance(result,", "= self.kuorma_2.resetHour() self.assertEqual(result_1, 1) self.assertEqual(result_2, 1) def test_getName(self): result_1 = self.kuorma_1.getName() result_2 =", "self.assertTrue(val) #Testaa paketin \"mittaus\" funktiot class TestMittaus(unittest.TestCase): def test_val_to_volt(self): result_1 = val_to_volt(4095) result_2", "= val_to_volt(4095) result_2 = val_to_volt(300) result_3 = val_to_volt(2000) self.assertEqual(result_1, 1.1) self.assertEqual(result_2, 0.0806) self.assertEqual(result_3,", "int) self.assertTrue(bol) self.assertGreaterEqual(result, 0) def test_adc_save(self): #Testi tähän pass #Testaa luokan \"load\" metodit", "def test_adc_read(self): sensorPin='P14' result = adc_read(sensorPin) bol = isinstance(result, int) self.assertTrue(bol) self.assertGreaterEqual(result, 0)", "self.assertEqual(result_1, 1) self.assertEqual(result_2, 1) def test_getName(self): result_1 = self.kuorma_1.getName() result_2 = self.kuorma_2.getName() self.assertEqual(result_1,", "self.kuorma_1.getName() result_2 = self.kuorma_2.getName() self.assertEqual(result_1, self.kuorma_1._load__name) self.assertEqual(result_2, self.kuorma_2._load__name) def test_getCons(self): #Parempi testi tähän", "main import * #Testaa funktiot pääohjelmasta class TestMain(unittest.TestCase): def test_openLoads(self): #Testi tähän pass", "tähän result_1 = self.kuorma_1.resetHour() result_2 = self.kuorma_2.resetHour() self.assertEqual(result_1, 1) self.assertEqual(result_2, 1) def test_getName(self):", "funktiot pääohjelmasta class TestMain(unittest.TestCase): def test_openLoads(self): #Testi tähän pass def test_openPhases(self): #Testi tähön", "unittest test.py import unittest from lib.mittaus import * from lib.classes import load from", "= val_to_volt(300) result_3 = val_to_volt(2000) self.assertEqual(result_1, 1.1) self.assertEqual(result_2, 0.0806) self.assertEqual(result_3, 0.5372) def test_adc_read(self):", "result_1 = self.kuorma_1.resetHour() result_2 = self.kuorma_2.resetHour() self.assertEqual(result_1, 1) self.assertEqual(result_2, 1) def test_getName(self): result_1", "self.kuorma_1=load(name=\"Lattialämmitys\", ID=12345, sensorPin=\"P11\", relayPin=2, maximumCurrent=10, phase=1, priority=0) self.kuorma_2=load(\"Kiuas\",12346,\"P12\",2,10,1,0) def tearDown(self): #Testi tähän pass", "def test_openMonthMax(self): #Testi tähän pass #Testaa palauttaako getConsAll() objektin def test_getConsAll(self): result =", "self.kuorma_1.getCons() result_2 = self.kuorma_2.getCons() self.assertGreaterEqual(result_1, 0) self.assertGreaterEqual(result_2, 0) def test_info(self): #Testi tähän pass", "self.kuorma_2.getName() self.assertEqual(result_1, self.kuorma_1._load__name) self.assertEqual(result_2, self.kuorma_2._load__name) def test_getCons(self): #Parempi testi tähän result_1 = self.kuorma_1.getCons()", "#Parempi testi tähän result_1 = self.kuorma_1.getCons() result_2 = self.kuorma_2.getCons() self.assertGreaterEqual(result_1, 0) self.assertGreaterEqual(result_2, 0)", "0) self.assertGreaterEqual(result_2, 0) def test_info(self): #Testi tähän pass if __name__ == \"__main__\": unittest.main()", "isinstance(result, object) self.assertTrue(val) #Testaa paketin \"mittaus\" funktiot class TestMittaus(unittest.TestCase): def test_val_to_volt(self): result_1 =", "TestMain(unittest.TestCase): def test_openLoads(self): #Testi tähän pass def test_openPhases(self): #Testi tähön pass def test_openMonthMax(self):", "1.1) self.assertEqual(result_2, 0.0806) self.assertEqual(result_3, 0.5372) def test_adc_read(self): sensorPin='P14' result = adc_read(sensorPin) bol =", "self.assertEqual(result_2, 1) def test_getName(self): result_1 = self.kuorma_1.getName() result_2 = self.kuorma_2.getName() self.assertEqual(result_1, self.kuorma_1._load__name) self.assertEqual(result_2,", "def test_openLoads(self): #Testi tähän pass def test_openPhases(self): #Testi tähön pass def test_openMonthMax(self): #Testi", "self.assertEqual(result_3, 0.5372) def test_adc_read(self): sensorPin='P14' result = adc_read(sensorPin) bol = isinstance(result, int) self.assertTrue(bol)", "result_3 = val_to_volt(2000) self.assertEqual(result_1, 1.1) self.assertEqual(result_2, 0.0806) self.assertEqual(result_3, 0.5372) def test_adc_read(self): sensorPin='P14' result", "test_adc_read(self): sensorPin='P14' result = adc_read(sensorPin) bol = isinstance(result, int) self.assertTrue(bol) self.assertGreaterEqual(result, 0) def", "sensorPin='P14' result = adc_read(sensorPin) bol = isinstance(result, int) self.assertTrue(bol) self.assertGreaterEqual(result, 0) def test_adc_save(self):", "class Testload(unittest.TestCase): def setUp(self): self.kuorma_1=load(name=\"Lattialämmitys\", ID=12345, sensorPin=\"P11\", relayPin=2, maximumCurrent=10, phase=1, priority=0) self.kuorma_2=load(\"Kiuas\",12346,\"P12\",2,10,1,0) def", "tähän pass def test_resetHour(self): #Parempi testi tähän result_1 = self.kuorma_1.resetHour() result_2 = self.kuorma_2.resetHour()", "bol = isinstance(result, int) self.assertTrue(bol) self.assertGreaterEqual(result, 0) def test_adc_save(self): #Testi tähän pass #Testaa", "self.assertEqual(result_1, self.kuorma_1._load__name) self.assertEqual(result_2, self.kuorma_2._load__name) def test_getCons(self): #Parempi testi tähän result_1 = self.kuorma_1.getCons() result_2", "tähän pass def test_openPhases(self): #Testi tähön pass def test_openMonthMax(self): #Testi tähän pass #Testaa", "relayPin=2, maximumCurrent=10, phase=1, priority=0) self.kuorma_2=load(\"Kiuas\",12346,\"P12\",2,10,1,0) def tearDown(self): #Testi tähän pass def test_changeRelayPin(self): #Testi", "unittest from lib.mittaus import * from lib.classes import load from main import *", "#Testaa palauttaako getConsAll() objektin def test_getConsAll(self): result = getConsAll() val = isinstance(result, object)", "tähön pass def test_openMonthMax(self): #Testi tähän pass #Testaa palauttaako getConsAll() objektin def test_getConsAll(self):", "pass def test_resetHour(self): #Parempi testi tähän result_1 = self.kuorma_1.resetHour() result_2 = self.kuorma_2.resetHour() self.assertEqual(result_1,", "test_getConsAll(self): result = getConsAll() val = isinstance(result, object) self.assertTrue(val) #Testaa paketin \"mittaus\" funktiot", "result = getConsAll() val = isinstance(result, object) self.assertTrue(val) #Testaa paketin \"mittaus\" funktiot class", "phase=1, priority=0) self.kuorma_2=load(\"Kiuas\",12346,\"P12\",2,10,1,0) def tearDown(self): #Testi tähän pass def test_changeRelayPin(self): #Testi tähän pass", "= self.kuorma_2.getName() self.assertEqual(result_1, self.kuorma_1._load__name) self.assertEqual(result_2, self.kuorma_2._load__name) def test_getCons(self): #Parempi testi tähän result_1 =", "def test_resetHour(self): #Parempi testi tähän result_1 = self.kuorma_1.resetHour() result_2 = self.kuorma_2.resetHour() self.assertEqual(result_1, 1)", "result_1 = self.kuorma_1.getName() result_2 = self.kuorma_2.getName() self.assertEqual(result_1, self.kuorma_1._load__name) self.assertEqual(result_2, self.kuorma_2._load__name) def test_getCons(self): #Parempi", "palauttaako getConsAll() objektin def test_getConsAll(self): result = getConsAll() val = isinstance(result, object) self.assertTrue(val)", "result_2 = self.kuorma_2.resetHour() self.assertEqual(result_1, 1) self.assertEqual(result_2, 1) def test_getName(self): result_1 = self.kuorma_1.getName() result_2", "\"mittaus\" funktiot class TestMittaus(unittest.TestCase): def test_val_to_volt(self): result_1 = val_to_volt(4095) result_2 = val_to_volt(300) result_3" ]
[ "NON_INHERITABLE_PATTERN = re.compile(\"^qual(.)*|ikats(.)*|funcId\") class IkatsTimeseriesMgr(IkatsGenericApiEndPoint): \"\"\" Ikats EndPoint specific to Timeseries management \"\"\"", "check_type(value=ts, allowed_types=[str, Timeseries], var_name=\"ts\", raise_exception=True) tsuid = ts if isinstance(ts, Timeseries): if ts.tsuid", "raise ValueError(\"Timeseries object shall have set at least tsuid or fid\") return self.dm_client.ts_delete(tsuid=tsuid,", "original timeseries where metadata shall be taken from (except intrinsic ones, eg. *qual_nb_points*)", ":type raise_exception: bool :returns: retrieved functional identifier value :rtype: str :raises TypeError: if", "*ts* is not a str nor a Timeseries :raises IkatsNotFoundError: if timeseries is", "tsuid of the timeseries or Timeseries Object to remove :param raise_exception: (optional) Indicates", "\"\"\" check_type(value=ts, allowed_types=[str, Timeseries], var_name=\"ts\", raise_exception=True) tsuid = ts if isinstance(ts, Timeseries): if", "value=end_date, dtype=MDType.DATE) # qual_nb_points self.dm_client.metadata_update(tsuid=ts.tsuid, name='qual_nb_points', value=nb_points, data_type=MDType.NUMBER, force_create=True) ts.metadata.set(name='qual_nb_points', value=nb_points, dtype=MDType.NUMBER) #", "via spark for example) :param fid: Functional Identifier of the TS in Ikats", "TypeError: if *ts* is not a str nor a Timeseries :raises IkatsNotFoundError: if", "param. :param tsuid: one tsuid value :param raise_exception: Allow to specify if the", "self.dm_client = DatamodelClient(session=self.api.session) def new(self, fid=None, data=None): \"\"\" Create an empty local Timeseries", "If the timeseries is a new one (object has no tsuid defined), the", "License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required", "-at least- one dataset \"\"\" check_type(value=ts, allowed_types=[str, Timeseries], var_name=\"ts\", raise_exception=True) tsuid = ts", "if no TSUID is present in *ts* object, the *ikats_start_date*, *ikats_end_date* and *qual_nb_points*", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the", "force the creation of the metadata generate_metadata = True # Add points to", "raise_exception: bool :returns: retrieved TSUID value or None if not found :rtype: str", "from ikats.client.opentsdb_client import OpenTSDBClient from ikats.client.opentsdb_stub import OpenTSDBStub from ikats.exceptions import (IkatsConflictError, IkatsException,", "% (fid, tsuid)) except IkatsNotFoundError: # Creation of a new tsuid metric, tags", "is None: sd = ts.metadata.get(name=\"ikats_start_date\") check_is_valid_epoch(value=sd, raise_exception=True) if ed is None: ed =", "the action :rtype: bool :raises TypeError: if *ts* is not a str nor", "of the timeseries or Timeseries Object to remove :param raise_exception: (optional) Indicates if", "ts def get(self, fid=None, tsuid=None): \"\"\" Returns an existing Timeseries object by providing", "*generate_metadata* is set or if no TSUID is present in *ts* object, the", "data_points except ValueError: raise IkatsNotFoundError(\"TS data points couldn't be retrieved properly\") def inherit(self,", "600 \"\"\" try: return self.dm_client.get_func_id_from_tsuid(tsuid=tsuid) except IkatsException: if raise_exception: raise return None def", "generate_metadata=True, raise_exception=True): \"\"\" Import timeseries data points to database or update an existing", "Timeseries (if fid not provided) If fid is set, the identifier will be", "License for the specific language governing permissions and limitations under the License. \"\"\"", "TS object in IKATS (which will inherit) :param parent: TS object in IKATS", "raise IkatsNotFoundError(\"TS data points couldn't be retrieved properly\") def inherit(self, ts, parent): \"\"\"", "ikats.client.datamodel_stub import DatamodelStub from ikats.client.opentsdb_client import OpenTSDBClient from ikats.client.opentsdb_stub import OpenTSDBStub from ikats.exceptions", "Copyright 2019 CS Systèmes d'Information Licensed under the Apache License, Version 2.0 (the", "that if timeseries belongs to a dataset it will not be removed Returns", "and number of points in *ts.data* *parent* is the original timeseries where metadata", "fid is set, the identifier will be created to database :param fid: Identifier", "try: data_points = self.tsdb_client.get_ts_by_tsuid(tsuid=ts.tsuid, sd=sd, ed=ed) # Return the points return data_points except", "to create :param parent: (optional) Timeseries object of inheritance parent :param generate_metadata: Generate", "| flight_phase: 8 | } will find the TS having the following metadata:", "# Check if fid already associated to an existing tsuid tsuid = self.dm_client.get_tsuid_from_fid(fid=fid)", "to create (if provided) :param data: List of data points as numpy array", "the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", ":param parent: (optional) Timeseries object of inheritance parent :param generate_metadata: Generate metadata (set", "self.dm_client.get_ts_list()] def fetch(self, ts, sd=None, ed=None): \"\"\" Retrieve the data corresponding to a", ":type fid: str :returns: A prepared Timeseries object :rtype: Timeseries :raises IkatsConflictError: if", "used before create method in case of parallel creation of data (import data", "raise_exception: (optional) Indicates if IKATS exceptions shall be raised (True, default) or not", "np.array :returns: the Timeseries object :rtype: Timeseries :raises IkatsConflictError: if *fid* already present", ":param fid: Identifier to create (if provided) :param data: List of data points", "object shall have set at least tsuid or fid\") return self.dm_client.ts_delete(tsuid=tsuid, raise_exception=raise_exception) def", "(timestamp in ms from epoch) :type ts: Timeseries :type sd: int or None", "constraint: | { | frequency: [1, 2], | flight_phase: 8 | } will", "TSUID associated to the functional ID param. :param fid: the functional Identifier :param", "< 600 \"\"\" try: return self.dm_client.get_func_id_from_tsuid(tsuid=tsuid) except IkatsException: if raise_exception: raise return None", "data: List of data points as numpy array or python 2D-list :type fid:", "action may take a while :returns: the list of Timeseries object :rtype: list", "tsuid defined), the computation of the metadata is forced Returns a boolean status", "data points :rtype: np.array :raises TypeError: if *ts* is not a Timeseries object", "not an int :raises IkatsNotFoundError: if TS data points couldn't be retrieved properly", "with status : 500 <= status < 600 \"\"\" try: return self.dm_client.get_func_id_from_tsuid(tsuid=tsuid) except", "raise_exception=True) tsuid = ts if isinstance(ts, Timeseries): if ts.tsuid is not None: tsuid", "future use. Shall be used before create method in case of parallel creation", "by applicable law or agreed to in writing, software distributed under the License", "ikats.client.opentsdb_stub import OpenTSDBStub from ikats.exceptions import (IkatsConflictError, IkatsException, IkatsNotFoundError) from ikats.lib import (MDType,", "check_is_fid_valid(fid=fid) # Check if fid already associated to an existing tsuid try: return", "create method in case of parallel creation of data (import data via spark", "TSUID start_date, end_date, nb_points = self.tsdb_client.add_points(tsuid=ts.tsuid, data=ts.data) if generate_metadata: # ikats_start_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_start_date',", "a boolean status of the action (True means \"OK\", False means \"errors occurred\")", "when it is defined if parent is not None: self.inherit(ts=ts, parent=parent) except IkatsException:", "a Timeseries object :raises TypeError: if *sd* is not an int :raises TypeError:", "OR frequency == 2) | AND | flight_phase == 8 :param constraint: constraint", "str :raises ValueError: no functional ID matching the tsuid :raises ServerError: http answer", "Indicates if IKATS exceptions shall be raised (True, default) or not (False) :type", "\"\"\" check_is_fid_valid(fid=fid) # Check if fid already associated to an existing tsuid try:", "corresponding to a Timeseries object as a numpy array .. note:: if omitted,", "boolean status of the action (True means \"OK\", False means \"errors occurred\") :param", "else: raise ValueError(\"Timeseries object shall have set at least tsuid or fid\") return", "a dataset it will not be removed Returns a boolean status of the", "raise_exception=True): \"\"\" Retrieve the TSUID associated to the functional ID param. :param fid:", "the action shall assert if not found or not :type fid: str :type", "assert if not found or not :type tsuid: str :type raise_exception: bool :returns:", "raise_exception: raise return None def _create_ref(self, fid): \"\"\" Create a reference of timeseries", "metadata constraint provided in parameter, the method get a TS list matching these", "an existing tsuid try: return self.dm_client.get_tsuid_from_fid(fid=fid) except IkatsException: if raise_exception: raise return None", "TS list matching these constraints Example of constraint: | { | frequency: [1,", "associate it to fid in temporal database for future use. Shall be used", ":param raise_exception: (optional) Indicates if IKATS exceptions shall be raised (True, default) or", "if ts.tsuid is None: ts.tsuid = self._create_ref(ts.fid).tsuid # If the TS is fresh,", "ikats.manager.generic_mgr_ import IkatsGenericApiEndPoint from ikats.objects import Timeseries NON_INHERITABLE_PATTERN = re.compile(\"^qual(.)*|ikats(.)*|funcId\") class IkatsTimeseriesMgr(IkatsGenericApiEndPoint): \"\"\"", "self.dm_client.ts_delete(tsuid=tsuid, raise_exception=raise_exception) def list(self): \"\"\" Get the list of all Timeseries from database", "to this TSUID start_date, end_date, nb_points = self.tsdb_client.add_points(tsuid=ts.tsuid, data=ts.data) if generate_metadata: # ikats_start_date", "generate_metadata: # ikats_start_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_start_date', value=start_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_start_date', value=start_date, dtype=MDType.DATE) # ikats_end_date", "%s\", parent, exception) def find_from_meta(self, constraint=None): \"\"\" From a metadata constraint provided in", "be used before create method in case of parallel creation of data (import", "return ts def get(self, fid=None, tsuid=None): \"\"\" Returns an existing Timeseries object by", "self.tsdb_client = OpenTSDBStub(session=self.api.session) self.dm_client = DatamodelStub(session=self.api.session) else: self.tsdb_client = OpenTSDBClient(session=self.api.session) self.dm_client = DatamodelClient(session=self.api.session)", "qual_nb_points self.dm_client.metadata_update(tsuid=ts.tsuid, name='qual_nb_points', value=nb_points, data_type=MDType.NUMBER, force_create=True) ts.metadata.set(name='qual_nb_points', value=nb_points, dtype=MDType.NUMBER) # Inherit from parent", "ms from epoch) :param ed: (optional) ending date (timestamp in ms from epoch)", "the functional ID param. :param fid: the functional Identifier :param raise_exception: Allow to", "*ts.data* *parent* is the original timeseries where metadata shall be taken from (except", "OR CONDITIONS OF ANY KIND, either express or implied. See the License for", "a valid Timeseries object \"\"\" # Input checks check_type(ts, Timeseries, \"ts\", raise_exception=True) check_type(parent,", ":param fid: FID of the Timeseries :param tsuid: TSUID of the Timeseries :type", "by the first point date, last point date and number of points in", "found or not :type tsuid: str :type raise_exception: bool :returns: retrieved functional identifier", ":raises ValueError: no functional ID matching the tsuid :raises ServerError: http answer with", "`new`) \"\"\" check_is_fid_valid(fid, raise_exception=True) try: # Check if fid already associated to an", "except IkatsException: if raise_exception: raise return None def _create_ref(self, fid): \"\"\" Create a", "is None: ts = Timeseries(api=self.api) else: ts = self._create_ref(fid=fid) ts.data = data return", "*ts* is not a Timeseries object :raises TypeError: if *sd* is not an", "is not an int :raises IkatsNotFoundError: if TS data points couldn't be retrieved", "import re from ikats.client.datamodel_client import DatamodelClient from ikats.client.datamodel_stub import DatamodelStub from ikats.client.opentsdb_client import", "the action (True means \"OK\", False means \"errors occurred\") :param ts: Timeseries object", "may not use this file except in compliance with the License. You may", "constraint provided in parameter, the method get a TS list matching these constraints", "under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "and tsuid are mutually exclusive\") if fid is not None: tsuid = self.fid2tsuid(fid=fid,", "tsuid: str :type raise_exception: bool :returns: retrieved functional identifier value :rtype: str :raises", "*fid* already present in database (use `get` instead of `new`) \"\"\" if fid", "super(IkatsTimeseriesMgr, self).__init__(*args, **kwargs) if self.api.emulate: self.tsdb_client = OpenTSDBStub(session=self.api.session) self.dm_client = DatamodelStub(session=self.api.session) else: self.tsdb_client", ":raises ValueError: if both *fid* and *tsuid* are set (or none of them)", "ikats.lib import (MDType, check_is_fid_valid, check_is_valid_epoch, check_type) from ikats.manager.generic_mgr_ import IkatsGenericApiEndPoint from ikats.objects import", "not a Timeseries object :raises TypeError: if *sd* is not an int :raises", "object :param sd: (optional) starting date (timestamp in ms from epoch) :param ed:", "set at least tsuid or fid\") return self.dm_client.ts_delete(tsuid=tsuid, raise_exception=raise_exception) def list(self): \"\"\" Get", "\"\"\" Create an empty local Timeseries (if fid not provided) If fid is", "timeseries belongs to a dataset it will not be removed Returns a boolean", "metadata generate_metadata = True # Add points to this TSUID start_date, end_date, nb_points", "number of points in *ts.data* *parent* is the original timeseries where metadata shall", "IkatsNotFoundError) from ikats.lib import (MDType, check_is_fid_valid, check_is_valid_epoch, check_type) from ikats.manager.generic_mgr_ import IkatsGenericApiEndPoint from", "provided) if ts.tsuid is None: ts.tsuid = self._create_ref(ts.fid).tsuid # If the TS is", "exceptions shall be raised (True, default) or not (False) :type ts: Timeseries :type", "self.dm_client.get_tsuid_from_fid(fid=ts.fid) except IkatsException: if raise_exception: raise return False else: raise ValueError(\"Timeseries object shall", "value=nb_points, dtype=MDType.NUMBER) # Inherit from parent when it is defined if parent is", "metadata Note that if timeseries belongs to a dataset it will not be", ":raises TypeError: if *ts* is not a str nor a Timeseries :raises IkatsNotFoundError:", "None: self.inherit(ts=ts, parent=parent) except IkatsException: if raise_exception: raise return False return True def", "object :rtype: Timeseries :raises IkatsConflictError: if FID already present in database (use `get`", "True # Add points to this TSUID start_date, end_date, nb_points = self.tsdb_client.add_points(tsuid=ts.tsuid, data=ts.data)", "to specify if the action shall assert if not found or not :type", "**kwargs) if self.api.emulate: self.tsdb_client = OpenTSDBStub(session=self.api.session) self.dm_client = DatamodelStub(session=self.api.session) else: self.tsdb_client = OpenTSDBClient(session=self.api.session)", "self.dm_client.get_func_id_from_tsuid(tsuid=tsuid) except IkatsException: if raise_exception: raise return None def fid2tsuid(self, fid, raise_exception=True): \"\"\"", ":type fid: str :type data: list or np.array :returns: the Timeseries object :rtype:", "if isinstance(ts, Timeseries): if ts.tsuid is not None: tsuid = ts.tsuid elif ts.fid", "to a *ts* object and all associated metadata Note that if timeseries belongs", "and limitations under the License. \"\"\" import re from ikats.client.datamodel_client import DatamodelClient from", "IkatsConflictError(\"%s already associated to an existing tsuid: %s\" % (fid, tsuid)) except IkatsNotFoundError:", "if raise_exception: raise return False return True def delete(self, ts, raise_exception=True): \"\"\" Delete", "of the TS in Ikats :type fid: str :returns: A prepared Timeseries object", "ServerError: http answer with status : 500 <= status < 600 \"\"\" try:", "has no tsuid defined), the computation of the metadata is forced Returns a", "and associate it to fid in temporal database for future use. Shall be", "to a dataset it will not be removed Returns a boolean status of", "# -*- coding: utf-8 -*- \"\"\" Copyright 2019 CS Systèmes d'Information Licensed under", "from parent when it is defined if parent is not None: self.inherit(ts=ts, parent=parent)", "data_type=MDType.NUMBER, force_create=True) ts.metadata.set(name='qual_nb_points', value=nb_points, dtype=MDType.NUMBER) # Inherit from parent when it is defined", "TS having the following metadata: | (frequency == 1 OR frequency == 2)", "to -at least- one dataset \"\"\" check_type(value=ts, allowed_types=[str, Timeseries], var_name=\"ts\", raise_exception=True) tsuid =", "None :returns: The data points :rtype: np.array :raises TypeError: if *ts* is not", "\"generate_metadata\", raise_exception=True) check_is_fid_valid(ts.fid, raise_exception=True) try: # First, we shall create the TSUID reference", "in parameter, the method get a TS list matching these constraints Example of", "object and all associated metadata Note that if timeseries belongs to a dataset", "if the identifier was not found in database \"\"\" if bool(fid) == bool(tsuid):", "of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "list of Timeseries object :rtype: list \"\"\" return [Timeseries(tsuid=x[\"tsuid\"], fid=x[\"funcId\"], api=self.api) for x", "Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use", ":returns: the status of the action :rtype: bool :raises TypeError: if *ts* is", "parent's metadata according to a pattern (not all metadata inherited) :param ts: TS", "set or if no TSUID is present in *ts* object, the *ikats_start_date*, *ikats_end_date*", "fid2tsuid(self, fid, raise_exception=True): \"\"\" Retrieve the TSUID associated to the functional ID param.", "the TS is fresh, we force the creation of the metadata generate_metadata =", "starting date (timestamp in ms from epoch) :param ed: (optional) ending date (timestamp", ":raises IkatsConflictError: if FID already present in database (use `get` instead of `new`)", ":param tsuid: TSUID of the Timeseries :type fid: str :type tsuid: str :returns:", "Timeseries object \"\"\" # Input checks check_type(ts, Timeseries, \"ts\", raise_exception=True) check_type(parent, [Timeseries, None],", "def find_from_meta(self, constraint=None): \"\"\" From a metadata constraint provided in parameter, the method", "parent=parent) except IkatsException: if raise_exception: raise return False return True def delete(self, ts,", "will be overwritten by the first point date, last point date and number", ":raises IkatsNotFoundError: if the identifier was not found in database \"\"\" if bool(fid)", "IkatsTimeseriesMgr(IkatsGenericApiEndPoint): \"\"\" Ikats EndPoint specific to Timeseries management \"\"\" def __init__(self, *args, **kwargs):", "action :rtype: bool :raises TypeError: if *ts* is not a str nor a", "existing tsuid tsuid = self.dm_client.get_tsuid_from_fid(fid=fid) # if fid already exists in database, raise", "Timeseries object :rtype: list \"\"\" return [Timeseries(tsuid=x[\"tsuid\"], fid=x[\"funcId\"], api=self.api) for x in self.dm_client.get_ts_list()]", "http answer with status : 500 <= status < 600 \"\"\" try: return", "False means \"errors occurred\") :param ts: tsuid of the timeseries or Timeseries Object", "of `new`) \"\"\" if fid is None: ts = Timeseries(api=self.api) else: ts =", "*qual_nb_points*) If the timeseries is a new one (object has no tsuid defined),", "Timeseries object :rtype: Timeseries :raises IkatsConflictError: if FID already present in database (use", "def inherit(self, ts, parent): \"\"\" Make a timeseries inherit of parent's metadata according", "a reference of timeseries in temporal data database and associate it to fid", "the action :rtype: bool :raises TypeError: if *ts* is not a valid Timeseries", "None :type ed: int or None :returns: The data points :rtype: np.array :raises", "point date and number of points in *ts.data* *parent* is the original timeseries", "tags=tags) # finally importing tsuid/fid pair in non temporal database self.dm_client.import_fid(tsuid=tsuid, fid=fid) return", "not (False) :type ts: str or Timeseries :type raise_exception: bool :returns: the status", "d'Information Licensed under the Apache License, Version 2.0 (the \"License\"); you may not", "not use this file except in compliance with the License. You may obtain", "tsuid param. :param tsuid: one tsuid value :param raise_exception: Allow to specify if", "omitted, *sd* (start date) and *ed* (end date) will be retrieved from metadata", "ID associated to the tsuid param. :param tsuid: one tsuid value :param raise_exception:", "take a while :returns: the list of Timeseries object :rtype: list \"\"\" return", "`new`) \"\"\" if fid is None: ts = Timeseries(api=self.api) else: ts = self._create_ref(fid=fid)", "timeseries or Timeseries Object to remove :param raise_exception: (optional) Indicates if IKATS exceptions", "bool :returns: retrieved TSUID value or None if not found :rtype: str :raises", "except IkatsException: if raise_exception: raise return False return True def delete(self, ts, raise_exception=True):", "\"\"\" Ikats EndPoint specific to Timeseries management \"\"\" def __init__(self, *args, **kwargs): super(IkatsTimeseriesMgr,", "FID or TSUID (only one shall be provided) :param fid: FID of the", "tsuid = ts.tsuid elif ts.fid is not None: try: tsuid = self.dm_client.get_tsuid_from_fid(fid=ts.fid) except", "== 8 :param constraint: constraint definition :type constraint: dict :returns: list of TSUID", ":raises TypeError: if *sd* is not an int :raises TypeError: if *ed* is", "numpy array or python 2D-list :type fid: str :type data: list or np.array", "ending date (timestamp in ms from epoch) :type ts: Timeseries :type sd: int", "tsuid = ts if isinstance(ts, Timeseries): if ts.tsuid is not None: tsuid =", "nb_points = self.tsdb_client.add_points(tsuid=ts.tsuid, data=ts.data) if generate_metadata: # ikats_start_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_start_date', value=start_date, data_type=MDType.DATE, force_create=True)", "if self.api.emulate: self.tsdb_client = OpenTSDBStub(session=self.api.session) self.dm_client = DatamodelStub(session=self.api.session) else: self.tsdb_client = OpenTSDBClient(session=self.api.session) self.dm_client", "8 | } will find the TS having the following metadata: | (frequency", "the metadata generate_metadata = True # Add points to this TSUID start_date, end_date,", "be taken from (except intrinsic ones, eg. *qual_nb_points*) If the timeseries is a", ":param fid: the functional Identifier :param raise_exception: Allow to specify if the action", "python 2D-list :type fid: str :type data: list or np.array :returns: the Timeseries", "shall assert if not found or not :type tsuid: str :type raise_exception: bool", "will be created to database :param fid: Identifier to create (if provided) :param", "raise_exception=True) try: # First, we shall create the TSUID reference (if not provided)", "2.0 (the \"License\"); you may not use this file except in compliance with", ": 500 <= status < 600 \"\"\" try: return self.dm_client.get_func_id_from_tsuid(tsuid=tsuid) except IkatsException: if", "nothing will be inherited; \\nreason: %s\", parent, exception) def find_from_meta(self, constraint=None): \"\"\" From", "is the original timeseries where metadata shall be taken from (except intrinsic ones,", "ts.tsuid elif ts.fid is not None: try: tsuid = self.dm_client.get_tsuid_from_fid(fid=ts.fid) except IkatsException: if", "the Timeseries object :rtype: Timeseries :raises IkatsConflictError: if *fid* already present in database", "doing partial import) (Default: True) :param raise_exception: Indicates if exceptions shall be raised", "copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", "self.tsdb_client.get_ts_by_tsuid(tsuid=ts.tsuid, sd=sd, ed=ed) # Return the points return data_points except ValueError: raise IkatsNotFoundError(\"TS", "language governing permissions and limitations under the License. \"\"\" import re from ikats.client.datamodel_client", "fetch(self, ts, sd=None, ed=None): \"\"\" Retrieve the data corresponding to a Timeseries object", "if *ed* is not an int :raises IkatsNotFoundError: if TS data points couldn't", "an existing tsuid tsuid = self.dm_client.get_tsuid_from_fid(fid=fid) # if fid already exists in database,", "the points return data_points except ValueError: raise IkatsNotFoundError(\"TS data points couldn't be retrieved", "status < 600 \"\"\" try: return self.dm_client.get_func_id_from_tsuid(tsuid=tsuid) except IkatsException: if raise_exception: raise return", "if timeseries belongs to -at least- one dataset \"\"\" check_type(value=ts, allowed_types=[str, Timeseries], var_name=\"ts\",", "forced Returns a boolean status of the action (True means \"OK\", False means", "A prepared Timeseries object :rtype: Timeseries :raises IkatsConflictError: if FID already present in", "param. :param fid: the functional Identifier :param raise_exception: Allow to specify if the", "of data points as numpy array or python 2D-list :type fid: str :type", "data corresponding to a Timeseries object as a numpy array .. note:: if", ":raises ServerError: http answer with status : 500 <= status < 600 \"\"\"", "if you want a fixed windowed range, set *sd* and *ed* manually (but", "is not a str nor a Timeseries :raises IkatsNotFoundError: if timeseries is not", "\\nreason: %s\", parent, exception) def find_from_meta(self, constraint=None): \"\"\" From a metadata constraint provided", ":rtype: np.array :raises TypeError: if *ts* is not a Timeseries object :raises TypeError:", "if *fid* already present in database (use `get` instead of `new`) \"\"\" if", "force_create=True) ts.metadata.set(name='qual_nb_points', value=nb_points, dtype=MDType.NUMBER) # Inherit from parent when it is defined if", "already associated to an existing tsuid: %s\" % (fid, tsuid)) except IkatsNotFoundError: #", "# ikats_end_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_end_date', value=end_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_end_date', value=end_date, dtype=MDType.DATE) # qual_nb_points self.dm_client.metadata_update(tsuid=ts.tsuid,", "IkatsConflictError: if FID already present in database (use `get` instead of `new`) \"\"\"", "ed = ts.metadata.get(name=\"ikats_end_date\") check_is_valid_epoch(value=ed, raise_exception=True) try: data_points = self.tsdb_client.get_ts_by_tsuid(tsuid=ts.tsuid, sd=sd, ed=ed) # Return", "None def _create_ref(self, fid): \"\"\" Create a reference of timeseries in temporal data", "except IkatsException: if raise_exception: raise return False else: raise ValueError(\"Timeseries object shall have", "the computation of the metadata is forced Returns a boolean status of the", "tsuid=None): \"\"\" Returns an existing Timeseries object by providing either its FID or", "matching these constraints Example of constraint: | { | frequency: [1, 2], |", "if raise_exception: raise return False else: raise ValueError(\"Timeseries object shall have set at", "(True means \"OK\", False means \"errors occurred\") :param ts: Timeseries object containing information", "or not (False) :type ts: str or Timeseries :type raise_exception: bool :returns: the", "a Timeseries :raises IkatsNotFoundError: if timeseries is not found on server :raises IkatsConflictError:", "fid is not None: tsuid = self.fid2tsuid(fid=fid, raise_exception=True) return Timeseries(api=self.api, tsuid=tsuid, fid=fid) def", "from metadata if you want a fixed windowed range, set *sd* and *ed*", "if TS data points couldn't be retrieved properly \"\"\" check_type(value=ts, allowed_types=Timeseries, var_name=\"ts\", raise_exception=True)", "not a valid Timeseries object \"\"\" # Input checks check_type(ts, Timeseries, \"ts\", raise_exception=True)", "re.compile(\"^qual(.)*|ikats(.)*|funcId\") class IkatsTimeseriesMgr(IkatsGenericApiEndPoint): \"\"\" Ikats EndPoint specific to Timeseries management \"\"\" def __init__(self,", "raise_exception=raise_exception) def list(self): \"\"\" Get the list of all Timeseries from database ..", "str :raises TypeError: if fid is not str :raises IkatsNotFoundError: no match \"\"\"", "Input checks check_type(ts, Timeseries, \"ts\", raise_exception=True) check_type(parent, [Timeseries, None], \"parent\", raise_exception=True) check_type(generate_metadata, bool,", "True) :param raise_exception: Indicates if exceptions shall be raised (True, default) or not", "to database :param fid: Identifier to create (if provided) :param data: List of", "self.tsdb_client.gen_metric_tags() tsuid = self.tsdb_client.assign_metric(metric=metric, tags=tags) # finally importing tsuid/fid pair in non temporal", "\"\"\" From a metadata constraint provided in parameter, the method get a TS", "def get(self, fid=None, tsuid=None): \"\"\" Returns an existing Timeseries object by providing either", "dict :raises TypeError: if *constraint* is not a dict \"\"\" return self.dm_client.get_ts_from_metadata(constraint=constraint) def", "return self.dm_client.get_func_id_from_tsuid(tsuid=tsuid) except IkatsException: if raise_exception: raise return None def fid2tsuid(self, fid, raise_exception=True):", "if ed is None: ed = ts.metadata.get(name=\"ikats_end_date\") check_is_valid_epoch(value=ed, raise_exception=True) try: data_points = self.tsdb_client.get_ts_by_tsuid(tsuid=ts.tsuid,", "windowed range, set *sd* and *ed* manually (but be aware that the TS", "Create an empty local Timeseries (if fid not provided) If fid is set,", "points if *generate_metadata* is set or if no TSUID is present in *ts*", "if tsuid is not a defined str :raises ValueError: no functional ID matching", "Flag metadata as \"not deleted\" result[meta_name][\"deleted\"] = False if not NON_INHERITABLE_PATTERN.match(meta_name): self.dm_client.metadata_create(tsuid=ts.tsuid, name=meta_name,", "least- one dataset \"\"\" check_type(value=ts, allowed_types=[str, Timeseries], var_name=\"ts\", raise_exception=True) tsuid = ts if", "ts.fid is not None: try: tsuid = self.dm_client.get_tsuid_from_fid(fid=ts.fid) except IkatsException: if raise_exception: raise", "when doing partial import) (Default: True) :param raise_exception: Indicates if exceptions shall be", "be not completely gathered) :param ts: Timeseries object :param sd: (optional) starting date", "the TSUID associated to the functional ID param. :param fid: the functional Identifier", "ts: Timeseries :param parent: Timeseries \"\"\" try: result = self.dm_client.metadata_get_typed([parent.tsuid])[parent.tsuid] for meta_name in", "providing either its FID or TSUID (only one shall be provided) :param fid:", "parameter, the method get a TS list matching these constraints Example of constraint:", ":raises TypeError: if tsuid is not a defined str :raises ValueError: no functional", "raised (True, default) or not (False) :type ts: Timeseries :type parent: Timeseries :type", "in Ikats :type fid: str :returns: A prepared Timeseries object :rtype: Timeseries :raises", ":returns: A prepared Timeseries object :rtype: Timeseries :raises IkatsConflictError: if FID already present", "specific language governing permissions and limitations under the License. \"\"\" import re from", "if exceptions shall be raised (True, default) or not (False) :type ts: Timeseries", "found on server :raises IkatsConflictError: if timeseries belongs to -at least- one dataset", ":raises IkatsNotFoundError: if TS data points couldn't be retrieved properly \"\"\" check_type(value=ts, allowed_types=Timeseries,", "from ikats.client.datamodel_stub import DatamodelStub from ikats.client.opentsdb_client import OpenTSDBClient from ikats.client.opentsdb_stub import OpenTSDBStub from", "str :type tsuid: str :returns: The Timeseries object :rtype: Timeseries :raises ValueError: if", "TSUID (only one shall be provided) :param fid: FID of the Timeseries :param", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "import) (Default: True) :param raise_exception: Indicates if exceptions shall be raised (True, default)", "present in database (use `get` instead of `new`) \"\"\" check_is_fid_valid(fid, raise_exception=True) try: #", "data=None): \"\"\" Create an empty local Timeseries (if fid not provided) If fid", "ts if isinstance(ts, Timeseries): if ts.tsuid is not None: tsuid = ts.tsuid elif", "ID param. :param fid: the functional Identifier :param raise_exception: Allow to specify if", "obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "fid already associated to an existing tsuid tsuid = self.dm_client.get_tsuid_from_fid(fid=fid) # if fid", "object in IKATS (which will inherit) :param parent: TS object in IKATS of", "Retrieve the functional ID associated to the tsuid param. :param tsuid: one tsuid", "new(self, fid=None, data=None): \"\"\" Create an empty local Timeseries (if fid not provided)", "else: self.tsdb_client = OpenTSDBClient(session=self.api.session) self.dm_client = DatamodelClient(session=self.api.session) def new(self, fid=None, data=None): \"\"\" Create", "Timeseries from database .. note:: This action may take a while :returns: the", "if the action shall assert if not found or not :type fid: str", "np.array :raises TypeError: if *ts* is not a Timeseries object :raises TypeError: if", "fid: FID of the Timeseries :param tsuid: TSUID of the Timeseries :type fid:", "exception raise IkatsConflictError(\"%s already associated to an existing tsuid: %s\" % (fid, tsuid))", "not found or not :type tsuid: str :type raise_exception: bool :returns: retrieved functional", "License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing,", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "raise_exception: raise return None def fid2tsuid(self, fid, raise_exception=True): \"\"\" Retrieve the TSUID associated", "a pattern (not all metadata inherited) :param ts: TS object in IKATS (which", "tsuid is not a defined str :raises ValueError: no functional ID matching the", "= ts if isinstance(ts, Timeseries): if ts.tsuid is not None: tsuid = ts.tsuid", ":raises IkatsNotFoundError: if timeseries is not found on server :raises IkatsConflictError: if timeseries", "str or Timeseries :type raise_exception: bool :returns: the status of the action :rtype:", "\"OK\", False means \"errors occurred\") :param ts: Timeseries object containing information about what", "as numpy array or python 2D-list :type fid: str :type data: list or", "raise_exception=True): \"\"\" Import timeseries data points to database or update an existing timeseries", "metadata shall be taken from (except intrinsic ones, eg. *qual_nb_points*) If the timeseries", "fid in temporal database for future use. Shall be used before create method", "(import data via spark for example) :param fid: Functional Identifier of the TS", "if omitted, *sd* (start date) and *ed* (end date) will be retrieved from", "return Timeseries(api=self.api, tsuid=tsuid, fid=fid) def save(self, ts, parent=None, generate_metadata=True, raise_exception=True): \"\"\" Import timeseries", "False else: raise ValueError(\"Timeseries object shall have set at least tsuid or fid\")", "already exists in database, raise a conflict exception raise IkatsConflictError(\"%s already associated to", "Timeseries :raises IkatsConflictError: if FID already present in database (use `get` instead of", "a timeseries inherit of parent's metadata according to a pattern (not all metadata", "IkatsConflictError: if timeseries belongs to -at least- one dataset \"\"\" check_type(value=ts, allowed_types=[str, Timeseries],", "ikats.client.opentsdb_client import OpenTSDBClient from ikats.client.opentsdb_stub import OpenTSDBStub from ikats.exceptions import (IkatsConflictError, IkatsException, IkatsNotFoundError)", "ts.tsuid = self._create_ref(ts.fid).tsuid # If the TS is fresh, we force the creation", "and *tsuid* are set (or none of them) :raises IkatsNotFoundError: if the identifier", "(not all metadata inherited) :param ts: TS object in IKATS (which will inherit)", "no TSUID is present in *ts* object, the *ikats_start_date*, *ikats_end_date* and *qual_nb_points* will", "timeseries belongs to -at least- one dataset \"\"\" check_type(value=ts, allowed_types=[str, Timeseries], var_name=\"ts\", raise_exception=True)", "OpenTSDBClient from ikats.client.opentsdb_stub import OpenTSDBStub from ikats.exceptions import (IkatsConflictError, IkatsException, IkatsNotFoundError) from ikats.lib", "not a str nor a Timeseries :raises IkatsNotFoundError: if timeseries is not found", "or update an existing timeseries with new points if *generate_metadata* is set or", "is not a valid Timeseries object \"\"\" # Input checks check_type(ts, Timeseries, \"ts\",", "self.tsdb_client.add_points(tsuid=ts.tsuid, data=ts.data) if generate_metadata: # ikats_start_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_start_date', value=start_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_start_date', value=start_date,", "parent): \"\"\" Make a timeseries inherit of parent's metadata according to a pattern", "was not found in database \"\"\" if bool(fid) == bool(tsuid): raise ValueError(\"fid and", "*ed* is not an int :raises IkatsNotFoundError: if TS data points couldn't be", "raised (True, default) or not (False) :type ts: str or Timeseries :type raise_exception:", "data corresponding to a *ts* object and all associated metadata Note that if", "force_create=True) ts.metadata.set(name='ikats_end_date', value=end_date, dtype=MDType.DATE) # qual_nb_points self.dm_client.metadata_update(tsuid=ts.tsuid, name='qual_nb_points', value=nb_points, data_type=MDType.NUMBER, force_create=True) ts.metadata.set(name='qual_nb_points', value=nb_points,", "shall assert if not found or not :type fid: str :type raise_exception: bool", "fid=None, data=None): \"\"\" Create an empty local Timeseries (if fid not provided) If", "assert if not found or not :type fid: str :type raise_exception: bool :returns:", "check_type(value=ts, allowed_types=Timeseries, var_name=\"ts\", raise_exception=True) check_type(value=sd, allowed_types=[int, None], var_name=\"sd\", raise_exception=True) check_type(value=ed, allowed_types=[int, None], var_name=\"ed\",", "or fid\") return self.dm_client.ts_delete(tsuid=tsuid, raise_exception=raise_exception) def list(self): \"\"\" Get the list of all", "\"\"\" Retrieve the TSUID associated to the functional ID param. :param fid: the", "(True, default) or not (False) :type ts: Timeseries :type parent: Timeseries :type generate_metadata:", ":type constraint: dict :returns: list of TSUID matching the constraints :rtype: dict :raises", "value=nb_points, data_type=MDType.NUMBER, force_create=True) ts.metadata.set(name='qual_nb_points', value=nb_points, dtype=MDType.NUMBER) # Inherit from parent when it is", "\"errors occurred\") :param ts: tsuid of the timeseries or Timeseries Object to remove", "not be removed Returns a boolean status of the action (True means \"OK\",", "numpy array .. note:: if omitted, *sd* (start date) and *ed* (end date)", "*args, **kwargs): super(IkatsTimeseriesMgr, self).__init__(*args, **kwargs) if self.api.emulate: self.tsdb_client = OpenTSDBStub(session=self.api.session) self.dm_client = DatamodelStub(session=self.api.session)", "object :rtype: list \"\"\" return [Timeseries(tsuid=x[\"tsuid\"], fid=x[\"funcId\"], api=self.api) for x in self.dm_client.get_ts_list()] def", "the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "*qual_nb_points* will be overwritten by the first point date, last point date and", "parent, exception) def find_from_meta(self, constraint=None): \"\"\" From a metadata constraint provided in parameter,", "OpenTSDBStub(session=self.api.session) self.dm_client = DatamodelStub(session=self.api.session) else: self.tsdb_client = OpenTSDBClient(session=self.api.session) self.dm_client = DatamodelClient(session=self.api.session) def new(self,", "couldn't be retrieved properly\") def inherit(self, ts, parent): \"\"\" Make a timeseries inherit", "date (timestamp in ms from epoch) :type ts: Timeseries :type sd: int or", "from ikats.lib import (MDType, check_is_fid_valid, check_is_valid_epoch, check_type) from ikats.manager.generic_mgr_ import IkatsGenericApiEndPoint from ikats.objects", "not None: self.inherit(ts=ts, parent=parent) except IkatsException: if raise_exception: raise return False return True", "is None: ed = ts.metadata.get(name=\"ikats_end_date\") check_is_valid_epoch(value=ed, raise_exception=True) try: data_points = self.tsdb_client.get_ts_by_tsuid(tsuid=ts.tsuid, sd=sd, ed=ed)", "Timeseries \"\"\" try: result = self.dm_client.metadata_get_typed([parent.tsuid])[parent.tsuid] for meta_name in result: # Flag metadata", "# Flag metadata as \"not deleted\" result[meta_name][\"deleted\"] = False if not NON_INHERITABLE_PATTERN.match(meta_name): self.dm_client.metadata_create(tsuid=ts.tsuid,", ":raises TypeError: if fid is not str :raises IkatsNotFoundError: no match \"\"\" check_is_fid_valid(fid=fid)", "of parallel creation of data (import data via spark for example) :param fid:", "is forced Returns a boolean status of the action (True means \"OK\", False", "local Timeseries (if fid not provided) If fid is set, the identifier will", "the TSUID reference (if not provided) if ts.tsuid is None: ts.tsuid = self._create_ref(ts.fid).tsuid", "IkatsNotFoundError: if TS data points couldn't be retrieved properly \"\"\" check_type(value=ts, allowed_types=Timeseries, var_name=\"ts\",", "case of parallel creation of data (import data via spark for example) :param", "self.dm_client.metadata_create(tsuid=ts.tsuid, name=meta_name, value=result[meta_name][\"value\"], data_type=MDType(result[meta_name][\"dtype\"]), force_update=True) except(ValueError, TypeError, SystemError) as exception: self.api.session.log.warning( \"Can't get", "= False if not NON_INHERITABLE_PATTERN.match(meta_name): self.dm_client.metadata_create(tsuid=ts.tsuid, name=meta_name, value=result[meta_name][\"value\"], data_type=MDType(result[meta_name][\"dtype\"]), force_update=True) except(ValueError, TypeError, SystemError)", "if the action shall assert if not found or not :type tsuid: str", "the functional Identifier :param raise_exception: Allow to specify if the action shall assert", "\"\"\" return [Timeseries(tsuid=x[\"tsuid\"], fid=x[\"funcId\"], api=self.api) for x in self.dm_client.get_ts_list()] def fetch(self, ts, sd=None,", "and *ed* (end date) will be retrieved from metadata if you want a", "# qual_nb_points self.dm_client.metadata_update(tsuid=ts.tsuid, name='qual_nb_points', value=nb_points, data_type=MDType.NUMBER, force_create=True) ts.metadata.set(name='qual_nb_points', value=nb_points, dtype=MDType.NUMBER) # Inherit from", "self.dm_client.metadata_update(tsuid=ts.tsuid, name='qual_nb_points', value=nb_points, data_type=MDType.NUMBER, force_create=True) ts.metadata.set(name='qual_nb_points', value=nb_points, dtype=MDType.NUMBER) # Inherit from parent when", "= OpenTSDBClient(session=self.api.session) self.dm_client = DatamodelClient(session=self.api.session) def new(self, fid=None, data=None): \"\"\" Create an empty", "except ValueError: raise IkatsNotFoundError(\"TS data points couldn't be retrieved properly\") def inherit(self, ts,", "ValueError: no functional ID matching the tsuid :raises ServerError: http answer with status", "not a defined str :raises ValueError: no functional ID matching the tsuid :raises", "the timeseries or Timeseries Object to remove :param raise_exception: (optional) Indicates if IKATS", "get(self, fid=None, tsuid=None): \"\"\" Returns an existing Timeseries object by providing either its", "(optional) starting date (timestamp in ms from epoch) :param ed: (optional) ending date", "be retrieved properly \"\"\" check_type(value=ts, allowed_types=Timeseries, var_name=\"ts\", raise_exception=True) check_type(value=sd, allowed_types=[int, None], var_name=\"sd\", raise_exception=True)", "Unless required by applicable law or agreed to in writing, software distributed under", "TS is fresh, we force the creation of the metadata generate_metadata = True", "from ikats.manager.generic_mgr_ import IkatsGenericApiEndPoint from ikats.objects import Timeseries NON_INHERITABLE_PATTERN = re.compile(\"^qual(.)*|ikats(.)*|funcId\") class IkatsTimeseriesMgr(IkatsGenericApiEndPoint):", "fid, raise_exception=True): \"\"\" Retrieve the TSUID associated to the functional ID param. :param", ":returns: the Timeseries object :rtype: Timeseries :raises IkatsConflictError: if *fid* already present in", "to Timeseries management \"\"\" def __init__(self, *args, **kwargs): super(IkatsTimeseriesMgr, self).__init__(*args, **kwargs) if self.api.emulate:", "functional ID param. :param fid: the functional Identifier :param raise_exception: Allow to specify", "fid is not str :raises IkatsNotFoundError: no match \"\"\" check_is_fid_valid(fid=fid) # Check if", "be retrieved properly\") def inherit(self, ts, parent): \"\"\" Make a timeseries inherit of", "allowed_types=[str, Timeseries], var_name=\"ts\", raise_exception=True) tsuid = ts if isinstance(ts, Timeseries): if ts.tsuid is", "IkatsGenericApiEndPoint from ikats.objects import Timeseries NON_INHERITABLE_PATTERN = re.compile(\"^qual(.)*|ikats(.)*|funcId\") class IkatsTimeseriesMgr(IkatsGenericApiEndPoint): \"\"\" Ikats EndPoint", "on server :raises IkatsConflictError: if timeseries belongs to -at least- one dataset \"\"\"", "fid: str :type tsuid: str :returns: The Timeseries object :rtype: Timeseries :raises ValueError:", "existing timeseries with new points if *generate_metadata* is set or if no TSUID", "not None: tsuid = ts.tsuid elif ts.fid is not None: try: tsuid =", "or python 2D-list :type fid: str :type data: list or np.array :returns: the", "\"\"\" # Input checks check_type(ts, Timeseries, \"ts\", raise_exception=True) check_type(parent, [Timeseries, None], \"parent\", raise_exception=True)", "| frequency: [1, 2], | flight_phase: 8 | } will find the TS", "a str nor a Timeseries :raises IkatsNotFoundError: if timeseries is not found on", "for the specific language governing permissions and limitations under the License. \"\"\" import", "of the action (True means \"OK\", False means \"errors occurred\") :param ts: tsuid", "Object to remove :param raise_exception: (optional) Indicates if IKATS exceptions shall be raised", "overwritten by the first point date, last point date and number of points", "information about what to create :param parent: (optional) Timeseries object of inheritance parent", "metadata (set to False when doing partial import) (Default: True) :param raise_exception: Indicates", "of Timeseries object :rtype: list \"\"\" return [Timeseries(tsuid=x[\"tsuid\"], fid=x[\"funcId\"], api=self.api) for x in", "use. Shall be used before create method in case of parallel creation of", "of constraint: | { | frequency: [1, 2], | flight_phase: 8 | }", "generate_metadata: bool :type raise_exception: bool :returns: the status of the action :rtype: bool", "software distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT", "shall create the TSUID reference (if not provided) if ts.tsuid is None: ts.tsuid", "of all Timeseries from database .. note:: This action may take a while", "in IKATS (which will inherit) :param parent: TS object in IKATS of inheritance", "tsuid are mutually exclusive\") if fid is not None: tsuid = self.fid2tsuid(fid=fid, raise_exception=True)", "self.tsdb_client.assign_metric(metric=metric, tags=tags) # finally importing tsuid/fid pair in non temporal database self.dm_client.import_fid(tsuid=tsuid, fid=fid)", "Timeseries], var_name=\"ts\", raise_exception=True) tsuid = ts if isinstance(ts, Timeseries): if ts.tsuid is not", "the specific language governing permissions and limitations under the License. \"\"\" import re", "array .. note:: if omitted, *sd* (start date) and *ed* (end date) will", "the TS may be not completely gathered) :param ts: Timeseries object :param sd:", "remove :param raise_exception: (optional) Indicates if IKATS exceptions shall be raised (True, default)", "None: ts = Timeseries(api=self.api) else: ts = self._create_ref(fid=fid) ts.data = data return ts", "fid=fid) def save(self, ts, parent=None, generate_metadata=True, raise_exception=True): \"\"\" Import timeseries data points to", "check_is_fid_valid(fid, raise_exception=True) try: # Check if fid already associated to an existing tsuid", "object \"\"\" # Input checks check_type(ts, Timeseries, \"ts\", raise_exception=True) check_type(parent, [Timeseries, None], \"parent\",", "raise_exception: bool :returns: retrieved functional identifier value :rtype: str :raises TypeError: if tsuid", "object :rtype: Timeseries :raises ValueError: if both *fid* and *tsuid* are set (or", "date) will be retrieved from metadata if you want a fixed windowed range,", "parent: TS object in IKATS of inheritance parent :type ts: Timeseries :param parent:", "# Return the points return data_points except ValueError: raise IkatsNotFoundError(\"TS data points couldn't", "in writing, software distributed under the License is distributed on an \"AS IS\"", "matching the tsuid :raises ServerError: http answer with status : 500 <= status", "or TSUID (only one shall be provided) :param fid: FID of the Timeseries", "an int :raises IkatsNotFoundError: if TS data points couldn't be retrieved properly \"\"\"", "retrieved properly\") def inherit(self, ts, parent): \"\"\" Make a timeseries inherit of parent's", "bool :raises TypeError: if *ts* is not a str nor a Timeseries :raises", "matching the constraints :rtype: dict :raises TypeError: if *constraint* is not a dict", "= self.tsdb_client.assign_metric(metric=metric, tags=tags) # finally importing tsuid/fid pair in non temporal database self.dm_client.import_fid(tsuid=tsuid,", "or agreed to in writing, software distributed under the License is distributed on", "fid: str :type raise_exception: bool :returns: retrieved TSUID value or None if not", "inheritance parent :type ts: Timeseries :param parent: Timeseries \"\"\" try: result = self.dm_client.metadata_get_typed([parent.tsuid])[parent.tsuid]", "the timeseries is a new one (object has no tsuid defined), the computation", "dataset \"\"\" check_type(value=ts, allowed_types=[str, Timeseries], var_name=\"ts\", raise_exception=True) tsuid = ts if isinstance(ts, Timeseries):", "= ts.tsuid elif ts.fid is not None: try: tsuid = self.dm_client.get_tsuid_from_fid(fid=ts.fid) except IkatsException:", "\"\"\" Create a reference of timeseries in temporal data database and associate it", "timeseries with new points if *generate_metadata* is set or if no TSUID is", "properly\") def inherit(self, ts, parent): \"\"\" Make a timeseries inherit of parent's metadata", "OpenTSDBStub from ikats.exceptions import (IkatsConflictError, IkatsException, IkatsNotFoundError) from ikats.lib import (MDType, check_is_fid_valid, check_is_valid_epoch,", "(use `get` instead of `new`) \"\"\" check_is_fid_valid(fid, raise_exception=True) try: # Check if fid", "prepared Timeseries object :rtype: Timeseries :raises IkatsConflictError: if FID already present in database", "note:: if omitted, *sd* (start date) and *ed* (end date) will be retrieved", ":type data: list or np.array :returns: the Timeseries object :rtype: Timeseries :raises IkatsConflictError:", "tsuid = self.dm_client.get_tsuid_from_fid(fid=fid) # if fid already exists in database, raise a conflict", ":param parent: Timeseries \"\"\" try: result = self.dm_client.metadata_get_typed([parent.tsuid])[parent.tsuid] for meta_name in result: #", "if *sd* is not an int :raises TypeError: if *ed* is not an", "OpenTSDBClient(session=self.api.session) self.dm_client = DatamodelClient(session=self.api.session) def new(self, fid=None, data=None): \"\"\" Create an empty local", "manually (but be aware that the TS may be not completely gathered) :param", "EndPoint specific to Timeseries management \"\"\" def __init__(self, *args, **kwargs): super(IkatsTimeseriesMgr, self).__init__(*args, **kwargs)", "provided) If fid is set, the identifier will be created to database :param", "properly \"\"\" check_type(value=ts, allowed_types=Timeseries, var_name=\"ts\", raise_exception=True) check_type(value=sd, allowed_types=[int, None], var_name=\"sd\", raise_exception=True) check_type(value=ed, allowed_types=[int,", "is not str :raises IkatsNotFoundError: no match \"\"\" check_is_fid_valid(fid=fid) # Check if fid", "(if fid not provided) If fid is set, the identifier will be created", "value=start_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_start_date', value=start_date, dtype=MDType.DATE) # ikats_end_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_end_date', value=end_date, data_type=MDType.DATE, force_create=True)", "data_type=MDType(result[meta_name][\"dtype\"]), force_update=True) except(ValueError, TypeError, SystemError) as exception: self.api.session.log.warning( \"Can't get metadata of parent", "List of data points as numpy array or python 2D-list :type fid: str", "meta_name in result: # Flag metadata as \"not deleted\" result[meta_name][\"deleted\"] = False if", ":raises IkatsConflictError: if *fid* already present in database (use `get` instead of `new`)", "aware that the TS may be not completely gathered) :param ts: Timeseries object", "tsuid value :param raise_exception: Allow to specify if the action shall assert if", "bool(fid) == bool(tsuid): raise ValueError(\"fid and tsuid are mutually exclusive\") if fid is", "try: return self.dm_client.get_func_id_from_tsuid(tsuid=tsuid) except IkatsException: if raise_exception: raise return None def fid2tsuid(self, fid,", "a *ts* object and all associated metadata Note that if timeseries belongs to", "existing tsuid try: return self.dm_client.get_tsuid_from_fid(fid=fid) except IkatsException: if raise_exception: raise return None def", "or not :type fid: str :type raise_exception: bool :returns: retrieved TSUID value or", "(use `get` instead of `new`) \"\"\" if fid is None: ts = Timeseries(api=self.api)", "this TSUID start_date, end_date, nb_points = self.tsdb_client.add_points(tsuid=ts.tsuid, data=ts.data) if generate_metadata: # ikats_start_date self.dm_client.metadata_update(tsuid=ts.tsuid,", "be inherited; \\nreason: %s\", parent, exception) def find_from_meta(self, constraint=None): \"\"\" From a metadata", "to a Timeseries object as a numpy array .. note:: if omitted, *sd*", "inherit of parent's metadata according to a pattern (not all metadata inherited) :param", "import DatamodelStub from ikats.client.opentsdb_client import OpenTSDBClient from ikats.client.opentsdb_stub import OpenTSDBStub from ikats.exceptions import", "this file except in compliance with the License. You may obtain a copy", "self.inherit(ts=ts, parent=parent) except IkatsException: if raise_exception: raise return False return True def delete(self,", "all associated metadata Note that if timeseries belongs to a dataset it will", "the License. \"\"\" import re from ikats.client.datamodel_client import DatamodelClient from ikats.client.datamodel_stub import DatamodelStub", "if fid already exists in database, raise a conflict exception raise IkatsConflictError(\"%s already", "the following metadata: | (frequency == 1 OR frequency == 2) | AND", "you may not use this file except in compliance with the License. You", "raise_exception=True) try: # Check if fid already associated to an existing tsuid tsuid", "import OpenTSDBClient from ikats.client.opentsdb_stub import OpenTSDBStub from ikats.exceptions import (IkatsConflictError, IkatsException, IkatsNotFoundError) from", "not :type tsuid: str :type raise_exception: bool :returns: retrieved functional identifier value :rtype:", "api=self.api) for x in self.dm_client.get_ts_list()] def fetch(self, ts, sd=None, ed=None): \"\"\" Retrieve the", "under the License. \"\"\" import re from ikats.client.datamodel_client import DatamodelClient from ikats.client.datamodel_stub import", "\"\"\" import re from ikats.client.datamodel_client import DatamodelClient from ikats.client.datamodel_stub import DatamodelStub from ikats.client.opentsdb_client", "may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "dict \"\"\" return self.dm_client.get_ts_from_metadata(constraint=constraint) def tsuid2fid(self, tsuid, raise_exception=True): \"\"\" Retrieve the functional ID", "or np.array :returns: the Timeseries object :rtype: Timeseries :raises IkatsConflictError: if *fid* already", "NON_INHERITABLE_PATTERN.match(meta_name): self.dm_client.metadata_create(tsuid=ts.tsuid, name=meta_name, value=result[meta_name][\"value\"], data_type=MDType(result[meta_name][\"dtype\"]), force_update=True) except(ValueError, TypeError, SystemError) as exception: self.api.session.log.warning( \"Can't", "action (True means \"OK\", False means \"errors occurred\") :param ts: Timeseries object containing", "not completely gathered) :param ts: Timeseries object :param sd: (optional) starting date (timestamp", "\"not deleted\" result[meta_name][\"deleted\"] = False if not NON_INHERITABLE_PATTERN.match(meta_name): self.dm_client.metadata_create(tsuid=ts.tsuid, name=meta_name, value=result[meta_name][\"value\"], data_type=MDType(result[meta_name][\"dtype\"]), force_update=True)", "is set, the identifier will be created to database :param fid: Identifier to", "means \"OK\", False means \"errors occurred\") :param ts: tsuid of the timeseries or", "# ikats_start_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_start_date', value=start_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_start_date', value=start_date, dtype=MDType.DATE) # ikats_end_date self.dm_client.metadata_update(tsuid=ts.tsuid,", "raise return None def _create_ref(self, fid): \"\"\" Create a reference of timeseries in", "check_type(value=ed, allowed_types=[int, None], var_name=\"ed\", raise_exception=True) if sd is None: sd = ts.metadata.get(name=\"ikats_start_date\") check_is_valid_epoch(value=sd,", "Timeseries NON_INHERITABLE_PATTERN = re.compile(\"^qual(.)*|ikats(.)*|funcId\") class IkatsTimeseriesMgr(IkatsGenericApiEndPoint): \"\"\" Ikats EndPoint specific to Timeseries management", "check_is_valid_epoch(value=sd, raise_exception=True) if ed is None: ed = ts.metadata.get(name=\"ikats_end_date\") check_is_valid_epoch(value=ed, raise_exception=True) try: data_points", "fresh, we force the creation of the metadata generate_metadata = True # Add", "ts: TS object in IKATS (which will inherit) :param parent: TS object in", "return self.dm_client.get_ts_from_metadata(constraint=constraint) def tsuid2fid(self, tsuid, raise_exception=True): \"\"\" Retrieve the functional ID associated to", "raise_exception=True) check_type(generate_metadata, bool, \"generate_metadata\", raise_exception=True) check_is_fid_valid(ts.fid, raise_exception=True) try: # First, we shall create", "Delete the data corresponding to a *ts* object and all associated metadata Note", "None: tsuid = self.fid2tsuid(fid=fid, raise_exception=True) return Timeseries(api=self.api, tsuid=tsuid, fid=fid) def save(self, ts, parent=None,", "that the TS may be not completely gathered) :param ts: Timeseries object :param", "is not an int :raises TypeError: if *ed* is not an int :raises", "-*- coding: utf-8 -*- \"\"\" Copyright 2019 CS Systèmes d'Information Licensed under the", "fid: str :type data: list or np.array :returns: the Timeseries object :rtype: Timeseries", "raise_exception: raise return False return True def delete(self, ts, raise_exception=True): \"\"\" Delete the", "= Timeseries(api=self.api) else: ts = self._create_ref(fid=fid) ts.data = data return ts def get(self,", "None], \"parent\", raise_exception=True) check_type(generate_metadata, bool, \"generate_metadata\", raise_exception=True) check_is_fid_valid(ts.fid, raise_exception=True) try: # First, we", "is a new one (object has no tsuid defined), the computation of the", "# Input checks check_type(ts, Timeseries, \"ts\", raise_exception=True) check_type(parent, [Timeseries, None], \"parent\", raise_exception=True) check_type(generate_metadata,", "fid: Identifier to create (if provided) :param data: List of data points as", "TS (%s), nothing will be inherited; \\nreason: %s\", parent, exception) def find_from_meta(self, constraint=None):", "= self.tsdb_client.gen_metric_tags() tsuid = self.tsdb_client.assign_metric(metric=metric, tags=tags) # finally importing tsuid/fid pair in non", "return None def fid2tsuid(self, fid, raise_exception=True): \"\"\" Retrieve the TSUID associated to the", "*ts* object and all associated metadata Note that if timeseries belongs to a", "return True def delete(self, ts, raise_exception=True): \"\"\" Delete the data corresponding to a", "False means \"errors occurred\") :param ts: Timeseries object containing information about what to", "dtype=MDType.NUMBER) # Inherit from parent when it is defined if parent is not", "flight_phase == 8 :param constraint: constraint definition :type constraint: dict :returns: list of", "of the action (True means \"OK\", False means \"errors occurred\") :param ts: Timeseries", "Retrieve the data corresponding to a Timeseries object as a numpy array ..", "Timeseries object as a numpy array .. note:: if omitted, *sd* (start date)", "*parent* is the original timeseries where metadata shall be taken from (except intrinsic", "raise_exception=True) check_type(value=sd, allowed_types=[int, None], var_name=\"sd\", raise_exception=True) check_type(value=ed, allowed_types=[int, None], var_name=\"ed\", raise_exception=True) if sd", "bool :raises TypeError: if *ts* is not a valid Timeseries object \"\"\" #", "associated to an existing tsuid try: return self.dm_client.get_tsuid_from_fid(fid=fid) except IkatsException: if raise_exception: raise", "frequency: [1, 2], | flight_phase: 8 | } will find the TS having", "Check if fid already associated to an existing tsuid tsuid = self.dm_client.get_tsuid_from_fid(fid=fid) #", "| { | frequency: [1, 2], | flight_phase: 8 | } will find", "\"\"\" return self.dm_client.get_ts_from_metadata(constraint=constraint) def tsuid2fid(self, tsuid, raise_exception=True): \"\"\" Retrieve the functional ID associated", "instead of `new`) \"\"\" check_is_fid_valid(fid, raise_exception=True) try: # Check if fid already associated", "TypeError: if *constraint* is not a dict \"\"\" return self.dm_client.get_ts_from_metadata(constraint=constraint) def tsuid2fid(self, tsuid,", "Timeseries(api=self.api) else: ts = self._create_ref(fid=fid) ts.data = data return ts def get(self, fid=None,", "IkatsNotFoundError: # Creation of a new tsuid metric, tags = self.tsdb_client.gen_metric_tags() tsuid =", "will not be removed Returns a boolean status of the action (True means", "| } will find the TS having the following metadata: | (frequency ==", "ts, parent=None, generate_metadata=True, raise_exception=True): \"\"\" Import timeseries data points to database or update", "var_name=\"ed\", raise_exception=True) if sd is None: sd = ts.metadata.get(name=\"ikats_start_date\") check_is_valid_epoch(value=sd, raise_exception=True) if ed", "= self.tsdb_client.get_ts_by_tsuid(tsuid=ts.tsuid, sd=sd, ed=ed) # Return the points return data_points except ValueError: raise", "to the functional ID param. :param fid: the functional Identifier :param raise_exception: Allow", "-*- \"\"\" Copyright 2019 CS Systèmes d'Information Licensed under the Apache License, Version", "the identifier was not found in database \"\"\" if bool(fid) == bool(tsuid): raise", "of the action :rtype: bool :raises TypeError: if *ts* is not a valid", "get a TS list matching these constraints Example of constraint: | { |", "management \"\"\" def __init__(self, *args, **kwargs): super(IkatsTimeseriesMgr, self).__init__(*args, **kwargs) if self.api.emulate: self.tsdb_client =", "file except in compliance with the License. You may obtain a copy of", "database, raise a conflict exception raise IkatsConflictError(\"%s already associated to an existing tsuid:", "ts = self._create_ref(fid=fid) ts.data = data return ts def get(self, fid=None, tsuid=None): \"\"\"", "deleted\" result[meta_name][\"deleted\"] = False if not NON_INHERITABLE_PATTERN.match(meta_name): self.dm_client.metadata_create(tsuid=ts.tsuid, name=meta_name, value=result[meta_name][\"value\"], data_type=MDType(result[meta_name][\"dtype\"]), force_update=True) except(ValueError,", "dict :returns: list of TSUID matching the constraints :rtype: dict :raises TypeError: if", "is defined if parent is not None: self.inherit(ts=ts, parent=parent) except IkatsException: if raise_exception:", "None def fid2tsuid(self, fid, raise_exception=True): \"\"\" Retrieve the TSUID associated to the functional", "\"ts\", raise_exception=True) check_type(parent, [Timeseries, None], \"parent\", raise_exception=True) check_type(generate_metadata, bool, \"generate_metadata\", raise_exception=True) check_is_fid_valid(ts.fid, raise_exception=True)", "a new tsuid metric, tags = self.tsdb_client.gen_metric_tags() tsuid = self.tsdb_client.assign_metric(metric=metric, tags=tags) # finally", "already associated to an existing tsuid try: return self.dm_client.get_tsuid_from_fid(fid=fid) except IkatsException: if raise_exception:", "not found :rtype: str :raises TypeError: if fid is not str :raises IkatsNotFoundError:", "IkatsException: if raise_exception: raise return None def fid2tsuid(self, fid, raise_exception=True): \"\"\" Retrieve the", "governing permissions and limitations under the License. \"\"\" import re from ikats.client.datamodel_client import", "IKATS exceptions shall be raised (True, default) or not (False) :type ts: str", "identifier was not found in database \"\"\" if bool(fid) == bool(tsuid): raise ValueError(\"fid", "if both *fid* and *tsuid* are set (or none of them) :raises IkatsNotFoundError:", "are set (or none of them) :raises IkatsNotFoundError: if the identifier was not", "as a numpy array .. note:: if omitted, *sd* (start date) and *ed*", "bool :type raise_exception: bool :returns: the status of the action :rtype: bool :raises", "| AND | flight_phase == 8 :param constraint: constraint definition :type constraint: dict", "permissions and limitations under the License. \"\"\" import re from ikats.client.datamodel_client import DatamodelClient", "Returns a boolean status of the action (True means \"OK\", False means \"errors", "Ikats :type fid: str :returns: A prepared Timeseries object :rtype: Timeseries :raises IkatsConflictError:", "have set at least tsuid or fid\") return self.dm_client.ts_delete(tsuid=tsuid, raise_exception=raise_exception) def list(self): \"\"\"", "is not a defined str :raises ValueError: no functional ID matching the tsuid", "(which will inherit) :param parent: TS object in IKATS of inheritance parent :type", "computation of the metadata is forced Returns a boolean status of the action", "self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_end_date', value=end_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_end_date', value=end_date, dtype=MDType.DATE) # qual_nb_points self.dm_client.metadata_update(tsuid=ts.tsuid, name='qual_nb_points', value=nb_points,", "create the TSUID reference (if not provided) if ts.tsuid is None: ts.tsuid =", "Timeseries, \"ts\", raise_exception=True) check_type(parent, [Timeseries, None], \"parent\", raise_exception=True) check_type(generate_metadata, bool, \"generate_metadata\", raise_exception=True) check_is_fid_valid(ts.fid,", "(if provided) :param data: List of data points as numpy array or python", "law or agreed to in writing, software distributed under the License is distributed", ":param ts: Timeseries object :param sd: (optional) starting date (timestamp in ms from", "ts: Timeseries object :param sd: (optional) starting date (timestamp in ms from epoch)", "Get the list of all Timeseries from database .. note:: This action may", "Version 2.0 (the \"License\"); you may not use this file except in compliance", "is set or if no TSUID is present in *ts* object, the *ikats_start_date*,", "*ed* manually (but be aware that the TS may be not completely gathered)", "points in *ts.data* *parent* is the original timeseries where metadata shall be taken", "ed is None: ed = ts.metadata.get(name=\"ikats_end_date\") check_is_valid_epoch(value=ed, raise_exception=True) try: data_points = self.tsdb_client.get_ts_by_tsuid(tsuid=ts.tsuid, sd=sd,", "bool :returns: retrieved functional identifier value :rtype: str :raises TypeError: if tsuid is", "shall be raised (True, default) or not (False) :type ts: Timeseries :type parent:", "Timeseries :param parent: Timeseries \"\"\" try: result = self.dm_client.metadata_get_typed([parent.tsuid])[parent.tsuid] for meta_name in result:", "under the Apache License, Version 2.0 (the \"License\"); you may not use this", ":type tsuid: str :type raise_exception: bool :returns: retrieved functional identifier value :rtype: str", "DatamodelClient from ikats.client.datamodel_stub import DatamodelStub from ikats.client.opentsdb_client import OpenTSDBClient from ikats.client.opentsdb_stub import OpenTSDBStub", ":type fid: str :type raise_exception: bool :returns: retrieved TSUID value or None if", "Return the points return data_points except ValueError: raise IkatsNotFoundError(\"TS data points couldn't be", "database (use `get` instead of `new`) \"\"\" if fid is None: ts =", "(only one shall be provided) :param fid: FID of the Timeseries :param tsuid:", "IkatsNotFoundError: if the identifier was not found in database \"\"\" if bool(fid) ==", ":type ed: int or None :returns: The data points :rtype: np.array :raises TypeError:", "or implied. See the License for the specific language governing permissions and limitations", "force_create=True) ts.metadata.set(name='ikats_start_date', value=start_date, dtype=MDType.DATE) # ikats_end_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_end_date', value=end_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_end_date', value=end_date,", ":returns: The data points :rtype: np.array :raises TypeError: if *ts* is not a", "a metadata constraint provided in parameter, the method get a TS list matching", ":param data: List of data points as numpy array or python 2D-list :type", "*ikats_start_date*, *ikats_end_date* and *qual_nb_points* will be overwritten by the first point date, last", "set *sd* and *ed* manually (but be aware that the TS may be", "fid: Functional Identifier of the TS in Ikats :type fid: str :returns: A", "present in database (use `get` instead of `new`) \"\"\" if fid is None:", "raise_exception=True): \"\"\" Delete the data corresponding to a *ts* object and all associated", "of the metadata generate_metadata = True # Add points to this TSUID start_date,", "them) :raises IkatsNotFoundError: if the identifier was not found in database \"\"\" if", "or None :type ed: int or None :returns: The data points :rtype: np.array", "to remove :param raise_exception: (optional) Indicates if IKATS exceptions shall be raised (True,", "of inheritance parent :type ts: Timeseries :param parent: Timeseries \"\"\" try: result =", "= self.tsdb_client.add_points(tsuid=ts.tsuid, data=ts.data) if generate_metadata: # ikats_start_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_start_date', value=start_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_start_date',", "retrieved TSUID value or None if not found :rtype: str :raises TypeError: if", ":rtype: str :raises TypeError: if fid is not str :raises IkatsNotFoundError: no match", "corresponding to a *ts* object and all associated metadata Note that if timeseries", "CONDITIONS OF ANY KIND, either express or implied. See the License for the", "*sd* and *ed* manually (but be aware that the TS may be not", "<= status < 600 \"\"\" try: return self.dm_client.get_func_id_from_tsuid(tsuid=tsuid) except IkatsException: if raise_exception: raise", "is not None: try: tsuid = self.dm_client.get_tsuid_from_fid(fid=ts.fid) except IkatsException: if raise_exception: raise return", "except in compliance with the License. You may obtain a copy of the", "# Check if fid already associated to an existing tsuid try: return self.dm_client.get_tsuid_from_fid(fid=fid)", "if fid is not str :raises IkatsNotFoundError: no match \"\"\" check_is_fid_valid(fid=fid) # Check", "the TS in Ikats :type fid: str :returns: A prepared Timeseries object :rtype:", "(but be aware that the TS may be not completely gathered) :param ts:", "you want a fixed windowed range, set *sd* and *ed* manually (but be", "== bool(tsuid): raise ValueError(\"fid and tsuid are mutually exclusive\") if fid is not", "of TSUID matching the constraints :rtype: dict :raises TypeError: if *constraint* is not", "if *ts* is not a Timeseries object :raises TypeError: if *sd* is not", "tsuid = self.tsdb_client.assign_metric(metric=metric, tags=tags) # finally importing tsuid/fid pair in non temporal database", "status : 500 <= status < 600 \"\"\" try: return self.dm_client.get_func_id_from_tsuid(tsuid=tsuid) except IkatsException:", "temporal data database and associate it to fid in temporal database for future", "IkatsNotFoundError(\"TS data points couldn't be retrieved properly\") def inherit(self, ts, parent): \"\"\" Make", "empty local Timeseries (if fid not provided) If fid is set, the identifier", "before create method in case of parallel creation of data (import data via", "not provided) If fid is set, the identifier will be created to database", "tsuid)) except IkatsNotFoundError: # Creation of a new tsuid metric, tags = self.tsdb_client.gen_metric_tags()", ":param ts: TS object in IKATS (which will inherit) :param parent: TS object", "data: list or np.array :returns: the Timeseries object :rtype: Timeseries :raises IkatsConflictError: if", "name='ikats_start_date', value=start_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_start_date', value=start_date, dtype=MDType.DATE) # ikats_end_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_end_date', value=end_date, data_type=MDType.DATE,", ":returns: retrieved TSUID value or None if not found :rtype: str :raises TypeError:", "if raise_exception: raise return None def _create_ref(self, fid): \"\"\" Create a reference of", "DatamodelStub(session=self.api.session) else: self.tsdb_client = OpenTSDBClient(session=self.api.session) self.dm_client = DatamodelClient(session=self.api.session) def new(self, fid=None, data=None): \"\"\"", "if not found :rtype: str :raises TypeError: if fid is not str :raises", "self.dm_client.get_ts_from_metadata(constraint=constraint) def tsuid2fid(self, tsuid, raise_exception=True): \"\"\" Retrieve the functional ID associated to the", "ValueError(\"Timeseries object shall have set at least tsuid or fid\") return self.dm_client.ts_delete(tsuid=tsuid, raise_exception=raise_exception)", "be raised (True, default) or not (False) :type ts: Timeseries :type parent: Timeseries", "list of TSUID matching the constraints :rtype: dict :raises TypeError: if *constraint* is", "create (if provided) :param data: List of data points as numpy array or", "express or implied. See the License for the specific language governing permissions and", "fid is None: ts = Timeseries(api=self.api) else: ts = self._create_ref(fid=fid) ts.data = data", ":returns: list of TSUID matching the constraints :rtype: dict :raises TypeError: if *constraint*", "pattern (not all metadata inherited) :param ts: TS object in IKATS (which will", "= ts.metadata.get(name=\"ikats_start_date\") check_is_valid_epoch(value=sd, raise_exception=True) if ed is None: ed = ts.metadata.get(name=\"ikats_end_date\") check_is_valid_epoch(value=ed, raise_exception=True)", "def delete(self, ts, raise_exception=True): \"\"\" Delete the data corresponding to a *ts* object", "= OpenTSDBStub(session=self.api.session) self.dm_client = DatamodelStub(session=self.api.session) else: self.tsdb_client = OpenTSDBClient(session=self.api.session) self.dm_client = DatamodelClient(session=self.api.session) def", "exists in database, raise a conflict exception raise IkatsConflictError(\"%s already associated to an", "to a pattern (not all metadata inherited) :param ts: TS object in IKATS", "If the TS is fresh, we force the creation of the metadata generate_metadata", "data=ts.data) if generate_metadata: # ikats_start_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_start_date', value=start_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_start_date', value=start_date, dtype=MDType.DATE)", "will be inherited; \\nreason: %s\", parent, exception) def find_from_meta(self, constraint=None): \"\"\" From a", "found or not :type fid: str :type raise_exception: bool :returns: retrieved TSUID value", "action (True means \"OK\", False means \"errors occurred\") :param ts: tsuid of the", "sd: (optional) starting date (timestamp in ms from epoch) :param ed: (optional) ending", "Indicates if exceptions shall be raised (True, default) or not (False) :type ts:", "and *ed* manually (but be aware that the TS may be not completely", "metadata as \"not deleted\" result[meta_name][\"deleted\"] = False if not NON_INHERITABLE_PATTERN.match(meta_name): self.dm_client.metadata_create(tsuid=ts.tsuid, name=meta_name, value=result[meta_name][\"value\"],", "def save(self, ts, parent=None, generate_metadata=True, raise_exception=True): \"\"\" Import timeseries data points to database", "TS object in IKATS of inheritance parent :type ts: Timeseries :param parent: Timeseries", "be overwritten by the first point date, last point date and number of", "an existing Timeseries object by providing either its FID or TSUID (only one", ":raises IkatsNotFoundError: no match \"\"\" check_is_fid_valid(fid=fid) # Check if fid already associated to", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See", "end_date, nb_points = self.tsdb_client.add_points(tsuid=ts.tsuid, data=ts.data) if generate_metadata: # ikats_start_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_start_date', value=start_date, data_type=MDType.DATE,", "point date, last point date and number of points in *ts.data* *parent* is", "class IkatsTimeseriesMgr(IkatsGenericApiEndPoint): \"\"\" Ikats EndPoint specific to Timeseries management \"\"\" def __init__(self, *args,", "fid=x[\"funcId\"], api=self.api) for x in self.dm_client.get_ts_list()] def fetch(self, ts, sd=None, ed=None): \"\"\" Retrieve", "TS in Ikats :type fid: str :returns: A prepared Timeseries object :rtype: Timeseries", "defined if parent is not None: self.inherit(ts=ts, parent=parent) except IkatsException: if raise_exception: raise", "raise_exception: Allow to specify if the action shall assert if not found or", "Shall be used before create method in case of parallel creation of data", "name='ikats_end_date', value=end_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_end_date', value=end_date, dtype=MDType.DATE) # qual_nb_points self.dm_client.metadata_update(tsuid=ts.tsuid, name='qual_nb_points', value=nb_points, data_type=MDType.NUMBER,", "be removed Returns a boolean status of the action (True means \"OK\", False", "var_name=\"ts\", raise_exception=True) check_type(value=sd, allowed_types=[int, None], var_name=\"sd\", raise_exception=True) check_type(value=ed, allowed_types=[int, None], var_name=\"ed\", raise_exception=True) if", "all Timeseries from database .. note:: This action may take a while :returns:", "(False) :type ts: str or Timeseries :type raise_exception: bool :returns: the status of", "allowed_types=[int, None], var_name=\"sd\", raise_exception=True) check_type(value=ed, allowed_types=[int, None], var_name=\"ed\", raise_exception=True) if sd is None:", "the *ikats_start_date*, *ikats_end_date* and *qual_nb_points* will be overwritten by the first point date,", "reference (if not provided) if ts.tsuid is None: ts.tsuid = self._create_ref(ts.fid).tsuid # If", "object :raises TypeError: if *sd* is not an int :raises TypeError: if *ed*", "\"\"\" Make a timeseries inherit of parent's metadata according to a pattern (not", "about what to create :param parent: (optional) Timeseries object of inheritance parent :param", ":param fid: Functional Identifier of the TS in Ikats :type fid: str :returns:", "database .. note:: This action may take a while :returns: the list of", "Note that if timeseries belongs to a dataset it will not be removed", "re from ikats.client.datamodel_client import DatamodelClient from ikats.client.datamodel_stub import DatamodelStub from ikats.client.opentsdb_client import OpenTSDBClient", "action shall assert if not found or not :type fid: str :type raise_exception:", "to an existing tsuid tsuid = self.dm_client.get_tsuid_from_fid(fid=fid) # if fid already exists in", "parent=None, generate_metadata=True, raise_exception=True): \"\"\" Import timeseries data points to database or update an", "associated to an existing tsuid tsuid = self.dm_client.get_tsuid_from_fid(fid=fid) # if fid already exists", "gathered) :param ts: Timeseries object :param sd: (optional) starting date (timestamp in ms", "object, the *ikats_start_date*, *ikats_end_date* and *qual_nb_points* will be overwritten by the first point", "or not (False) :type ts: Timeseries :type parent: Timeseries :type generate_metadata: bool :type", "ts: str or Timeseries :type raise_exception: bool :returns: the status of the action", "TypeError: if *ts* is not a Timeseries object :raises TypeError: if *sd* is", "fid: the functional Identifier :param raise_exception: Allow to specify if the action shall", "nor a Timeseries :raises IkatsNotFoundError: if timeseries is not found on server :raises", "want a fixed windowed range, set *sd* and *ed* manually (but be aware", "\"\"\" Retrieve the functional ID associated to the tsuid param. :param tsuid: one", "occurred\") :param ts: tsuid of the timeseries or Timeseries Object to remove :param", "creation of data (import data via spark for example) :param fid: Functional Identifier", "int or None :type ed: int or None :returns: The data points :rtype:", "in database, raise a conflict exception raise IkatsConflictError(\"%s already associated to an existing", "# Inherit from parent when it is defined if parent is not None:", "first point date, last point date and number of points in *ts.data* *parent*", "int :raises IkatsNotFoundError: if TS data points couldn't be retrieved properly \"\"\" check_type(value=ts,", "metadata of parent TS (%s), nothing will be inherited; \\nreason: %s\", parent, exception)", "a new one (object has no tsuid defined), the computation of the metadata", "def new(self, fid=None, data=None): \"\"\" Create an empty local Timeseries (if fid not", "of them) :raises IkatsNotFoundError: if the identifier was not found in database \"\"\"", "Check if fid already associated to an existing tsuid try: return self.dm_client.get_tsuid_from_fid(fid=fid) except", "int or None :returns: The data points :rtype: np.array :raises TypeError: if *ts*", "list or np.array :returns: the Timeseries object :rtype: Timeseries :raises IkatsConflictError: if *fid*", "\"Can't get metadata of parent TS (%s), nothing will be inherited; \\nreason: %s\",", "self.api.session.log.warning( \"Can't get metadata of parent TS (%s), nothing will be inherited; \\nreason:", "Systèmes d'Information Licensed under the Apache License, Version 2.0 (the \"License\"); you may", "ikats_start_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_start_date', value=start_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_start_date', value=start_date, dtype=MDType.DATE) # ikats_end_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_end_date',", "Timeseries :type generate_metadata: bool :type raise_exception: bool :returns: the status of the action", "action :rtype: bool :raises TypeError: if *ts* is not a valid Timeseries object", "what to create :param parent: (optional) Timeseries object of inheritance parent :param generate_metadata:", "belongs to -at least- one dataset \"\"\" check_type(value=ts, allowed_types=[str, Timeseries], var_name=\"ts\", raise_exception=True) tsuid", "First, we shall create the TSUID reference (if not provided) if ts.tsuid is", "def list(self): \"\"\" Get the list of all Timeseries from database .. note::", "generate_metadata = True # Add points to this TSUID start_date, end_date, nb_points =", "data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_end_date', value=end_date, dtype=MDType.DATE) # qual_nb_points self.dm_client.metadata_update(tsuid=ts.tsuid, name='qual_nb_points', value=nb_points, data_type=MDType.NUMBER, force_create=True) ts.metadata.set(name='qual_nb_points',", "we shall create the TSUID reference (if not provided) if ts.tsuid is None:", "except IkatsException: if raise_exception: raise return None def fid2tsuid(self, fid, raise_exception=True): \"\"\" Retrieve", "raise IkatsConflictError(\"%s already associated to an existing tsuid: %s\" % (fid, tsuid)) except", "if generate_metadata: # ikats_start_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_start_date', value=start_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_start_date', value=start_date, dtype=MDType.DATE) #", "data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_start_date', value=start_date, dtype=MDType.DATE) # ikats_end_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_end_date', value=end_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_end_date',", "_create_ref(self, fid): \"\"\" Create a reference of timeseries in temporal data database and", "having the following metadata: | (frequency == 1 OR frequency == 2) |", "status of the action :rtype: bool :raises TypeError: if *ts* is not a", "(True means \"OK\", False means \"errors occurred\") :param ts: tsuid of the timeseries", "ts: tsuid of the timeseries or Timeseries Object to remove :param raise_exception: (optional)", "value=start_date, dtype=MDType.DATE) # ikats_end_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_end_date', value=end_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_end_date', value=end_date, dtype=MDType.DATE) #", "# if fid already exists in database, raise a conflict exception raise IkatsConflictError(\"%s", "reference of timeseries in temporal data database and associate it to fid in", "metadata: | (frequency == 1 OR frequency == 2) | AND | flight_phase", "parent :param generate_metadata: Generate metadata (set to False when doing partial import) (Default:", "belongs to a dataset it will not be removed Returns a boolean status", "by providing either its FID or TSUID (only one shall be provided) :param", "not a dict \"\"\" return self.dm_client.get_ts_from_metadata(constraint=constraint) def tsuid2fid(self, tsuid, raise_exception=True): \"\"\" Retrieve the", "Identifier to create (if provided) :param data: List of data points as numpy", "TSUID is present in *ts* object, the *ikats_start_date*, *ikats_end_date* and *qual_nb_points* will be", "timeseries is not found on server :raises IkatsConflictError: if timeseries belongs to -at", "from ikats.client.datamodel_client import DatamodelClient from ikats.client.datamodel_stub import DatamodelStub from ikats.client.opentsdb_client import OpenTSDBClient from", ":param ts: Timeseries object containing information about what to create :param parent: (optional)", "in case of parallel creation of data (import data via spark for example)", "checks check_type(ts, Timeseries, \"ts\", raise_exception=True) check_type(parent, [Timeseries, None], \"parent\", raise_exception=True) check_type(generate_metadata, bool, \"generate_metadata\",", "new points if *generate_metadata* is set or if no TSUID is present in", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "one tsuid value :param raise_exception: Allow to specify if the action shall assert", "in compliance with the License. You may obtain a copy of the License", "create :param parent: (optional) Timeseries object of inheritance parent :param generate_metadata: Generate metadata", "KIND, either express or implied. See the License for the specific language governing", "for meta_name in result: # Flag metadata as \"not deleted\" result[meta_name][\"deleted\"] = False", "one shall be provided) :param fid: FID of the Timeseries :param tsuid: TSUID", "not found on server :raises IkatsConflictError: if timeseries belongs to -at least- one", "writing, software distributed under the License is distributed on an \"AS IS\" BASIS,", "**kwargs): super(IkatsTimeseriesMgr, self).__init__(*args, **kwargs) if self.api.emulate: self.tsdb_client = OpenTSDBStub(session=self.api.session) self.dm_client = DatamodelStub(session=self.api.session) else:", "FID of the Timeseries :param tsuid: TSUID of the Timeseries :type fid: str", "raise ValueError(\"fid and tsuid are mutually exclusive\") if fid is not None: tsuid", "| (frequency == 1 OR frequency == 2) | AND | flight_phase ==", "value :rtype: str :raises TypeError: if tsuid is not a defined str :raises", "(optional) ending date (timestamp in ms from epoch) :type ts: Timeseries :type sd:", "*ts* object, the *ikats_start_date*, *ikats_end_date* and *qual_nb_points* will be overwritten by the first", "*sd* is not an int :raises TypeError: if *ed* is not an int", "TypeError: if *ed* is not an int :raises IkatsNotFoundError: if TS data points", "we force the creation of the metadata generate_metadata = True # Add points", "= self._create_ref(ts.fid).tsuid # If the TS is fresh, we force the creation of", "\"\"\" if fid is None: ts = Timeseries(api=self.api) else: ts = self._create_ref(fid=fid) ts.data", ":type ts: Timeseries :param parent: Timeseries \"\"\" try: result = self.dm_client.metadata_get_typed([parent.tsuid])[parent.tsuid] for meta_name", "a defined str :raises ValueError: no functional ID matching the tsuid :raises ServerError:", "This action may take a while :returns: the list of Timeseries object :rtype:", "retrieved from metadata if you want a fixed windowed range, set *sd* and", "raise_exception=True) check_is_fid_valid(ts.fid, raise_exception=True) try: # First, we shall create the TSUID reference (if", "temporal database for future use. Shall be used before create method in case", "[Timeseries(tsuid=x[\"tsuid\"], fid=x[\"funcId\"], api=self.api) for x in self.dm_client.get_ts_list()] def fetch(self, ts, sd=None, ed=None): \"\"\"", "force_update=True) except(ValueError, TypeError, SystemError) as exception: self.api.session.log.warning( \"Can't get metadata of parent TS", "Retrieve the TSUID associated to the functional ID param. :param fid: the functional", "import IkatsGenericApiEndPoint from ikats.objects import Timeseries NON_INHERITABLE_PATTERN = re.compile(\"^qual(.)*|ikats(.)*|funcId\") class IkatsTimeseriesMgr(IkatsGenericApiEndPoint): \"\"\" Ikats", "not found in database \"\"\" if bool(fid) == bool(tsuid): raise ValueError(\"fid and tsuid", "ts.tsuid is None: ts.tsuid = self._create_ref(ts.fid).tsuid # If the TS is fresh, we", "self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_start_date', value=start_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_start_date', value=start_date, dtype=MDType.DATE) # ikats_end_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_end_date', value=end_date,", "OF ANY KIND, either express or implied. See the License for the specific", ":rtype: list \"\"\" return [Timeseries(tsuid=x[\"tsuid\"], fid=x[\"funcId\"], api=self.api) for x in self.dm_client.get_ts_list()] def fetch(self,", ":type ts: Timeseries :type parent: Timeseries :type generate_metadata: bool :type raise_exception: bool :returns:", "no match \"\"\" check_is_fid_valid(fid=fid) # Check if fid already associated to an existing", "an existing tsuid: %s\" % (fid, tsuid)) except IkatsNotFoundError: # Creation of a", "new tsuid metric, tags = self.tsdb_client.gen_metric_tags() tsuid = self.tsdb_client.assign_metric(metric=metric, tags=tags) # finally importing", "tags = self.tsdb_client.gen_metric_tags() tsuid = self.tsdb_client.assign_metric(metric=metric, tags=tags) # finally importing tsuid/fid pair in", "or Timeseries Object to remove :param raise_exception: (optional) Indicates if IKATS exceptions shall", "may take a while :returns: the list of Timeseries object :rtype: list \"\"\"", "specific to Timeseries management \"\"\" def __init__(self, *args, **kwargs): super(IkatsTimeseriesMgr, self).__init__(*args, **kwargs) if", "\"OK\", False means \"errors occurred\") :param ts: tsuid of the timeseries or Timeseries", "(object has no tsuid defined), the computation of the metadata is forced Returns", "SystemError) as exception: self.api.session.log.warning( \"Can't get metadata of parent TS (%s), nothing will", "object as a numpy array .. note:: if omitted, *sd* (start date) and", "if fid is None: ts = Timeseries(api=self.api) else: ts = self._create_ref(fid=fid) ts.data =", "server :raises IkatsConflictError: if timeseries belongs to -at least- one dataset \"\"\" check_type(value=ts,", "epoch) :type ts: Timeseries :type sd: int or None :type ed: int or", "False when doing partial import) (Default: True) :param raise_exception: Indicates if exceptions shall", "ts, parent): \"\"\" Make a timeseries inherit of parent's metadata according to a", "Timeseries :raises IkatsNotFoundError: if timeseries is not found on server :raises IkatsConflictError: if", "if *constraint* is not a dict \"\"\" return self.dm_client.get_ts_from_metadata(constraint=constraint) def tsuid2fid(self, tsuid, raise_exception=True):", "See the License for the specific language governing permissions and limitations under the", "if *generate_metadata* is set or if no TSUID is present in *ts* object,", "2019 CS Systèmes d'Information Licensed under the Apache License, Version 2.0 (the \"License\");", "check_type(ts, Timeseries, \"ts\", raise_exception=True) check_type(parent, [Timeseries, None], \"parent\", raise_exception=True) check_type(generate_metadata, bool, \"generate_metadata\", raise_exception=True)", "value=result[meta_name][\"value\"], data_type=MDType(result[meta_name][\"dtype\"]), force_update=True) except(ValueError, TypeError, SystemError) as exception: self.api.session.log.warning( \"Can't get metadata of", "sd = ts.metadata.get(name=\"ikats_start_date\") check_is_valid_epoch(value=sd, raise_exception=True) if ed is None: ed = ts.metadata.get(name=\"ikats_end_date\") check_is_valid_epoch(value=ed,", "note:: This action may take a while :returns: the list of Timeseries object", "ID matching the tsuid :raises ServerError: http answer with status : 500 <=", "if timeseries belongs to a dataset it will not be removed Returns a", "a while :returns: the list of Timeseries object :rtype: list \"\"\" return [Timeseries(tsuid=x[\"tsuid\"],", "created to database :param fid: Identifier to create (if provided) :param data: List", "\"License\"); you may not use this file except in compliance with the License.", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "importing tsuid/fid pair in non temporal database self.dm_client.import_fid(tsuid=tsuid, fid=fid) return Timeseries(tsuid=tsuid, fid=fid, api=self.api)", "date) and *ed* (end date) will be retrieved from metadata if you want", "agreed to in writing, software distributed under the License is distributed on an", "None if not found :rtype: str :raises TypeError: if fid is not str", "default) or not (False) :type ts: str or Timeseries :type raise_exception: bool :returns:", "ts.tsuid is not None: tsuid = ts.tsuid elif ts.fid is not None: try:", "the original timeseries where metadata shall be taken from (except intrinsic ones, eg.", "== 1 OR frequency == 2) | AND | flight_phase == 8 :param", "implied. See the License for the specific language governing permissions and limitations under", ":param ed: (optional) ending date (timestamp in ms from epoch) :type ts: Timeseries", "action shall assert if not found or not :type tsuid: str :type raise_exception:", "sd=sd, ed=ed) # Return the points return data_points except ValueError: raise IkatsNotFoundError(\"TS data", "return False else: raise ValueError(\"Timeseries object shall have set at least tsuid or", "find_from_meta(self, constraint=None): \"\"\" From a metadata constraint provided in parameter, the method get", "parent: Timeseries :type generate_metadata: bool :type raise_exception: bool :returns: the status of the", "= DatamodelClient(session=self.api.session) def new(self, fid=None, data=None): \"\"\" Create an empty local Timeseries (if", "points couldn't be retrieved properly \"\"\" check_type(value=ts, allowed_types=Timeseries, var_name=\"ts\", raise_exception=True) check_type(value=sd, allowed_types=[int, None],", "\"\"\" try: result = self.dm_client.metadata_get_typed([parent.tsuid])[parent.tsuid] for meta_name in result: # Flag metadata as", "Inherit from parent when it is defined if parent is not None: self.inherit(ts=ts,", "exceptions shall be raised (True, default) or not (False) :type ts: str or", "if timeseries is not found on server :raises IkatsConflictError: if timeseries belongs to", "return self.dm_client.get_tsuid_from_fid(fid=fid) except IkatsException: if raise_exception: raise return None def _create_ref(self, fid): \"\"\"", "ts, raise_exception=True): \"\"\" Delete the data corresponding to a *ts* object and all", "tsuid = self.fid2tsuid(fid=fid, raise_exception=True) return Timeseries(api=self.api, tsuid=tsuid, fid=fid) def save(self, ts, parent=None, generate_metadata=True,", "timeseries in temporal data database and associate it to fid in temporal database", "isinstance(ts, Timeseries): if ts.tsuid is not None: tsuid = ts.tsuid elif ts.fid is", "list(self): \"\"\" Get the list of all Timeseries from database .. note:: This", "IKATS (which will inherit) :param parent: TS object in IKATS of inheritance parent", "Timeseries object by providing either its FID or TSUID (only one shall be", "*ikats_end_date* and *qual_nb_points* will be overwritten by the first point date, last point", "__init__(self, *args, **kwargs): super(IkatsTimeseriesMgr, self).__init__(*args, **kwargs) if self.api.emulate: self.tsdb_client = OpenTSDBStub(session=self.api.session) self.dm_client =", ":type sd: int or None :type ed: int or None :returns: The data", "IKATS of inheritance parent :type ts: Timeseries :param parent: Timeseries \"\"\" try: result", "return False return True def delete(self, ts, raise_exception=True): \"\"\" Delete the data corresponding", "value or None if not found :rtype: str :raises TypeError: if fid is", "(if not provided) if ts.tsuid is None: ts.tsuid = self._create_ref(ts.fid).tsuid # If the", "2D-list :type fid: str :type data: list or np.array :returns: the Timeseries object", "list of all Timeseries from database .. note:: This action may take a", "its FID or TSUID (only one shall be provided) :param fid: FID of", "TS data points couldn't be retrieved properly \"\"\" check_type(value=ts, allowed_types=Timeseries, var_name=\"ts\", raise_exception=True) check_type(value=sd,", "required by applicable law or agreed to in writing, software distributed under the", "if fid is not None: tsuid = self.fid2tsuid(fid=fid, raise_exception=True) return Timeseries(api=self.api, tsuid=tsuid, fid=fid)", "defined str :raises ValueError: no functional ID matching the tsuid :raises ServerError: http", "if sd is None: sd = ts.metadata.get(name=\"ikats_start_date\") check_is_valid_epoch(value=sd, raise_exception=True) if ed is None:", "occurred\") :param ts: Timeseries object containing information about what to create :param parent:", "(optional) Timeseries object of inheritance parent :param generate_metadata: Generate metadata (set to False", "check_type(generate_metadata, bool, \"generate_metadata\", raise_exception=True) check_is_fid_valid(ts.fid, raise_exception=True) try: # First, we shall create the", "constraint definition :type constraint: dict :returns: list of TSUID matching the constraints :rtype:", "not str :raises IkatsNotFoundError: no match \"\"\" check_is_fid_valid(fid=fid) # Check if fid already", "associated to an existing tsuid: %s\" % (fid, tsuid)) except IkatsNotFoundError: # Creation", "object containing information about what to create :param parent: (optional) Timeseries object of", "the creation of the metadata generate_metadata = True # Add points to this", "ikats.exceptions import (IkatsConflictError, IkatsException, IkatsNotFoundError) from ikats.lib import (MDType, check_is_fid_valid, check_is_valid_epoch, check_type) from", "where metadata shall be taken from (except intrinsic ones, eg. *qual_nb_points*) If the", "from database .. note:: This action may take a while :returns: the list", "tsuid: TSUID of the Timeseries :type fid: str :type tsuid: str :returns: The", "The data points :rtype: np.array :raises TypeError: if *ts* is not a Timeseries", "(%s), nothing will be inherited; \\nreason: %s\", parent, exception) def find_from_meta(self, constraint=None): \"\"\"", ":returns: the list of Timeseries object :rtype: list \"\"\" return [Timeseries(tsuid=x[\"tsuid\"], fid=x[\"funcId\"], api=self.api)", "not (False) :type ts: Timeseries :type parent: Timeseries :type generate_metadata: bool :type raise_exception:", "tsuid2fid(self, tsuid, raise_exception=True): \"\"\" Retrieve the functional ID associated to the tsuid param.", "conflict exception raise IkatsConflictError(\"%s already associated to an existing tsuid: %s\" % (fid,", "(except intrinsic ones, eg. *qual_nb_points*) If the timeseries is a new one (object", "return data_points except ValueError: raise IkatsNotFoundError(\"TS data points couldn't be retrieved properly\") def", "= self.dm_client.get_tsuid_from_fid(fid=ts.fid) except IkatsException: if raise_exception: raise return False else: raise ValueError(\"Timeseries object", "ed: (optional) ending date (timestamp in ms from epoch) :type ts: Timeseries :type", "object by providing either its FID or TSUID (only one shall be provided)", "raise_exception=True) if sd is None: sd = ts.metadata.get(name=\"ikats_start_date\") check_is_valid_epoch(value=sd, raise_exception=True) if ed is", "if parent is not None: self.inherit(ts=ts, parent=parent) except IkatsException: if raise_exception: raise return", "database :param fid: Identifier to create (if provided) :param data: List of data", ":rtype: Timeseries :raises IkatsConflictError: if *fid* already present in database (use `get` instead", "is fresh, we force the creation of the metadata generate_metadata = True #", "specify if the action shall assert if not found or not :type tsuid:", "ANY KIND, either express or implied. See the License for the specific language", "be provided) :param fid: FID of the Timeseries :param tsuid: TSUID of the", "raise_exception=True) check_type(parent, [Timeseries, None], \"parent\", raise_exception=True) check_type(generate_metadata, bool, \"generate_metadata\", raise_exception=True) check_is_fid_valid(ts.fid, raise_exception=True) try:", "\"\"\" Retrieve the data corresponding to a Timeseries object as a numpy array", "# finally importing tsuid/fid pair in non temporal database self.dm_client.import_fid(tsuid=tsuid, fid=fid) return Timeseries(tsuid=tsuid,", "2], | flight_phase: 8 | } will find the TS having the following", "\"\"\" if bool(fid) == bool(tsuid): raise ValueError(\"fid and tsuid are mutually exclusive\") if", "TypeError: if tsuid is not a defined str :raises ValueError: no functional ID", "Timeseries object :rtype: Timeseries :raises IkatsConflictError: if *fid* already present in database (use", "*tsuid* are set (or none of them) :raises IkatsNotFoundError: if the identifier was", "the first point date, last point date and number of points in *ts.data*", "of points in *ts.data* *parent* is the original timeseries where metadata shall be", "check_is_fid_valid, check_is_valid_epoch, check_type) from ikats.manager.generic_mgr_ import IkatsGenericApiEndPoint from ikats.objects import Timeseries NON_INHERITABLE_PATTERN =", "except IkatsNotFoundError: # Creation of a new tsuid metric, tags = self.tsdb_client.gen_metric_tags() tsuid", ":type generate_metadata: bool :type raise_exception: bool :returns: the status of the action :rtype:", ".. note:: if omitted, *sd* (start date) and *ed* (end date) will be", "date (timestamp in ms from epoch) :param ed: (optional) ending date (timestamp in", "import (MDType, check_is_fid_valid, check_is_valid_epoch, check_type) from ikats.manager.generic_mgr_ import IkatsGenericApiEndPoint from ikats.objects import Timeseries", "in result: # Flag metadata as \"not deleted\" result[meta_name][\"deleted\"] = False if not", "the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless", "= self.dm_client.get_tsuid_from_fid(fid=fid) # if fid already exists in database, raise a conflict exception", "array or python 2D-list :type fid: str :type data: list or np.array :returns:", "(the \"License\"); you may not use this file except in compliance with the", "if not NON_INHERITABLE_PATTERN.match(meta_name): self.dm_client.metadata_create(tsuid=ts.tsuid, name=meta_name, value=result[meta_name][\"value\"], data_type=MDType(result[meta_name][\"dtype\"]), force_update=True) except(ValueError, TypeError, SystemError) as exception:", "means \"OK\", False means \"errors occurred\") :param ts: Timeseries object containing information about", "may be not completely gathered) :param ts: Timeseries object :param sd: (optional) starting", "result = self.dm_client.metadata_get_typed([parent.tsuid])[parent.tsuid] for meta_name in result: # Flag metadata as \"not deleted\"", "existing tsuid: %s\" % (fid, tsuid)) except IkatsNotFoundError: # Creation of a new", "TSUID matching the constraints :rtype: dict :raises TypeError: if *constraint* is not a", "tsuid metric, tags = self.tsdb_client.gen_metric_tags() tsuid = self.tsdb_client.assign_metric(metric=metric, tags=tags) # finally importing tsuid/fid", "self.fid2tsuid(fid=fid, raise_exception=True) return Timeseries(api=self.api, tsuid=tsuid, fid=fid) def save(self, ts, parent=None, generate_metadata=True, raise_exception=True): \"\"\"", "ts.metadata.set(name='ikats_end_date', value=end_date, dtype=MDType.DATE) # qual_nb_points self.dm_client.metadata_update(tsuid=ts.tsuid, name='qual_nb_points', value=nb_points, data_type=MDType.NUMBER, force_create=True) ts.metadata.set(name='qual_nb_points', value=nb_points, dtype=MDType.NUMBER)", "points couldn't be retrieved properly\") def inherit(self, ts, parent): \"\"\" Make a timeseries", "metadata is forced Returns a boolean status of the action (True means \"OK\",", "ValueError: raise IkatsNotFoundError(\"TS data points couldn't be retrieved properly\") def inherit(self, ts, parent):", "tsuid :raises ServerError: http answer with status : 500 <= status < 600", "value=end_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_end_date', value=end_date, dtype=MDType.DATE) # qual_nb_points self.dm_client.metadata_update(tsuid=ts.tsuid, name='qual_nb_points', value=nb_points, data_type=MDType.NUMBER, force_create=True)", "var_name=\"ts\", raise_exception=True) tsuid = ts if isinstance(ts, Timeseries): if ts.tsuid is not None:", "= self.dm_client.metadata_get_typed([parent.tsuid])[parent.tsuid] for meta_name in result: # Flag metadata as \"not deleted\" result[meta_name][\"deleted\"]", "False return True def delete(self, ts, raise_exception=True): \"\"\" Delete the data corresponding to", "http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed", "utf-8 -*- \"\"\" Copyright 2019 CS Systèmes d'Information Licensed under the Apache License,", "delete(self, ts, raise_exception=True): \"\"\" Delete the data corresponding to a *ts* object and", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "TSUID reference (if not provided) if ts.tsuid is None: ts.tsuid = self._create_ref(ts.fid).tsuid #", "(start date) and *ed* (end date) will be retrieved from metadata if you", "try: result = self.dm_client.metadata_get_typed([parent.tsuid])[parent.tsuid] for meta_name in result: # Flag metadata as \"not", "the list of Timeseries object :rtype: list \"\"\" return [Timeseries(tsuid=x[\"tsuid\"], fid=x[\"funcId\"], api=self.api) for", "sd: int or None :type ed: int or None :returns: The data points", "none of them) :raises IkatsNotFoundError: if the identifier was not found in database", "the list of all Timeseries from database .. note:: This action may take", "if ts.tsuid is not None: tsuid = ts.tsuid elif ts.fid is not None:", "bool, \"generate_metadata\", raise_exception=True) check_is_fid_valid(ts.fid, raise_exception=True) try: # First, we shall create the TSUID", "set, the identifier will be created to database :param fid: Identifier to create", "Timeseries object :param sd: (optional) starting date (timestamp in ms from epoch) :param", "try: return self.dm_client.get_tsuid_from_fid(fid=fid) except IkatsException: if raise_exception: raise return None def _create_ref(self, fid):", "if not found or not :type tsuid: str :type raise_exception: bool :returns: retrieved", "data points to database or update an existing timeseries with new points if", "{ | frequency: [1, 2], | flight_phase: 8 | } will find the", ":type ts: str or Timeseries :type raise_exception: bool :returns: the status of the", "points to database or update an existing timeseries with new points if *generate_metadata*", "either express or implied. See the License for the specific language governing permissions", "def fetch(self, ts, sd=None, ed=None): \"\"\" Retrieve the data corresponding to a Timeseries", "the tsuid param. :param tsuid: one tsuid value :param raise_exception: Allow to specify", "the Timeseries :type fid: str :type tsuid: str :returns: The Timeseries object :rtype:", "raise_exception=True): \"\"\" Retrieve the functional ID associated to the tsuid param. :param tsuid:", "not NON_INHERITABLE_PATTERN.match(meta_name): self.dm_client.metadata_create(tsuid=ts.tsuid, name=meta_name, value=result[meta_name][\"value\"], data_type=MDType(result[meta_name][\"dtype\"]), force_update=True) except(ValueError, TypeError, SystemError) as exception: self.api.session.log.warning(", "fid already exists in database, raise a conflict exception raise IkatsConflictError(\"%s already associated", "metadata inherited) :param ts: TS object in IKATS (which will inherit) :param parent:", "TypeError, SystemError) as exception: self.api.session.log.warning( \"Can't get metadata of parent TS (%s), nothing", "points as numpy array or python 2D-list :type fid: str :type data: list", "IkatsException: if raise_exception: raise return False else: raise ValueError(\"Timeseries object shall have set", "Apache License, Version 2.0 (the \"License\"); you may not use this file except", ":type fid: str :type tsuid: str :returns: The Timeseries object :rtype: Timeseries :raises", "data return ts def get(self, fid=None, tsuid=None): \"\"\" Returns an existing Timeseries object", "= ts.metadata.get(name=\"ikats_end_date\") check_is_valid_epoch(value=ed, raise_exception=True) try: data_points = self.tsdb_client.get_ts_by_tsuid(tsuid=ts.tsuid, sd=sd, ed=ed) # Return the", "database or update an existing timeseries with new points if *generate_metadata* is set", "generate_metadata: Generate metadata (set to False when doing partial import) (Default: True) :param", "ts.metadata.set(name='qual_nb_points', value=nb_points, dtype=MDType.NUMBER) # Inherit from parent when it is defined if parent", "in database (use `get` instead of `new`) \"\"\" check_is_fid_valid(fid, raise_exception=True) try: # Check", "of the action :rtype: bool :raises TypeError: if *ts* is not a str", "tsuid: str :returns: The Timeseries object :rtype: Timeseries :raises ValueError: if both *fid*", "self.dm_client.get_tsuid_from_fid(fid=fid) except IkatsException: if raise_exception: raise return None def _create_ref(self, fid): \"\"\" Create", "\"parent\", raise_exception=True) check_type(generate_metadata, bool, \"generate_metadata\", raise_exception=True) check_is_fid_valid(ts.fid, raise_exception=True) try: # First, we shall", "the TS having the following metadata: | (frequency == 1 OR frequency ==", "update an existing timeseries with new points if *generate_metadata* is set or if", "is not None: self.inherit(ts=ts, parent=parent) except IkatsException: if raise_exception: raise return False return", "to in writing, software distributed under the License is distributed on an \"AS", "in database (use `get` instead of `new`) \"\"\" if fid is None: ts", "data points couldn't be retrieved properly\") def inherit(self, ts, parent): \"\"\" Make a", "TSUID value or None if not found :rtype: str :raises TypeError: if fid", "ts: Timeseries :type parent: Timeseries :type generate_metadata: bool :type raise_exception: bool :returns: the", "identifier will be created to database :param fid: Identifier to create (if provided)", "def tsuid2fid(self, tsuid, raise_exception=True): \"\"\" Retrieve the functional ID associated to the tsuid", "`get` instead of `new`) \"\"\" check_is_fid_valid(fid, raise_exception=True) try: # Check if fid already", "def fid2tsuid(self, fid, raise_exception=True): \"\"\" Retrieve the TSUID associated to the functional ID", "the constraints :rtype: dict :raises TypeError: if *constraint* is not a dict \"\"\"", "of the metadata is forced Returns a boolean status of the action (True", ":rtype: Timeseries :raises ValueError: if both *fid* and *tsuid* are set (or none", "new one (object has no tsuid defined), the computation of the metadata is", "def __init__(self, *args, **kwargs): super(IkatsTimeseriesMgr, self).__init__(*args, **kwargs) if self.api.emulate: self.tsdb_client = OpenTSDBStub(session=self.api.session) self.dm_client", "data points as numpy array or python 2D-list :type fid: str :type data:", "fid already associated to an existing tsuid try: return self.dm_client.get_tsuid_from_fid(fid=fid) except IkatsException: if", "completely gathered) :param ts: Timeseries object :param sd: (optional) starting date (timestamp in", ":type raise_exception: bool :returns: retrieved TSUID value or None if not found :rtype:", "if not found or not :type fid: str :type raise_exception: bool :returns: retrieved", "from epoch) :type ts: Timeseries :type sd: int or None :type ed: int", "name=meta_name, value=result[meta_name][\"value\"], data_type=MDType(result[meta_name][\"dtype\"]), force_update=True) except(ValueError, TypeError, SystemError) as exception: self.api.session.log.warning( \"Can't get metadata", "timeseries where metadata shall be taken from (except intrinsic ones, eg. *qual_nb_points*) If", "creation of the metadata generate_metadata = True # Add points to this TSUID", "500 <= status < 600 \"\"\" try: return self.dm_client.get_func_id_from_tsuid(tsuid=tsuid) except IkatsException: if raise_exception:", "provided) :param fid: FID of the Timeseries :param tsuid: TSUID of the Timeseries", "and all associated metadata Note that if timeseries belongs to a dataset it", "check_is_fid_valid(ts.fid, raise_exception=True) try: # First, we shall create the TSUID reference (if not", "TypeError: if *sd* is not an int :raises TypeError: if *ed* is not", "points :rtype: np.array :raises TypeError: if *ts* is not a Timeseries object :raises", "shall be taken from (except intrinsic ones, eg. *qual_nb_points*) If the timeseries is", "License. \"\"\" import re from ikats.client.datamodel_client import DatamodelClient from ikats.client.datamodel_stub import DatamodelStub from", "in temporal data database and associate it to fid in temporal database for", "\"\"\" def __init__(self, *args, **kwargs): super(IkatsTimeseriesMgr, self).__init__(*args, **kwargs) if self.api.emulate: self.tsdb_client = OpenTSDBStub(session=self.api.session)", "raise return False else: raise ValueError(\"Timeseries object shall have set at least tsuid", "tsuid, raise_exception=True): \"\"\" Retrieve the functional ID associated to the tsuid param. :param", "TSUID of the Timeseries :type fid: str :type tsuid: str :returns: The Timeseries", "date and number of points in *ts.data* *parent* is the original timeseries where", "a TS list matching these constraints Example of constraint: | { | frequency:", "self.api.emulate: self.tsdb_client = OpenTSDBStub(session=self.api.session) self.dm_client = DatamodelStub(session=self.api.session) else: self.tsdb_client = OpenTSDBClient(session=self.api.session) self.dm_client =", "of the Timeseries :type fid: str :type tsuid: str :returns: The Timeseries object", "of inheritance parent :param generate_metadata: Generate metadata (set to False when doing partial", "found in database \"\"\" if bool(fid) == bool(tsuid): raise ValueError(\"fid and tsuid are", "Timeseries :type fid: str :type tsuid: str :returns: The Timeseries object :rtype: Timeseries", "no functional ID matching the tsuid :raises ServerError: http answer with status :", "data (import data via spark for example) :param fid: Functional Identifier of the", "found :rtype: str :raises TypeError: if fid is not str :raises IkatsNotFoundError: no", "inherit(self, ts, parent): \"\"\" Make a timeseries inherit of parent's metadata according to", "object of inheritance parent :param generate_metadata: Generate metadata (set to False when doing", "data_points = self.tsdb_client.get_ts_by_tsuid(tsuid=ts.tsuid, sd=sd, ed=ed) # Return the points return data_points except ValueError:", "Timeseries management \"\"\" def __init__(self, *args, **kwargs): super(IkatsTimeseriesMgr, self).__init__(*args, **kwargs) if self.api.emulate: self.tsdb_client", "Allow to specify if the action shall assert if not found or not", "str :returns: A prepared Timeseries object :rtype: Timeseries :raises IkatsConflictError: if FID already", ":type ts: Timeseries :type sd: int or None :type ed: int or None", "DatamodelStub from ikats.client.opentsdb_client import OpenTSDBClient from ikats.client.opentsdb_stub import OpenTSDBStub from ikats.exceptions import (IkatsConflictError,", "Timeseries(api=self.api, tsuid=tsuid, fid=fid) def save(self, ts, parent=None, generate_metadata=True, raise_exception=True): \"\"\" Import timeseries data", "retrieved functional identifier value :rtype: str :raises TypeError: if tsuid is not a", "for future use. Shall be used before create method in case of parallel", "identifier value :rtype: str :raises TypeError: if tsuid is not a defined str", "in self.dm_client.get_ts_list()] def fetch(self, ts, sd=None, ed=None): \"\"\" Retrieve the data corresponding to", "in IKATS of inheritance parent :type ts: Timeseries :param parent: Timeseries \"\"\" try:", "(MDType, check_is_fid_valid, check_is_valid_epoch, check_type) from ikats.manager.generic_mgr_ import IkatsGenericApiEndPoint from ikats.objects import Timeseries NON_INHERITABLE_PATTERN", "shall be raised (True, default) or not (False) :type ts: str or Timeseries", "ts.metadata.get(name=\"ikats_end_date\") check_is_valid_epoch(value=ed, raise_exception=True) try: data_points = self.tsdb_client.get_ts_by_tsuid(tsuid=ts.tsuid, sd=sd, ed=ed) # Return the points", "points return data_points except ValueError: raise IkatsNotFoundError(\"TS data points couldn't be retrieved properly\")", "tsuid: %s\" % (fid, tsuid)) except IkatsNotFoundError: # Creation of a new tsuid", "self.dm_client.get_tsuid_from_fid(fid=fid) # if fid already exists in database, raise a conflict exception raise", "tsuid=tsuid, fid=fid) def save(self, ts, parent=None, generate_metadata=True, raise_exception=True): \"\"\" Import timeseries data points", "Timeseries :raises IkatsConflictError: if *fid* already present in database (use `get` instead of", "Timeseries object containing information about what to create :param parent: (optional) Timeseries object", "constraints :rtype: dict :raises TypeError: if *constraint* is not a dict \"\"\" return", "in ms from epoch) :param ed: (optional) ending date (timestamp in ms from", "self._create_ref(ts.fid).tsuid # If the TS is fresh, we force the creation of the", "IkatsNotFoundError: if timeseries is not found on server :raises IkatsConflictError: if timeseries belongs", "both *fid* and *tsuid* are set (or none of them) :raises IkatsNotFoundError: if", ":type tsuid: str :returns: The Timeseries object :rtype: Timeseries :raises ValueError: if both", "Example of constraint: | { | frequency: [1, 2], | flight_phase: 8 |", "list matching these constraints Example of constraint: | { | frequency: [1, 2],", "the metadata is forced Returns a boolean status of the action (True means", ":param ts: tsuid of the timeseries or Timeseries Object to remove :param raise_exception:", "will be retrieved from metadata if you want a fixed windowed range, set", "is present in *ts* object, the *ikats_start_date*, *ikats_end_date* and *qual_nb_points* will be overwritten", "method get a TS list matching these constraints Example of constraint: | {", "timeseries data points to database or update an existing timeseries with new points", "if IKATS exceptions shall be raised (True, default) or not (False) :type ts:", "metric, tags = self.tsdb_client.gen_metric_tags() tsuid = self.tsdb_client.assign_metric(metric=metric, tags=tags) # finally importing tsuid/fid pair", "be aware that the TS may be not completely gathered) :param ts: Timeseries", "at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software", "an empty local Timeseries (if fid not provided) If fid is set, the", ":returns: The Timeseries object :rtype: Timeseries :raises ValueError: if both *fid* and *tsuid*", "ikats.client.datamodel_client import DatamodelClient from ikats.client.datamodel_stub import DatamodelStub from ikats.client.opentsdb_client import OpenTSDBClient from ikats.client.opentsdb_stub", "of the Timeseries :param tsuid: TSUID of the Timeseries :type fid: str :type", "associated to the functional ID param. :param fid: the functional Identifier :param raise_exception:", "[Timeseries, None], \"parent\", raise_exception=True) check_type(generate_metadata, bool, \"generate_metadata\", raise_exception=True) check_is_fid_valid(ts.fid, raise_exception=True) try: # First,", "at least tsuid or fid\") return self.dm_client.ts_delete(tsuid=tsuid, raise_exception=raise_exception) def list(self): \"\"\" Get the", ":param parent: TS object in IKATS of inheritance parent :type ts: Timeseries :param", "(end date) will be retrieved from metadata if you want a fixed windowed", "is None: ts.tsuid = self._create_ref(ts.fid).tsuid # If the TS is fresh, we force", "= DatamodelStub(session=self.api.session) else: self.tsdb_client = OpenTSDBClient(session=self.api.session) self.dm_client = DatamodelClient(session=self.api.session) def new(self, fid=None, data=None):", "flight_phase: 8 | } will find the TS having the following metadata: |", "the functional ID associated to the tsuid param. :param tsuid: one tsuid value", "of parent TS (%s), nothing will be inherited; \\nreason: %s\", parent, exception) def", "(frequency == 1 OR frequency == 2) | AND | flight_phase == 8", "raise a conflict exception raise IkatsConflictError(\"%s already associated to an existing tsuid: %s\"", "self).__init__(*args, **kwargs) if self.api.emulate: self.tsdb_client = OpenTSDBStub(session=self.api.session) self.dm_client = DatamodelStub(session=self.api.session) else: self.tsdb_client =", ":raises IkatsConflictError: if timeseries belongs to -at least- one dataset \"\"\" check_type(value=ts, allowed_types=[str,", "inherit) :param parent: TS object in IKATS of inheritance parent :type ts: Timeseries", "Timeseries :param tsuid: TSUID of the Timeseries :type fid: str :type tsuid: str", "\"\"\" Import timeseries data points to database or update an existing timeseries with", "Timeseries :type raise_exception: bool :returns: the status of the action :rtype: bool :raises", "shall have set at least tsuid or fid\") return self.dm_client.ts_delete(tsuid=tsuid, raise_exception=raise_exception) def list(self):", "match \"\"\" check_is_fid_valid(fid=fid) # Check if fid already associated to an existing tsuid", "or if no TSUID is present in *ts* object, the *ikats_start_date*, *ikats_end_date* and", "is not a dict \"\"\" return self.dm_client.get_ts_from_metadata(constraint=constraint) def tsuid2fid(self, tsuid, raise_exception=True): \"\"\" Retrieve", "mutually exclusive\") if fid is not None: tsuid = self.fid2tsuid(fid=fid, raise_exception=True) return Timeseries(api=self.api,", "the License for the specific language governing permissions and limitations under the License.", "limitations under the License. \"\"\" import re from ikats.client.datamodel_client import DatamodelClient from ikats.client.datamodel_stub", "provided in parameter, the method get a TS list matching these constraints Example", "from (except intrinsic ones, eg. *qual_nb_points*) If the timeseries is a new one", "save(self, ts, parent=None, generate_metadata=True, raise_exception=True): \"\"\" Import timeseries data points to database or", "if raise_exception: raise return None def fid2tsuid(self, fid, raise_exception=True): \"\"\" Retrieve the TSUID", "if fid already associated to an existing tsuid tsuid = self.dm_client.get_tsuid_from_fid(fid=fid) # if", "the action shall assert if not found or not :type tsuid: str :type", "be retrieved from metadata if you want a fixed windowed range, set *sd*", "in *ts.data* *parent* is the original timeseries where metadata shall be taken from", ":type parent: Timeseries :type generate_metadata: bool :type raise_exception: bool :returns: the status of", "str :raises IkatsNotFoundError: no match \"\"\" check_is_fid_valid(fid=fid) # Check if fid already associated", "\"\"\" Delete the data corresponding to a *ts* object and all associated metadata", "for example) :param fid: Functional Identifier of the TS in Ikats :type fid:", "object in IKATS of inheritance parent :type ts: Timeseries :param parent: Timeseries \"\"\"", "(False) :type ts: Timeseries :type parent: Timeseries :type generate_metadata: bool :type raise_exception: bool", "import OpenTSDBStub from ikats.exceptions import (IkatsConflictError, IkatsException, IkatsNotFoundError) from ikats.lib import (MDType, check_is_fid_valid,", "as exception: self.api.session.log.warning( \"Can't get metadata of parent TS (%s), nothing will be", "not None: tsuid = self.fid2tsuid(fid=fid, raise_exception=True) return Timeseries(api=self.api, tsuid=tsuid, fid=fid) def save(self, ts,", "the data corresponding to a Timeseries object as a numpy array .. note::", "Create a reference of timeseries in temporal data database and associate it to", "License, Version 2.0 (the \"License\"); you may not use this file except in", ":rtype: Timeseries :raises IkatsConflictError: if FID already present in database (use `get` instead", "not provided) if ts.tsuid is None: ts.tsuid = self._create_ref(ts.fid).tsuid # If the TS", "one (object has no tsuid defined), the computation of the metadata is forced", "in *ts* object, the *ikats_start_date*, *ikats_end_date* and *qual_nb_points* will be overwritten by the", "fid: str :returns: A prepared Timeseries object :rtype: Timeseries :raises IkatsConflictError: if FID", "frequency == 2) | AND | flight_phase == 8 :param constraint: constraint definition", "(or none of them) :raises IkatsNotFoundError: if the identifier was not found in", "elif ts.fid is not None: try: tsuid = self.dm_client.get_tsuid_from_fid(fid=ts.fid) except IkatsException: if raise_exception:", "sd=None, ed=None): \"\"\" Retrieve the data corresponding to a Timeseries object as a", "raise return None def fid2tsuid(self, fid, raise_exception=True): \"\"\" Retrieve the TSUID associated to", "None: tsuid = ts.tsuid elif ts.fid is not None: try: tsuid = self.dm_client.get_tsuid_from_fid(fid=ts.fid)", ":rtype: dict :raises TypeError: if *constraint* is not a dict \"\"\" return self.dm_client.get_ts_from_metadata(constraint=constraint)", "ts = Timeseries(api=self.api) else: ts = self._create_ref(fid=fid) ts.data = data return ts def", "inherited) :param ts: TS object in IKATS (which will inherit) :param parent: TS", "parent: (optional) Timeseries object of inheritance parent :param generate_metadata: Generate metadata (set to", "CS Systèmes d'Information Licensed under the Apache License, Version 2.0 (the \"License\"); you", "taken from (except intrinsic ones, eg. *qual_nb_points*) If the timeseries is a new", "fid\") return self.dm_client.ts_delete(tsuid=tsuid, raise_exception=raise_exception) def list(self): \"\"\" Get the list of all Timeseries", "Timeseries object :rtype: Timeseries :raises ValueError: if both *fid* and *tsuid* are set", "while :returns: the list of Timeseries object :rtype: list \"\"\" return [Timeseries(tsuid=x[\"tsuid\"], fid=x[\"funcId\"],", "is not None: tsuid = ts.tsuid elif ts.fid is not None: try: tsuid", "None: ed = ts.metadata.get(name=\"ikats_end_date\") check_is_valid_epoch(value=ed, raise_exception=True) try: data_points = self.tsdb_client.get_ts_by_tsuid(tsuid=ts.tsuid, sd=sd, ed=ed) #", "from epoch) :param ed: (optional) ending date (timestamp in ms from epoch) :type", "a conflict exception raise IkatsConflictError(\"%s already associated to an existing tsuid: %s\" %", "allowed_types=[int, None], var_name=\"ed\", raise_exception=True) if sd is None: sd = ts.metadata.get(name=\"ikats_start_date\") check_is_valid_epoch(value=sd, raise_exception=True)", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License", "ikats.objects import Timeseries NON_INHERITABLE_PATTERN = re.compile(\"^qual(.)*|ikats(.)*|funcId\") class IkatsTimeseriesMgr(IkatsGenericApiEndPoint): \"\"\" Ikats EndPoint specific to", "already present in database (use `get` instead of `new`) \"\"\" if fid is", "name='qual_nb_points', value=nb_points, data_type=MDType.NUMBER, force_create=True) ts.metadata.set(name='qual_nb_points', value=nb_points, dtype=MDType.NUMBER) # Inherit from parent when it", "if fid already associated to an existing tsuid try: return self.dm_client.get_tsuid_from_fid(fid=fid) except IkatsException:", "check_type) from ikats.manager.generic_mgr_ import IkatsGenericApiEndPoint from ikats.objects import Timeseries NON_INHERITABLE_PATTERN = re.compile(\"^qual(.)*|ikats(.)*|funcId\") class", "data via spark for example) :param fid: Functional Identifier of the TS in", "according to a pattern (not all metadata inherited) :param ts: TS object in", "the tsuid :raises ServerError: http answer with status : 500 <= status <", "None: ts.tsuid = self._create_ref(ts.fid).tsuid # If the TS is fresh, we force the", "functional identifier value :rtype: str :raises TypeError: if tsuid is not a defined", "# If the TS is fresh, we force the creation of the metadata", "8 :param constraint: constraint definition :type constraint: dict :returns: list of TSUID matching", "ts, sd=None, ed=None): \"\"\" Retrieve the data corresponding to a Timeseries object as", "None], var_name=\"ed\", raise_exception=True) if sd is None: sd = ts.metadata.get(name=\"ikats_start_date\") check_is_valid_epoch(value=sd, raise_exception=True) if", "TypeError: if fid is not str :raises IkatsNotFoundError: no match \"\"\" check_is_fid_valid(fid=fid) #", "functional ID associated to the tsuid param. :param tsuid: one tsuid value :param", "functional ID matching the tsuid :raises ServerError: http answer with status : 500", "def _create_ref(self, fid): \"\"\" Create a reference of timeseries in temporal data database", "a dict \"\"\" return self.dm_client.get_ts_from_metadata(constraint=constraint) def tsuid2fid(self, tsuid, raise_exception=True): \"\"\" Retrieve the functional", "= True # Add points to this TSUID start_date, end_date, nb_points = self.tsdb_client.add_points(tsuid=ts.tsuid,", "database for future use. Shall be used before create method in case of", "it is defined if parent is not None: self.inherit(ts=ts, parent=parent) except IkatsException: if", "parent TS (%s), nothing will be inherited; \\nreason: %s\", parent, exception) def find_from_meta(self,", "ts.metadata.set(name='ikats_start_date', value=start_date, dtype=MDType.DATE) # ikats_end_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_end_date', value=end_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_end_date', value=end_date, dtype=MDType.DATE)", ":raises TypeError: if *constraint* is not a dict \"\"\" return self.dm_client.get_ts_from_metadata(constraint=constraint) def tsuid2fid(self,", "all metadata inherited) :param ts: TS object in IKATS (which will inherit) :param", "*fid* and *tsuid* are set (or none of them) :raises IkatsNotFoundError: if the", "raise return False return True def delete(self, ts, raise_exception=True): \"\"\" Delete the data", "either its FID or TSUID (only one shall be provided) :param fid: FID", "ikats_end_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_end_date', value=end_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_end_date', value=end_date, dtype=MDType.DATE) # qual_nb_points self.dm_client.metadata_update(tsuid=ts.tsuid, name='qual_nb_points',", "Timeseries :raises ValueError: if both *fid* and *tsuid* are set (or none of", "Generate metadata (set to False when doing partial import) (Default: True) :param raise_exception:", "if *ts* is not a str nor a Timeseries :raises IkatsNotFoundError: if timeseries", "Creation of a new tsuid metric, tags = self.tsdb_client.gen_metric_tags() tsuid = self.tsdb_client.assign_metric(metric=metric, tags=tags)", "constraints Example of constraint: | { | frequency: [1, 2], | flight_phase: 8", "ms from epoch) :type ts: Timeseries :type sd: int or None :type ed:", "\"\"\" Returns an existing Timeseries object by providing either its FID or TSUID", "following metadata: | (frequency == 1 OR frequency == 2) | AND |", "(optional) Indicates if IKATS exceptions shall be raised (True, default) or not (False)", "Identifier :param raise_exception: Allow to specify if the action shall assert if not", "The Timeseries object :rtype: Timeseries :raises ValueError: if both *fid* and *tsuid* are", "[1, 2], | flight_phase: 8 | } will find the TS having the", "# Add points to this TSUID start_date, end_date, nb_points = self.tsdb_client.add_points(tsuid=ts.tsuid, data=ts.data) if", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "\"\"\" check_is_fid_valid(fid, raise_exception=True) try: # Check if fid already associated to an existing", "TS may be not completely gathered) :param ts: Timeseries object :param sd: (optional)", "# Creation of a new tsuid metric, tags = self.tsdb_client.gen_metric_tags() tsuid = self.tsdb_client.assign_metric(metric=metric,", "an existing timeseries with new points if *generate_metadata* is set or if no", "already associated to an existing tsuid tsuid = self.dm_client.get_tsuid_from_fid(fid=fid) # if fid already", "tsuid or fid\") return self.dm_client.ts_delete(tsuid=tsuid, raise_exception=raise_exception) def list(self): \"\"\" Get the list of", "data points couldn't be retrieved properly \"\"\" check_type(value=ts, allowed_types=Timeseries, var_name=\"ts\", raise_exception=True) check_type(value=sd, allowed_types=[int,", "allowed_types=Timeseries, var_name=\"ts\", raise_exception=True) check_type(value=sd, allowed_types=[int, None], var_name=\"sd\", raise_exception=True) check_type(value=ed, allowed_types=[int, None], var_name=\"ed\", raise_exception=True)", "points to this TSUID start_date, end_date, nb_points = self.tsdb_client.add_points(tsuid=ts.tsuid, data=ts.data) if generate_metadata: #", "Timeseries object of inheritance parent :param generate_metadata: Generate metadata (set to False when", "IkatsException, IkatsNotFoundError) from ikats.lib import (MDType, check_is_fid_valid, check_is_valid_epoch, check_type) from ikats.manager.generic_mgr_ import IkatsGenericApiEndPoint", "ValueError: if both *fid* and *tsuid* are set (or none of them) :raises", "it to fid in temporal database for future use. Shall be used before", "containing information about what to create :param parent: (optional) Timeseries object of inheritance", "= self._create_ref(fid=fid) ts.data = data return ts def get(self, fid=None, tsuid=None): \"\"\" Returns", "dataset it will not be removed Returns a boolean status of the action", "return [Timeseries(tsuid=x[\"tsuid\"], fid=x[\"funcId\"], api=self.api) for x in self.dm_client.get_ts_list()] def fetch(self, ts, sd=None, ed=None):", "not an int :raises TypeError: if *ed* is not an int :raises IkatsNotFoundError:", "of timeseries in temporal data database and associate it to fid in temporal", "\"\"\" Copyright 2019 CS Systèmes d'Information Licensed under the Apache License, Version 2.0", "IkatsNotFoundError: no match \"\"\" check_is_fid_valid(fid=fid) # Check if fid already associated to an", "Functional Identifier of the TS in Ikats :type fid: str :returns: A prepared", "last point date and number of points in *ts.data* *parent* is the original", "exception: self.api.session.log.warning( \"Can't get metadata of parent TS (%s), nothing will be inherited;", ":rtype: bool :raises TypeError: if *ts* is not a str nor a Timeseries", "sd is None: sd = ts.metadata.get(name=\"ikats_start_date\") check_is_valid_epoch(value=sd, raise_exception=True) if ed is None: ed", "instead of `new`) \"\"\" if fid is None: ts = Timeseries(api=self.api) else: ts", "epoch) :param ed: (optional) ending date (timestamp in ms from epoch) :type ts:", "*ed* (end date) will be retrieved from metadata if you want a fixed", "Add points to this TSUID start_date, end_date, nb_points = self.tsdb_client.add_points(tsuid=ts.tsuid, data=ts.data) if generate_metadata:", ":raises TypeError: if *ts* is not a valid Timeseries object \"\"\" # Input", "Make a timeseries inherit of parent's metadata according to a pattern (not all", "ts.data = data return ts def get(self, fid=None, tsuid=None): \"\"\" Returns an existing", "intrinsic ones, eg. *qual_nb_points*) If the timeseries is a new one (object has", "parent when it is defined if parent is not None: self.inherit(ts=ts, parent=parent) except", "removed Returns a boolean status of the action (True means \"OK\", False means", ":param constraint: constraint definition :type constraint: dict :returns: list of TSUID matching the", "constraint: dict :returns: list of TSUID matching the constraints :rtype: dict :raises TypeError:", "distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "Timeseries Object to remove :param raise_exception: (optional) Indicates if IKATS exceptions shall be", ":type raise_exception: bool :returns: the status of the action :rtype: bool :raises TypeError:", "database \"\"\" if bool(fid) == bool(tsuid): raise ValueError(\"fid and tsuid are mutually exclusive\")", "and *qual_nb_points* will be overwritten by the first point date, last point date", "(timestamp in ms from epoch) :param ed: (optional) ending date (timestamp in ms", "it will not be removed Returns a boolean status of the action (True", "to an existing tsuid: %s\" % (fid, tsuid)) except IkatsNotFoundError: # Creation of", "range, set *sd* and *ed* manually (but be aware that the TS may", "list \"\"\" return [Timeseries(tsuid=x[\"tsuid\"], fid=x[\"funcId\"], api=self.api) for x in self.dm_client.get_ts_list()] def fetch(self, ts,", "raise_exception: bool :returns: the status of the action :rtype: bool :raises TypeError: if", "no tsuid defined), the computation of the metadata is forced Returns a boolean", "are mutually exclusive\") if fid is not None: tsuid = self.fid2tsuid(fid=fid, raise_exception=True) return", "use this file except in compliance with the License. You may obtain a", "or Timeseries :type raise_exception: bool :returns: the status of the action :rtype: bool", "| flight_phase == 8 :param constraint: constraint definition :type constraint: dict :returns: list", "parent: Timeseries \"\"\" try: result = self.dm_client.metadata_get_typed([parent.tsuid])[parent.tsuid] for meta_name in result: # Flag", "an int :raises TypeError: if *ed* is not an int :raises IkatsNotFoundError: if", "least tsuid or fid\") return self.dm_client.ts_delete(tsuid=tsuid, raise_exception=raise_exception) def list(self): \"\"\" Get the list", "date, last point date and number of points in *ts.data* *parent* is the", "self.dm_client.metadata_get_typed([parent.tsuid])[parent.tsuid] for meta_name in result: # Flag metadata as \"not deleted\" result[meta_name][\"deleted\"] =", "except(ValueError, TypeError, SystemError) as exception: self.api.session.log.warning( \"Can't get metadata of parent TS (%s),", "import DatamodelClient from ikats.client.datamodel_stub import DatamodelStub from ikats.client.opentsdb_client import OpenTSDBClient from ikats.client.opentsdb_stub import", "a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "(True, default) or not (False) :type ts: str or Timeseries :type raise_exception: bool", "self.tsdb_client = OpenTSDBClient(session=self.api.session) self.dm_client = DatamodelClient(session=self.api.session) def new(self, fid=None, data=None): \"\"\" Create an", "import Timeseries NON_INHERITABLE_PATTERN = re.compile(\"^qual(.)*|ikats(.)*|funcId\") class IkatsTimeseriesMgr(IkatsGenericApiEndPoint): \"\"\" Ikats EndPoint specific to Timeseries", "str :type raise_exception: bool :returns: retrieved functional identifier value :rtype: str :raises TypeError:", "means \"errors occurred\") :param ts: tsuid of the timeseries or Timeseries Object to", "ones, eg. *qual_nb_points*) If the timeseries is a new one (object has no", "a Timeseries object as a numpy array .. note:: if omitted, *sd* (start", "int :raises TypeError: if *ed* is not an int :raises IkatsNotFoundError: if TS", "in temporal database for future use. Shall be used before create method in", "ts.metadata.get(name=\"ikats_start_date\") check_is_valid_epoch(value=sd, raise_exception=True) if ed is None: ed = ts.metadata.get(name=\"ikats_end_date\") check_is_valid_epoch(value=ed, raise_exception=True) try:", "try: # First, we shall create the TSUID reference (if not provided) if", "(IkatsConflictError, IkatsException, IkatsNotFoundError) from ikats.lib import (MDType, check_is_fid_valid, check_is_valid_epoch, check_type) from ikats.manager.generic_mgr_ import", "provided) :param data: List of data points as numpy array or python 2D-list", "str nor a Timeseries :raises IkatsNotFoundError: if timeseries is not found on server", "self.dm_client = DatamodelStub(session=self.api.session) else: self.tsdb_client = OpenTSDBClient(session=self.api.session) self.dm_client = DatamodelClient(session=self.api.session) def new(self, fid=None,", "database (use `get` instead of `new`) \"\"\" check_is_fid_valid(fid, raise_exception=True) try: # Check if", "(Default: True) :param raise_exception: Indicates if exceptions shall be raised (True, default) or", "dtype=MDType.DATE) # ikats_end_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_end_date', value=end_date, data_type=MDType.DATE, force_create=True) ts.metadata.set(name='ikats_end_date', value=end_date, dtype=MDType.DATE) # qual_nb_points", "return self.dm_client.ts_delete(tsuid=tsuid, raise_exception=raise_exception) def list(self): \"\"\" Get the list of all Timeseries from", "defined), the computation of the metadata is forced Returns a boolean status of", "is not a Timeseries object :raises TypeError: if *sd* is not an int", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "for x in self.dm_client.get_ts_list()] def fetch(self, ts, sd=None, ed=None): \"\"\" Retrieve the data", "*ts* is not a valid Timeseries object \"\"\" # Input checks check_type(ts, Timeseries,", "tsuid try: return self.dm_client.get_tsuid_from_fid(fid=fid) except IkatsException: if raise_exception: raise return None def _create_ref(self,", "parallel creation of data (import data via spark for example) :param fid: Functional", ".. note:: This action may take a while :returns: the list of Timeseries", "object :rtype: Timeseries :raises IkatsConflictError: if *fid* already present in database (use `get`", "raise_exception=True) check_type(value=ed, allowed_types=[int, None], var_name=\"ed\", raise_exception=True) if sd is None: sd = ts.metadata.get(name=\"ikats_start_date\")", "of a new tsuid metric, tags = self.tsdb_client.gen_metric_tags() tsuid = self.tsdb_client.assign_metric(metric=metric, tags=tags) #", "of parent's metadata according to a pattern (not all metadata inherited) :param ts:", "try: # Check if fid already associated to an existing tsuid tsuid =", "start_date, end_date, nb_points = self.tsdb_client.add_points(tsuid=ts.tsuid, data=ts.data) if generate_metadata: # ikats_start_date self.dm_client.metadata_update(tsuid=ts.tsuid, name='ikats_start_date', value=start_date,", "str :type data: list or np.array :returns: the Timeseries object :rtype: Timeseries :raises", "data database and associate it to fid in temporal database for future use.", "tsuid: one tsuid value :param raise_exception: Allow to specify if the action shall", "timeseries is a new one (object has no tsuid defined), the computation of", "metadata if you want a fixed windowed range, set *sd* and *ed* manually", "Returns an existing Timeseries object by providing either its FID or TSUID (only", "partial import) (Default: True) :param raise_exception: Indicates if exceptions shall be raised (True,", "a numpy array .. note:: if omitted, *sd* (start date) and *ed* (end", "one dataset \"\"\" check_type(value=ts, allowed_types=[str, Timeseries], var_name=\"ts\", raise_exception=True) tsuid = ts if isinstance(ts,", "(set to False when doing partial import) (Default: True) :param raise_exception: Indicates if", "default) or not (False) :type ts: Timeseries :type parent: Timeseries :type generate_metadata: bool", "database and associate it to fid in temporal database for future use. Shall", "Ikats EndPoint specific to Timeseries management \"\"\" def __init__(self, *args, **kwargs): super(IkatsTimeseriesMgr, self).__init__(*args,", "timeseries inherit of parent's metadata according to a pattern (not all metadata inherited)", "return None def _create_ref(self, fid): \"\"\" Create a reference of timeseries in temporal", "IkatsException: if raise_exception: raise return None def _create_ref(self, fid): \"\"\" Create a reference", "be raised (True, default) or not (False) :type ts: str or Timeseries :type", "status of the action (True means \"OK\", False means \"errors occurred\") :param ts:", "\"\"\" try: return self.dm_client.get_func_id_from_tsuid(tsuid=tsuid) except IkatsException: if raise_exception: raise return None def fid2tsuid(self,", "metadata according to a pattern (not all metadata inherited) :param ts: TS object", "is not found on server :raises IkatsConflictError: if timeseries belongs to -at least-", "or None if not found :rtype: str :raises TypeError: if fid is not", ":param raise_exception: Allow to specify if the action shall assert if not found", "(fid, tsuid)) except IkatsNotFoundError: # Creation of a new tsuid metric, tags =", ":rtype: bool :raises TypeError: if *ts* is not a valid Timeseries object \"\"\"", "fid not provided) If fid is set, the identifier will be created to", ":param sd: (optional) starting date (timestamp in ms from epoch) :param ed: (optional)", "`get` instead of `new`) \"\"\" if fid is None: ts = Timeseries(api=self.api) else:", ":returns: retrieved functional identifier value :rtype: str :raises TypeError: if tsuid is not", "# First, we shall create the TSUID reference (if not provided) if ts.tsuid", "dtype=MDType.DATE) # qual_nb_points self.dm_client.metadata_update(tsuid=ts.tsuid, name='qual_nb_points', value=nb_points, data_type=MDType.NUMBER, force_create=True) ts.metadata.set(name='qual_nb_points', value=nb_points, dtype=MDType.NUMBER) # Inherit", "of `new`) \"\"\" check_is_fid_valid(fid, raise_exception=True) try: # Check if fid already associated to", "the data corresponding to a *ts* object and all associated metadata Note that", "Timeseries :type sd: int or None :type ed: int or None :returns: The", "raise_exception=True) if ed is None: ed = ts.metadata.get(name=\"ikats_end_date\") check_is_valid_epoch(value=ed, raise_exception=True) try: data_points =", "str :returns: The Timeseries object :rtype: Timeseries :raises ValueError: if both *fid* and", "answer with status : 500 <= status < 600 \"\"\" try: return self.dm_client.get_func_id_from_tsuid(tsuid=tsuid)", "present in *ts* object, the *ikats_start_date*, *ikats_end_date* and *qual_nb_points* will be overwritten by", "= re.compile(\"^qual(.)*|ikats(.)*|funcId\") class IkatsTimeseriesMgr(IkatsGenericApiEndPoint): \"\"\" Ikats EndPoint specific to Timeseries management \"\"\" def", "as \"not deleted\" result[meta_name][\"deleted\"] = False if not NON_INHERITABLE_PATTERN.match(meta_name): self.dm_client.metadata_create(tsuid=ts.tsuid, name=meta_name, value=result[meta_name][\"value\"], data_type=MDType(result[meta_name][\"dtype\"]),", "%s\" % (fid, tsuid)) except IkatsNotFoundError: # Creation of a new tsuid metric,", "will inherit) :param parent: TS object in IKATS of inheritance parent :type ts:", "valid Timeseries object \"\"\" # Input checks check_type(ts, Timeseries, \"ts\", raise_exception=True) check_type(parent, [Timeseries,", "the method get a TS list matching these constraints Example of constraint: |", "to False when doing partial import) (Default: True) :param raise_exception: Indicates if exceptions", "} will find the TS having the following metadata: | (frequency == 1", "set (or none of them) :raises IkatsNotFoundError: if the identifier was not found", "will find the TS having the following metadata: | (frequency == 1 OR", "import (IkatsConflictError, IkatsException, IkatsNotFoundError) from ikats.lib import (MDType, check_is_fid_valid, check_is_valid_epoch, check_type) from ikats.manager.generic_mgr_", "to fid in temporal database for future use. Shall be used before create", "2) | AND | flight_phase == 8 :param constraint: constraint definition :type constraint:", "example) :param fid: Functional Identifier of the TS in Ikats :type fid: str", "or None :returns: The data points :rtype: np.array :raises TypeError: if *ts* is", "from ikats.exceptions import (IkatsConflictError, IkatsException, IkatsNotFoundError) from ikats.lib import (MDType, check_is_fid_valid, check_is_valid_epoch, check_type)", "retrieved properly \"\"\" check_type(value=ts, allowed_types=Timeseries, var_name=\"ts\", raise_exception=True) check_type(value=sd, allowed_types=[int, None], var_name=\"sd\", raise_exception=True) check_type(value=ed,", "the Apache License, Version 2.0 (the \"License\"); you may not use this file", "fixed windowed range, set *sd* and *ed* manually (but be aware that the", "*constraint* is not a dict \"\"\" return self.dm_client.get_ts_from_metadata(constraint=constraint) def tsuid2fid(self, tsuid, raise_exception=True): \"\"\"", "check_is_valid_epoch(value=ed, raise_exception=True) try: data_points = self.tsdb_client.get_ts_by_tsuid(tsuid=ts.tsuid, sd=sd, ed=ed) # Return the points return", "find the TS having the following metadata: | (frequency == 1 OR frequency", "eg. *qual_nb_points*) If the timeseries is a new one (object has no tsuid", "with new points if *generate_metadata* is set or if no TSUID is present", "check_type(parent, [Timeseries, None], \"parent\", raise_exception=True) check_type(generate_metadata, bool, \"generate_metadata\", raise_exception=True) check_is_fid_valid(ts.fid, raise_exception=True) try: #", "coding: utf-8 -*- \"\"\" Copyright 2019 CS Systèmes d'Information Licensed under the Apache", "self._create_ref(fid=fid) ts.data = data return ts def get(self, fid=None, tsuid=None): \"\"\" Returns an", "finally importing tsuid/fid pair in non temporal database self.dm_client.import_fid(tsuid=tsuid, fid=fid) return Timeseries(tsuid=tsuid, fid=fid,", "None: try: tsuid = self.dm_client.get_tsuid_from_fid(fid=ts.fid) except IkatsException: if raise_exception: raise return False else:", "already present in database (use `get` instead of `new`) \"\"\" check_is_fid_valid(fid, raise_exception=True) try:", "DatamodelClient(session=self.api.session) def new(self, fid=None, data=None): \"\"\" Create an empty local Timeseries (if fid", "result: # Flag metadata as \"not deleted\" result[meta_name][\"deleted\"] = False if not NON_INHERITABLE_PATTERN.match(meta_name):", "bool(tsuid): raise ValueError(\"fid and tsuid are mutually exclusive\") if fid is not None:", "check_is_valid_epoch, check_type) from ikats.manager.generic_mgr_ import IkatsGenericApiEndPoint from ikats.objects import Timeseries NON_INHERITABLE_PATTERN = re.compile(\"^qual(.)*|ikats(.)*|funcId\")", "to database or update an existing timeseries with new points if *generate_metadata* is", "= data return ts def get(self, fid=None, tsuid=None): \"\"\" Returns an existing Timeseries", "functional Identifier :param raise_exception: Allow to specify if the action shall assert if", "bool :returns: the status of the action :rtype: bool :raises TypeError: if *ts*", "not found or not :type fid: str :type raise_exception: bool :returns: retrieved TSUID", "if bool(fid) == bool(tsuid): raise ValueError(\"fid and tsuid are mutually exclusive\") if fid", "the action (True means \"OK\", False means \"errors occurred\") :param ts: tsuid of", "\"\"\" check_type(value=ts, allowed_types=Timeseries, var_name=\"ts\", raise_exception=True) check_type(value=sd, allowed_types=[int, None], var_name=\"sd\", raise_exception=True) check_type(value=ed, allowed_types=[int, None],", "from ikats.objects import Timeseries NON_INHERITABLE_PATTERN = re.compile(\"^qual(.)*|ikats(.)*|funcId\") class IkatsTimeseriesMgr(IkatsGenericApiEndPoint): \"\"\" Ikats EndPoint specific", "= self.fid2tsuid(fid=fid, raise_exception=True) return Timeseries(api=self.api, tsuid=tsuid, fid=fid) def save(self, ts, parent=None, generate_metadata=True, raise_exception=True):", "raise_exception=True) try: data_points = self.tsdb_client.get_ts_by_tsuid(tsuid=ts.tsuid, sd=sd, ed=ed) # Return the points return data_points", "constraint=None): \"\"\" From a metadata constraint provided in parameter, the method get a", "ed=None): \"\"\" Retrieve the data corresponding to a Timeseries object as a numpy", "ValueError(\"fid and tsuid are mutually exclusive\") if fid is not None: tsuid =", "spark for example) :param fid: Functional Identifier of the TS in Ikats :type", "var_name=\"sd\", raise_exception=True) check_type(value=ed, allowed_types=[int, None], var_name=\"ed\", raise_exception=True) if sd is None: sd =", "or not :type tsuid: str :type raise_exception: bool :returns: retrieved functional identifier value", "None], var_name=\"sd\", raise_exception=True) check_type(value=ed, allowed_types=[int, None], var_name=\"ed\", raise_exception=True) if sd is None: sd", "None: sd = ts.metadata.get(name=\"ikats_start_date\") check_is_valid_epoch(value=sd, raise_exception=True) if ed is None: ed = ts.metadata.get(name=\"ikats_end_date\")", "Timeseries :type parent: Timeseries :type generate_metadata: bool :type raise_exception: bool :returns: the status", "raise_exception=True) return Timeseries(api=self.api, tsuid=tsuid, fid=fid) def save(self, ts, parent=None, generate_metadata=True, raise_exception=True): \"\"\" Import", "raise_exception: raise return False else: raise ValueError(\"Timeseries object shall have set at least", "to an existing tsuid try: return self.dm_client.get_tsuid_from_fid(fid=fid) except IkatsException: if raise_exception: raise return", "method in case of parallel creation of data (import data via spark for", "means \"errors occurred\") :param ts: Timeseries object containing information about what to create", "str :type raise_exception: bool :returns: retrieved TSUID value or None if not found", "in database \"\"\" if bool(fid) == bool(tsuid): raise ValueError(\"fid and tsuid are mutually", "parent is not None: self.inherit(ts=ts, parent=parent) except IkatsException: if raise_exception: raise return False", "else: ts = self._create_ref(fid=fid) ts.data = data return ts def get(self, fid=None, tsuid=None):", "Timeseries object :raises TypeError: if *sd* is not an int :raises TypeError: if", "of data (import data via spark for example) :param fid: Functional Identifier of", "associated metadata Note that if timeseries belongs to a dataset it will not", "False if not NON_INHERITABLE_PATTERN.match(meta_name): self.dm_client.metadata_create(tsuid=ts.tsuid, name=meta_name, value=result[meta_name][\"value\"], data_type=MDType(result[meta_name][\"dtype\"]), force_update=True) except(ValueError, TypeError, SystemError) as", "get metadata of parent TS (%s), nothing will be inherited; \\nreason: %s\", parent,", "inheritance parent :param generate_metadata: Generate metadata (set to False when doing partial import)", "\"\"\" Get the list of all Timeseries from database .. note:: This action", "associated to the tsuid param. :param tsuid: one tsuid value :param raise_exception: Allow", "FID already present in database (use `get` instead of `new`) \"\"\" check_is_fid_valid(fid, raise_exception=True)", "to the tsuid param. :param tsuid: one tsuid value :param raise_exception: Allow to", "result[meta_name][\"deleted\"] = False if not NON_INHERITABLE_PATTERN.match(meta_name): self.dm_client.metadata_create(tsuid=ts.tsuid, name=meta_name, value=result[meta_name][\"value\"], data_type=MDType(result[meta_name][\"dtype\"]), force_update=True) except(ValueError, TypeError,", "existing Timeseries object by providing either its FID or TSUID (only one shall", "definition :type constraint: dict :returns: list of TSUID matching the constraints :rtype: dict", "if FID already present in database (use `get` instead of `new`) \"\"\" check_is_fid_valid(fid,", "== 2) | AND | flight_phase == 8 :param constraint: constraint definition :type", ":param raise_exception: Indicates if exceptions shall be raised (True, default) or not (False)", "TypeError: if *ts* is not a valid Timeseries object \"\"\" # Input checks", "raise_exception: Indicates if exceptions shall be raised (True, default) or not (False) :type", "exclusive\") if fid is not None: tsuid = self.fid2tsuid(fid=fid, raise_exception=True) return Timeseries(api=self.api, tsuid=tsuid,", "From a metadata constraint provided in parameter, the method get a TS list", "IkatsException: if raise_exception: raise return False return True def delete(self, ts, raise_exception=True): \"\"\"", "compliance with the License. You may obtain a copy of the License at", "*sd* (start date) and *ed* (end date) will be retrieved from metadata if", "not :type fid: str :type raise_exception: bool :returns: retrieved TSUID value or None", "couldn't be retrieved properly \"\"\" check_type(value=ts, allowed_types=Timeseries, var_name=\"ts\", raise_exception=True) check_type(value=sd, allowed_types=[int, None], var_name=\"sd\",", "1 OR frequency == 2) | AND | flight_phase == 8 :param constraint:", "\"errors occurred\") :param ts: Timeseries object containing information about what to create :param", "str :raises TypeError: if tsuid is not a defined str :raises ValueError: no", "if *ts* is not a valid Timeseries object \"\"\" # Input checks check_type(ts,", "ts: Timeseries :type sd: int or None :type ed: int or None :returns:", "the identifier will be created to database :param fid: Identifier to create (if", "from ikats.client.opentsdb_stub import OpenTSDBStub from ikats.exceptions import (IkatsConflictError, IkatsException, IkatsNotFoundError) from ikats.lib import", "ed: int or None :returns: The data points :rtype: np.array :raises TypeError: if", "tsuid tsuid = self.dm_client.get_tsuid_from_fid(fid=fid) # if fid already exists in database, raise a", "True def delete(self, ts, raise_exception=True): \"\"\" Delete the data corresponding to a *ts*", "parent :type ts: Timeseries :param parent: Timeseries \"\"\" try: result = self.dm_client.metadata_get_typed([parent.tsuid])[parent.tsuid] for", "exception) def find_from_meta(self, constraint=None): \"\"\" From a metadata constraint provided in parameter, the", "If fid is set, the identifier will be created to database :param fid:", "You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by", ":rtype: str :raises TypeError: if tsuid is not a defined str :raises ValueError:", "in ms from epoch) :type ts: Timeseries :type sd: int or None :type", "the Timeseries :param tsuid: TSUID of the Timeseries :type fid: str :type tsuid:", "ts: Timeseries object containing information about what to create :param parent: (optional) Timeseries", "is not None: tsuid = self.fid2tsuid(fid=fid, raise_exception=True) return Timeseries(api=self.api, tsuid=tsuid, fid=fid) def save(self,", "applicable law or agreed to in writing, software distributed under the License is", ":param tsuid: one tsuid value :param raise_exception: Allow to specify if the action", "ed=ed) # Return the points return data_points except ValueError: raise IkatsNotFoundError(\"TS data points", "be created to database :param fid: Identifier to create (if provided) :param data:", "specify if the action shall assert if not found or not :type fid:", "value :param raise_exception: Allow to specify if the action shall assert if not", "x in self.dm_client.get_ts_list()] def fetch(self, ts, sd=None, ed=None): \"\"\" Retrieve the data corresponding", "try: tsuid = self.dm_client.get_tsuid_from_fid(fid=ts.fid) except IkatsException: if raise_exception: raise return False else: raise", "these constraints Example of constraint: | { | frequency: [1, 2], | flight_phase:", "inherited; \\nreason: %s\", parent, exception) def find_from_meta(self, constraint=None): \"\"\" From a metadata constraint", "tsuid = self.dm_client.get_tsuid_from_fid(fid=ts.fid) except IkatsException: if raise_exception: raise return False else: raise ValueError(\"Timeseries", "AND | flight_phase == 8 :param constraint: constraint definition :type constraint: dict :returns:", "IkatsConflictError: if *fid* already present in database (use `get` instead of `new`) \"\"\"", ":raises TypeError: if *ts* is not a Timeseries object :raises TypeError: if *sd*", ":param generate_metadata: Generate metadata (set to False when doing partial import) (Default: True)", "the status of the action :rtype: bool :raises TypeError: if *ts* is not", "check_type(value=sd, allowed_types=[int, None], var_name=\"sd\", raise_exception=True) check_type(value=ed, allowed_types=[int, None], var_name=\"ed\", raise_exception=True) if sd is", "fid=None, tsuid=None): \"\"\" Returns an existing Timeseries object by providing either its FID", "shall be provided) :param fid: FID of the Timeseries :param tsuid: TSUID of", "fid): \"\"\" Create a reference of timeseries in temporal data database and associate", ":raises TypeError: if *ed* is not an int :raises IkatsNotFoundError: if TS data", "Import timeseries data points to database or update an existing timeseries with new", "a fixed windowed range, set *sd* and *ed* manually (but be aware that", "Timeseries): if ts.tsuid is not None: tsuid = ts.tsuid elif ts.fid is not", "with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0", "not None: try: tsuid = self.dm_client.get_tsuid_from_fid(fid=ts.fid) except IkatsException: if raise_exception: raise return False", "constraint: constraint definition :type constraint: dict :returns: list of TSUID matching the constraints", "Identifier of the TS in Ikats :type fid: str :returns: A prepared Timeseries" ]
[ "lw=lw, label=f'{model_name} curve (area = {roc_auc:.5f})') # plot thresholds for it, thr in", "= 0 for cfi, c_family in enumerate(c_families): project_id = c_families[c_family] print(c_family, project_id) #", "= './data' # mpl colors: colors = plt.rcParams['axes.prop_cycle'].by_key()['color'] # [u'#1f77b4', u'#ff7f0e', u'#2ca02c', u'#d62728',", "roc_curve(y_test, y) roc_auc = auc(fpr, tpr) ax.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw) ax2.plot(fpr, tpr,", "ax2.set_xlim([0.0, .2]) ax2.set_ylim([0.8, 1.0]) ax2.set_xlabel('False Positive Rate (Contamination)') ax2.set_ylabel('True Positive Rate (Sensitivity)') #", "thres(v, thr: float = 0.5): v_ = np.array(deepcopy(v)) v_[v_ >= thr] = 1", "/ 2. for i, j in itertools.product(range(confusion_matr.shape[0]), range(confusion_matr.shape[1])): ax_.text(j, i, format(confusion_matr[i, j], 'd'),", "ax2_.set_xticks(tick_marks, tick_marks) # ax2_.set_yticks(tick_marks, tick_marks) # # ax_.xaxis.set_visible(False) # ax_.yaxis.set_visible(False) # ax2_.xaxis.set_visible(False) #", "model_names if c_family in m_] n_mn = len(mn) for ii, model_name in enumerate(mn):", "fig2.add_subplot(3, 2 * len(c_families), ii * 8 + cfi * 2 + 2)", "# c_families = {'rb': '5b96af9c0354c9000b0aea36', # 'sl': '5b99b2c6aec3c500103a14de', # 'kd': '5be0ae7958830a0018821794'} # c_families", "left=0.05, right=0.70, top=0.98, wspace=0.2, hspace=0.2) lw = 1.6 # ROCs ax = fig.add_subplot(1,", "import tensorflow as tf from keras.models import model_from_json import json from sklearn.metrics import", "matrix:') print(confusion_matr_normalized) fpr, tpr, thresholds = roc_curve(y_test, y) roc_auc = auc(fpr, tpr) ax.plot(fpr,", "os.path.join(path_base, 'service/models') c_families = {'rb': '5b96af9c0354c9000b0aea36', 'sl': '5b99b2c6aec3c500103a14de', 'kd': '5be0ae7958830a0018821794', 'os': '5c05bbdc826480000a95c0bf'} #", "= confusion_matrix(y_test, labels_pred) confusion_matr_normalized = confusion_matr.astype('float') / confusion_matr.sum(axis=1)[:, np.newaxis] print(f'Threshold: {thr}') print('Confusion matrix:')", "2. thresh_norm = confusion_matr_normalized.max() / 2. for i, j in itertools.product(range(confusion_matr.shape[0]), range(confusion_matr.shape[1])): ax_.text(j,", "# break ax.legend(loc='lower right') ax2.legend(bbox_to_anchor=(1.04, 1), loc=\"upper left\") fig.savefig(f'./roc_rb_sl_kd.png', dpi=300) fig2.savefig(f'./cm_rb_sl_kd.png', dpi=300) plt.show()", "= plt.figure() fig2.subplots_adjust(bottom=0.06, left=0.01, right=1.0, top=0.93, wspace=0.0, hspace=0.12) cn = 0 for cfi,", "cfi == 0 and ii == 0: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it +", "os os.environ[\"CUDA_DEVICE_ORDER\"] = \"PCI_BUS_ID\" # see issue #152 os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"1\" import tensorflow", "if ii == 0: # break ax.legend(loc='lower right') ax2.legend(bbox_to_anchor=(1.04, 1), loc=\"upper left\") fig.savefig(f'./roc_rb_sl_kd.png',", "markersize=8, color=colors[-(it + 1)], label=f'Threshold: {1-thr:.2f}') else: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it +", "'5c05bbdc826480000a95c0bf'} # c_families = {'rb': '5b96af9c0354c9000b0aea36', # 'sl': '5b99b2c6aec3c500103a14de', # 'kd': '5be0ae7958830a0018821794'} #", "roc_curve, auc, confusion_matrix import numpy as np import pandas as pd from copy", "from keras.models import model_from_json import json from sklearn.metrics import roc_curve, auc, confusion_matrix import", "1.6 # ROCs ax = fig.add_subplot(1, 2, 1) # zoomed ROCs ax2 =", "# u'#8c564b', u'#e377c2', u'#7f7f7f', u'#bcbd22', u'#17becf'] # line styles: line_styles = ['-', '--',", "wspace=0.0, hspace=0.12) cn = 0 for cfi, c_family in enumerate(c_families): project_id = c_families[c_family]", "top=0.93, wspace=0.0, hspace=0.12) cn = 0 for cfi, c_family in enumerate(c_families): project_id =", "# return load_model(path) with open(os.path.join(path, f'{model_base_name}.architecture.json'), 'r') as json_file: loaded_model_json = json_file.read() m", "Positive Rate (Contamination)') ax.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\") # ax.legend(loc=\"best\") ax.grid(True)", "range(confusion_matr.shape[1])): ax_.text(j, i, format(confusion_matr[i, j], 'd'), horizontalalignment=\"center\", color=\"white\" if confusion_matr[i, j] > thresh", "= os.path.join(path_base, 'service/models') c_families = {'rb': '5b96af9c0354c9000b0aea36', 'sl': '5b99b2c6aec3c500103a14de', 'kd': '5be0ae7958830a0018821794', 'os': '5c05bbdc826480000a95c0bf'}", "in m_] n_mn = len(mn) for ii, model_name in enumerate(mn): print(f'loading model {model_name}:", "y_, '.', markersize=8, color=colors[-(it + 1)]) ax2.plot(x_, y_, 'o', markersize=8, color=colors[-(it + 1)])", "labels_pred) confusion_matr_normalized = confusion_matr.astype('float') / confusion_matr.sum(axis=1)[:, np.newaxis] print(f'Threshold: {thr}') print('Confusion matrix:') print(confusion_matr) print('Normalized", "# if ii == 0: # break ax.legend(loc='lower right') ax2.legend(bbox_to_anchor=(1.04, 1), loc=\"upper left\")", "os.environ[\"CUDA_DEVICE_ORDER\"] = \"PCI_BUS_ID\" # see issue #152 os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"1\" import tensorflow as", "= './' with open(os.path.join(path_base, 'service/code/config.json')) as f: config = json.load(f) # models =", "+ cfi * 2 + 2) ax_.imshow(confusion_matr, interpolation='nearest', cmap=plt.cm.Blues) ax2_.imshow(confusion_matr_normalized, interpolation='nearest', cmap=plt.cm.Blues) tick_marks", "ax2_.yaxis.set_visible(False) ax_.axis('off') ax2_.axis('off') thresh = confusion_matr.max() / 2. thresh_norm = confusion_matr_normalized.max() / 2.", "label=f'{model_name} curve (area = {roc_auc:.5f})') # plot thresholds for it, thr in enumerate(score_thresholds):", "Rate (Sensitivity)') # ax.legend(loc=\"lower right\") # ax2.legend(loc=\"best\") ax2.grid(True) # Confusion matrices fig2 =", "ax.grid(True) ax2.set_xlim([0.0, .2]) ax2.set_ylim([0.8, 1.0]) ax2.set_xlabel('False Positive Rate (Contamination)') ax2.set_ylabel('True Positive Rate (Sensitivity)')", "{'rb': '5b96af9c0354c9000b0aea36', 'sl': '5b99b2c6aec3c500103a14de', 'kd': '5be0ae7958830a0018821794', 'os': '5c05bbdc826480000a95c0bf'} # c_families = {'rb': '5b96af9c0354c9000b0aea36',", "Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\") # ax2.legend(loc=\"best\") ax2.grid(True) # Confusion matrices fig2", "tpr) ax.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw) ax2.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw, label=f'{model_name} curve", "x_, y_) if cfi == 0 and ii == 0: ax.plot(x_, y_, '.',", "2) ax_.imshow(confusion_matr, interpolation='nearest', cmap=plt.cm.Blues) ax2_.imshow(confusion_matr_normalized, interpolation='nearest', cmap=plt.cm.Blues) tick_marks = np.arange(2) # ax_.set_xticks(tick_marks, tick_marks)", "return m def thres(v, thr: float = 0.5): v_ = np.array(deepcopy(v)) v_[v_ >=", "confusion_matr.max() / 2. thresh_norm = confusion_matr_normalized.max() / 2. for i, j in itertools.product(range(confusion_matr.shape[0]),", "= confusion_matr.astype('float') / confusion_matr.sum(axis=1)[:, np.newaxis] print(f'Threshold: {thr}') print('Confusion matrix:') print(confusion_matr) print('Normalized confusion matrix:')", "wspace=0.2, hspace=0.2) lw = 1.6 # ROCs ax = fig.add_subplot(1, 2, 1) #", "utils import load_data # import matplotlib # matplotlib.use('agg') import matplotlib.pyplot as plt def", "= \"PCI_BUS_ID\" # see issue #152 os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"1\" import tensorflow as tf", "1], color='#333333', lw=lw, linestyle='--') ax.set_xlim([0.0, 1.0]) ax.set_ylim([0.0, 1.05]) ax.set_xlabel('False Positive Rate (Contamination)') ax.set_ylabel('True", "Positive Rate (Contamination)') ax2.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\") # ax2.legend(loc=\"best\") ax2.grid(True)", "json from sklearn.metrics import roc_curve, auc, confusion_matrix import numpy as np import pandas", "see issue #152 os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"1\" import tensorflow as tf from keras.models import", "1.05]) ax.set_xlabel('False Positive Rate (Contamination)') ax.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\") #", "f'{model_base_name}.weights.h5')) return m def thres(v, thr: float = 0.5): v_ = np.array(deepcopy(v)) v_[v_", "thresholds for it, thr in enumerate(score_thresholds): x_ = np.interp(thr, thresholds[::-1], fpr) y_ =", "* 2 + 2) ax_.imshow(confusion_matr, interpolation='nearest', cmap=plt.cm.Blues) ax2_.imshow(confusion_matr_normalized, interpolation='nearest', cmap=plt.cm.Blues) tick_marks = np.arange(2)", "ax2.set_ylim([0.8, 1.0]) ax2.set_xlabel('False Positive Rate (Contamination)') ax2.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\")", "x_train, y_train, x_test, y_test, classes = load_data(path=path_data, project_id=project_id, binary=True, grayscale=True, resize=(144, 144), test_size=0.1,", "{models[model_name]}') m = load_model_helper(path_models, models[model_name]) y = m.predict(x_test, batch_size=32, verbose=True) # for thr", "1) ax2_ = fig2.add_subplot(3, 2 * len(c_families), ii * 8 + cfi *", "# ax2_.set_yticks(tick_marks, tick_marks) # # ax_.xaxis.set_visible(False) # ax_.yaxis.set_visible(False) # ax2_.xaxis.set_visible(False) # ax2_.yaxis.set_visible(False) ax_.axis('off')", "ax_.yaxis.set_visible(False) # ax2_.xaxis.set_visible(False) # ax2_.yaxis.set_visible(False) ax_.axis('off') ax2_.axis('off') thresh = confusion_matr.max() / 2. thresh_norm", "if confusion_matr_normalized[i, j] > thresh_norm else \"black\") # if ii == 0: #", "as f: config = json.load(f) # models = config['models'] models = config['models_201901'] model_names", "colors: colors = plt.rcParams['axes.prop_cycle'].by_key()['color'] # [u'#1f77b4', u'#ff7f0e', u'#2ca02c', u'#d62728', u'#9467bd', # u'#8c564b', u'#e377c2',", "ROC fig = plt.figure(figsize=(14, 5)) fig.subplots_adjust(bottom=0.09, left=0.05, right=0.70, top=0.98, wspace=0.2, hspace=0.2) lw =", "= json.load(f) # models = config['models'] models = config['models_201901'] model_names = list(models.keys()) path_models", "cmap=plt.cm.Blues) ax2_.imshow(confusion_matr_normalized, interpolation='nearest', cmap=plt.cm.Blues) tick_marks = np.arange(2) # ax_.set_xticks(tick_marks, tick_marks) # ax_.set_yticks(tick_marks, tick_marks)", "else \"black\") ax2_.text(j, i, format(confusion_matr_normalized[i, j], '.2f'), horizontalalignment=\"center\", color=\"white\" if confusion_matr_normalized[i, j] >", "np.array(deepcopy(v)) v_[v_ >= thr] = 1 v_[v_ < thr] = 0 return v_", "thr in (0.5,): labels_pred = thres(y, thr=thr) confusion_matr = confusion_matrix(y_test, labels_pred) confusion_matr_normalized =", "'r') as json_file: loaded_model_json = json_file.read() m = model_from_json(loaded_model_json) m.load_weights(os.path.join(path, f'{model_base_name}.weights.h5')) return m", "':'] # thresholds score_thresholds = [0.99, 0.9, 0.5, 0.1, 0.01] # ROC fig", "as np import pandas as pd from copy import deepcopy import itertools from", "interpolation='nearest', cmap=plt.cm.Blues) tick_marks = np.arange(2) # ax_.set_xticks(tick_marks, tick_marks) # ax_.set_yticks(tick_marks, tick_marks) # ax2_.set_xticks(tick_marks,", "0: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it + 1)], label=f'Threshold: {1-thr:.2f}') else: ax.plot(x_, y_,", "v_ if __name__ == '__main__': tf.keras.backend.clear_session() # path_base = '/Users/dmitryduev/_caltech/python/deep-asteroids/' path_base = './'", "cfi * 2 + 1) ax2_ = fig2.add_subplot(3, 2 * len(c_families), ii *", "confusion_matr.astype('float') / confusion_matr.sum(axis=1)[:, np.newaxis] print(f'Threshold: {thr}') print('Confusion matrix:') print(confusion_matr) print('Normalized confusion matrix:') print(confusion_matr_normalized)", "thr in (0.5, 0.9): for thr in (0.5,): labels_pred = thres(y, thr=thr) confusion_matr", "line_styles[ii], color=colors[cfi], lw=lw, label=f'{model_name} curve (area = {roc_auc:.5f})') # plot thresholds for it,", "# ax2_.set_xticks(tick_marks, tick_marks) # ax2_.set_yticks(tick_marks, tick_marks) # # ax_.xaxis.set_visible(False) # ax_.yaxis.set_visible(False) # ax2_.xaxis.set_visible(False)", "c_family in m_] n_mn = len(mn) for ii, model_name in enumerate(mn): print(f'loading model", "+ 1)], label=f'Threshold: {1-thr:.2f}') else: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it + 1)]) ax2.plot(x_,", "c_families = {'rb': '5b96af9c0354c9000b0aea36', 'sl': '5b99b2c6aec3c500103a14de', 'kd': '5be0ae7958830a0018821794', 'os': '5c05bbdc826480000a95c0bf'} # c_families =", "as tf from keras.models import model_from_json import json from sklearn.metrics import roc_curve, auc,", "2 * len(c_families), ii * 8 + cfi * 2 + 2) ax_.imshow(confusion_matr,", "# ax_.xaxis.set_visible(False) # ax_.yaxis.set_visible(False) # ax2_.xaxis.set_visible(False) # ax2_.yaxis.set_visible(False) ax_.axis('off') ax2_.axis('off') thresh = confusion_matr.max()", "plt.figure() fig2.subplots_adjust(bottom=0.06, left=0.01, right=1.0, top=0.93, wspace=0.0, hspace=0.12) cn = 0 for cfi, c_family", "itertools from utils import load_data # import matplotlib # matplotlib.use('agg') import matplotlib.pyplot as", "color=colors[cfi], lw=lw, label=f'{model_name} curve (area = {roc_auc:.5f})') # plot thresholds for it, thr", "== 0: # break ax.legend(loc='lower right') ax2.legend(bbox_to_anchor=(1.04, 1), loc=\"upper left\") fig.savefig(f'./roc_rb_sl_kd.png', dpi=300) fig2.savefig(f'./cm_rb_sl_kd.png',", "v_[v_ >= thr] = 1 v_[v_ < thr] = 0 return v_ if", "ax.set_ylim([0.0, 1.05]) ax.set_xlabel('False Positive Rate (Contamination)') ax.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\")", "# 'kd': '5be0ae7958830a0018821794'} # c_families = {'rb': '5b96af9c0354c9000b0aea36'} path_data = './data' # mpl", "m.load_weights(os.path.join(path, f'{model_base_name}.weights.h5')) return m def thres(v, thr: float = 0.5): v_ = np.array(deepcopy(v))", "= np.arange(2) # ax_.set_xticks(tick_marks, tick_marks) # ax_.set_yticks(tick_marks, tick_marks) # ax2_.set_xticks(tick_marks, tick_marks) # ax2_.set_yticks(tick_marks,", "1.0]) ax.set_ylim([0.0, 1.05]) ax.set_xlabel('False Positive Rate (Contamination)') ax.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower", "confusion_matrix import numpy as np import pandas as pd from copy import deepcopy", "fig.add_subplot(1, 2, 1) # zoomed ROCs ax2 = fig.add_subplot(1, 2, 2) ax.plot([0, 1],", "Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\") # ax.legend(loc=\"best\") ax.grid(True) ax2.set_xlim([0.0, .2]) ax2.set_ylim([0.8, 1.0])", "u'#e377c2', u'#7f7f7f', u'#bcbd22', u'#17becf'] # line styles: line_styles = ['-', '--', ':'] #", "horizontalalignment=\"center\", color=\"white\" if confusion_matr_normalized[i, j] > thresh_norm else \"black\") # if ii ==", "\"PCI_BUS_ID\" # see issue #152 os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"1\" import tensorflow as tf from", "m def thres(v, thr: float = 0.5): v_ = np.array(deepcopy(v)) v_[v_ >= thr]", "enumerate(mn): print(f'loading model {model_name}: {models[model_name]}') m = load_model_helper(path_models, models[model_name]) y = m.predict(x_test, batch_size=32,", "fig = plt.figure(figsize=(14, 5)) fig.subplots_adjust(bottom=0.09, left=0.05, right=0.70, top=0.98, wspace=0.2, hspace=0.2) lw = 1.6", "model_name in enumerate(mn): print(f'loading model {model_name}: {models[model_name]}') m = load_model_helper(path_models, models[model_name]) y =", "u'#17becf'] # line styles: line_styles = ['-', '--', ':'] # thresholds score_thresholds =", "len(c_families), ii * 8 + cfi * 2 + 1) ax2_ = fig2.add_subplot(3,", "grayscale=True, resize=(144, 144), test_size=0.1, verbose=True, random_state=42) mn = [m_ for m_ in model_names", "# load data x_train, y_train, x_test, y_test, classes = load_data(path=path_data, project_id=project_id, binary=True, grayscale=True,", "'./' with open(os.path.join(path_base, 'service/code/config.json')) as f: config = json.load(f) # models = config['models']", "ax.plot([0, 1], [0, 1], color='#333333', lw=lw, linestyle='--') ax.set_xlim([0.0, 1.0]) ax.set_ylim([0.0, 1.05]) ax.set_xlabel('False Positive", "= {'rb': '5b96af9c0354c9000b0aea36', # 'sl': '5b99b2c6aec3c500103a14de', # 'kd': '5be0ae7958830a0018821794'} # c_families = {'rb':", "'service/code/config.json')) as f: config = json.load(f) # models = config['models'] models = config['models_201901']", "= confusion_matr.max() / 2. thresh_norm = confusion_matr_normalized.max() / 2. for i, j in", "color=colors[-(it + 1)]) ax2.plot(x_, y_, 'o', markersize=8, color=colors[-(it + 1)]) # plot confusion", "left=0.01, right=1.0, top=0.93, wspace=0.0, hspace=0.12) cn = 0 for cfi, c_family in enumerate(c_families):", "sklearn.metrics import roc_curve, auc, confusion_matrix import numpy as np import pandas as pd", "pd from copy import deepcopy import itertools from utils import load_data # import", "in enumerate(c_families): project_id = c_families[c_family] print(c_family, project_id) # load data x_train, y_train, x_test,", "1)]) ax2.plot(x_, y_, 'o', markersize=8, color=colors[-(it + 1)]) # plot confusion matrices ax_", "'--', ':'] # thresholds score_thresholds = [0.99, 0.9, 0.5, 0.1, 0.01] # ROC", "if confusion_matr[i, j] > thresh else \"black\") ax2_.text(j, i, format(confusion_matr_normalized[i, j], '.2f'), horizontalalignment=\"center\",", "ax_.xaxis.set_visible(False) # ax_.yaxis.set_visible(False) # ax2_.xaxis.set_visible(False) # ax2_.yaxis.set_visible(False) ax_.axis('off') ax2_.axis('off') thresh = confusion_matr.max() /", "path_models = os.path.join(path_base, 'service/models') c_families = {'rb': '5b96af9c0354c9000b0aea36', 'sl': '5b99b2c6aec3c500103a14de', 'kd': '5be0ae7958830a0018821794', 'os':", "import itertools from utils import load_data # import matplotlib # matplotlib.use('agg') import matplotlib.pyplot", "np import pandas as pd from copy import deepcopy import itertools from utils", "1)]) # plot confusion matrices ax_ = fig2.add_subplot(3, 2 * len(c_families), ii *", "2) ax.plot([0, 1], [0, 1], color='#333333', lw=lw, linestyle='--') ax.set_xlim([0.0, 1.0]) ax.set_ylim([0.0, 1.05]) ax.set_xlabel('False", "= confusion_matr_normalized.max() / 2. for i, j in itertools.product(range(confusion_matr.shape[0]), range(confusion_matr.shape[1])): ax_.text(j, i, format(confusion_matr[i,", "np.newaxis] print(f'Threshold: {thr}') print('Confusion matrix:') print(confusion_matr) print('Normalized confusion matrix:') print(confusion_matr_normalized) fpr, tpr, thresholds", "config = json.load(f) # models = config['models'] models = config['models_201901'] model_names = list(models.keys())", "fig.subplots_adjust(bottom=0.09, left=0.05, right=0.70, top=0.98, wspace=0.2, hspace=0.2) lw = 1.6 # ROCs ax =", "2 + 1) ax2_ = fig2.add_subplot(3, 2 * len(c_families), ii * 8 +", "confusion_matr_normalized.max() / 2. for i, j in itertools.product(range(confusion_matr.shape[0]), range(confusion_matr.shape[1])): ax_.text(j, i, format(confusion_matr[i, j],", "in (0.5, 0.9): for thr in (0.5,): labels_pred = thres(y, thr=thr) confusion_matr =", "(Sensitivity)') # ax.legend(loc=\"lower right\") # ax.legend(loc=\"best\") ax.grid(True) ax2.set_xlim([0.0, .2]) ax2.set_ylim([0.8, 1.0]) ax2.set_xlabel('False Positive", "ax2.grid(True) # Confusion matrices fig2 = plt.figure() fig2.subplots_adjust(bottom=0.06, left=0.01, right=1.0, top=0.93, wspace=0.0, hspace=0.12)", ".2]) ax2.set_ylim([0.8, 1.0]) ax2.set_xlabel('False Positive Rate (Contamination)') ax2.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower", "load data x_train, y_train, x_test, y_test, classes = load_data(path=path_data, project_id=project_id, binary=True, grayscale=True, resize=(144,", "it, thr in enumerate(score_thresholds): x_ = np.interp(thr, thresholds[::-1], fpr) y_ = np.interp(thr, thresholds[::-1],", "load_data(path=path_data, project_id=project_id, binary=True, grayscale=True, resize=(144, 144), test_size=0.1, verbose=True, random_state=42) mn = [m_ for", "'5b96af9c0354c9000b0aea36'} path_data = './data' # mpl colors: colors = plt.rcParams['axes.prop_cycle'].by_key()['color'] # [u'#1f77b4', u'#ff7f0e',", "y_, 'o', markersize=8, color=colors[-(it + 1)]) # plot confusion matrices ax_ = fig2.add_subplot(3,", "os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"1\" import tensorflow as tf from keras.models import model_from_json import json", "2, 1) # zoomed ROCs ax2 = fig.add_subplot(1, 2, 2) ax.plot([0, 1], [0,", "ax.plot(x_, y_, '.', markersize=8, color=colors[-(it + 1)]) ax2.plot(x_, y_, 'o', markersize=8, color=colors[-(it +", "ax.set_xlabel('False Positive Rate (Contamination)') ax.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\") # ax.legend(loc=\"best\")", "for m_ in model_names if c_family in m_] n_mn = len(mn) for ii,", "# ROCs ax = fig.add_subplot(1, 2, 1) # zoomed ROCs ax2 = fig.add_subplot(1,", "= 0.5): v_ = np.array(deepcopy(v)) v_[v_ >= thr] = 1 v_[v_ < thr]", "matrices fig2 = plt.figure() fig2.subplots_adjust(bottom=0.06, left=0.01, right=1.0, top=0.93, wspace=0.0, hspace=0.12) cn = 0", "and ii == 0: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it + 1)], label=f'Threshold: {1-thr:.2f}')", "in itertools.product(range(confusion_matr.shape[0]), range(confusion_matr.shape[1])): ax_.text(j, i, format(confusion_matr[i, j], 'd'), horizontalalignment=\"center\", color=\"white\" if confusion_matr[i, j]", "for thr in (0.5,): labels_pred = thres(y, thr=thr) confusion_matr = confusion_matrix(y_test, labels_pred) confusion_matr_normalized", "# ax2.legend(loc=\"best\") ax2.grid(True) # Confusion matrices fig2 = plt.figure() fig2.subplots_adjust(bottom=0.06, left=0.01, right=1.0, top=0.93,", "# # ax_.xaxis.set_visible(False) # ax_.yaxis.set_visible(False) # ax2_.xaxis.set_visible(False) # ax2_.yaxis.set_visible(False) ax_.axis('off') ax2_.axis('off') thresh =", "itertools.product(range(confusion_matr.shape[0]), range(confusion_matr.shape[1])): ax_.text(j, i, format(confusion_matr[i, j], 'd'), horizontalalignment=\"center\", color=\"white\" if confusion_matr[i, j] >", "x_ = np.interp(thr, thresholds[::-1], fpr) y_ = np.interp(thr, thresholds[::-1], tpr) # print(thr, x_,", "= fig.add_subplot(1, 2, 2) ax.plot([0, 1], [0, 1], color='#333333', lw=lw, linestyle='--') ax.set_xlim([0.0, 1.0])", "print(confusion_matr_normalized) fpr, tpr, thresholds = roc_curve(y_test, y) roc_auc = auc(fpr, tpr) ax.plot(fpr, tpr,", "tick_marks) # ax_.set_yticks(tick_marks, tick_marks) # ax2_.set_xticks(tick_marks, tick_marks) # ax2_.set_yticks(tick_marks, tick_marks) # # ax_.xaxis.set_visible(False)", "load_data # import matplotlib # matplotlib.use('agg') import matplotlib.pyplot as plt def load_model_helper(path, model_base_name):", "# Confusion matrices fig2 = plt.figure() fig2.subplots_adjust(bottom=0.06, left=0.01, right=1.0, top=0.93, wspace=0.0, hspace=0.12) cn", "'.', markersize=8, color=colors[-(it + 1)]) ax2.plot(x_, y_, 'o', markersize=8, color=colors[-(it + 1)]) #", "== '__main__': tf.keras.backend.clear_session() # path_base = '/Users/dmitryduev/_caltech/python/deep-asteroids/' path_base = './' with open(os.path.join(path_base, 'service/code/config.json'))", "# line styles: line_styles = ['-', '--', ':'] # thresholds score_thresholds = [0.99,", "for ii, model_name in enumerate(mn): print(f'loading model {model_name}: {models[model_name]}') m = load_model_helper(path_models, models[model_name])", "import numpy as np import pandas as pd from copy import deepcopy import", "fig.add_subplot(1, 2, 2) ax.plot([0, 1], [0, 1], color='#333333', lw=lw, linestyle='--') ax.set_xlim([0.0, 1.0]) ax.set_ylim([0.0,", "test_size=0.1, verbose=True, random_state=42) mn = [m_ for m_ in model_names if c_family in", "= plt.figure(figsize=(14, 5)) fig.subplots_adjust(bottom=0.09, left=0.05, right=0.70, top=0.98, wspace=0.2, hspace=0.2) lw = 1.6 #", "# see issue #152 os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"1\" import tensorflow as tf from keras.models", "random_state=42) mn = [m_ for m_ in model_names if c_family in m_] n_mn", "fig2.subplots_adjust(bottom=0.06, left=0.01, right=1.0, top=0.93, wspace=0.0, hspace=0.12) cn = 0 for cfi, c_family in", "loaded_model_json = json_file.read() m = model_from_json(loaded_model_json) m.load_weights(os.path.join(path, f'{model_base_name}.weights.h5')) return m def thres(v, thr:", "resize=(144, 144), test_size=0.1, verbose=True, random_state=42) mn = [m_ for m_ in model_names if", "* len(c_families), ii * 8 + cfi * 2 + 1) ax2_ =", "classes = load_data(path=path_data, project_id=project_id, binary=True, grayscale=True, resize=(144, 144), test_size=0.1, verbose=True, random_state=42) mn =", "y_ = np.interp(thr, thresholds[::-1], tpr) # print(thr, x_, y_) if cfi == 0", "'5b99b2c6aec3c500103a14de', 'kd': '5be0ae7958830a0018821794', 'os': '5c05bbdc826480000a95c0bf'} # c_families = {'rb': '5b96af9c0354c9000b0aea36', # 'sl': '5b99b2c6aec3c500103a14de',", "ax2_ = fig2.add_subplot(3, 2 * len(c_families), ii * 8 + cfi * 2", "1], [0, 1], color='#333333', lw=lw, linestyle='--') ax.set_xlim([0.0, 1.0]) ax.set_ylim([0.0, 1.05]) ax.set_xlabel('False Positive Rate", "+ cfi * 2 + 1) ax2_ = fig2.add_subplot(3, 2 * len(c_families), ii", "# zoomed ROCs ax2 = fig.add_subplot(1, 2, 2) ax.plot([0, 1], [0, 1], color='#333333',", "0.9, 0.5, 0.1, 0.01] # ROC fig = plt.figure(figsize=(14, 5)) fig.subplots_adjust(bottom=0.09, left=0.05, right=0.70,", "['-', '--', ':'] # thresholds score_thresholds = [0.99, 0.9, 0.5, 0.1, 0.01] #", "y = m.predict(x_test, batch_size=32, verbose=True) # for thr in (0.5, 0.9): for thr", "list(models.keys()) path_models = os.path.join(path_base, 'service/models') c_families = {'rb': '5b96af9c0354c9000b0aea36', 'sl': '5b99b2c6aec3c500103a14de', 'kd': '5be0ae7958830a0018821794',", "ax2.set_xlabel('False Positive Rate (Contamination)') ax2.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\") # ax2.legend(loc=\"best\")", "ax2 = fig.add_subplot(1, 2, 2) ax.plot([0, 1], [0, 1], color='#333333', lw=lw, linestyle='--') ax.set_xlim([0.0,", "if cfi == 0 and ii == 0: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it", "'os': '5c05bbdc826480000a95c0bf'} # c_families = {'rb': '5b96af9c0354c9000b0aea36', # 'sl': '5b99b2c6aec3c500103a14de', # 'kd': '5be0ae7958830a0018821794'}", "pandas as pd from copy import deepcopy import itertools from utils import load_data", "lw = 1.6 # ROCs ax = fig.add_subplot(1, 2, 1) # zoomed ROCs", "print('Normalized confusion matrix:') print(confusion_matr_normalized) fpr, tpr, thresholds = roc_curve(y_test, y) roc_auc = auc(fpr,", "'__main__': tf.keras.backend.clear_session() # path_base = '/Users/dmitryduev/_caltech/python/deep-asteroids/' path_base = './' with open(os.path.join(path_base, 'service/code/config.json')) as", "open(os.path.join(path, f'{model_base_name}.architecture.json'), 'r') as json_file: loaded_model_json = json_file.read() m = model_from_json(loaded_model_json) m.load_weights(os.path.join(path, f'{model_base_name}.weights.h5'))", "ii * 8 + cfi * 2 + 1) ax2_ = fig2.add_subplot(3, 2", "i, format(confusion_matr_normalized[i, j], '.2f'), horizontalalignment=\"center\", color=\"white\" if confusion_matr_normalized[i, j] > thresh_norm else \"black\")", "confusion matrices ax_ = fig2.add_subplot(3, 2 * len(c_families), ii * 8 + cfi", "144), test_size=0.1, verbose=True, random_state=42) mn = [m_ for m_ in model_names if c_family", "matplotlib # matplotlib.use('agg') import matplotlib.pyplot as plt def load_model_helper(path, model_base_name): # return load_model(path)", "fpr) y_ = np.interp(thr, thresholds[::-1], tpr) # print(thr, x_, y_) if cfi ==", "project_id=project_id, binary=True, grayscale=True, resize=(144, 144), test_size=0.1, verbose=True, random_state=42) mn = [m_ for m_", "* 2 + 1) ax2_ = fig2.add_subplot(3, 2 * len(c_families), ii * 8", "ax.set_xlim([0.0, 1.0]) ax.set_ylim([0.0, 1.05]) ax.set_xlabel('False Positive Rate (Contamination)') ax.set_ylabel('True Positive Rate (Sensitivity)') #", "ii, model_name in enumerate(mn): print(f'loading model {model_name}: {models[model_name]}') m = load_model_helper(path_models, models[model_name]) y", ">= thr] = 1 v_[v_ < thr] = 0 return v_ if __name__", "0.1, 0.01] # ROC fig = plt.figure(figsize=(14, 5)) fig.subplots_adjust(bottom=0.09, left=0.05, right=0.70, top=0.98, wspace=0.2,", "# matplotlib.use('agg') import matplotlib.pyplot as plt def load_model_helper(path, model_base_name): # return load_model(path) with", "'/Users/dmitryduev/_caltech/python/deep-asteroids/' path_base = './' with open(os.path.join(path_base, 'service/code/config.json')) as f: config = json.load(f) #", "== 0: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it + 1)], label=f'Threshold: {1-thr:.2f}') else: ax.plot(x_,", "path_data = './data' # mpl colors: colors = plt.rcParams['axes.prop_cycle'].by_key()['color'] # [u'#1f77b4', u'#ff7f0e', u'#2ca02c',", "5)) fig.subplots_adjust(bottom=0.09, left=0.05, right=0.70, top=0.98, wspace=0.2, hspace=0.2) lw = 1.6 # ROCs ax", "'5be0ae7958830a0018821794', 'os': '5c05bbdc826480000a95c0bf'} # c_families = {'rb': '5b96af9c0354c9000b0aea36', # 'sl': '5b99b2c6aec3c500103a14de', # 'kd':", "thresholds[::-1], fpr) y_ = np.interp(thr, thresholds[::-1], tpr) # print(thr, x_, y_) if cfi", "with open(os.path.join(path, f'{model_base_name}.architecture.json'), 'r') as json_file: loaded_model_json = json_file.read() m = model_from_json(loaded_model_json) m.load_weights(os.path.join(path,", "(Contamination)') ax2.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\") # ax2.legend(loc=\"best\") ax2.grid(True) # Confusion", "Rate (Contamination)') ax.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\") # ax.legend(loc=\"best\") ax.grid(True) ax2.set_xlim([0.0,", "format(confusion_matr[i, j], 'd'), horizontalalignment=\"center\", color=\"white\" if confusion_matr[i, j] > thresh else \"black\") ax2_.text(j,", "import deepcopy import itertools from utils import load_data # import matplotlib # matplotlib.use('agg')", "thresholds score_thresholds = [0.99, 0.9, 0.5, 0.1, 0.01] # ROC fig = plt.figure(figsize=(14,", "ax2_.xaxis.set_visible(False) # ax2_.yaxis.set_visible(False) ax_.axis('off') ax2_.axis('off') thresh = confusion_matr.max() / 2. thresh_norm = confusion_matr_normalized.max()", "issue #152 os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"1\" import tensorflow as tf from keras.models import model_from_json", "ax2.legend(loc=\"best\") ax2.grid(True) # Confusion matrices fig2 = plt.figure() fig2.subplots_adjust(bottom=0.06, left=0.01, right=1.0, top=0.93, wspace=0.0,", "= m.predict(x_test, batch_size=32, verbose=True) # for thr in (0.5, 0.9): for thr in", "ax.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw) ax2.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw, label=f'{model_name} curve (area", "fig2 = plt.figure() fig2.subplots_adjust(bottom=0.06, left=0.01, right=1.0, top=0.93, wspace=0.0, hspace=0.12) cn = 0 for", "float = 0.5): v_ = np.array(deepcopy(v)) v_[v_ >= thr] = 1 v_[v_ <", "y_test, classes = load_data(path=path_data, project_id=project_id, binary=True, grayscale=True, resize=(144, 144), test_size=0.1, verbose=True, random_state=42) mn", "plt.figure(figsize=(14, 5)) fig.subplots_adjust(bottom=0.09, left=0.05, right=0.70, top=0.98, wspace=0.2, hspace=0.2) lw = 1.6 # ROCs", "tpr) # print(thr, x_, y_) if cfi == 0 and ii == 0:", "'sl': '5b99b2c6aec3c500103a14de', # 'kd': '5be0ae7958830a0018821794'} # c_families = {'rb': '5b96af9c0354c9000b0aea36'} path_data = './data'", "from sklearn.metrics import roc_curve, auc, confusion_matrix import numpy as np import pandas as", "model_base_name): # return load_model(path) with open(os.path.join(path, f'{model_base_name}.architecture.json'), 'r') as json_file: loaded_model_json = json_file.read()", "thr in enumerate(score_thresholds): x_ = np.interp(thr, thresholds[::-1], fpr) y_ = np.interp(thr, thresholds[::-1], tpr)", "thresholds = roc_curve(y_test, y) roc_auc = auc(fpr, tpr) ax.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw)", "'o', markersize=8, color=colors[-(it + 1)]) # plot confusion matrices ax_ = fig2.add_subplot(3, 2", "tpr, line_styles[ii], color=colors[cfi], lw=lw, label=f'{model_name} curve (area = {roc_auc:.5f})') # plot thresholds for", "{1-thr:.2f}') else: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it + 1)]) ax2.plot(x_, y_, 'o', markersize=8,", "json_file.read() m = model_from_json(loaded_model_json) m.load_weights(os.path.join(path, f'{model_base_name}.weights.h5')) return m def thres(v, thr: float =", "for i, j in itertools.product(range(confusion_matr.shape[0]), range(confusion_matr.shape[1])): ax_.text(j, i, format(confusion_matr[i, j], 'd'), horizontalalignment=\"center\", color=\"white\"", "cfi, c_family in enumerate(c_families): project_id = c_families[c_family] print(c_family, project_id) # load data x_train,", "(0.5, 0.9): for thr in (0.5,): labels_pred = thres(y, thr=thr) confusion_matr = confusion_matrix(y_test,", "as json_file: loaded_model_json = json_file.read() m = model_from_json(loaded_model_json) m.load_weights(os.path.join(path, f'{model_base_name}.weights.h5')) return m def", "# ax.legend(loc=\"lower right\") # ax.legend(loc=\"best\") ax.grid(True) ax2.set_xlim([0.0, .2]) ax2.set_ylim([0.8, 1.0]) ax2.set_xlabel('False Positive Rate", "= [m_ for m_ in model_names if c_family in m_] n_mn = len(mn)", "0.9): for thr in (0.5,): labels_pred = thres(y, thr=thr) confusion_matr = confusion_matrix(y_test, labels_pred)", "(area = {roc_auc:.5f})') # plot thresholds for it, thr in enumerate(score_thresholds): x_ =", "mpl colors: colors = plt.rcParams['axes.prop_cycle'].by_key()['color'] # [u'#1f77b4', u'#ff7f0e', u'#2ca02c', u'#d62728', u'#9467bd', # u'#8c564b',", "m_ in model_names if c_family in m_] n_mn = len(mn) for ii, model_name", "for cfi, c_family in enumerate(c_families): project_id = c_families[c_family] print(c_family, project_id) # load data", "markersize=8, color=colors[-(it + 1)]) ax2.plot(x_, y_, 'o', markersize=8, color=colors[-(it + 1)]) # plot", "binary=True, grayscale=True, resize=(144, 144), test_size=0.1, verbose=True, random_state=42) mn = [m_ for m_ in", "m = model_from_json(loaded_model_json) m.load_weights(os.path.join(path, f'{model_base_name}.weights.h5')) return m def thres(v, thr: float = 0.5):", "print(confusion_matr) print('Normalized confusion matrix:') print(confusion_matr_normalized) fpr, tpr, thresholds = roc_curve(y_test, y) roc_auc =", "0 return v_ if __name__ == '__main__': tf.keras.backend.clear_session() # path_base = '/Users/dmitryduev/_caltech/python/deep-asteroids/' path_base", "def load_model_helper(path, model_base_name): # return load_model(path) with open(os.path.join(path, f'{model_base_name}.architecture.json'), 'r') as json_file: loaded_model_json", "x_test, y_test, classes = load_data(path=path_data, project_id=project_id, binary=True, grayscale=True, resize=(144, 144), test_size=0.1, verbose=True, random_state=42)", "tpr, thresholds = roc_curve(y_test, y) roc_auc = auc(fpr, tpr) ax.plot(fpr, tpr, line_styles[ii], color=colors[cfi],", "ROCs ax2 = fig.add_subplot(1, 2, 2) ax.plot([0, 1], [0, 1], color='#333333', lw=lw, linestyle='--')", "v_[v_ < thr] = 0 return v_ if __name__ == '__main__': tf.keras.backend.clear_session() #", "# thresholds score_thresholds = [0.99, 0.9, 0.5, 0.1, 0.01] # ROC fig =", "mn = [m_ for m_ in model_names if c_family in m_] n_mn =", "batch_size=32, verbose=True) # for thr in (0.5, 0.9): for thr in (0.5,): labels_pred", "horizontalalignment=\"center\", color=\"white\" if confusion_matr[i, j] > thresh else \"black\") ax2_.text(j, i, format(confusion_matr_normalized[i, j],", "* len(c_families), ii * 8 + cfi * 2 + 2) ax_.imshow(confusion_matr, interpolation='nearest',", "import json from sklearn.metrics import roc_curve, auc, confusion_matrix import numpy as np import", "roc_auc = auc(fpr, tpr) ax.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw) ax2.plot(fpr, tpr, line_styles[ii], color=colors[cfi],", "confusion matrix:') print(confusion_matr_normalized) fpr, tpr, thresholds = roc_curve(y_test, y) roc_auc = auc(fpr, tpr)", "project_id = c_families[c_family] print(c_family, project_id) # load data x_train, y_train, x_test, y_test, classes", "open(os.path.join(path_base, 'service/code/config.json')) as f: config = json.load(f) # models = config['models'] models =", "m = load_model_helper(path_models, models[model_name]) y = m.predict(x_test, batch_size=32, verbose=True) # for thr in", "c_families = {'rb': '5b96af9c0354c9000b0aea36', # 'sl': '5b99b2c6aec3c500103a14de', # 'kd': '5be0ae7958830a0018821794'} # c_families =", "ax2_.axis('off') thresh = confusion_matr.max() / 2. thresh_norm = confusion_matr_normalized.max() / 2. for i,", "plt def load_model_helper(path, model_base_name): # return load_model(path) with open(os.path.join(path, f'{model_base_name}.architecture.json'), 'r') as json_file:", "< thr] = 0 return v_ if __name__ == '__main__': tf.keras.backend.clear_session() # path_base", "config['models_201901'] model_names = list(models.keys()) path_models = os.path.join(path_base, 'service/models') c_families = {'rb': '5b96af9c0354c9000b0aea36', 'sl':", "thresh_norm = confusion_matr_normalized.max() / 2. for i, j in itertools.product(range(confusion_matr.shape[0]), range(confusion_matr.shape[1])): ax_.text(j, i,", "* 8 + cfi * 2 + 1) ax2_ = fig2.add_subplot(3, 2 *", "# plot confusion matrices ax_ = fig2.add_subplot(3, 2 * len(c_families), ii * 8", "'5b96af9c0354c9000b0aea36', 'sl': '5b99b2c6aec3c500103a14de', 'kd': '5be0ae7958830a0018821794', 'os': '5c05bbdc826480000a95c0bf'} # c_families = {'rb': '5b96af9c0354c9000b0aea36', #", "* 8 + cfi * 2 + 2) ax_.imshow(confusion_matr, interpolation='nearest', cmap=plt.cm.Blues) ax2_.imshow(confusion_matr_normalized, interpolation='nearest',", "(Sensitivity)') # ax.legend(loc=\"lower right\") # ax2.legend(loc=\"best\") ax2.grid(True) # Confusion matrices fig2 = plt.figure()", "= {'rb': '5b96af9c0354c9000b0aea36'} path_data = './data' # mpl colors: colors = plt.rcParams['axes.prop_cycle'].by_key()['color'] #", "u'#2ca02c', u'#d62728', u'#9467bd', # u'#8c564b', u'#e377c2', u'#7f7f7f', u'#bcbd22', u'#17becf'] # line styles: line_styles", "verbose=True) # for thr in (0.5, 0.9): for thr in (0.5,): labels_pred =", "= load_data(path=path_data, project_id=project_id, binary=True, grayscale=True, resize=(144, 144), test_size=0.1, verbose=True, random_state=42) mn = [m_", "format(confusion_matr_normalized[i, j], '.2f'), horizontalalignment=\"center\", color=\"white\" if confusion_matr_normalized[i, j] > thresh_norm else \"black\") #", "from utils import load_data # import matplotlib # matplotlib.use('agg') import matplotlib.pyplot as plt", "else \"black\") # if ii == 0: # break ax.legend(loc='lower right') ax2.legend(bbox_to_anchor=(1.04, 1),", "import load_data # import matplotlib # matplotlib.use('agg') import matplotlib.pyplot as plt def load_model_helper(path,", "u'#bcbd22', u'#17becf'] # line styles: line_styles = ['-', '--', ':'] # thresholds score_thresholds", "{roc_auc:.5f})') # plot thresholds for it, thr in enumerate(score_thresholds): x_ = np.interp(thr, thresholds[::-1],", "= roc_curve(y_test, y) roc_auc = auc(fpr, tpr) ax.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw) ax2.plot(fpr,", "# import matplotlib # matplotlib.use('agg') import matplotlib.pyplot as plt def load_model_helper(path, model_base_name): #", "+ 1) ax2_ = fig2.add_subplot(3, 2 * len(c_families), ii * 8 + cfi", "\"black\") # if ii == 0: # break ax.legend(loc='lower right') ax2.legend(bbox_to_anchor=(1.04, 1), loc=\"upper", "= 1.6 # ROCs ax = fig.add_subplot(1, 2, 1) # zoomed ROCs ax2", "line_styles = ['-', '--', ':'] # thresholds score_thresholds = [0.99, 0.9, 0.5, 0.1,", "# ax_.set_xticks(tick_marks, tick_marks) # ax_.set_yticks(tick_marks, tick_marks) # ax2_.set_xticks(tick_marks, tick_marks) # ax2_.set_yticks(tick_marks, tick_marks) #", "== 0 and ii == 0: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it + 1)],", "j] > thresh_norm else \"black\") # if ii == 0: # break ax.legend(loc='lower", "c_families[c_family] print(c_family, project_id) # load data x_train, y_train, x_test, y_test, classes = load_data(path=path_data,", "hspace=0.2) lw = 1.6 # ROCs ax = fig.add_subplot(1, 2, 1) # zoomed", "ax_ = fig2.add_subplot(3, 2 * len(c_families), ii * 8 + cfi * 2", "ax.legend(loc=\"lower right\") # ax2.legend(loc=\"best\") ax2.grid(True) # Confusion matrices fig2 = plt.figure() fig2.subplots_adjust(bottom=0.06, left=0.01,", "\"1\" import tensorflow as tf from keras.models import model_from_json import json from sklearn.metrics", "ROCs ax = fig.add_subplot(1, 2, 1) # zoomed ROCs ax2 = fig.add_subplot(1, 2,", "ii == 0: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it + 1)], label=f'Threshold: {1-thr:.2f}') else:", "ax_.text(j, i, format(confusion_matr[i, j], 'd'), horizontalalignment=\"center\", color=\"white\" if confusion_matr[i, j] > thresh else", "Confusion matrices fig2 = plt.figure() fig2.subplots_adjust(bottom=0.06, left=0.01, right=1.0, top=0.93, wspace=0.0, hspace=0.12) cn =", "8 + cfi * 2 + 2) ax_.imshow(confusion_matr, interpolation='nearest', cmap=plt.cm.Blues) ax2_.imshow(confusion_matr_normalized, interpolation='nearest', cmap=plt.cm.Blues)", "ax_.axis('off') ax2_.axis('off') thresh = confusion_matr.max() / 2. thresh_norm = confusion_matr_normalized.max() / 2. for", "tick_marks) # # ax_.xaxis.set_visible(False) # ax_.yaxis.set_visible(False) # ax2_.xaxis.set_visible(False) # ax2_.yaxis.set_visible(False) ax_.axis('off') ax2_.axis('off') thresh", "tf from keras.models import model_from_json import json from sklearn.metrics import roc_curve, auc, confusion_matrix", "load_model_helper(path_models, models[model_name]) y = m.predict(x_test, batch_size=32, verbose=True) # for thr in (0.5, 0.9):", "right\") # ax2.legend(loc=\"best\") ax2.grid(True) # Confusion matrices fig2 = plt.figure() fig2.subplots_adjust(bottom=0.06, left=0.01, right=1.0,", "2 + 2) ax_.imshow(confusion_matr, interpolation='nearest', cmap=plt.cm.Blues) ax2_.imshow(confusion_matr_normalized, interpolation='nearest', cmap=plt.cm.Blues) tick_marks = np.arange(2) #", "thresh_norm else \"black\") # if ii == 0: # break ax.legend(loc='lower right') ax2.legend(bbox_to_anchor=(1.04,", "# print(thr, x_, y_) if cfi == 0 and ii == 0: ax.plot(x_,", "ax = fig.add_subplot(1, 2, 1) # zoomed ROCs ax2 = fig.add_subplot(1, 2, 2)", "= config['models_201901'] model_names = list(models.keys()) path_models = os.path.join(path_base, 'service/models') c_families = {'rb': '5b96af9c0354c9000b0aea36',", "import model_from_json import json from sklearn.metrics import roc_curve, auc, confusion_matrix import numpy as", "= auc(fpr, tpr) ax.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw) ax2.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw,", "'./data' # mpl colors: colors = plt.rcParams['axes.prop_cycle'].by_key()['color'] # [u'#1f77b4', u'#ff7f0e', u'#2ca02c', u'#d62728', u'#9467bd',", "as pd from copy import deepcopy import itertools from utils import load_data #", "= {'rb': '5b96af9c0354c9000b0aea36', 'sl': '5b99b2c6aec3c500103a14de', 'kd': '5be0ae7958830a0018821794', 'os': '5c05bbdc826480000a95c0bf'} # c_families = {'rb':", "'sl': '5b99b2c6aec3c500103a14de', 'kd': '5be0ae7958830a0018821794', 'os': '5c05bbdc826480000a95c0bf'} # c_families = {'rb': '5b96af9c0354c9000b0aea36', # 'sl':", "thresh else \"black\") ax2_.text(j, i, format(confusion_matr_normalized[i, j], '.2f'), horizontalalignment=\"center\", color=\"white\" if confusion_matr_normalized[i, j]", "model {model_name}: {models[model_name]}') m = load_model_helper(path_models, models[model_name]) y = m.predict(x_test, batch_size=32, verbose=True) #", "(Contamination)') ax.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\") # ax.legend(loc=\"best\") ax.grid(True) ax2.set_xlim([0.0, .2])", "8 + cfi * 2 + 1) ax2_ = fig2.add_subplot(3, 2 * len(c_families),", "= {roc_auc:.5f})') # plot thresholds for it, thr in enumerate(score_thresholds): x_ = np.interp(thr,", "c_family in enumerate(c_families): project_id = c_families[c_family] print(c_family, project_id) # load data x_train, y_train,", "= np.interp(thr, thresholds[::-1], tpr) # print(thr, x_, y_) if cfi == 0 and", "interpolation='nearest', cmap=plt.cm.Blues) ax2_.imshow(confusion_matr_normalized, interpolation='nearest', cmap=plt.cm.Blues) tick_marks = np.arange(2) # ax_.set_xticks(tick_marks, tick_marks) # ax_.set_yticks(tick_marks,", "0: # break ax.legend(loc='lower right') ax2.legend(bbox_to_anchor=(1.04, 1), loc=\"upper left\") fig.savefig(f'./roc_rb_sl_kd.png', dpi=300) fig2.savefig(f'./cm_rb_sl_kd.png', dpi=300)", "len(mn) for ii, model_name in enumerate(mn): print(f'loading model {model_name}: {models[model_name]}') m = load_model_helper(path_models,", "= c_families[c_family] print(c_family, project_id) # load data x_train, y_train, x_test, y_test, classes =", "(0.5,): labels_pred = thres(y, thr=thr) confusion_matr = confusion_matrix(y_test, labels_pred) confusion_matr_normalized = confusion_matr.astype('float') /", "import os os.environ[\"CUDA_DEVICE_ORDER\"] = \"PCI_BUS_ID\" # see issue #152 os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"1\" import", "line styles: line_styles = ['-', '--', ':'] # thresholds score_thresholds = [0.99, 0.9,", "= \"1\" import tensorflow as tf from keras.models import model_from_json import json from", "0.01] # ROC fig = plt.figure(figsize=(14, 5)) fig.subplots_adjust(bottom=0.09, left=0.05, right=0.70, top=0.98, wspace=0.2, hspace=0.2)", "1.0]) ax2.set_xlabel('False Positive Rate (Contamination)') ax2.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\") #", "= fig.add_subplot(1, 2, 1) # zoomed ROCs ax2 = fig.add_subplot(1, 2, 2) ax.plot([0,", "lw=lw, linestyle='--') ax.set_xlim([0.0, 1.0]) ax.set_ylim([0.0, 1.05]) ax.set_xlabel('False Positive Rate (Contamination)') ax.set_ylabel('True Positive Rate", "2 * len(c_families), ii * 8 + cfi * 2 + 1) ax2_", "m.predict(x_test, batch_size=32, verbose=True) # for thr in (0.5, 0.9): for thr in (0.5,):", "1)], label=f'Threshold: {1-thr:.2f}') else: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it + 1)]) ax2.plot(x_, y_,", "i, format(confusion_matr[i, j], 'd'), horizontalalignment=\"center\", color=\"white\" if confusion_matr[i, j] > thresh else \"black\")", "as plt def load_model_helper(path, model_base_name): # return load_model(path) with open(os.path.join(path, f'{model_base_name}.architecture.json'), 'r') as", "y_train, x_test, y_test, classes = load_data(path=path_data, project_id=project_id, binary=True, grayscale=True, resize=(144, 144), test_size=0.1, verbose=True,", "right=0.70, top=0.98, wspace=0.2, hspace=0.2) lw = 1.6 # ROCs ax = fig.add_subplot(1, 2,", "'service/models') c_families = {'rb': '5b96af9c0354c9000b0aea36', 'sl': '5b99b2c6aec3c500103a14de', 'kd': '5be0ae7958830a0018821794', 'os': '5c05bbdc826480000a95c0bf'} # c_families", "# models = config['models'] models = config['models_201901'] model_names = list(models.keys()) path_models = os.path.join(path_base,", "tick_marks) # ax2_.set_xticks(tick_marks, tick_marks) # ax2_.set_yticks(tick_marks, tick_marks) # # ax_.xaxis.set_visible(False) # ax_.yaxis.set_visible(False) #", "in (0.5,): labels_pred = thres(y, thr=thr) confusion_matr = confusion_matrix(y_test, labels_pred) confusion_matr_normalized = confusion_matr.astype('float')", "{thr}') print('Confusion matrix:') print(confusion_matr) print('Normalized confusion matrix:') print(confusion_matr_normalized) fpr, tpr, thresholds = roc_curve(y_test,", "if __name__ == '__main__': tf.keras.backend.clear_session() # path_base = '/Users/dmitryduev/_caltech/python/deep-asteroids/' path_base = './' with", "# 'sl': '5b99b2c6aec3c500103a14de', # 'kd': '5be0ae7958830a0018821794'} # c_families = {'rb': '5b96af9c0354c9000b0aea36'} path_data =", "color=colors[-(it + 1)], label=f'Threshold: {1-thr:.2f}') else: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it + 1)])", "# [u'#1f77b4', u'#ff7f0e', u'#2ca02c', u'#d62728', u'#9467bd', # u'#8c564b', u'#e377c2', u'#7f7f7f', u'#bcbd22', u'#17becf'] #", "0 for cfi, c_family in enumerate(c_families): project_id = c_families[c_family] print(c_family, project_id) # load", "= list(models.keys()) path_models = os.path.join(path_base, 'service/models') c_families = {'rb': '5b96af9c0354c9000b0aea36', 'sl': '5b99b2c6aec3c500103a14de', 'kd':", "thresh = confusion_matr.max() / 2. thresh_norm = confusion_matr_normalized.max() / 2. for i, j", "= config['models'] models = config['models_201901'] model_names = list(models.keys()) path_models = os.path.join(path_base, 'service/models') c_families", "= json_file.read() m = model_from_json(loaded_model_json) m.load_weights(os.path.join(path, f'{model_base_name}.weights.h5')) return m def thres(v, thr: float", "= 0 return v_ if __name__ == '__main__': tf.keras.backend.clear_session() # path_base = '/Users/dmitryduev/_caltech/python/deep-asteroids/'", "config['models'] models = config['models_201901'] model_names = list(models.keys()) path_models = os.path.join(path_base, 'service/models') c_families =", "for it, thr in enumerate(score_thresholds): x_ = np.interp(thr, thresholds[::-1], fpr) y_ = np.interp(thr,", "j], 'd'), horizontalalignment=\"center\", color=\"white\" if confusion_matr[i, j] > thresh else \"black\") ax2_.text(j, i,", "m_] n_mn = len(mn) for ii, model_name in enumerate(mn): print(f'loading model {model_name}: {models[model_name]}')", "thr=thr) confusion_matr = confusion_matrix(y_test, labels_pred) confusion_matr_normalized = confusion_matr.astype('float') / confusion_matr.sum(axis=1)[:, np.newaxis] print(f'Threshold: {thr}')", "matplotlib.pyplot as plt def load_model_helper(path, model_base_name): # return load_model(path) with open(os.path.join(path, f'{model_base_name}.architecture.json'), 'r')", "# ax_.set_yticks(tick_marks, tick_marks) # ax2_.set_xticks(tick_marks, tick_marks) # ax2_.set_yticks(tick_marks, tick_marks) # # ax_.xaxis.set_visible(False) #", "linestyle='--') ax.set_xlim([0.0, 1.0]) ax.set_ylim([0.0, 1.05]) ax.set_xlabel('False Positive Rate (Contamination)') ax.set_ylabel('True Positive Rate (Sensitivity)')", "f'{model_base_name}.architecture.json'), 'r') as json_file: loaded_model_json = json_file.read() m = model_from_json(loaded_model_json) m.load_weights(os.path.join(path, f'{model_base_name}.weights.h5')) return", "y_, '.', markersize=8, color=colors[-(it + 1)], label=f'Threshold: {1-thr:.2f}') else: ax.plot(x_, y_, '.', markersize=8,", "data x_train, y_train, x_test, y_test, classes = load_data(path=path_data, project_id=project_id, binary=True, grayscale=True, resize=(144, 144),", "thr] = 0 return v_ if __name__ == '__main__': tf.keras.backend.clear_session() # path_base =", "y) roc_auc = auc(fpr, tpr) ax.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw) ax2.plot(fpr, tpr, line_styles[ii],", "n_mn = len(mn) for ii, model_name in enumerate(mn): print(f'loading model {model_name}: {models[model_name]}') m", "auc(fpr, tpr) ax.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw) ax2.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw, label=f'{model_name}", "confusion_matr_normalized[i, j] > thresh_norm else \"black\") # if ii == 0: # break", "'d'), horizontalalignment=\"center\", color=\"white\" if confusion_matr[i, j] > thresh else \"black\") ax2_.text(j, i, format(confusion_matr_normalized[i,", "+ 1)]) ax2.plot(x_, y_, 'o', markersize=8, color=colors[-(it + 1)]) # plot confusion matrices", "ax2_.text(j, i, format(confusion_matr_normalized[i, j], '.2f'), horizontalalignment=\"center\", color=\"white\" if confusion_matr_normalized[i, j] > thresh_norm else", "tensorflow as tf from keras.models import model_from_json import json from sklearn.metrics import roc_curve,", "u'#ff7f0e', u'#2ca02c', u'#d62728', u'#9467bd', # u'#8c564b', u'#e377c2', u'#7f7f7f', u'#bcbd22', u'#17becf'] # line styles:", "in enumerate(score_thresholds): x_ = np.interp(thr, thresholds[::-1], fpr) y_ = np.interp(thr, thresholds[::-1], tpr) #", "models = config['models'] models = config['models_201901'] model_names = list(models.keys()) path_models = os.path.join(path_base, 'service/models')", "auc, confusion_matrix import numpy as np import pandas as pd from copy import", "deepcopy import itertools from utils import load_data # import matplotlib # matplotlib.use('agg') import", "'5be0ae7958830a0018821794'} # c_families = {'rb': '5b96af9c0354c9000b0aea36'} path_data = './data' # mpl colors: colors", "0 and ii == 0: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it + 1)], label=f'Threshold:", "plt.rcParams['axes.prop_cycle'].by_key()['color'] # [u'#1f77b4', u'#ff7f0e', u'#2ca02c', u'#d62728', u'#9467bd', # u'#8c564b', u'#e377c2', u'#7f7f7f', u'#bcbd22', u'#17becf']", "[m_ for m_ in model_names if c_family in m_] n_mn = len(mn) for", "ax.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\") # ax.legend(loc=\"best\") ax.grid(True) ax2.set_xlim([0.0, .2]) ax2.set_ylim([0.8,", "print(f'Threshold: {thr}') print('Confusion matrix:') print(confusion_matr) print('Normalized confusion matrix:') print(confusion_matr_normalized) fpr, tpr, thresholds =", "u'#8c564b', u'#e377c2', u'#7f7f7f', u'#bcbd22', u'#17becf'] # line styles: line_styles = ['-', '--', ':']", "return load_model(path) with open(os.path.join(path, f'{model_base_name}.architecture.json'), 'r') as json_file: loaded_model_json = json_file.read() m =", "i, j in itertools.product(range(confusion_matr.shape[0]), range(confusion_matr.shape[1])): ax_.text(j, i, format(confusion_matr[i, j], 'd'), horizontalalignment=\"center\", color=\"white\" if", "= len(mn) for ii, model_name in enumerate(mn): print(f'loading model {model_name}: {models[model_name]}') m =", "ii == 0: # break ax.legend(loc='lower right') ax2.legend(bbox_to_anchor=(1.04, 1), loc=\"upper left\") fig.savefig(f'./roc_rb_sl_kd.png', dpi=300)", "ax_.imshow(confusion_matr, interpolation='nearest', cmap=plt.cm.Blues) ax2_.imshow(confusion_matr_normalized, interpolation='nearest', cmap=plt.cm.Blues) tick_marks = np.arange(2) # ax_.set_xticks(tick_marks, tick_marks) #", "path_base = '/Users/dmitryduev/_caltech/python/deep-asteroids/' path_base = './' with open(os.path.join(path_base, 'service/code/config.json')) as f: config =", "[0.99, 0.9, 0.5, 0.1, 0.01] # ROC fig = plt.figure(figsize=(14, 5)) fig.subplots_adjust(bottom=0.09, left=0.05,", "= fig2.add_subplot(3, 2 * len(c_families), ii * 8 + cfi * 2 +", "'.', markersize=8, color=colors[-(it + 1)], label=f'Threshold: {1-thr:.2f}') else: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it", "matrix:') print(confusion_matr) print('Normalized confusion matrix:') print(confusion_matr_normalized) fpr, tpr, thresholds = roc_curve(y_test, y) roc_auc", "'kd': '5be0ae7958830a0018821794', 'os': '5c05bbdc826480000a95c0bf'} # c_families = {'rb': '5b96af9c0354c9000b0aea36', # 'sl': '5b99b2c6aec3c500103a14de', #", "j] > thresh else \"black\") ax2_.text(j, i, format(confusion_matr_normalized[i, j], '.2f'), horizontalalignment=\"center\", color=\"white\" if", "= 1 v_[v_ < thr] = 0 return v_ if __name__ == '__main__':", "print('Confusion matrix:') print(confusion_matr) print('Normalized confusion matrix:') print(confusion_matr_normalized) fpr, tpr, thresholds = roc_curve(y_test, y)", "# c_families = {'rb': '5b96af9c0354c9000b0aea36'} path_data = './data' # mpl colors: colors =", "j in itertools.product(range(confusion_matr.shape[0]), range(confusion_matr.shape[1])): ax_.text(j, i, format(confusion_matr[i, j], 'd'), horizontalalignment=\"center\", color=\"white\" if confusion_matr[i,", "{model_name}: {models[model_name]}') m = load_model_helper(path_models, models[model_name]) y = m.predict(x_test, batch_size=32, verbose=True) # for", "hspace=0.12) cn = 0 for cfi, c_family in enumerate(c_families): project_id = c_families[c_family] print(c_family,", "+ 1)]) # plot confusion matrices ax_ = fig2.add_subplot(3, 2 * len(c_families), ii", "thr] = 1 v_[v_ < thr] = 0 return v_ if __name__ ==", "> thresh else \"black\") ax2_.text(j, i, format(confusion_matr_normalized[i, j], '.2f'), horizontalalignment=\"center\", color=\"white\" if confusion_matr_normalized[i,", "json.load(f) # models = config['models'] models = config['models_201901'] model_names = list(models.keys()) path_models =", "with open(os.path.join(path_base, 'service/code/config.json')) as f: config = json.load(f) # models = config['models'] models", "verbose=True, random_state=42) mn = [m_ for m_ in model_names if c_family in m_]", "np.interp(thr, thresholds[::-1], fpr) y_ = np.interp(thr, thresholds[::-1], tpr) # print(thr, x_, y_) if", "cfi * 2 + 2) ax_.imshow(confusion_matr, interpolation='nearest', cmap=plt.cm.Blues) ax2_.imshow(confusion_matr_normalized, interpolation='nearest', cmap=plt.cm.Blues) tick_marks =", "# ax.legend(loc=\"lower right\") # ax2.legend(loc=\"best\") ax2.grid(True) # Confusion matrices fig2 = plt.figure() fig2.subplots_adjust(bottom=0.06,", "print(f'loading model {model_name}: {models[model_name]}') m = load_model_helper(path_models, models[model_name]) y = m.predict(x_test, batch_size=32, verbose=True)", "in model_names if c_family in m_] n_mn = len(mn) for ii, model_name in", "2, 2) ax.plot([0, 1], [0, 1], color='#333333', lw=lw, linestyle='--') ax.set_xlim([0.0, 1.0]) ax.set_ylim([0.0, 1.05])", "u'#9467bd', # u'#8c564b', u'#e377c2', u'#7f7f7f', u'#bcbd22', u'#17becf'] # line styles: line_styles = ['-',", "{'rb': '5b96af9c0354c9000b0aea36'} path_data = './data' # mpl colors: colors = plt.rcParams['axes.prop_cycle'].by_key()['color'] # [u'#1f77b4',", "copy import deepcopy import itertools from utils import load_data # import matplotlib #", "ii * 8 + cfi * 2 + 2) ax_.imshow(confusion_matr, interpolation='nearest', cmap=plt.cm.Blues) ax2_.imshow(confusion_matr_normalized,", "zoomed ROCs ax2 = fig.add_subplot(1, 2, 2) ax.plot([0, 1], [0, 1], color='#333333', lw=lw,", "thres(y, thr=thr) confusion_matr = confusion_matrix(y_test, labels_pred) confusion_matr_normalized = confusion_matr.astype('float') / confusion_matr.sum(axis=1)[:, np.newaxis] print(f'Threshold:", "numpy as np import pandas as pd from copy import deepcopy import itertools", "label=f'Threshold: {1-thr:.2f}') else: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it + 1)]) ax2.plot(x_, y_, 'o',", "def thres(v, thr: float = 0.5): v_ = np.array(deepcopy(v)) v_[v_ >= thr] =", "v_ = np.array(deepcopy(v)) v_[v_ >= thr] = 1 v_[v_ < thr] = 0", "= load_model_helper(path_models, models[model_name]) y = m.predict(x_test, batch_size=32, verbose=True) # for thr in (0.5,", "cmap=plt.cm.Blues) tick_marks = np.arange(2) # ax_.set_xticks(tick_marks, tick_marks) # ax_.set_yticks(tick_marks, tick_marks) # ax2_.set_xticks(tick_marks, tick_marks)", "ax_.set_xticks(tick_marks, tick_marks) # ax_.set_yticks(tick_marks, tick_marks) # ax2_.set_xticks(tick_marks, tick_marks) # ax2_.set_yticks(tick_marks, tick_marks) # #", "keras.models import model_from_json import json from sklearn.metrics import roc_curve, auc, confusion_matrix import numpy", "= thres(y, thr=thr) confusion_matr = confusion_matrix(y_test, labels_pred) confusion_matr_normalized = confusion_matr.astype('float') / confusion_matr.sum(axis=1)[:, np.newaxis]", "#152 os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"1\" import tensorflow as tf from keras.models import model_from_json import", "confusion_matr.sum(axis=1)[:, np.newaxis] print(f'Threshold: {thr}') print('Confusion matrix:') print(confusion_matr) print('Normalized confusion matrix:') print(confusion_matr_normalized) fpr, tpr,", "fpr, tpr, thresholds = roc_curve(y_test, y) roc_auc = auc(fpr, tpr) ax.plot(fpr, tpr, line_styles[ii],", "print(c_family, project_id) # load data x_train, y_train, x_test, y_test, classes = load_data(path=path_data, project_id=project_id,", "0.5): v_ = np.array(deepcopy(v)) v_[v_ >= thr] = 1 v_[v_ < thr] =", "= ['-', '--', ':'] # thresholds score_thresholds = [0.99, 0.9, 0.5, 0.1, 0.01]", "f: config = json.load(f) # models = config['models'] models = config['models_201901'] model_names =", "ax2.plot(x_, y_, 'o', markersize=8, color=colors[-(it + 1)]) # plot confusion matrices ax_ =", "if c_family in m_] n_mn = len(mn) for ii, model_name in enumerate(mn): print(f'loading", "confusion_matr = confusion_matrix(y_test, labels_pred) confusion_matr_normalized = confusion_matr.astype('float') / confusion_matr.sum(axis=1)[:, np.newaxis] print(f'Threshold: {thr}') print('Confusion", "path_base = './' with open(os.path.join(path_base, 'service/code/config.json')) as f: config = json.load(f) # models", "plot confusion matrices ax_ = fig2.add_subplot(3, 2 * len(c_families), ii * 8 +", "score_thresholds = [0.99, 0.9, 0.5, 0.1, 0.01] # ROC fig = plt.figure(figsize=(14, 5))", "print(thr, x_, y_) if cfi == 0 and ii == 0: ax.plot(x_, y_,", "'.2f'), horizontalalignment=\"center\", color=\"white\" if confusion_matr_normalized[i, j] > thresh_norm else \"black\") # if ii", "import roc_curve, auc, confusion_matrix import numpy as np import pandas as pd from", "/ confusion_matr.sum(axis=1)[:, np.newaxis] print(f'Threshold: {thr}') print('Confusion matrix:') print(confusion_matr) print('Normalized confusion matrix:') print(confusion_matr_normalized) fpr,", "confusion_matr[i, j] > thresh else \"black\") ax2_.text(j, i, format(confusion_matr_normalized[i, j], '.2f'), horizontalalignment=\"center\", color=\"white\"", "ax.plot(x_, y_, '.', markersize=8, color=colors[-(it + 1)], label=f'Threshold: {1-thr:.2f}') else: ax.plot(x_, y_, '.',", "markersize=8, color=colors[-(it + 1)]) # plot confusion matrices ax_ = fig2.add_subplot(3, 2 *", "Rate (Sensitivity)') # ax.legend(loc=\"lower right\") # ax.legend(loc=\"best\") ax.grid(True) ax2.set_xlim([0.0, .2]) ax2.set_ylim([0.8, 1.0]) ax2.set_xlabel('False", "\"black\") ax2_.text(j, i, format(confusion_matr_normalized[i, j], '.2f'), horizontalalignment=\"center\", color=\"white\" if confusion_matr_normalized[i, j] > thresh_norm", "json_file: loaded_model_json = json_file.read() m = model_from_json(loaded_model_json) m.load_weights(os.path.join(path, f'{model_base_name}.weights.h5')) return m def thres(v,", "Rate (Contamination)') ax2.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\") # ax2.legend(loc=\"best\") ax2.grid(True) #", "cn = 0 for cfi, c_family in enumerate(c_families): project_id = c_families[c_family] print(c_family, project_id)", "colors = plt.rcParams['axes.prop_cycle'].by_key()['color'] # [u'#1f77b4', u'#ff7f0e', u'#2ca02c', u'#d62728', u'#9467bd', # u'#8c564b', u'#e377c2', u'#7f7f7f',", "enumerate(c_families): project_id = c_families[c_family] print(c_family, project_id) # load data x_train, y_train, x_test, y_test,", "right\") # ax.legend(loc=\"best\") ax.grid(True) ax2.set_xlim([0.0, .2]) ax2.set_ylim([0.8, 1.0]) ax2.set_xlabel('False Positive Rate (Contamination)') ax2.set_ylabel('True", "line_styles[ii], color=colors[cfi], lw=lw) ax2.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw, label=f'{model_name} curve (area = {roc_auc:.5f})')", "= np.array(deepcopy(v)) v_[v_ >= thr] = 1 v_[v_ < thr] = 0 return", "matplotlib.use('agg') import matplotlib.pyplot as plt def load_model_helper(path, model_base_name): # return load_model(path) with open(os.path.join(path,", "tpr, line_styles[ii], color=colors[cfi], lw=lw) ax2.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw, label=f'{model_name} curve (area =", "# mpl colors: colors = plt.rcParams['axes.prop_cycle'].by_key()['color'] # [u'#1f77b4', u'#ff7f0e', u'#2ca02c', u'#d62728', u'#9467bd', #", "import matplotlib # matplotlib.use('agg') import matplotlib.pyplot as plt def load_model_helper(path, model_base_name): # return", "confusion_matrix(y_test, labels_pred) confusion_matr_normalized = confusion_matr.astype('float') / confusion_matr.sum(axis=1)[:, np.newaxis] print(f'Threshold: {thr}') print('Confusion matrix:') print(confusion_matr)", "thresholds[::-1], tpr) # print(thr, x_, y_) if cfi == 0 and ii ==", "ax.legend(loc=\"best\") ax.grid(True) ax2.set_xlim([0.0, .2]) ax2.set_ylim([0.8, 1.0]) ax2.set_xlabel('False Positive Rate (Contamination)') ax2.set_ylabel('True Positive Rate", "= np.interp(thr, thresholds[::-1], fpr) y_ = np.interp(thr, thresholds[::-1], tpr) # print(thr, x_, y_)", "in enumerate(mn): print(f'loading model {model_name}: {models[model_name]}') m = load_model_helper(path_models, models[model_name]) y = m.predict(x_test,", "# ax.legend(loc=\"best\") ax.grid(True) ax2.set_xlim([0.0, .2]) ax2.set_ylim([0.8, 1.0]) ax2.set_xlabel('False Positive Rate (Contamination)') ax2.set_ylabel('True Positive", "models = config['models_201901'] model_names = list(models.keys()) path_models = os.path.join(path_base, 'service/models') c_families = {'rb':", "y_) if cfi == 0 and ii == 0: ax.plot(x_, y_, '.', markersize=8,", "2. for i, j in itertools.product(range(confusion_matr.shape[0]), range(confusion_matr.shape[1])): ax_.text(j, i, format(confusion_matr[i, j], 'd'), horizontalalignment=\"center\",", "= '/Users/dmitryduev/_caltech/python/deep-asteroids/' path_base = './' with open(os.path.join(path_base, 'service/code/config.json')) as f: config = json.load(f)", "styles: line_styles = ['-', '--', ':'] # thresholds score_thresholds = [0.99, 0.9, 0.5,", "load_model(path) with open(os.path.join(path, f'{model_base_name}.architecture.json'), 'r') as json_file: loaded_model_json = json_file.read() m = model_from_json(loaded_model_json)", "models[model_name]) y = m.predict(x_test, batch_size=32, verbose=True) # for thr in (0.5, 0.9): for", "np.arange(2) # ax_.set_xticks(tick_marks, tick_marks) # ax_.set_yticks(tick_marks, tick_marks) # ax2_.set_xticks(tick_marks, tick_marks) # ax2_.set_yticks(tick_marks, tick_marks)", "import matplotlib.pyplot as plt def load_model_helper(path, model_base_name): # return load_model(path) with open(os.path.join(path, f'{model_base_name}.architecture.json'),", "# ax2_.xaxis.set_visible(False) # ax2_.yaxis.set_visible(False) ax_.axis('off') ax2_.axis('off') thresh = confusion_matr.max() / 2. thresh_norm =", "'5b99b2c6aec3c500103a14de', # 'kd': '5be0ae7958830a0018821794'} # c_families = {'rb': '5b96af9c0354c9000b0aea36'} path_data = './data' #", "return v_ if __name__ == '__main__': tf.keras.backend.clear_session() # path_base = '/Users/dmitryduev/_caltech/python/deep-asteroids/' path_base =", "# ax2_.yaxis.set_visible(False) ax_.axis('off') ax2_.axis('off') thresh = confusion_matr.max() / 2. thresh_norm = confusion_matr_normalized.max() /", "top=0.98, wspace=0.2, hspace=0.2) lw = 1.6 # ROCs ax = fig.add_subplot(1, 2, 1)", "__name__ == '__main__': tf.keras.backend.clear_session() # path_base = '/Users/dmitryduev/_caltech/python/deep-asteroids/' path_base = './' with open(os.path.join(path_base,", "enumerate(score_thresholds): x_ = np.interp(thr, thresholds[::-1], fpr) y_ = np.interp(thr, thresholds[::-1], tpr) # print(thr,", "color=\"white\" if confusion_matr_normalized[i, j] > thresh_norm else \"black\") # if ii == 0:", "lw=lw) ax2.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw, label=f'{model_name} curve (area = {roc_auc:.5f})') # plot", "from copy import deepcopy import itertools from utils import load_data # import matplotlib", "tick_marks = np.arange(2) # ax_.set_xticks(tick_marks, tick_marks) # ax_.set_yticks(tick_marks, tick_marks) # ax2_.set_xticks(tick_marks, tick_marks) #", "tick_marks) # ax2_.set_yticks(tick_marks, tick_marks) # # ax_.xaxis.set_visible(False) # ax_.yaxis.set_visible(False) # ax2_.xaxis.set_visible(False) # ax2_.yaxis.set_visible(False)", "= [0.99, 0.9, 0.5, 0.1, 0.01] # ROC fig = plt.figure(figsize=(14, 5)) fig.subplots_adjust(bottom=0.09,", "# plot thresholds for it, thr in enumerate(score_thresholds): x_ = np.interp(thr, thresholds[::-1], fpr)", "> thresh_norm else \"black\") # if ii == 0: # break ax.legend(loc='lower right')", "# path_base = '/Users/dmitryduev/_caltech/python/deep-asteroids/' path_base = './' with open(os.path.join(path_base, 'service/code/config.json')) as f: config", "'kd': '5be0ae7958830a0018821794'} # c_families = {'rb': '5b96af9c0354c9000b0aea36'} path_data = './data' # mpl colors:", "right=1.0, top=0.93, wspace=0.0, hspace=0.12) cn = 0 for cfi, c_family in enumerate(c_families): project_id", "# ROC fig = plt.figure(figsize=(14, 5)) fig.subplots_adjust(bottom=0.09, left=0.05, right=0.70, top=0.98, wspace=0.2, hspace=0.2) lw", "for thr in (0.5, 0.9): for thr in (0.5,): labels_pred = thres(y, thr=thr)", "ax2_.imshow(confusion_matr_normalized, interpolation='nearest', cmap=plt.cm.Blues) tick_marks = np.arange(2) # ax_.set_xticks(tick_marks, tick_marks) # ax_.set_yticks(tick_marks, tick_marks) #", "1) # zoomed ROCs ax2 = fig.add_subplot(1, 2, 2) ax.plot([0, 1], [0, 1],", "# ax_.yaxis.set_visible(False) # ax2_.xaxis.set_visible(False) # ax2_.yaxis.set_visible(False) ax_.axis('off') ax2_.axis('off') thresh = confusion_matr.max() / 2.", "plot thresholds for it, thr in enumerate(score_thresholds): x_ = np.interp(thr, thresholds[::-1], fpr) y_", "u'#7f7f7f', u'#bcbd22', u'#17becf'] # line styles: line_styles = ['-', '--', ':'] # thresholds", "[0, 1], color='#333333', lw=lw, linestyle='--') ax.set_xlim([0.0, 1.0]) ax.set_ylim([0.0, 1.05]) ax.set_xlabel('False Positive Rate (Contamination)')", "model_from_json(loaded_model_json) m.load_weights(os.path.join(path, f'{model_base_name}.weights.h5')) return m def thres(v, thr: float = 0.5): v_ =", "tf.keras.backend.clear_session() # path_base = '/Users/dmitryduev/_caltech/python/deep-asteroids/' path_base = './' with open(os.path.join(path_base, 'service/code/config.json')) as f:", "c_families = {'rb': '5b96af9c0354c9000b0aea36'} path_data = './data' # mpl colors: colors = plt.rcParams['axes.prop_cycle'].by_key()['color']", "ax.legend(loc=\"lower right\") # ax.legend(loc=\"best\") ax.grid(True) ax2.set_xlim([0.0, .2]) ax2.set_ylim([0.8, 1.0]) ax2.set_xlabel('False Positive Rate (Contamination)')", "# for thr in (0.5, 0.9): for thr in (0.5,): labels_pred = thres(y,", "project_id) # load data x_train, y_train, x_test, y_test, classes = load_data(path=path_data, project_id=project_id, binary=True,", "[u'#1f77b4', u'#ff7f0e', u'#2ca02c', u'#d62728', u'#9467bd', # u'#8c564b', u'#e377c2', u'#7f7f7f', u'#bcbd22', u'#17becf'] # line", "'5b96af9c0354c9000b0aea36', # 'sl': '5b99b2c6aec3c500103a14de', # 'kd': '5be0ae7958830a0018821794'} # c_families = {'rb': '5b96af9c0354c9000b0aea36'} path_data", "color=colors[cfi], lw=lw) ax2.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw, label=f'{model_name} curve (area = {roc_auc:.5f})') #", "color=colors[-(it + 1)]) # plot confusion matrices ax_ = fig2.add_subplot(3, 2 * len(c_families),", "ax_.set_yticks(tick_marks, tick_marks) # ax2_.set_xticks(tick_marks, tick_marks) # ax2_.set_yticks(tick_marks, tick_marks) # # ax_.xaxis.set_visible(False) # ax_.yaxis.set_visible(False)", "ax2_.set_yticks(tick_marks, tick_marks) # # ax_.xaxis.set_visible(False) # ax_.yaxis.set_visible(False) # ax2_.xaxis.set_visible(False) # ax2_.yaxis.set_visible(False) ax_.axis('off') ax2_.axis('off')", "confusion_matr_normalized = confusion_matr.astype('float') / confusion_matr.sum(axis=1)[:, np.newaxis] print(f'Threshold: {thr}') print('Confusion matrix:') print(confusion_matr) print('Normalized confusion", "curve (area = {roc_auc:.5f})') # plot thresholds for it, thr in enumerate(score_thresholds): x_", "model_from_json import json from sklearn.metrics import roc_curve, auc, confusion_matrix import numpy as np", "ax2.plot(fpr, tpr, line_styles[ii], color=colors[cfi], lw=lw, label=f'{model_name} curve (area = {roc_auc:.5f})') # plot thresholds", "load_model_helper(path, model_base_name): # return load_model(path) with open(os.path.join(path, f'{model_base_name}.architecture.json'), 'r') as json_file: loaded_model_json =", "color=\"white\" if confusion_matr[i, j] > thresh else \"black\") ax2_.text(j, i, format(confusion_matr_normalized[i, j], '.2f'),", "matrices ax_ = fig2.add_subplot(3, 2 * len(c_families), ii * 8 + cfi *", "len(c_families), ii * 8 + cfi * 2 + 2) ax_.imshow(confusion_matr, interpolation='nearest', cmap=plt.cm.Blues)", "else: ax.plot(x_, y_, '.', markersize=8, color=colors[-(it + 1)]) ax2.plot(x_, y_, 'o', markersize=8, color=colors[-(it", "fig2.add_subplot(3, 2 * len(c_families), ii * 8 + cfi * 2 + 1)", "ax2.set_ylabel('True Positive Rate (Sensitivity)') # ax.legend(loc=\"lower right\") # ax2.legend(loc=\"best\") ax2.grid(True) # Confusion matrices", "u'#d62728', u'#9467bd', # u'#8c564b', u'#e377c2', u'#7f7f7f', u'#bcbd22', u'#17becf'] # line styles: line_styles =", "{'rb': '5b96af9c0354c9000b0aea36', # 'sl': '5b99b2c6aec3c500103a14de', # 'kd': '5be0ae7958830a0018821794'} # c_families = {'rb': '5b96af9c0354c9000b0aea36'}", "labels_pred = thres(y, thr=thr) confusion_matr = confusion_matrix(y_test, labels_pred) confusion_matr_normalized = confusion_matr.astype('float') / confusion_matr.sum(axis=1)[:,", "1 v_[v_ < thr] = 0 return v_ if __name__ == '__main__': tf.keras.backend.clear_session()", "thr: float = 0.5): v_ = np.array(deepcopy(v)) v_[v_ >= thr] = 1 v_[v_", "model_names = list(models.keys()) path_models = os.path.join(path_base, 'service/models') c_families = {'rb': '5b96af9c0354c9000b0aea36', 'sl': '5b99b2c6aec3c500103a14de',", "= plt.rcParams['axes.prop_cycle'].by_key()['color'] # [u'#1f77b4', u'#ff7f0e', u'#2ca02c', u'#d62728', u'#9467bd', # u'#8c564b', u'#e377c2', u'#7f7f7f', u'#bcbd22',", "= model_from_json(loaded_model_json) m.load_weights(os.path.join(path, f'{model_base_name}.weights.h5')) return m def thres(v, thr: float = 0.5): v_", "+ 2) ax_.imshow(confusion_matr, interpolation='nearest', cmap=plt.cm.Blues) ax2_.imshow(confusion_matr_normalized, interpolation='nearest', cmap=plt.cm.Blues) tick_marks = np.arange(2) # ax_.set_xticks(tick_marks,", "/ 2. thresh_norm = confusion_matr_normalized.max() / 2. for i, j in itertools.product(range(confusion_matr.shape[0]), range(confusion_matr.shape[1])):", "0.5, 0.1, 0.01] # ROC fig = plt.figure(figsize=(14, 5)) fig.subplots_adjust(bottom=0.09, left=0.05, right=0.70, top=0.98,", "color='#333333', lw=lw, linestyle='--') ax.set_xlim([0.0, 1.0]) ax.set_ylim([0.0, 1.05]) ax.set_xlabel('False Positive Rate (Contamination)') ax.set_ylabel('True Positive", "j], '.2f'), horizontalalignment=\"center\", color=\"white\" if confusion_matr_normalized[i, j] > thresh_norm else \"black\") # if", "import pandas as pd from copy import deepcopy import itertools from utils import", "np.interp(thr, thresholds[::-1], tpr) # print(thr, x_, y_) if cfi == 0 and ii" ]
[ "f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y,", "1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_field_status_returns_possession_and_ball_status(self): f = Field(self.team_a, self.team_b) fs", "self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) def test_get_field_zone_at_bounce(self): f = Field(self.team_a, self.team_b) zone = f.get_field_zone()", "0.5), # backs Skills(0.5, 0.5, 0.5), # ruck Skills(0.5, 0.5, 0.5), ) self.team_b", "f = Field(self.team_a, self.team_b) f.ball_status = BallStatus.MOVING f.set_position(Position(1, 1)) zone = f.get_field_zone() self.assertEqual(zone,", "f = Field(self.team_a, self.team_b) f.set_position(Position(3, 0)) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a,", "- 1) def test_move_laterally_when_possession_is_home_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y))", "FIELD_MAX_Y) def test_move_laterally_when_possession_is_home_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.RIGHT)", "zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(6, 4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(7,", "0.5), ) def test_init(self): f = Field(self.team_a, self.team_b) self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE)", "f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession =", "FIELD_MAX_X) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3, 0)) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_set_position_x_greater_than_maximum(self):", ") def test_init(self): f = Field(self.team_a, self.team_b) self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession,", "test_get_field_zone_at_bounce(self): f = Field(self.team_a, self.team_b) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_get_field_zone_when_moving(self): f", "self.assertEqual(f.position.y, FIELD_CENTER_Y) # Move laterally - HOME_TEAM def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b)", "f.field_status self.assertEqual(fs.possession, Possession.IN_CONTENTION) self.assertEqual(fs.ball_status, BallStatus.BOUNCE) # Move forwards def test_move_forward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a,", "= f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(4, 3)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(6, 4))", "FieldZone.MID_FIELD) f.set_position(Position(7, 5)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) f.set_position(Position(9, 4)) zone = f.get_field_zone()", "def test_move_forward_when_possession_is_away_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x,", "self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) def test_get_field_zone_at_bounce(self): f = Field(self.team_a, self.team_b)", "f = Field(self.team_a, self.team_b) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_get_field_zone_when_moving(self): f =", "Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def", "self.team_b) f.centre_ball() self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) def test_get_field_zone_at_bounce(self): f =", "BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) self.assertEqual(f.teams[0], self.team_a) self.assertEqual(f.teams[1], self.team_b) def test_set_position(self): f = Field(self.team_a, self.team_b)", "self.assertEqual(f.possession, Possession.IN_CONTENTION) self.assertEqual(f.teams[0], self.team_a) self.assertEqual(f.teams[1], self.team_b) def test_set_position(self): f = Field(self.team_a, self.team_b) f.set_position(Position(4,", "# Move laterally - HOME_TEAM def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession =", "Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_forward_field(self): f = Field(self.team_a,", "ruck Skills(0.5, 0.5, 0.5), ) self.team_b = Team(\"BBB\", Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5,", "self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_home_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession", "Possession class TestField(unittest.TestCase): def setUp(self): self.team_a = Team( # name \"AAA\", # forwards", "= Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y)", "Team(\"BBB\", Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5),", "0.5, 0.5), # backs Skills(0.5, 0.5, 0.5), # ruck Skills(0.5, 0.5, 0.5), )", "def test_init(self): f = Field(self.team_a, self.team_b) self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION)", "Possession.AWAY_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_away_team_and_at_left_side_and_move_left(self): f = Field(self.team_a,", "FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward()", "= Possession.AWAY_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_right(self): f =", "= Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_move_laterally_when_possession_is_away_team_and_at_right_side_and_move_right(self): f =", "FIELD_MAX_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_move_laterally_when_possession_is_home_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession", "FieldZone.FORWARDS) f.set_position(Position(4, 3)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(6, 4)) zone = f.get_field_zone()", "f = Field(self.team_a, self.team_b) f.set_position(Position(3, 6)) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_centre_ball(self): f = Field(self.team_a,", "FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession =", "FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession", "f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) #", "= Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1)", "= Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y)", "self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y))", "unittest from field import * from data import Team, Skills from status import", "= Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_field_center(self):", "f = Field(self.team_a, self.team_b) self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) self.assertEqual(f.teams[0], self.team_a)", "# Move laterally - AWAY_TEAM def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession =", "def test_move_backward_when_possession_is_away_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x,", ") self.team_b = Team(\"BBB\", Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5),", "self.team_b) def test_set_position(self): f = Field(self.team_a, self.team_b) f.set_position(Position(4, 3)) self.assertEqual(f.position.x, 4) self.assertEqual(f.position.y, 3)", "0.5, 0.5), # mid_field Skills(0.5, 0.5, 0.5), # backs Skills(0.5, 0.5, 0.5), #", "f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X -", "self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession", "= Possession.HOME_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_at_forward_limit(self): f =", "self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_back_field(self):", "f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y,", "1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X,", "# mid_field Skills(0.5, 0.5, 0.5), # backs Skills(0.5, 0.5, 0.5), # ruck Skills(0.5,", "class TestField(unittest.TestCase): def setUp(self): self.team_a = Team( # name \"AAA\", # forwards Skills(0.5,", "= Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y)", "def setUp(self): self.team_a = Team( # name \"AAA\", # forwards Skills(0.5, 0.5, 0.5),", "name \"AAA\", # forwards Skills(0.5, 0.5, 0.5), # mid_field Skills(0.5, 0.5, 0.5), #", "def test_field_status_returns_possession_and_ball_status(self): f = Field(self.team_a, self.team_b) fs = f.field_status self.assertEqual(fs.possession, Possession.IN_CONTENTION) self.assertEqual(fs.ball_status, BallStatus.BOUNCE)", "= Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) # Move laterally -", "f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_away_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b)", "Skills from status import BallStatus, FieldZone, LateralDirection, Possession class TestField(unittest.TestCase): def setUp(self): self.team_a", "FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_move_laterally_when_possession_is_home_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X,", "data import Team, Skills from status import BallStatus, FieldZone, LateralDirection, Possession class TestField(unittest.TestCase):", "# name \"AAA\", # forwards Skills(0.5, 0.5, 0.5), # mid_field Skills(0.5, 0.5, 0.5),", "3)) self.assertEqual(f.position.x, 4) self.assertEqual(f.position.y, 3) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(-1, 3))", "def test_centre_ball(self): f = Field(self.team_a, self.team_b) f.centre_ball() self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession,", "f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b)", "f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_in_forward_field(self): f", "f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(6, 4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(7, 5)) zone", "= Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y)", "f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y -", "= Possession.AWAY_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_at_back_limit(self): f =", "f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y,", "# Move forwards def test_move_forward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_forward()", "test_move_forward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1)", "test_move_forward_when_possession_is_away_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X)", "f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_move_laterally_when_possession_is_home_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession =", "0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), ) def", "1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X,", "self.assertEqual(f.position.y, 3) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(-1, 3)) self.assertEqual(f.position.x, FIELD_MIN_X) def", "self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM", "FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward()", "FIELD_MIN_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) # Move laterally - AWAY_TEAM def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_left(self):", "Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) #", "f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(3, 2)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(4, 3)) zone", "self.team_b) self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) self.assertEqual(f.teams[0], self.team_a) self.assertEqual(f.teams[1], self.team_b) def", "f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_move_laterally_when_possession_is_away_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession =", "Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y) if", "FIELD_MIN_X) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(10, 3)) self.assertEqual(f.position.x, FIELD_MAX_X) def test_set_position_x_less_than_minimum(self):", "self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM", "f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) # Move laterally -", "Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y,", "test_move_backward_when_possession_is_away_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X)", "self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y))", "self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) ## Move backwards def test_move_backward_when_possession_is_home_team_and_in_field_center(self): f =", "FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession =", "Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), ) def test_init(self): f", "Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def", "def test_move_backward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X -", "def test_move_forward_when_possession_is_away_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x,", "self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y))", "from field import * from data import Team, Skills from status import BallStatus,", "BallStatus.BOUNCE) # Move forwards def test_move_forward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM", "zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(4, 3)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(6,", "1) def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X)", "= Possession.HOME_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_right(self): f =", "FieldZone.RUCK) def test_get_field_zone_when_moving(self): f = Field(self.team_a, self.team_b) f.ball_status = BallStatus.MOVING f.set_position(Position(1, 1)) zone", "Possession.AWAY_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_at_forward_limit(self): f = Field(self.team_a,", "Possession.IN_CONTENTION) def test_get_field_zone_at_bounce(self): f = Field(self.team_a, self.team_b) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def", "self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_forward_field(self):", "import * from data import Team, Skills from status import BallStatus, FieldZone, LateralDirection,", "FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) self.assertEqual(f.teams[0], self.team_a) self.assertEqual(f.teams[1], self.team_b) def test_set_position(self): f =", "= Possession.HOME_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_at_back_limit(self): f =", "import BallStatus, FieldZone, LateralDirection, Possession class TestField(unittest.TestCase): def setUp(self): self.team_a = Team( #", "= Field(self.team_a, self.team_b) fs = f.field_status self.assertEqual(fs.possession, Possession.IN_CONTENTION) self.assertEqual(fs.ball_status, BallStatus.BOUNCE) # Move forwards", "1) def test_move_laterally_when_possession_is_home_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.LEFT)", "Possession.HOME_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_at_back_limit(self): f = Field(self.team_a,", "AWAY_TEAM def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X)", "0.5), # mid_field Skills(0.5, 0.5, 0.5), # backs Skills(0.5, 0.5, 0.5), # ruck", "f.set_position(Position(7, 5)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) f.set_position(Position(9, 4)) zone = f.get_field_zone() self.assertEqual(zone,", "f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y,", "= Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y)", "Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) def", "self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) self.assertEqual(f.teams[0], self.team_a) self.assertEqual(f.teams[1], self.team_b) def test_set_position(self): f = Field(self.team_a,", "def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3, 6)) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_centre_ball(self): f", "4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(7, 5)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS)", "0.5, 0.5), Skills(0.5, 0.5, 0.5), ) def test_init(self): f = Field(self.team_a, self.team_b) self.assertEqual(f.position,", "import unittest from field import * from data import Team, Skills from status", "import Team, Skills from status import BallStatus, FieldZone, LateralDirection, Possession class TestField(unittest.TestCase): def", "0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), ) def test_init(self):", "Field(self.team_a, self.team_b) f.set_position(Position(10, 3)) self.assertEqual(f.position.x, FIELD_MAX_X) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3,", "= f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(6, 4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(7, 5))", "forwards def test_move_forward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X", "3)) self.assertEqual(f.position.x, FIELD_MAX_X) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3, 0)) self.assertEqual(f.position.y, FIELD_MIN_Y)", "self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_away_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession", "f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y +", "Skills(0.5, 0.5, 0.5), # mid_field Skills(0.5, 0.5, 0.5), # backs Skills(0.5, 0.5, 0.5),", "Skills(0.5, 0.5, 0.5), # backs Skills(0.5, 0.5, 0.5), # ruck Skills(0.5, 0.5, 0.5),", "test_init(self): f = Field(self.team_a, self.team_b) self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) self.assertEqual(f.teams[0],", "status import BallStatus, FieldZone, LateralDirection, Possession class TestField(unittest.TestCase): def setUp(self): self.team_a = Team(", "mid_field Skills(0.5, 0.5, 0.5), # backs Skills(0.5, 0.5, 0.5), # ruck Skills(0.5, 0.5,", "f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b)", "+ 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM", "= Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) # Move", "self.assertEqual(f.position.y, FIELD_CENTER_Y) ## Move backwards def test_move_backward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession =", "def test_move_forward_when_possession_is_home_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x,", "Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y,", "FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X,", "test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(-1, 3)) self.assertEqual(f.position.x, FIELD_MIN_X) def test_set_position_x_greater_than_maximum(self): f =", "0.5), Skills(0.5, 0.5, 0.5), ) def test_init(self): f = Field(self.team_a, self.team_b) self.assertEqual(f.position, Position(FIELD_CENTER_X,", "FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward()", "= f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(7, 5)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) f.set_position(Position(9, 4))", "self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_home_team_and_at_left_side_and_move_left(self):", "Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) # Move laterally", "# backs Skills(0.5, 0.5, 0.5), # ruck Skills(0.5, 0.5, 0.5), ) self.team_b =", "f.possession = Possession.AWAY_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_at_forward_limit(self): f", "Skills(0.5, 0.5, 0.5), ) self.team_b = Team(\"BBB\", Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5),", "FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession =", "+ 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM", "FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a,", "f.possession = Possession.HOME_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_at_back_limit(self): f", "self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y))", "f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) f.set_position(Position(9, 4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) def test_get_field_zone_when_ball_is_thrown_in(self): f", "def test_move_laterally_when_possession_is_away_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x,", "Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def", "self.team_b) f.set_position(Position(4, 3)) self.assertEqual(f.position.x, 4) self.assertEqual(f.position.y, 3) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b)", "f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_back_field(self): f", "f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession =", "f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y,", "= Team( # name \"AAA\", # forwards Skills(0.5, 0.5, 0.5), # mid_field Skills(0.5,", "f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_get_field_zone_when_moving(self): f = Field(self.team_a, self.team_b) f.ball_status = BallStatus.MOVING f.set_position(Position(1,", "from status import BallStatus, FieldZone, LateralDirection, Possession class TestField(unittest.TestCase): def setUp(self): self.team_a =", "Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), )", "FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_move_laterally_when_possession_is_away_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X,", "Field(self.team_a, self.team_b) f.ball_status = BallStatus.MOVING f.set_position(Position(1, 1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(3,", "f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_field_status_returns_possession_and_ball_status(self): f = Field(self.team_a, self.team_b) fs = f.field_status self.assertEqual(fs.possession,", "FIELD_MIN_Y) def test_move_laterally_when_possession_is_away_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.RIGHT)", "f.set_position(Position(6, 4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(7, 5)) zone = f.get_field_zone() self.assertEqual(zone,", "self.team_b) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_get_field_zone_when_moving(self): f = Field(self.team_a, self.team_b) f.ball_status", "self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(4, 3)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(6, 4)) zone =", "f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b)", "f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) # Move laterally - AWAY_TEAM def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_left(self): f", "= Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y)", "test_move_forward_when_possession_is_home_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X)", "test_move_laterally_when_possession_is_home_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X)", "= Field(self.team_a, self.team_b) f.set_position(Position(3, 6)) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_centre_ball(self): f = Field(self.team_a, self.team_b)", "1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(3, 2)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS)", "FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) ## Move backwards def test_move_backward_when_possession_is_home_team_and_in_field_center(self):", "self.assertEqual(zone, FieldZone.RUCK) def test_get_field_zone_when_moving(self): f = Field(self.team_a, self.team_b) f.ball_status = BallStatus.MOVING f.set_position(Position(1, 1))", "TestField(unittest.TestCase): def setUp(self): self.team_a = Team( # name \"AAA\", # forwards Skills(0.5, 0.5,", "Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_move_laterally_when_possession_is_home_team_and_at_right_side_and_move_right(self): f = Field(self.team_a,", "self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) # Move laterally - AWAY_TEAM def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_left(self): f =", "self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM", "Field(self.team_a, self.team_b) fs = f.field_status self.assertEqual(fs.possession, Possession.IN_CONTENTION) self.assertEqual(fs.ball_status, BallStatus.BOUNCE) # Move forwards def", "= Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X + 1)", "def test_move_laterally_when_possession_is_away_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x,", "test_get_field_zone_when_ball_is_thrown_in(self): f = Field(self.team_a, self.team_b) f.ball_status = BallStatus.THROW_IN f.set_position(Position(6, 1)) zone = f.get_field_zone()", "self.team_b) f.possession = Possession.AWAY_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_at_back_limit(self):", "self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM", "FIELD_CENTER_Y) ## Move backwards def test_move_backward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM", "self.assertEqual(fs.possession, Possession.IN_CONTENTION) self.assertEqual(fs.ball_status, BallStatus.BOUNCE) # Move forwards def test_move_forward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b)", "zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_field_status_returns_possession_and_ball_status(self): f = Field(self.team_a, self.team_b) fs =", "Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), ) def test_init(self): f = Field(self.team_a, self.team_b)", "f = Field(self.team_a, self.team_b) f.set_position(Position(4, 3)) self.assertEqual(f.position.x, 4) self.assertEqual(f.position.y, 3) def test_set_position_x_less_than_minimum(self): f", "self.team_b) f.ball_status = BallStatus.THROW_IN f.set_position(Position(6, 1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_field_status_returns_possession_and_ball_status(self):", "f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_in_back_field(self): f = Field(self.team_a, self.team_b)", "f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b)", "FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_away_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession =", "self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_move_laterally_when_possession_is_home_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM", "1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X,", "self.assertEqual(zone, FieldZone.BACKS) def test_get_field_zone_when_ball_is_thrown_in(self): f = Field(self.team_a, self.team_b) f.ball_status = BallStatus.THROW_IN f.set_position(Position(6, 1))", "f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_field_center(self): f =", "f.possession = Possession.AWAY_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_at_back_limit(self): f", "test_get_field_zone_when_moving(self): f = Field(self.team_a, self.team_b) f.ball_status = BallStatus.MOVING f.set_position(Position(1, 1)) zone = f.get_field_zone()", "self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y))", "- 1) def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x,", "self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession", "0.5), ) self.team_b = Team(\"BBB\", Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5,", "0.5), # ruck Skills(0.5, 0.5, 0.5), ) self.team_b = Team(\"BBB\", Skills(0.5, 0.5, 0.5),", "Move backwards def test_move_backward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_backward() self.assertEqual(f.position.x,", "def test_set_position(self): f = Field(self.team_a, self.team_b) f.set_position(Position(4, 3)) self.assertEqual(f.position.x, 4) self.assertEqual(f.position.y, 3) def", "laterally - HOME_TEAM def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.LEFT)", "\"AAA\", # forwards Skills(0.5, 0.5, 0.5), # mid_field Skills(0.5, 0.5, 0.5), # backs", "f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) ## Move backwards def test_move_backward_when_possession_is_home_team_and_in_field_center(self): f", "- HOME_TEAM def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x,", "f.set_position(Position(9, 4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) def test_get_field_zone_when_ball_is_thrown_in(self): f = Field(self.team_a, self.team_b)", "= Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1)", "self.team_b) f.ball_status = BallStatus.MOVING f.set_position(Position(1, 1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(3, 2))", "= Field(self.team_a, self.team_b) f.set_position(Position(-1, 3)) self.assertEqual(f.position.x, FIELD_MIN_X) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b)", "f.set_position(Position(10, 3)) self.assertEqual(f.position.x, FIELD_MAX_X) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3, 0)) self.assertEqual(f.position.y,", "5)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) f.set_position(Position(9, 4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS)", "self.assertEqual(zone, FieldZone.BACKS) f.set_position(Position(9, 4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) def test_get_field_zone_when_ball_is_thrown_in(self): f =", "= Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_move_laterally_when_possession_is_home_team_and_at_right_side_and_move_right(self): f =", "= Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X - 1)", "1) def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X)", "self.assertEqual(f.possession, Possession.IN_CONTENTION) def test_get_field_zone_at_bounce(self): f = Field(self.team_a, self.team_b) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK)", "FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_home_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X,", "- 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) # Move laterally - HOME_TEAM def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_left(self): f =", "f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) # Move laterally - AWAY_TEAM def", "Field(self.team_a, self.team_b) f.set_position(Position(-1, 3)) self.assertEqual(f.position.x, FIELD_MIN_X) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(10,", "Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_back_field(self): f = Field(self.team_a,", "FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a,", "self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) # Move laterally - HOME_TEAM def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_left(self):", "fs = f.field_status self.assertEqual(fs.possession, Possession.IN_CONTENTION) self.assertEqual(fs.ball_status, BallStatus.BOUNCE) # Move forwards def test_move_forward_when_possession_is_home_team_and_in_field_center(self): f", "f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y,", "f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(4, 3)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(6, 4)) zone", "f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X +", "1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_backward()", "f.ball_status = BallStatus.MOVING f.set_position(Position(1, 1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(3, 2)) zone", "test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3, 0)) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_set_position_x_greater_than_maximum(self): f =", "f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_in_back_field(self): f", "0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), ) def test_init(self): f = Field(self.team_a,", "LateralDirection, Possession class TestField(unittest.TestCase): def setUp(self): self.team_a = Team( # name \"AAA\", #", "0.5, 0.5), ) def test_init(self): f = Field(self.team_a, self.team_b) self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status,", "def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(10, 3)) self.assertEqual(f.position.x, FIELD_MAX_X) def test_set_position_x_less_than_minimum(self): f", "f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b)", "test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(10, 3)) self.assertEqual(f.position.x, FIELD_MAX_X) def test_set_position_x_less_than_minimum(self): f =", "f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b)", "f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_move_laterally_when_possession_is_home_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b)", "Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_field_center(self): f", "BallStatus, FieldZone, LateralDirection, Possession class TestField(unittest.TestCase): def setUp(self): self.team_a = Team( # name", "= f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) f.set_position(Position(9, 4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) def test_get_field_zone_when_ball_is_thrown_in(self):", "Field(self.team_a, self.team_b) f.set_position(Position(4, 3)) self.assertEqual(f.position.x, 4) self.assertEqual(f.position.y, 3) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a,", "= f.field_status self.assertEqual(fs.possession, Possession.IN_CONTENTION) self.assertEqual(fs.ball_status, BallStatus.BOUNCE) # Move forwards def test_move_forward_when_possession_is_home_team_and_in_field_center(self): f =", "f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def", "f = Field(self.team_a, self.team_b) f.ball_status = BallStatus.THROW_IN f.set_position(Position(6, 1)) zone = f.get_field_zone() self.assertEqual(zone,", "Move laterally - AWAY_TEAM def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM", "f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y,", "Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def", "1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_forward()", "FieldZone.MID_FIELD) f.set_position(Position(6, 4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(7, 5)) zone = f.get_field_zone()", "= Field(self.team_a, self.team_b) f.set_position(Position(4, 3)) self.assertEqual(f.position.x, 4) self.assertEqual(f.position.y, 3) def test_set_position_x_less_than_minimum(self): f =", "1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X,", "FieldZone, LateralDirection, Possession class TestField(unittest.TestCase): def setUp(self): self.team_a = Team( # name \"AAA\",", "f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X +", "self.team_a = Team( # name \"AAA\", # forwards Skills(0.5, 0.5, 0.5), # mid_field", "self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y))", "f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y) if __name__ ==", "test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3, 6)) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_centre_ball(self): f =", "self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_move_laterally_when_possession_is_home_team_and_at_right_side_and_move_right(self):", "self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) self.assertEqual(f.teams[0], self.team_a) self.assertEqual(f.teams[1], self.team_b) def test_set_position(self):", "Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) ## Move backwards", "self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) # Move", "= Possession.AWAY_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_away_team_and_at_left_side_and_move_left(self): f =", "Possession.AWAY_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_at_back_limit(self): f = Field(self.team_a,", "f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y,", "= Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y)", "FIELD_MIN_Y) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3, 6)) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_centre_ball(self):", "test_move_forward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1)", "Team( # name \"AAA\", # forwards Skills(0.5, 0.5, 0.5), # mid_field Skills(0.5, 0.5,", "self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(7, 5)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) f.set_position(Position(9, 4)) zone =", "Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_in_back_field(self): f = Field(self.team_a,", "BallStatus.MOVING f.set_position(Position(1, 1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(3, 2)) zone = f.get_field_zone()", "FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession =", "self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM", "Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def", "self.assertEqual(f.position.x, FIELD_MIN_X) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(10, 3)) self.assertEqual(f.position.x, FIELD_MAX_X) def", "0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), ) def test_init(self): f =", "f = Field(self.team_a, self.team_b) f.set_position(Position(-1, 3)) self.assertEqual(f.position.x, FIELD_MIN_X) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a,", "self.assertEqual(f.position.y, FIELD_MAX_Y) def test_move_laterally_when_possession_is_home_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y))", "Possession.IN_CONTENTION) self.assertEqual(f.teams[0], self.team_a) self.assertEqual(f.teams[1], self.team_b) def test_set_position(self): f = Field(self.team_a, self.team_b) f.set_position(Position(4, 3))", "self.assertEqual(zone, FieldZone.RUCK) def test_field_status_returns_possession_and_ball_status(self): f = Field(self.team_a, self.team_b) fs = f.field_status self.assertEqual(fs.possession, Possession.IN_CONTENTION)", "self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession", "Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def", "Possession.IN_CONTENTION) self.assertEqual(fs.ball_status, BallStatus.BOUNCE) # Move forwards def test_move_forward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession", "def test_move_backward_when_possession_is_home_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x,", "self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y)", "FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_away_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X,", "Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y,", "Possession.AWAY_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_right(self): f = Field(self.team_a,", "forwards Skills(0.5, 0.5, 0.5), # mid_field Skills(0.5, 0.5, 0.5), # backs Skills(0.5, 0.5,", "3)) self.assertEqual(f.position.x, FIELD_MIN_X) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(10, 3)) self.assertEqual(f.position.x, FIELD_MAX_X)", "self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_forward() self.assertEqual(f.position.x,", "Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) self.assertEqual(f.teams[0], self.team_a) self.assertEqual(f.teams[1], self.team_b) def test_set_position(self): f", "test_field_status_returns_possession_and_ball_status(self): f = Field(self.team_a, self.team_b) fs = f.field_status self.assertEqual(fs.possession, Possession.IN_CONTENTION) self.assertEqual(fs.ball_status, BallStatus.BOUNCE) #", "def test_get_field_zone_at_bounce(self): f = Field(self.team_a, self.team_b) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_get_field_zone_when_moving(self):", "self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y)", "<gh_stars>0 import unittest from field import * from data import Team, Skills from", "4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) def test_get_field_zone_when_ball_is_thrown_in(self): f = Field(self.team_a, self.team_b) f.ball_status", "self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_right(self):", "self.team_b) f.possession = Possession.HOME_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_at_forward_limit(self):", "f.set_position(Position(1, 1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(3, 2)) zone = f.get_field_zone() self.assertEqual(zone,", "6)) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_centre_ball(self): f = Field(self.team_a, self.team_b) f.centre_ball() self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y))", "HOME_TEAM def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X)", "f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) def test_get_field_zone_when_ball_is_thrown_in(self): f = Field(self.team_a, self.team_b) f.ball_status = BallStatus.THROW_IN f.set_position(Position(6,", "FIELD_CENTER_Y) # Move laterally - HOME_TEAM def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession", "f.set_position(Position(3, 6)) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_centre_ball(self): f = Field(self.team_a, self.team_b) f.centre_ball() self.assertEqual(f.position, Position(FIELD_CENTER_X,", "FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) # Move laterally - AWAY_TEAM def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_left(self): f = Field(self.team_a,", "FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward()", "3)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(6, 4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD)", "f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b)", "def test_move_backward_when_possession_is_away_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x,", "self.team_b) f.set_position(Position(-1, 3)) self.assertEqual(f.position.x, FIELD_MIN_X) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(10, 3))", "f.centre_ball() self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) def test_get_field_zone_at_bounce(self): f = Field(self.team_a,", "FieldZone.FORWARDS) f.set_position(Position(3, 2)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(4, 3)) zone = f.get_field_zone()", "2)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(4, 3)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD)", "zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_get_field_zone_when_moving(self): f = Field(self.team_a, self.team_b) f.ball_status =", "backwards def test_move_backward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X", "self.assertEqual(f.position.y, FIELD_MIN_Y) # Move laterally - AWAY_TEAM def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b)", "def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(-1, 3)) self.assertEqual(f.position.x, FIELD_MIN_X) def test_set_position_x_greater_than_maximum(self): f", "def test_get_field_zone_when_moving(self): f = Field(self.team_a, self.team_b) f.ball_status = BallStatus.MOVING f.set_position(Position(1, 1)) zone =", "FieldZone.BACKS) def test_get_field_zone_when_ball_is_thrown_in(self): f = Field(self.team_a, self.team_b) f.ball_status = BallStatus.THROW_IN f.set_position(Position(6, 1)) zone", "self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession", "self.team_b) f.set_position(Position(3, 0)) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3, 6))", "f = Field(self.team_a, self.team_b) f.centre_ball() self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) def", "test_move_laterally_when_possession_is_away_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X)", "Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y) def", "= f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_get_field_zone_when_moving(self): f = Field(self.team_a, self.team_b) f.ball_status = BallStatus.MOVING", "self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_move_laterally_when_possession_is_away_team_and_at_right_side_and_move_right(self):", "FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X,", "= Field(self.team_a, self.team_b) f.centre_ball() self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) def test_get_field_zone_at_bounce(self):", "= Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X + 1)", "f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y +", "self.team_b) f.set_position(Position(3, 6)) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_centre_ball(self): f = Field(self.team_a, self.team_b) f.centre_ball() self.assertEqual(f.position,", "f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y,", "= Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) ## Move", "= Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y)", "f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y,", "f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_home_team_and_at_left_side_and_move_left(self): f", "FIELD_MIN_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_move_laterally_when_possession_is_away_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession", "f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_field_center(self): f =", "f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) # Move laterally - HOME_TEAM def", "Field(self.team_a, self.team_b) self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) self.assertEqual(f.teams[0], self.team_a) self.assertEqual(f.teams[1], self.team_b)", "= Possession.HOME_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_home_team_and_at_left_side_and_move_left(self): f =", "self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y))", "- 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM", "self.team_b) f.possession = Possession.HOME_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_at_back_limit(self):", "0.5, 0.5), # ruck Skills(0.5, 0.5, 0.5), ) self.team_b = Team(\"BBB\", Skills(0.5, 0.5,", "f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession =", "= Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X - 1)", "BallStatus.THROW_IN f.set_position(Position(6, 1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_field_status_returns_possession_and_ball_status(self): f = Field(self.team_a,", "zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(7, 5)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) f.set_position(Position(9,", "Field(self.team_a, self.team_b) f.set_position(Position(3, 0)) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3,", "zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(3, 2)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(4,", "= Possession.AWAY_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_at_forward_limit(self): f =", "FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.RIGHT)", "= Field(self.team_a, self.team_b) f.ball_status = BallStatus.THROW_IN f.set_position(Position(6, 1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK)", "self.assertEqual(fs.ball_status, BallStatus.BOUNCE) # Move forwards def test_move_forward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession =", "self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y)", "Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def", "= Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_field_center(self):", "FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession =", "FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X,", "= Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1)", "+ 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM", "f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_move_laterally_when_possession_is_away_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b)", "f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y,", "= Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_in_forward_field(self): f =", "def test_move_backward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X +", "Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def", "- AWAY_TEAM def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x,", "Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_move_laterally_when_possession_is_away_team_and_at_right_side_and_move_right(self): f = Field(self.team_a,", "f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(7, 5)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) f.set_position(Position(9, 4)) zone", "self.team_b) fs = f.field_status self.assertEqual(fs.possession, Possession.IN_CONTENTION) self.assertEqual(fs.ball_status, BallStatus.BOUNCE) # Move forwards def test_move_forward_when_possession_is_home_team_and_in_field_center(self):", "self.assertEqual(f.position.y, FIELD_MAX_Y) def test_centre_ball(self): f = Field(self.team_a, self.team_b) f.centre_ball() self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status,", "FIELD_MAX_Y) def test_centre_ball(self): f = Field(self.team_a, self.team_b) f.centre_ball() self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE)", "f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y,", "f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_forward_field(self): f", "self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_away_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM", "Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def", "# forwards Skills(0.5, 0.5, 0.5), # mid_field Skills(0.5, 0.5, 0.5), # backs Skills(0.5,", "self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_in_back_field(self):", "f.set_position(Position(6, 1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_field_status_returns_possession_and_ball_status(self): f = Field(self.team_a, self.team_b)", "FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X", "BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) def test_get_field_zone_at_bounce(self): f = Field(self.team_a, self.team_b) zone = f.get_field_zone() self.assertEqual(zone,", "FieldZone.BACKS) f.set_position(Position(9, 4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) def test_get_field_zone_when_ball_is_thrown_in(self): f = Field(self.team_a,", "def test_move_laterally_when_possession_is_home_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x,", "f = Field(self.team_a, self.team_b) f.set_position(Position(10, 3)) self.assertEqual(f.position.x, FIELD_MAX_X) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a,", "test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y", "f.set_position(Position(3, 0)) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3, 6)) self.assertEqual(f.position.y,", "4) self.assertEqual(f.position.y, 3) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(-1, 3)) self.assertEqual(f.position.x, FIELD_MIN_X)", "f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b)", "Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def", "* from data import Team, Skills from status import BallStatus, FieldZone, LateralDirection, Possession", "test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y", "def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y,", "= Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_forward_field(self): f =", "Skills(0.5, 0.5, 0.5), # ruck Skills(0.5, 0.5, 0.5), ) self.team_b = Team(\"BBB\", Skills(0.5,", "laterally - AWAY_TEAM def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.LEFT)", "Team, Skills from status import BallStatus, FieldZone, LateralDirection, Possession class TestField(unittest.TestCase): def setUp(self):", "Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y) if __name__ == \"__main__\": unittest.main()", "def test_move_forward_when_possession_is_home_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x,", "self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_in_forward_field(self):", "setUp(self): self.team_a = Team( # name \"AAA\", # forwards Skills(0.5, 0.5, 0.5), #", "= Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y)", "test_move_backward_when_possession_is_home_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X)", "FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession", "self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession", "def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y,", "FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_home_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession =", "self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(3, 2)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(4, 3)) zone =", "- 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM", "f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_right(self): f", "= Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y)", "test_centre_ball(self): f = Field(self.team_a, self.team_b) f.centre_ball() self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION)", "f.set_position(Position(4, 3)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(6, 4)) zone = f.get_field_zone() self.assertEqual(zone,", "Field(self.team_a, self.team_b) f.centre_ball() self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) def test_get_field_zone_at_bounce(self): f", "FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession", "Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_field_center(self): f", "def test_move_forward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X -", "zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) def test_get_field_zone_when_ball_is_thrown_in(self): f = Field(self.team_a, self.team_b) f.ball_status =", "FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X,", "def test_move_backward_when_possession_is_home_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x,", "test_move_backward_when_possession_is_away_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X", "+ 1) def test_move_laterally_when_possession_is_away_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y))", "self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_backward() self.assertEqual(f.position.x,", "1) def test_move_laterally_when_possession_is_away_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.LEFT)", "Field(self.team_a, self.team_b) f.ball_status = BallStatus.THROW_IN f.set_position(Position(6, 1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def", "FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward()", "f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_back_field(self): f = Field(self.team_a, self.team_b)", "def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3, 0)) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_set_position_x_greater_than_maximum(self): f", "self.team_b = Team(\"BBB\", Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5,", "FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession =", "3) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(-1, 3)) self.assertEqual(f.position.x, FIELD_MIN_X) def test_set_position_x_greater_than_maximum(self):", "def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y,", "Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) # Move laterally - AWAY_TEAM", "backs Skills(0.5, 0.5, 0.5), # ruck Skills(0.5, 0.5, 0.5), ) self.team_b = Team(\"BBB\",", "= Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_back_field(self): f =", "0.5, 0.5), ) self.team_b = Team(\"BBB\", Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5,", "= Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y)", "f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_away_team_and_at_left_side_and_move_left(self): f", "self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession", "= f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) def test_get_field_zone_when_ball_is_thrown_in(self): f = Field(self.team_a, self.team_b) f.ball_status = BallStatus.THROW_IN", "Skills(0.5, 0.5, 0.5), ) def test_init(self): f = Field(self.team_a, self.team_b) self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y))", "+ 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) ## Move backwards def test_move_backward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b)", "FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) # Move laterally - HOME_TEAM def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_left(self): f", "f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def", "= BallStatus.THROW_IN f.set_position(Position(6, 1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_field_status_returns_possession_and_ball_status(self): f =", "self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM", "self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y) if __name__", "f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) ##", "Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) def test_get_field_zone_at_bounce(self): f = Field(self.team_a, self.team_b) zone", "= Field(self.team_a, self.team_b) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_get_field_zone_when_moving(self): f = Field(self.team_a,", "f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_move_laterally_when_possession_is_home_team_and_at_right_side_and_move_right(self): f", "Possession.HOME_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_home_team_and_at_left_side_and_move_left(self): f = Field(self.team_a,", "test_set_position(self): f = Field(self.team_a, self.team_b) f.set_position(Position(4, 3)) self.assertEqual(f.position.x, 4) self.assertEqual(f.position.y, 3) def test_set_position_x_less_than_minimum(self):", "Move laterally - HOME_TEAM def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM", "0)) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3, 6)) self.assertEqual(f.position.y, FIELD_MAX_Y)", "def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y,", "self.assertEqual(f.position.y, FIELD_MIN_Y) def test_move_laterally_when_possession_is_away_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y))", "test_move_forward_when_possession_is_away_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X", "f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_at_back_limit(self): f = Field(self.team_a, self.team_b)", "= Field(self.team_a, self.team_b) f.set_position(Position(3, 0)) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b)", "= f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(3, 2)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(4, 3))", "Possession.HOME_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_at_forward_limit(self): f = Field(self.team_a,", "test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y", "f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b)", "Field(self.team_a, self.team_b) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_get_field_zone_when_moving(self): f = Field(self.team_a, self.team_b)", "FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession", "self.assertEqual(f.position.x, FIELD_MAX_X) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3, 0)) self.assertEqual(f.position.y, FIELD_MIN_Y) def", "test_move_backward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1)", "= Field(self.team_a, self.team_b) f.set_position(Position(10, 3)) self.assertEqual(f.position.x, FIELD_MAX_X) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b)", "zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) f.set_position(Position(9, 4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.BACKS) def", "self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(6, 4)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.MID_FIELD) f.set_position(Position(7, 5)) zone =", "1) self.assertEqual(f.position.y, FIELD_CENTER_Y) ## Move backwards def test_move_backward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession", "FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X", "Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y,", "test_move_forward_when_possession_is_home_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X", "FieldZone.RUCK) def test_field_status_returns_possession_and_ball_status(self): f = Field(self.team_a, self.team_b) fs = f.field_status self.assertEqual(fs.possession, Possession.IN_CONTENTION) self.assertEqual(fs.ball_status,", "f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) # Move laterally", "self.assertEqual(f.position.x, 4) self.assertEqual(f.position.y, 3) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(-1, 3)) self.assertEqual(f.position.x,", "self.assertEqual(f.teams[0], self.team_a) self.assertEqual(f.teams[1], self.team_b) def test_set_position(self): f = Field(self.team_a, self.team_b) f.set_position(Position(4, 3)) self.assertEqual(f.position.x,", "+ 1) def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x,", "self.assertEqual(f.position.y, FIELD_MIN_Y) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3, 6)) self.assertEqual(f.position.y, FIELD_MAX_Y) def", "f.possession = Possession.HOME_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_at_forward_limit(self): f", "= f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_field_status_returns_possession_and_ball_status(self): f = Field(self.team_a, self.team_b) fs = f.field_status", "Possession.HOME_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_right(self): f = Field(self.team_a,", "test_move_backward_when_possession_is_home_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X", "f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_home_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b)", "f.set_position(Position(3, 2)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(4, 3)) zone = f.get_field_zone() self.assertEqual(zone,", "self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_move_laterally_when_possession_is_away_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM", "## Move backwards def test_move_backward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_backward()", "= Field(self.team_a, self.team_b) self.assertEqual(f.position, Position(FIELD_CENTER_X, FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) self.assertEqual(f.teams[0], self.team_a) self.assertEqual(f.teams[1],", "FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.RIGHT)", "def test_get_field_zone_when_ball_is_thrown_in(self): f = Field(self.team_a, self.team_b) f.ball_status = BallStatus.THROW_IN f.set_position(Position(6, 1)) zone =", "f = Field(self.team_a, self.team_b) fs = f.field_status self.assertEqual(fs.possession, Possession.IN_CONTENTION) self.assertEqual(fs.ball_status, BallStatus.BOUNCE) # Move", "Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def", "f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y -", "self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y)", "# ruck Skills(0.5, 0.5, 0.5), ) self.team_b = Team(\"BBB\", Skills(0.5, 0.5, 0.5), Skills(0.5,", "test_move_laterally_when_possession_is_home_team_and_at_right_side_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X)", "FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) ## Move backwards def test_move_backward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a,", "- 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM", "FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession =", "FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) # Move laterally - HOME_TEAM", "= Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y) if __name__ == \"__main__\":", "1) self.assertEqual(f.position.y, FIELD_CENTER_Y) # Move laterally - HOME_TEAM def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_left(self): f = Field(self.team_a,", "FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession =", "f.set_position(Position(-1, 3)) self.assertEqual(f.position.x, FIELD_MIN_X) def test_set_position_x_greater_than_maximum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(10, 3)) self.assertEqual(f.position.x,", "Move forwards def test_move_forward_when_possession_is_home_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_forward() self.assertEqual(f.position.x,", "f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) ## Move backwards def", "def test_move_laterally_when_possession_is_home_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x,", "f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_right(self): f", "= Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1)", "self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_home_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM", "= Field(self.team_a, self.team_b) f.ball_status = BallStatus.MOVING f.set_position(Position(1, 1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS)", "self.assertEqual(f.teams[1], self.team_b) def test_set_position(self): f = Field(self.team_a, self.team_b) f.set_position(Position(4, 3)) self.assertEqual(f.position.x, 4) self.assertEqual(f.position.y,", "Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_in_forward_field(self): f = Field(self.team_a,", "self.assertEqual(f.position.x, FIELD_CENTER_X - 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession", "test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y", "self.team_b) f.possession = Possession.AWAY_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X + 1) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_away_team_and_at_forward_limit(self):", "self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_away_team_and_at_left_side_and_move_left(self):", "FIELD_CENTER_Y) def test_move_backward_when_possession_is_home_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_backward()", "self.team_b) f.set_position(Position(10, 3)) self.assertEqual(f.position.x, FIELD_MAX_X) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(3, 0))", "test_move_laterally_when_possession_is_away_team_and_at_left_side_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X)", "FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_at_forward_limit(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward()", "= Possession.HOME_TEAM f.set_position(Position(FIELD_MIN_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_in_back_field(self): f =", "f.move_backward() self.assertEqual(f.position.x, FIELD_MIN_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession =", "= Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MAX_Y)) f.move_laterally(LateralDirection.RIGHT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MAX_Y)", "self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y + 1) def test_move_laterally_when_possession_is_home_team_and_in_field_center_and_move_right(self): f = Field(self.team_a, self.team_b) f.possession", "= BallStatus.MOVING f.set_position(Position(1, 1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.FORWARDS) f.set_position(Position(3, 2)) zone =", "Field(self.team_a, self.team_b) f.set_position(Position(3, 6)) self.assertEqual(f.position.y, FIELD_MAX_Y) def test_centre_ball(self): f = Field(self.team_a, self.team_b) f.centre_ball()", "FIELD_CENTER_Y) def test_move_forward_when_possession_is_home_team_and_in_back_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward()", "def test_move_forward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_forward() self.assertEqual(f.position.x, FIELD_CENTER_X +", "f.set_position(Position(4, 3)) self.assertEqual(f.position.x, 4) self.assertEqual(f.position.y, 3) def test_set_position_x_less_than_minimum(self): f = Field(self.team_a, self.team_b) f.set_position(Position(-1,", "Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_backward() self.assertEqual(f.position.x, FIELD_MAX_X) self.assertEqual(f.position.y, FIELD_CENTER_Y) def", "f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_CENTER_X, FIELD_MIN_Y)) f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_MIN_Y) def test_move_laterally_when_possession_is_away_team_and_at_right_side_and_move_right(self): f", "self.team_b) f.possession = Possession.AWAY_TEAM f.move_laterally(LateralDirection.LEFT) self.assertEqual(f.position.x, FIELD_CENTER_X) self.assertEqual(f.position.y, FIELD_CENTER_Y - 1) def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_right(self):", "self.team_a) self.assertEqual(f.teams[1], self.team_b) def test_set_position(self): f = Field(self.team_a, self.team_b) f.set_position(Position(4, 3)) self.assertEqual(f.position.x, 4)", "field import * from data import Team, Skills from status import BallStatus, FieldZone,", "from data import Team, Skills from status import BallStatus, FieldZone, LateralDirection, Possession class", "= Team(\"BBB\", Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5, 0.5), Skills(0.5, 0.5,", "self.assertEqual(f.position.y, FIELD_CENTER_Y) def test_move_backward_when_possession_is_away_team_and_in_forward_field(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y))", "test_move_backward_when_possession_is_away_team_and_in_field_center(self): f = Field(self.team_a, self.team_b) f.possession = Possession.AWAY_TEAM f.move_backward() self.assertEqual(f.position.x, FIELD_CENTER_X - 1)", "FIELD_CENTER_Y)) self.assertEqual(f.ball_status, BallStatus.BOUNCE) self.assertEqual(f.possession, Possession.IN_CONTENTION) def test_get_field_zone_at_bounce(self): f = Field(self.team_a, self.team_b) zone =", "f = Field(self.team_a, self.team_b) f.possession = Possession.HOME_TEAM f.set_position(Position(FIELD_MAX_X, FIELD_CENTER_Y)) f.move_forward() self.assertEqual(f.position.x, FIELD_MAX_X -", "FIELD_MIN_Y) # Move laterally - AWAY_TEAM def test_move_laterally_when_possession_is_away_team_and_in_field_center_and_move_left(self): f = Field(self.team_a, self.team_b) f.possession", "f.ball_status = BallStatus.THROW_IN f.set_position(Position(6, 1)) zone = f.get_field_zone() self.assertEqual(zone, FieldZone.RUCK) def test_field_status_returns_possession_and_ball_status(self): f" ]
[ "getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x,", ")) elif isinstance(value, Enum): result[attr] = value.value else: result[attr] = value if issubclass(SubscriptionVersion,", "id @property def language(self): \"\"\"Gets the language of this SubscriptionVersion. :return: The language", "SubscriptionVersion. :type: datetime \"\"\" self._planned_purge_date = planned_purge_date @property def planned_termination_date(self): \"\"\"Gets the planned_termination_date", "= kwargs.get('terminating_on', None) self.termination_issued_on = kwargs.get('termination_issued_on', None) self.version = kwargs.get('version', None) @property def", "@version.setter def version(self, version): \"\"\"Sets the version of this SubscriptionVersion. The version number", "of this SubscriptionVersion. The planned purge date indicates when the entity is permanently", "activated_on of this SubscriptionVersion. :type: datetime \"\"\" self._activated_on = activated_on @property def billing_currency(self):", "'datetime', 'failed_on': 'datetime', 'id': 'int', 'language': 'str', 'linked_space_id': 'int', 'planned_purge_date': 'datetime', 'planned_termination_date': 'datetime',", "SubscriptionVersion. :param selected_components: The selected_components of this SubscriptionVersion. :type: list[SubscriptionProductComponent] \"\"\" self._selected_components =", "the entity. The version is incremented whenever the entity is changed. :return: The", "def linked_space_id(self, linked_space_id): \"\"\"Sets the linked_space_id of this SubscriptionVersion. The linked space id", "of this SubscriptionVersion. :return: The subscription of this SubscriptionVersion. :rtype: Subscription \"\"\" return", "actual date may be different. :return: The expected_last_period_end of this SubscriptionVersion. :rtype: datetime", "language of this SubscriptionVersion. :param language: The language of this SubscriptionVersion. :type: str", "return self._termination_issued_on @termination_issued_on.setter def termination_issued_on(self, termination_issued_on): \"\"\"Sets the termination_issued_on of this SubscriptionVersion. :param", "of this SubscriptionVersion. :type: list[SubscriptionComponentConfiguration] \"\"\" self._component_configurations = component_configurations @property def created_on(self): \"\"\"Gets", ":type: datetime \"\"\" self._activated_on = activated_on @property def billing_currency(self): \"\"\"Gets the billing_currency of", "\"\"\"Sets the id of this SubscriptionVersion. The ID is the primary key of", "this SubscriptionVersion. :type: list[SubscriptionProductComponent] \"\"\" self._selected_components = selected_components @property def state(self): \"\"\"Gets the", "x.to_dict() if hasattr(x, \"to_dict\") else x, value )) elif hasattr(value, \"to_dict\"): result[attr] =", "hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item:", "datetime \"\"\" return self._planned_termination_date @planned_termination_date.setter def planned_termination_date(self, planned_termination_date): \"\"\"Sets the planned_termination_date of this", "@property def failed_on(self): \"\"\"Gets the failed_on of this SubscriptionVersion. :return: The failed_on of", "datetime \"\"\" return self._failed_on @failed_on.setter def failed_on(self, failed_on): \"\"\"Sets the failed_on of this", "The selected_components of this SubscriptionVersion. :type: list[SubscriptionProductComponent] \"\"\" self._selected_components = selected_components @property def", "\"\"\"Sets the billing_currency of this SubscriptionVersion. The subscriber is charged in the billing", "The product_version of this SubscriptionVersion. :type: SubscriptionProductVersion \"\"\" self._product_version = product_version @property def", ":param linked_space_id: The linked_space_id of this SubscriptionVersion. :type: int \"\"\" self._linked_space_id = linked_space_id", "enabled currencies on the subscription product. :param billing_currency: The billing_currency of this SubscriptionVersion.", "= kwargs.get('activated_on', None) self.billing_currency = kwargs.get('billing_currency', None) self.component_configurations = kwargs.get('component_configurations', None) self.created_on =", "kwargs.get('product_version', None) self.selected_components = kwargs.get('selected_components', None) self.state = kwargs.get('state', None) self.subscription = kwargs.get('subscription',", ":rtype: datetime \"\"\" return self._termination_issued_on @termination_issued_on.setter def termination_issued_on(self, termination_issued_on): \"\"\"Sets the termination_issued_on of", "state(self): \"\"\"Gets the state of this SubscriptionVersion. :return: The state of this SubscriptionVersion.", "@property def version(self): \"\"\"Gets the version of this SubscriptionVersion. The version number indicates", "SubscriptionVersion. :rtype: str \"\"\" return self._billing_currency @billing_currency.setter def billing_currency(self, billing_currency): \"\"\"Sets the billing_currency", "planned_purge_date of this SubscriptionVersion. The planned purge date indicates when the entity is", "result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other): if", "a projection and as such the actual date may be different. :return: The", "\"\"\"Gets the failed_on of this SubscriptionVersion. :return: The failed_on of this SubscriptionVersion. :rtype:", "space id holds the ID of the space to which the entity belongs", "list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x, value )) elif hasattr(value,", "the created_on of this SubscriptionVersion. :param created_on: The created_on of this SubscriptionVersion. :type:", ":param terminating_on: The terminating_on of this SubscriptionVersion. :type: datetime \"\"\" self._terminating_on = terminating_on", "the product_version of this SubscriptionVersion. :param product_version: The product_version of this SubscriptionVersion. :type:", "'subscription': 'Subscription', 'terminated_on': 'datetime', 'terminating_on': 'datetime', 'termination_issued_on': 'datetime', 'version': 'int', } attribute_map =", "value return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self,", "@property def termination_issued_on(self): \"\"\"Gets the termination_issued_on of this SubscriptionVersion. :return: The termination_issued_on of", "@state.setter def state(self, state): \"\"\"Sets the state of this SubscriptionVersion. :param state: The", "SubscriptionVersion. :type: datetime \"\"\" self._termination_issued_on = termination_issued_on @property def version(self): \"\"\"Gets the version", "of this SubscriptionVersion. :return: The termination_issued_on of this SubscriptionVersion. :rtype: datetime \"\"\" return", "version(self): \"\"\"Gets the version of this SubscriptionVersion. The version number indicates the version", "coding: utf-8 import pprint import six from enum import Enum class SubscriptionVersion: swagger_types", "this SubscriptionVersion. :type: str \"\"\" self._language = language @property def linked_space_id(self): \"\"\"Gets the", "this SubscriptionVersion. :type: Subscription \"\"\" self._subscription = subscription @property def terminated_on(self): \"\"\"Gets the", "= terminated_on @property def terminating_on(self): \"\"\"Gets the terminating_on of this SubscriptionVersion. :return: The", "termination_issued_on of this SubscriptionVersion. :return: The termination_issued_on of this SubscriptionVersion. :rtype: datetime \"\"\"", "The subscription of this SubscriptionVersion. :type: Subscription \"\"\" self._subscription = subscription @property def", "expected_last_period_end(self): \"\"\"Gets the expected_last_period_end of this SubscriptionVersion. The expected last period end is", "the billing currency. The billing currency has to be one of the enabled", "datetime \"\"\" self._created_on = created_on @property def expected_last_period_end(self): \"\"\"Gets the expected_last_period_end of this", "product_version(self): \"\"\"Gets the product_version of this SubscriptionVersion. :return: The product_version of this SubscriptionVersion.", "None _failed_on = None _id = None _language = None _linked_space_id = None", "Subscription \"\"\" return self._subscription @subscription.setter def subscription(self, subscription): \"\"\"Sets the subscription of this", "\"\"\"Gets the planned_purge_date of this SubscriptionVersion. The planned purge date indicates when the", "{ 'activated_on': 'activatedOn','billing_currency': 'billingCurrency','component_configurations': 'componentConfigurations','created_on': 'createdOn','expected_last_period_end': 'expectedLastPeriodEnd','failed_on': 'failedOn','id': 'id','language': 'language','linked_space_id': 'linkedSpaceId','planned_purge_date': 'plannedPurgeDate','planned_termination_date': 'plannedTerminationDate','product_version':", "\"\"\" return self._terminating_on @terminating_on.setter def terminating_on(self, terminating_on): \"\"\"Sets the terminating_on of this SubscriptionVersion.", "kwargs.get('component_configurations', None) self.created_on = kwargs.get('created_on', None) self.expected_last_period_end = kwargs.get('expected_last_period_end', None) self.failed_on = kwargs.get('failed_on',", "changed. :param version: The version of this SubscriptionVersion. :type: int \"\"\" self._version =", "removed. :param planned_purge_date: The planned_purge_date of this SubscriptionVersion. :type: datetime \"\"\" self._planned_purge_date =", "_component_configurations = None _created_on = None _expected_last_period_end = None _failed_on = None _id", "\"\"\" self._planned_termination_date = planned_termination_date @property def product_version(self): \"\"\"Gets the product_version of this SubscriptionVersion.", "id): \"\"\"Sets the id of this SubscriptionVersion. The ID is the primary key", "one of the enabled currencies on the subscription product. :return: The billing_currency of", "kwargs.get('terminated_on', None) self.terminating_on = kwargs.get('terminating_on', None) self.termination_issued_on = kwargs.get('termination_issued_on', None) self.version = kwargs.get('version',", "this SubscriptionVersion. :param language: The language of this SubscriptionVersion. :type: str \"\"\" self._language", "is the primary key of the entity. The ID identifies the entity uniquely.", "@component_configurations.setter def component_configurations(self, component_configurations): \"\"\"Sets the component_configurations of this SubscriptionVersion. :param component_configurations: The", "= kwargs.get('language', None) self.linked_space_id = kwargs.get('linked_space_id', None) self.planned_purge_date = kwargs.get('planned_purge_date', None) self.planned_termination_date =", "the enabled currencies on the subscription product. :param billing_currency: The billing_currency of this", "= None _linked_space_id = None _planned_purge_date = None _planned_termination_date = None _product_version =", "else: result[attr] = value if issubclass(SubscriptionVersion, dict): for key, value in self.items(): result[key]", "= linked_space_id @property def planned_purge_date(self): \"\"\"Gets the planned_purge_date of this SubscriptionVersion. The planned", "def activated_on(self): \"\"\"Gets the activated_on of this SubscriptionVersion. :return: The activated_on of this", "\"\"\"Gets the selected_components of this SubscriptionVersion. :return: The selected_components of this SubscriptionVersion. :rtype:", "@property def created_on(self): \"\"\"Gets the created_on of this SubscriptionVersion. :return: The created_on of", "SubscriptionVersion. :return: The termination_issued_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._termination_issued_on @termination_issued_on.setter", "_created_on = None _expected_last_period_end = None _failed_on = None _id = None _language", "failed_on(self): \"\"\"Gets the failed_on of this SubscriptionVersion. :return: The failed_on of this SubscriptionVersion.", "SubscriptionVersion. :param created_on: The created_on of this SubscriptionVersion. :type: datetime \"\"\" self._created_on =", "failed_on: The failed_on of this SubscriptionVersion. :type: datetime \"\"\" self._failed_on = failed_on @property", "of this SubscriptionVersion. :rtype: datetime \"\"\" return self._terminating_on @terminating_on.setter def terminating_on(self, terminating_on): \"\"\"Sets", "planned_purge_date(self): \"\"\"Gets the planned_purge_date of this SubscriptionVersion. The planned purge date indicates when", "\"\"\"Sets the created_on of this SubscriptionVersion. :param created_on: The created_on of this SubscriptionVersion.", "@property def component_configurations(self): \"\"\"Gets the component_configurations of this SubscriptionVersion. :return: The component_configurations of", "the entity. The ID identifies the entity uniquely. :return: The id of this", "created_on of this SubscriptionVersion. :param created_on: The created_on of this SubscriptionVersion. :type: datetime", "state of this SubscriptionVersion. :return: The state of this SubscriptionVersion. :rtype: SubscriptionVersionState \"\"\"", "of this SubscriptionVersion. :type: SubscriptionVersionState \"\"\" self._state = state @property def subscription(self): \"\"\"Gets", "product_version of this SubscriptionVersion. :type: SubscriptionProductVersion \"\"\" self._product_version = product_version @property def selected_components(self):", "= failed_on @property def id(self): \"\"\"Gets the id of this SubscriptionVersion. The ID", "six from enum import Enum class SubscriptionVersion: swagger_types = { 'activated_on': 'datetime', 'billing_currency':", "isinstance(other, SubscriptionVersion): return False return self.__dict__ == other.__dict__ def __ne__(self, other): return not", "the component_configurations of this SubscriptionVersion. :return: The component_configurations of this SubscriptionVersion. :rtype: list[SubscriptionComponentConfiguration]", "= subscription @property def terminated_on(self): \"\"\"Gets the terminated_on of this SubscriptionVersion. :return: The", "this SubscriptionVersion. :rtype: str \"\"\" return self._language @language.setter def language(self, language): \"\"\"Sets the", ":rtype: list[SubscriptionComponentConfiguration] \"\"\" return self._component_configurations @component_configurations.setter def component_configurations(self, component_configurations): \"\"\"Sets the component_configurations of", "kwargs.get('termination_issued_on', None) self.version = kwargs.get('version', None) @property def activated_on(self): \"\"\"Gets the activated_on of", "this SubscriptionVersion. :type: datetime \"\"\" self._activated_on = activated_on @property def billing_currency(self): \"\"\"Gets the", "this SubscriptionVersion. :type: datetime \"\"\" self._planned_purge_date = planned_purge_date @property def planned_termination_date(self): \"\"\"Gets the", "to be removed. :param planned_purge_date: The planned_purge_date of this SubscriptionVersion. :type: datetime \"\"\"", "the subscription product. :param billing_currency: The billing_currency of this SubscriptionVersion. :type: str \"\"\"", "key of the entity. The ID identifies the entity uniquely. :return: The id", "be different. :param expected_last_period_end: The expected_last_period_end of this SubscriptionVersion. :type: datetime \"\"\" self._expected_last_period_end", "this SubscriptionVersion. :type: int \"\"\" self._version = version def to_dict(self): result = {}", "\"to_dict\") else item, value.items() )) elif isinstance(value, Enum): result[attr] = value.value else: result[attr]", "dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item,", "linked_space_id of this SubscriptionVersion. :type: int \"\"\" self._linked_space_id = linked_space_id @property def planned_purge_date(self):", "created_on: The created_on of this SubscriptionVersion. :type: datetime \"\"\" self._created_on = created_on @property", "the entity belongs to. :param linked_space_id: The linked_space_id of this SubscriptionVersion. :type: int", "of this SubscriptionVersion. :rtype: SubscriptionProductVersion \"\"\" return self._product_version @product_version.setter def product_version(self, product_version): \"\"\"Sets", "which the projected end date of the last period is. This is only", "Enum): result[attr] = value.value else: result[attr] = value if issubclass(SubscriptionVersion, dict): for key,", "billing_currency(self): \"\"\"Gets the billing_currency of this SubscriptionVersion. The subscriber is charged in the", "self._created_on = created_on @property def expected_last_period_end(self): \"\"\"Gets the expected_last_period_end of this SubscriptionVersion. The", "of this SubscriptionVersion. :param created_on: The created_on of this SubscriptionVersion. :type: datetime \"\"\"", "self.created_on = kwargs.get('created_on', None) self.expected_last_period_end = kwargs.get('expected_last_period_end', None) self.failed_on = kwargs.get('failed_on', None) self.id", "attribute_map = { 'activated_on': 'activatedOn','billing_currency': 'billingCurrency','component_configurations': 'componentConfigurations','created_on': 'createdOn','expected_last_period_end': 'expectedLastPeriodEnd','failed_on': 'failedOn','id': 'id','language': 'language','linked_space_id': 'linkedSpaceId','planned_purge_date':", "@property def product_version(self): \"\"\"Gets the product_version of this SubscriptionVersion. :return: The product_version of", "SubscriptionVersion. :type: list[SubscriptionComponentConfiguration] \"\"\" self._component_configurations = component_configurations @property def created_on(self): \"\"\"Gets the created_on", "SubscriptionVersion. :return: The component_configurations of this SubscriptionVersion. :rtype: list[SubscriptionComponentConfiguration] \"\"\" return self._component_configurations @component_configurations.setter", "this SubscriptionVersion. :type: SubscriptionProductVersion \"\"\" self._product_version = product_version @property def selected_components(self): \"\"\"Gets the", "= id @property def language(self): \"\"\"Gets the language of this SubscriptionVersion. :return: The", "date indicates when the entity is permanently removed. When the date is null", "self.expected_last_period_end = kwargs.get('expected_last_period_end', None) self.failed_on = kwargs.get('failed_on', None) self.id = kwargs.get('id', None) self.language", ":type: datetime \"\"\" self._planned_purge_date = planned_purge_date @property def planned_termination_date(self): \"\"\"Gets the planned_termination_date of", ":type: Subscription \"\"\" self._subscription = subscription @property def terminated_on(self): \"\"\"Gets the terminated_on of", "planned_termination_date(self): \"\"\"Gets the planned_termination_date of this SubscriptionVersion. :return: The planned_termination_date of this SubscriptionVersion.", "return self._component_configurations @component_configurations.setter def component_configurations(self, component_configurations): \"\"\"Sets the component_configurations of this SubscriptionVersion. :param", "The expected_last_period_end of this SubscriptionVersion. :rtype: datetime \"\"\" return self._expected_last_period_end @expected_last_period_end.setter def expected_last_period_end(self,", "entity. The ID identifies the entity uniquely. :param id: The id of this", "'int', 'language': 'str', 'linked_space_id': 'int', 'planned_purge_date': 'datetime', 'planned_termination_date': 'datetime', 'product_version': 'SubscriptionProductVersion', 'selected_components': 'list[SubscriptionProductComponent]',", ":type: list[SubscriptionComponentConfiguration] \"\"\" self._component_configurations = component_configurations @property def created_on(self): \"\"\"Gets the created_on of", "SubscriptionVersion. :return: The created_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._created_on @created_on.setter", "purge date indicates when the entity is permanently removed. When the date is", ":param failed_on: The failed_on of this SubscriptionVersion. :type: datetime \"\"\" self._failed_on = failed_on", "\"\"\"Sets the product_version of this SubscriptionVersion. :param product_version: The product_version of this SubscriptionVersion.", "terminating_on(self, terminating_on): \"\"\"Sets the terminating_on of this SubscriptionVersion. :param terminating_on: The terminating_on of", "# coding: utf-8 import pprint import six from enum import Enum class SubscriptionVersion:", "terminated_on(self, terminated_on): \"\"\"Sets the terminated_on of this SubscriptionVersion. :param terminated_on: The terminated_on of", "datetime \"\"\" self._planned_purge_date = planned_purge_date @property def planned_termination_date(self): \"\"\"Gets the planned_termination_date of this", "is incremented whenever the entity is changed. :param version: The version of this", "SubscriptionVersion. :param failed_on: The failed_on of this SubscriptionVersion. :type: datetime \"\"\" self._failed_on =", "state @property def subscription(self): \"\"\"Gets the subscription of this SubscriptionVersion. :return: The subscription", ":rtype: int \"\"\" return self._version @version.setter def version(self, version): \"\"\"Sets the version of", "self.items(): result[key] = value return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return", "'datetime', 'expected_last_period_end': 'datetime', 'failed_on': 'datetime', 'id': 'int', 'language': 'str', 'linked_space_id': 'int', 'planned_purge_date': 'datetime',", "datetime \"\"\" return self._activated_on @activated_on.setter def activated_on(self, activated_on): \"\"\"Sets the activated_on of this", "this SubscriptionVersion. :rtype: str \"\"\" return self._billing_currency @billing_currency.setter def billing_currency(self, billing_currency): \"\"\"Sets the", "kwargs.get('linked_space_id', None) self.planned_purge_date = kwargs.get('planned_purge_date', None) self.planned_termination_date = kwargs.get('planned_termination_date', None) self.product_version = kwargs.get('product_version',", "@property def language(self): \"\"\"Gets the language of this SubscriptionVersion. :return: The language of", "this SubscriptionVersion. :rtype: datetime \"\"\" return self._created_on @created_on.setter def created_on(self, created_on): \"\"\"Sets the", "is null the entity is not planned to be removed. :return: The planned_purge_date", ":rtype: datetime \"\"\" return self._expected_last_period_end @expected_last_period_end.setter def expected_last_period_end(self, expected_last_period_end): \"\"\"Sets the expected_last_period_end of", "id of this SubscriptionVersion. :rtype: int \"\"\" return self._id @id.setter def id(self, id):", "dict): for key, value in self.items(): result[key] = value return result def to_str(self):", ":rtype: list[SubscriptionProductComponent] \"\"\" return self._selected_components @selected_components.setter def selected_components(self, selected_components): \"\"\"Sets the selected_components of", "None _termination_issued_on = None _version = None def __init__(self, **kwargs): self.discriminator = None", ":return: The component_configurations of this SubscriptionVersion. :rtype: list[SubscriptionComponentConfiguration] \"\"\" return self._component_configurations @component_configurations.setter def", "the component_configurations of this SubscriptionVersion. :param component_configurations: The component_configurations of this SubscriptionVersion. :type:", "component_configurations of this SubscriptionVersion. :type: list[SubscriptionComponentConfiguration] \"\"\" self._component_configurations = component_configurations @property def created_on(self):", "expected_last_period_end): \"\"\"Sets the expected_last_period_end of this SubscriptionVersion. The expected last period end is", "billing currency has to be one of the enabled currencies on the subscription", "and as such the actual date may be different. :return: The expected_last_period_end of", "\"\"\" self._selected_components = selected_components @property def state(self): \"\"\"Gets the state of this SubscriptionVersion.", "SubscriptionVersion. :return: The activated_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._activated_on @activated_on.setter", "this SubscriptionVersion. :param subscription: The subscription of this SubscriptionVersion. :type: Subscription \"\"\" self._subscription", "'str', 'component_configurations': 'list[SubscriptionComponentConfiguration]', 'created_on': 'datetime', 'expected_last_period_end': 'datetime', 'failed_on': 'datetime', 'id': 'int', 'language': 'str',", "of this SubscriptionVersion. :return: The product_version of this SubscriptionVersion. :rtype: SubscriptionProductVersion \"\"\" return", "entity is not planned to be removed. :return: The planned_purge_date of this SubscriptionVersion.", "of this SubscriptionVersion. :type: datetime \"\"\" self._planned_purge_date = planned_purge_date @property def planned_termination_date(self): \"\"\"Gets", "may be different. :param expected_last_period_end: The expected_last_period_end of this SubscriptionVersion. :type: datetime \"\"\"", ":param planned_termination_date: The planned_termination_date of this SubscriptionVersion. :type: datetime \"\"\" self._planned_termination_date = planned_termination_date", "self.product_version = kwargs.get('product_version', None) self.selected_components = kwargs.get('selected_components', None) self.state = kwargs.get('state', None) self.subscription", "terminated_on(self): \"\"\"Gets the terminated_on of this SubscriptionVersion. :return: The terminated_on of this SubscriptionVersion.", "= terminating_on @property def termination_issued_on(self): \"\"\"Gets the termination_issued_on of this SubscriptionVersion. :return: The", "@failed_on.setter def failed_on(self, failed_on): \"\"\"Sets the failed_on of this SubscriptionVersion. :param failed_on: The", "the state of this SubscriptionVersion. :param state: The state of this SubscriptionVersion. :type:", "The product_version of this SubscriptionVersion. :rtype: SubscriptionProductVersion \"\"\" return self._product_version @product_version.setter def product_version(self,", "the entity is permanently removed. When the date is null the entity is", "SubscriptionVersion. :rtype: SubscriptionProductVersion \"\"\" return self._product_version @product_version.setter def product_version(self, product_version): \"\"\"Sets the product_version", "None) self.terminating_on = kwargs.get('terminating_on', None) self.termination_issued_on = kwargs.get('termination_issued_on', None) self.version = kwargs.get('version', None)", "def language(self): \"\"\"Gets the language of this SubscriptionVersion. :return: The language of this", "_failed_on = None _id = None _language = None _linked_space_id = None _planned_purge_date", "= termination_issued_on @property def version(self): \"\"\"Gets the version of this SubscriptionVersion. The version", ":rtype: str \"\"\" return self._language @language.setter def language(self, language): \"\"\"Sets the language of", "None _selected_components = None _state = None _subscription = None _terminated_on = None", "created_on @property def expected_last_period_end(self): \"\"\"Gets the expected_last_period_end of this SubscriptionVersion. The expected last", "self._billing_currency = billing_currency @property def component_configurations(self): \"\"\"Gets the component_configurations of this SubscriptionVersion. :return:", "activated_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._activated_on @activated_on.setter def activated_on(self, activated_on):", "on the subscription product. :return: The billing_currency of this SubscriptionVersion. :rtype: str \"\"\"", "of this SubscriptionVersion. :type: datetime \"\"\" self._planned_termination_date = planned_termination_date @property def product_version(self): \"\"\"Gets", "of this SubscriptionVersion. :return: The created_on of this SubscriptionVersion. :rtype: datetime \"\"\" return", ":type: str \"\"\" self._billing_currency = billing_currency @property def component_configurations(self): \"\"\"Gets the component_configurations of", "int \"\"\" self._id = id @property def language(self): \"\"\"Gets the language of this", "self.language = kwargs.get('language', None) self.linked_space_id = kwargs.get('linked_space_id', None) self.planned_purge_date = kwargs.get('planned_purge_date', None) self.planned_termination_date", "SubscriptionVersion. :return: The terminated_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._terminated_on @terminated_on.setter", "_version = None def __init__(self, **kwargs): self.discriminator = None self.activated_on = kwargs.get('activated_on', None)", "def id(self): \"\"\"Gets the id of this SubscriptionVersion. The ID is the primary", "planned_termination_date: The planned_termination_date of this SubscriptionVersion. :type: datetime \"\"\" self._planned_termination_date = planned_termination_date @property", "= None self.activated_on = kwargs.get('activated_on', None) self.billing_currency = kwargs.get('billing_currency', None) self.component_configurations = kwargs.get('component_configurations',", "this SubscriptionVersion. The subscriber is charged in the billing currency. The billing currency", "datetime \"\"\" return self._terminating_on @terminating_on.setter def terminating_on(self, terminating_on): \"\"\"Sets the terminating_on of this", "planned_termination_date of this SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_termination_date @planned_termination_date.setter def planned_termination_date(self, planned_termination_date):", "this SubscriptionVersion. :param terminated_on: The terminated_on of this SubscriptionVersion. :type: datetime \"\"\" self._terminated_on", "\"\"\"Gets the version of this SubscriptionVersion. The version number indicates the version of", "= language @property def linked_space_id(self): \"\"\"Gets the linked_space_id of this SubscriptionVersion. The linked", "import Enum class SubscriptionVersion: swagger_types = { 'activated_on': 'datetime', 'billing_currency': 'str', 'component_configurations': 'list[SubscriptionComponentConfiguration]',", "SubscriptionProductVersion \"\"\" return self._product_version @product_version.setter def product_version(self, product_version): \"\"\"Sets the product_version of this", "has to be one of the enabled currencies on the subscription product. :return:", "incremented whenever the entity is changed. :param version: The version of this SubscriptionVersion.", "of this SubscriptionVersion. :rtype: list[SubscriptionProductComponent] \"\"\" return self._selected_components @selected_components.setter def selected_components(self, selected_components): \"\"\"Sets", "this SubscriptionVersion. :rtype: Subscription \"\"\" return self._subscription @subscription.setter def subscription(self, subscription): \"\"\"Sets the", ":return: The language of this SubscriptionVersion. :rtype: str \"\"\" return self._language @language.setter def", "The version of this SubscriptionVersion. :type: int \"\"\" self._version = version def to_dict(self):", "'terminatedOn','terminating_on': 'terminatingOn','termination_issued_on': 'terminationIssuedOn','version': 'version', } _activated_on = None _billing_currency = None _component_configurations =", "planned_termination_date of this SubscriptionVersion. :param planned_termination_date: The planned_termination_date of this SubscriptionVersion. :type: datetime", "this SubscriptionVersion. The linked space id holds the ID of the space to", "self.discriminator = None self.activated_on = kwargs.get('activated_on', None) self.billing_currency = kwargs.get('billing_currency', None) self.component_configurations =", "linked_space_id: The linked_space_id of this SubscriptionVersion. :type: int \"\"\" self._linked_space_id = linked_space_id @property", "SubscriptionVersion. :param state: The state of this SubscriptionVersion. :type: SubscriptionVersionState \"\"\" self._state =", "state: The state of this SubscriptionVersion. :type: SubscriptionVersionState \"\"\" self._state = state @property", "import six from enum import Enum class SubscriptionVersion: swagger_types = { 'activated_on': 'datetime',", "\"\"\"Sets the terminating_on of this SubscriptionVersion. :param terminating_on: The terminating_on of this SubscriptionVersion.", "version of this SubscriptionVersion. :type: int \"\"\" self._version = version def to_dict(self): result", "attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\")", ":type: datetime \"\"\" self._expected_last_period_end = expected_last_period_end @property def failed_on(self): \"\"\"Gets the failed_on of", "the activated_on of this SubscriptionVersion. :param activated_on: The activated_on of this SubscriptionVersion. :type:", "lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items() )) elif isinstance(value,", "linked_space_id of this SubscriptionVersion. :rtype: int \"\"\" return self._linked_space_id @linked_space_id.setter def linked_space_id(self, linked_space_id):", "this SubscriptionVersion. :param selected_components: The selected_components of this SubscriptionVersion. :type: list[SubscriptionProductComponent] \"\"\" self._selected_components", "self.terminating_on = kwargs.get('terminating_on', None) self.termination_issued_on = kwargs.get('termination_issued_on', None) self.version = kwargs.get('version', None) @property", "expected_last_period_end(self, expected_last_period_end): \"\"\"Sets the expected_last_period_end of this SubscriptionVersion. The expected last period end", "None) self.terminated_on = kwargs.get('terminated_on', None) self.terminating_on = kwargs.get('terminating_on', None) self.termination_issued_on = kwargs.get('termination_issued_on', None)", "of this SubscriptionVersion. :type: datetime \"\"\" self._created_on = created_on @property def expected_last_period_end(self): \"\"\"Gets", "on which the projected end date of the last period is. This is", "of this SubscriptionVersion. :param failed_on: The failed_on of this SubscriptionVersion. :type: datetime \"\"\"", "this SubscriptionVersion. :return: The selected_components of this SubscriptionVersion. :rtype: list[SubscriptionProductComponent] \"\"\" return self._selected_components", "the planned_termination_date of this SubscriptionVersion. :param planned_termination_date: The planned_termination_date of this SubscriptionVersion. :type:", "\"\"\" return self._failed_on @failed_on.setter def failed_on(self, failed_on): \"\"\"Sets the failed_on of this SubscriptionVersion.", "the product_version of this SubscriptionVersion. :return: The product_version of this SubscriptionVersion. :rtype: SubscriptionProductVersion", "subscription product. :param billing_currency: The billing_currency of this SubscriptionVersion. :type: str \"\"\" self._billing_currency", "def version(self, version): \"\"\"Sets the version of this SubscriptionVersion. The version number indicates", "_terminating_on = None _termination_issued_on = None _version = None def __init__(self, **kwargs): self.discriminator", ":param product_version: The product_version of this SubscriptionVersion. :type: SubscriptionProductVersion \"\"\" self._product_version = product_version", "The version of this SubscriptionVersion. :rtype: int \"\"\" return self._version @version.setter def version(self,", "planned_purge_date: The planned_purge_date of this SubscriptionVersion. :type: datetime \"\"\" self._planned_purge_date = planned_purge_date @property", "state): \"\"\"Sets the state of this SubscriptionVersion. :param state: The state of this", "entity. The version is incremented whenever the entity is changed. :return: The version", ":return: The billing_currency of this SubscriptionVersion. :rtype: str \"\"\" return self._billing_currency @billing_currency.setter def", "subscription of this SubscriptionVersion. :type: Subscription \"\"\" self._subscription = subscription @property def terminated_on(self):", ":type: datetime \"\"\" self._failed_on = failed_on @property def id(self): \"\"\"Gets the id of", "'terminationIssuedOn','version': 'version', } _activated_on = None _billing_currency = None _component_configurations = None _created_on", "period is. This is only a projection and as such the actual date", "@terminating_on.setter def terminating_on(self, terminating_on): \"\"\"Sets the terminating_on of this SubscriptionVersion. :param terminating_on: The", "entity is changed. :return: The version of this SubscriptionVersion. :rtype: int \"\"\" return", "The state of this SubscriptionVersion. :type: SubscriptionVersionState \"\"\" self._state = state @property def", "billing_currency): \"\"\"Sets the billing_currency of this SubscriptionVersion. The subscriber is charged in the", "@planned_termination_date.setter def planned_termination_date(self, planned_termination_date): \"\"\"Sets the planned_termination_date of this SubscriptionVersion. :param planned_termination_date: The", "None) self.billing_currency = kwargs.get('billing_currency', None) self.component_configurations = kwargs.get('component_configurations', None) self.created_on = kwargs.get('created_on', None)", "of this SubscriptionVersion. :type: datetime \"\"\" self._activated_on = activated_on @property def billing_currency(self): \"\"\"Gets", "of this SubscriptionVersion. :type: datetime \"\"\" self._terminating_on = terminating_on @property def termination_issued_on(self): \"\"\"Gets", "failed_on of this SubscriptionVersion. :return: The failed_on of this SubscriptionVersion. :rtype: datetime \"\"\"", "@property def terminating_on(self): \"\"\"Gets the terminating_on of this SubscriptionVersion. :return: The terminating_on of", "\"\"\" return self._created_on @created_on.setter def created_on(self, created_on): \"\"\"Sets the created_on of this SubscriptionVersion.", "@linked_space_id.setter def linked_space_id(self, linked_space_id): \"\"\"Sets the linked_space_id of this SubscriptionVersion. The linked space", "= None _failed_on = None _id = None _language = None _linked_space_id =", "hasattr(x, \"to_dict\") else x, value )) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif", "kwargs.get('terminating_on', None) self.termination_issued_on = kwargs.get('termination_issued_on', None) self.version = kwargs.get('version', None) @property def activated_on(self):", ":rtype: SubscriptionProductVersion \"\"\" return self._product_version @product_version.setter def product_version(self, product_version): \"\"\"Sets the product_version of", "str \"\"\" self._billing_currency = billing_currency @property def component_configurations(self): \"\"\"Gets the component_configurations of this", "def created_on(self): \"\"\"Gets the created_on of this SubscriptionVersion. :return: The created_on of this", "\"\"\" return self._billing_currency @billing_currency.setter def billing_currency(self, billing_currency): \"\"\"Sets the billing_currency of this SubscriptionVersion.", "The language of this SubscriptionVersion. :rtype: str \"\"\" return self._language @language.setter def language(self,", "of this SubscriptionVersion. :rtype: str \"\"\" return self._billing_currency @billing_currency.setter def billing_currency(self, billing_currency): \"\"\"Sets", "activated_on @property def billing_currency(self): \"\"\"Gets the billing_currency of this SubscriptionVersion. The subscriber is", "= None _id = None _language = None _linked_space_id = None _planned_purge_date =", "selected_components: The selected_components of this SubscriptionVersion. :type: list[SubscriptionProductComponent] \"\"\" self._selected_components = selected_components @property", "SubscriptionVersion. :return: The subscription of this SubscriptionVersion. :rtype: Subscription \"\"\" return self._subscription @subscription.setter", "activated_on(self, activated_on): \"\"\"Sets the activated_on of this SubscriptionVersion. :param activated_on: The activated_on of", "\"\"\"Sets the activated_on of this SubscriptionVersion. :param activated_on: The activated_on of this SubscriptionVersion.", "\"\"\" return self._expected_last_period_end @expected_last_period_end.setter def expected_last_period_end(self, expected_last_period_end): \"\"\"Sets the expected_last_period_end of this SubscriptionVersion.", "elif isinstance(value, Enum): result[attr] = value.value else: result[attr] = value if issubclass(SubscriptionVersion, dict):", "elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\")", "the state of this SubscriptionVersion. :return: The state of this SubscriptionVersion. :rtype: SubscriptionVersionState", "\"\"\"Gets the state of this SubscriptionVersion. :return: The state of this SubscriptionVersion. :rtype:", "'productVersion','selected_components': 'selectedComponents','state': 'state','subscription': 'subscription','terminated_on': 'terminatedOn','terminating_on': 'terminatingOn','termination_issued_on': 'terminationIssuedOn','version': 'version', } _activated_on = None _billing_currency", "the entity uniquely. :return: The id of this SubscriptionVersion. :rtype: int \"\"\" return", "SubscriptionVersion. :param subscription: The subscription of this SubscriptionVersion. :type: Subscription \"\"\" self._subscription =", "{} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list):", ":param activated_on: The activated_on of this SubscriptionVersion. :type: datetime \"\"\" self._activated_on = activated_on", "\"\"\"Gets the component_configurations of this SubscriptionVersion. :return: The component_configurations of this SubscriptionVersion. :rtype:", "failed_on of this SubscriptionVersion. :param failed_on: The failed_on of this SubscriptionVersion. :type: datetime", "is permanently removed. When the date is null the entity is not planned", "= None _planned_purge_date = None _planned_termination_date = None _product_version = None _selected_components =", "failed_on(self, failed_on): \"\"\"Sets the failed_on of this SubscriptionVersion. :param failed_on: The failed_on of", ":type: int \"\"\" self._id = id @property def language(self): \"\"\"Gets the language of", "SubscriptionVersion. :type: SubscriptionVersionState \"\"\" self._state = state @property def subscription(self): \"\"\"Gets the subscription", ":return: The linked_space_id of this SubscriptionVersion. :rtype: int \"\"\" return self._linked_space_id @linked_space_id.setter def", "and as such the actual date may be different. :param expected_last_period_end: The expected_last_period_end", "The termination_issued_on of this SubscriptionVersion. :type: datetime \"\"\" self._termination_issued_on = termination_issued_on @property def", "@billing_currency.setter def billing_currency(self, billing_currency): \"\"\"Sets the billing_currency of this SubscriptionVersion. The subscriber is", "def planned_purge_date(self, planned_purge_date): \"\"\"Sets the planned_purge_date of this SubscriptionVersion. The planned purge date", "is not planned to be removed. :return: The planned_purge_date of this SubscriptionVersion. :rtype:", "_planned_termination_date = None _product_version = None _selected_components = None _state = None _subscription", "def expected_last_period_end(self, expected_last_period_end): \"\"\"Sets the expected_last_period_end of this SubscriptionVersion. The expected last period", "\"\"\"Sets the selected_components of this SubscriptionVersion. :param selected_components: The selected_components of this SubscriptionVersion.", "list[SubscriptionComponentConfiguration] \"\"\" return self._component_configurations @component_configurations.setter def component_configurations(self, component_configurations): \"\"\"Sets the component_configurations of this", ":type: int \"\"\" self._version = version def to_dict(self): result = {} for attr,", "the language of this SubscriptionVersion. :return: The language of this SubscriptionVersion. :rtype: str", "def terminating_on(self, terminating_on): \"\"\"Sets the terminating_on of this SubscriptionVersion. :param terminating_on: The terminating_on", "result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if", "def termination_issued_on(self, termination_issued_on): \"\"\"Sets the termination_issued_on of this SubscriptionVersion. :param termination_issued_on: The termination_issued_on", "product_version of this SubscriptionVersion. :return: The product_version of this SubscriptionVersion. :rtype: SubscriptionProductVersion \"\"\"", "enabled currencies on the subscription product. :return: The billing_currency of this SubscriptionVersion. :rtype:", "return self._product_version @product_version.setter def product_version(self, product_version): \"\"\"Sets the product_version of this SubscriptionVersion. :param", "'state': 'SubscriptionVersionState', 'subscription': 'Subscription', 'terminated_on': 'datetime', 'terminating_on': 'datetime', 'termination_issued_on': 'datetime', 'version': 'int', }", "of this SubscriptionVersion. :rtype: datetime \"\"\" return self._termination_issued_on @termination_issued_on.setter def termination_issued_on(self, termination_issued_on): \"\"\"Sets", "self.version = kwargs.get('version', None) @property def activated_on(self): \"\"\"Gets the activated_on of this SubscriptionVersion.", "def language(self, language): \"\"\"Sets the language of this SubscriptionVersion. :param language: The language", "SubscriptionVersion. The subscriber is charged in the billing currency. The billing currency has", "list[SubscriptionProductComponent] \"\"\" self._selected_components = selected_components @property def state(self): \"\"\"Gets the state of this", "result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items()", "id(self): \"\"\"Gets the id of this SubscriptionVersion. The ID is the primary key", "Enum class SubscriptionVersion: swagger_types = { 'activated_on': 'datetime', 'billing_currency': 'str', 'component_configurations': 'list[SubscriptionComponentConfiguration]', 'created_on':", "self._created_on @created_on.setter def created_on(self, created_on): \"\"\"Sets the created_on of this SubscriptionVersion. :param created_on:", "= kwargs.get('product_version', None) self.selected_components = kwargs.get('selected_components', None) self.state = kwargs.get('state', None) self.subscription =", "entity is changed. :param version: The version of this SubscriptionVersion. :type: int \"\"\"", "= kwargs.get('terminated_on', None) self.terminating_on = kwargs.get('terminating_on', None) self.termination_issued_on = kwargs.get('termination_issued_on', None) self.version =", "return self._planned_termination_date @planned_termination_date.setter def planned_termination_date(self, planned_termination_date): \"\"\"Sets the planned_termination_date of this SubscriptionVersion. :param", "def terminated_on(self): \"\"\"Gets the terminated_on of this SubscriptionVersion. :return: The terminated_on of this", "SubscriptionVersion. :param product_version: The product_version of this SubscriptionVersion. :type: SubscriptionProductVersion \"\"\" self._product_version =", "'int', 'planned_purge_date': 'datetime', 'planned_termination_date': 'datetime', 'product_version': 'SubscriptionProductVersion', 'selected_components': 'list[SubscriptionProductComponent]', 'state': 'SubscriptionVersionState', 'subscription': 'Subscription',", "self._language @language.setter def language(self, language): \"\"\"Sets the language of this SubscriptionVersion. :param language:", "\"\"\" return self._linked_space_id @linked_space_id.setter def linked_space_id(self, linked_space_id): \"\"\"Sets the linked_space_id of this SubscriptionVersion.", "id: The id of this SubscriptionVersion. :type: int \"\"\" self._id = id @property", "SubscriptionVersion. :type: str \"\"\" self._billing_currency = billing_currency @property def component_configurations(self): \"\"\"Gets the component_configurations", "SubscriptionVersion. :type: str \"\"\" self._language = language @property def linked_space_id(self): \"\"\"Gets the linked_space_id", "SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_purge_date @planned_purge_date.setter def planned_purge_date(self, planned_purge_date): \"\"\"Sets the planned_purge_date", "'id','language': 'language','linked_space_id': 'linkedSpaceId','planned_purge_date': 'plannedPurgeDate','planned_termination_date': 'plannedTerminationDate','product_version': 'productVersion','selected_components': 'selectedComponents','state': 'state','subscription': 'subscription','terminated_on': 'terminatedOn','terminating_on': 'terminatingOn','termination_issued_on': 'terminationIssuedOn','version': 'version',", "number indicates the version of the entity. The version is incremented whenever the", "this SubscriptionVersion. :param component_configurations: The component_configurations of this SubscriptionVersion. :type: list[SubscriptionComponentConfiguration] \"\"\" self._component_configurations", "of this SubscriptionVersion. The expected last period end is the date on which", "self._terminating_on @terminating_on.setter def terminating_on(self, terminating_on): \"\"\"Sets the terminating_on of this SubscriptionVersion. :param terminating_on:", "SubscriptionVersionState \"\"\" return self._state @state.setter def state(self, state): \"\"\"Sets the state of this", "__init__(self, **kwargs): self.discriminator = None self.activated_on = kwargs.get('activated_on', None) self.billing_currency = kwargs.get('billing_currency', None)", "The expected last period end is the date on which the projected end", "the terminating_on of this SubscriptionVersion. :return: The terminating_on of this SubscriptionVersion. :rtype: datetime", "language: The language of this SubscriptionVersion. :type: str \"\"\" self._language = language @property", "'Subscription', 'terminated_on': 'datetime', 'terminating_on': 'datetime', 'termination_issued_on': 'datetime', 'version': 'int', } attribute_map = {", "def failed_on(self): \"\"\"Gets the failed_on of this SubscriptionVersion. :return: The failed_on of this", "\"\"\" return self._terminated_on @terminated_on.setter def terminated_on(self, terminated_on): \"\"\"Sets the terminated_on of this SubscriptionVersion.", "**kwargs): self.discriminator = None self.activated_on = kwargs.get('activated_on', None) self.billing_currency = kwargs.get('billing_currency', None) self.component_configurations", "version is incremented whenever the entity is changed. :return: The version of this", "The language of this SubscriptionVersion. :type: str \"\"\" self._language = language @property def", "the expected_last_period_end of this SubscriptionVersion. The expected last period end is the date", "self._failed_on @failed_on.setter def failed_on(self, failed_on): \"\"\"Sets the failed_on of this SubscriptionVersion. :param failed_on:", "SubscriptionVersion. :return: The planned_termination_date of this SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_termination_date @planned_termination_date.setter", "such the actual date may be different. :param expected_last_period_end: The expected_last_period_end of this", "\"\"\"Gets the termination_issued_on of this SubscriptionVersion. :return: The termination_issued_on of this SubscriptionVersion. :rtype:", "this SubscriptionVersion. :type: datetime \"\"\" self._planned_termination_date = planned_termination_date @property def product_version(self): \"\"\"Gets the", "belongs to. :return: The linked_space_id of this SubscriptionVersion. :rtype: int \"\"\" return self._linked_space_id", "SubscriptionVersion. :rtype: Subscription \"\"\" return self._subscription @subscription.setter def subscription(self, subscription): \"\"\"Sets the subscription", "subscription of this SubscriptionVersion. :return: The subscription of this SubscriptionVersion. :rtype: Subscription \"\"\"", "the termination_issued_on of this SubscriptionVersion. :param termination_issued_on: The termination_issued_on of this SubscriptionVersion. :type:", "_id = None _language = None _linked_space_id = None _planned_purge_date = None _planned_termination_date", "'version', } _activated_on = None _billing_currency = None _component_configurations = None _created_on =", "\"\"\" self._id = id @property def language(self): \"\"\"Gets the language of this SubscriptionVersion.", "None) self.linked_space_id = kwargs.get('linked_space_id', None) self.planned_purge_date = kwargs.get('planned_purge_date', None) self.planned_termination_date = kwargs.get('planned_termination_date', None)", "kwargs.get('planned_purge_date', None) self.planned_termination_date = kwargs.get('planned_termination_date', None) self.product_version = kwargs.get('product_version', None) self.selected_components = kwargs.get('selected_components',", "'billing_currency': 'str', 'component_configurations': 'list[SubscriptionComponentConfiguration]', 'created_on': 'datetime', 'expected_last_period_end': 'datetime', 'failed_on': 'datetime', 'id': 'int', 'language':", "of this SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_purge_date @planned_purge_date.setter def planned_purge_date(self, planned_purge_date): \"\"\"Sets", "version: The version of this SubscriptionVersion. :type: int \"\"\" self._version = version def", "\"\"\"Gets the language of this SubscriptionVersion. :return: The language of this SubscriptionVersion. :rtype:", "of this SubscriptionVersion. :rtype: int \"\"\" return self._linked_space_id @linked_space_id.setter def linked_space_id(self, linked_space_id): \"\"\"Sets", "= None _terminating_on = None _termination_issued_on = None _version = None def __init__(self,", "'selectedComponents','state': 'state','subscription': 'subscription','terminated_on': 'terminatedOn','terminating_on': 'terminatingOn','termination_issued_on': 'terminationIssuedOn','version': 'version', } _activated_on = None _billing_currency =", "value.value else: result[attr] = value if issubclass(SubscriptionVersion, dict): for key, value in self.items():", "= None _termination_issued_on = None _version = None def __init__(self, **kwargs): self.discriminator =", "= None _selected_components = None _state = None _subscription = None _terminated_on =", "= kwargs.get('planned_termination_date', None) self.product_version = kwargs.get('product_version', None) self.selected_components = kwargs.get('selected_components', None) self.state =", "this SubscriptionVersion. :type: datetime \"\"\" self._created_on = created_on @property def expected_last_period_end(self): \"\"\"Gets the", ":return: The id of this SubscriptionVersion. :rtype: int \"\"\" return self._id @id.setter def", "planned to be removed. :param planned_purge_date: The planned_purge_date of this SubscriptionVersion. :type: datetime", "planned_purge_date(self, planned_purge_date): \"\"\"Sets the planned_purge_date of this SubscriptionVersion. The planned purge date indicates", "return self._version @version.setter def version(self, version): \"\"\"Sets the version of this SubscriptionVersion. The", "SubscriptionVersion. :rtype: datetime \"\"\" return self._termination_issued_on @termination_issued_on.setter def termination_issued_on(self, termination_issued_on): \"\"\"Sets the termination_issued_on", "None _product_version = None _selected_components = None _state = None _subscription = None", "period end is the date on which the projected end date of the", "expected_last_period_end of this SubscriptionVersion. :type: datetime \"\"\" self._expected_last_period_end = expected_last_period_end @property def failed_on(self):", "'datetime', 'product_version': 'SubscriptionProductVersion', 'selected_components': 'list[SubscriptionProductComponent]', 'state': 'SubscriptionVersionState', 'subscription': 'Subscription', 'terminated_on': 'datetime', 'terminating_on': 'datetime',", "language of this SubscriptionVersion. :rtype: str \"\"\" return self._language @language.setter def language(self, language):", "= None _planned_termination_date = None _product_version = None _selected_components = None _state =", "def subscription(self, subscription): \"\"\"Sets the subscription of this SubscriptionVersion. :param subscription: The subscription", "self._expected_last_period_end = expected_last_period_end @property def failed_on(self): \"\"\"Gets the failed_on of this SubscriptionVersion. :return:", "= kwargs.get('linked_space_id', None) self.planned_purge_date = kwargs.get('planned_purge_date', None) self.planned_termination_date = kwargs.get('planned_termination_date', None) self.product_version =", "incremented whenever the entity is changed. :return: The version of this SubscriptionVersion. :rtype:", "if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\") else", "SubscriptionVersion. The ID is the primary key of the entity. The ID identifies", "SubscriptionVersion. :type: int \"\"\" self._linked_space_id = linked_space_id @property def planned_purge_date(self): \"\"\"Gets the planned_purge_date", "The selected_components of this SubscriptionVersion. :rtype: list[SubscriptionProductComponent] \"\"\" return self._selected_components @selected_components.setter def selected_components(self,", "= None _created_on = None _expected_last_period_end = None _failed_on = None _id =", "= None _billing_currency = None _component_configurations = None _created_on = None _expected_last_period_end =", "datetime \"\"\" self._expected_last_period_end = expected_last_period_end @property def failed_on(self): \"\"\"Gets the failed_on of this", "return self._terminated_on @terminated_on.setter def terminated_on(self, terminated_on): \"\"\"Sets the terminated_on of this SubscriptionVersion. :param", "'plannedPurgeDate','planned_termination_date': 'plannedTerminationDate','product_version': 'productVersion','selected_components': 'selectedComponents','state': 'state','subscription': 'subscription','terminated_on': 'terminatedOn','terminating_on': 'terminatingOn','termination_issued_on': 'terminationIssuedOn','version': 'version', } _activated_on =", "only a projection and as such the actual date may be different. :param", "of this SubscriptionVersion. :type: datetime \"\"\" self._termination_issued_on = termination_issued_on @property def version(self): \"\"\"Gets", "not planned to be removed. :param planned_purge_date: The planned_purge_date of this SubscriptionVersion. :type:", "if not isinstance(other, SubscriptionVersion): return False return self.__dict__ == other.__dict__ def __ne__(self, other):", "None) self.expected_last_period_end = kwargs.get('expected_last_period_end', None) self.failed_on = kwargs.get('failed_on', None) self.id = kwargs.get('id', None)", "self._planned_termination_date = planned_termination_date @property def product_version(self): \"\"\"Gets the product_version of this SubscriptionVersion. :return:", "The version is incremented whenever the entity is changed. :param version: The version", "of this SubscriptionVersion. :return: The terminating_on of this SubscriptionVersion. :rtype: datetime \"\"\" return", "the date on which the projected end date of the last period is.", "@property def linked_space_id(self): \"\"\"Gets the linked_space_id of this SubscriptionVersion. The linked space id", "def __init__(self, **kwargs): self.discriminator = None self.activated_on = kwargs.get('activated_on', None) self.billing_currency = kwargs.get('billing_currency',", "SubscriptionVersion. :type: Subscription \"\"\" self._subscription = subscription @property def terminated_on(self): \"\"\"Gets the terminated_on", "The ID identifies the entity uniquely. :return: The id of this SubscriptionVersion. :rtype:", "x: x.to_dict() if hasattr(x, \"to_dict\") else x, value )) elif hasattr(value, \"to_dict\"): result[attr]", "is only a projection and as such the actual date may be different.", "SubscriptionVersion. :rtype: int \"\"\" return self._version @version.setter def version(self, version): \"\"\"Sets the version", "'id': 'int', 'language': 'str', 'linked_space_id': 'int', 'planned_purge_date': 'datetime', 'planned_termination_date': 'datetime', 'product_version': 'SubscriptionProductVersion', 'selected_components':", "\"\"\" self._language = language @property def linked_space_id(self): \"\"\"Gets the linked_space_id of this SubscriptionVersion.", "to. :return: The linked_space_id of this SubscriptionVersion. :rtype: int \"\"\" return self._linked_space_id @linked_space_id.setter", "of this SubscriptionVersion. :param selected_components: The selected_components of this SubscriptionVersion. :type: list[SubscriptionProductComponent] \"\"\"", "return self._activated_on @activated_on.setter def activated_on(self, activated_on): \"\"\"Sets the activated_on of this SubscriptionVersion. :param", "The terminated_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._terminated_on @terminated_on.setter def terminated_on(self,", "of this SubscriptionVersion. :type: list[SubscriptionProductComponent] \"\"\" self._selected_components = selected_components @property def state(self): \"\"\"Gets", "uniquely. :return: The id of this SubscriptionVersion. :rtype: int \"\"\" return self._id @id.setter", "termination_issued_on: The termination_issued_on of this SubscriptionVersion. :type: datetime \"\"\" self._termination_issued_on = termination_issued_on @property", "of this SubscriptionVersion. :param terminated_on: The terminated_on of this SubscriptionVersion. :type: datetime \"\"\"", "list[SubscriptionProductComponent] \"\"\" return self._selected_components @selected_components.setter def selected_components(self, selected_components): \"\"\"Sets the selected_components of this", "None) @property def activated_on(self): \"\"\"Gets the activated_on of this SubscriptionVersion. :return: The activated_on", "= getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if", "linked_space_id(self): \"\"\"Gets the linked_space_id of this SubscriptionVersion. The linked space id holds the", ":param termination_issued_on: The termination_issued_on of this SubscriptionVersion. :type: datetime \"\"\" self._termination_issued_on = termination_issued_on", "= kwargs.get('selected_components', None) self.state = kwargs.get('state', None) self.subscription = kwargs.get('subscription', None) self.terminated_on =", "def component_configurations(self): \"\"\"Gets the component_configurations of this SubscriptionVersion. :return: The component_configurations of this", "def to_dict(self): result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self,", "version def to_dict(self): result = {} for attr, _ in six.iteritems(self.swagger_types): value =", "expected last period end is the date on which the projected end date", "'datetime', 'id': 'int', 'language': 'str', 'linked_space_id': 'int', 'planned_purge_date': 'datetime', 'planned_termination_date': 'datetime', 'product_version': 'SubscriptionProductVersion',", "indicates when the entity is permanently removed. When the date is null the", "None) self.planned_termination_date = kwargs.get('planned_termination_date', None) self.product_version = kwargs.get('product_version', None) self.selected_components = kwargs.get('selected_components', None)", "self.state = kwargs.get('state', None) self.subscription = kwargs.get('subscription', None) self.terminated_on = kwargs.get('terminated_on', None) self.terminating_on", ":return: The planned_termination_date of this SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_termination_date @planned_termination_date.setter def", "'datetime', 'planned_termination_date': 'datetime', 'product_version': 'SubscriptionProductVersion', 'selected_components': 'list[SubscriptionProductComponent]', 'state': 'SubscriptionVersionState', 'subscription': 'Subscription', 'terminated_on': 'datetime',", "'componentConfigurations','created_on': 'createdOn','expected_last_period_end': 'expectedLastPeriodEnd','failed_on': 'failedOn','id': 'id','language': 'language','linked_space_id': 'linkedSpaceId','planned_purge_date': 'plannedPurgeDate','planned_termination_date': 'plannedTerminationDate','product_version': 'productVersion','selected_components': 'selectedComponents','state': 'state','subscription': 'subscription','terminated_on':", "'str', 'linked_space_id': 'int', 'planned_purge_date': 'datetime', 'planned_termination_date': 'datetime', 'product_version': 'SubscriptionProductVersion', 'selected_components': 'list[SubscriptionProductComponent]', 'state': 'SubscriptionVersionState',", "language(self, language): \"\"\"Sets the language of this SubscriptionVersion. :param language: The language of", "'subscription','terminated_on': 'terminatedOn','terminating_on': 'terminatingOn','termination_issued_on': 'terminationIssuedOn','version': 'version', } _activated_on = None _billing_currency = None _component_configurations", "version is incremented whenever the entity is changed. :param version: The version of", "SubscriptionVersion. :type: list[SubscriptionProductComponent] \"\"\" self._selected_components = selected_components @property def state(self): \"\"\"Gets the state", "\"\"\"Sets the termination_issued_on of this SubscriptionVersion. :param termination_issued_on: The termination_issued_on of this SubscriptionVersion.", "str \"\"\" self._language = language @property def linked_space_id(self): \"\"\"Gets the linked_space_id of this", "is charged in the billing currency. The billing currency has to be one", "the date is null the entity is not planned to be removed. :param", "to be one of the enabled currencies on the subscription product. :param billing_currency:", "created_on(self, created_on): \"\"\"Sets the created_on of this SubscriptionVersion. :param created_on: The created_on of", "'datetime', 'billing_currency': 'str', 'component_configurations': 'list[SubscriptionComponentConfiguration]', 'created_on': 'datetime', 'expected_last_period_end': 'datetime', 'failed_on': 'datetime', 'id': 'int',", "This is only a projection and as such the actual date may be", "@property def selected_components(self): \"\"\"Gets the selected_components of this SubscriptionVersion. :return: The selected_components of", "of the enabled currencies on the subscription product. :return: The billing_currency of this", "'int', } attribute_map = { 'activated_on': 'activatedOn','billing_currency': 'billingCurrency','component_configurations': 'componentConfigurations','created_on': 'createdOn','expected_last_period_end': 'expectedLastPeriodEnd','failed_on': 'failedOn','id': 'id','language':", "= { 'activated_on': 'datetime', 'billing_currency': 'str', 'component_configurations': 'list[SubscriptionComponentConfiguration]', 'created_on': 'datetime', 'expected_last_period_end': 'datetime', 'failed_on':", "= value return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def", "SubscriptionVersionState \"\"\" self._state = state @property def subscription(self): \"\"\"Gets the subscription of this", "to be one of the enabled currencies on the subscription product. :return: The", "created_on of this SubscriptionVersion. :type: datetime \"\"\" self._created_on = created_on @property def expected_last_period_end(self):", "None _expected_last_period_end = None _failed_on = None _id = None _language = None", "product. :return: The billing_currency of this SubscriptionVersion. :rtype: str \"\"\" return self._billing_currency @billing_currency.setter", "is the date on which the projected end date of the last period", "else x, value )) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict):", "\"\"\"Sets the linked_space_id of this SubscriptionVersion. The linked space id holds the ID", "id holds the ID of the space to which the entity belongs to.", "of this SubscriptionVersion. :type: datetime \"\"\" self._expected_last_period_end = expected_last_period_end @property def failed_on(self): \"\"\"Gets", "self._component_configurations = component_configurations @property def created_on(self): \"\"\"Gets the created_on of this SubscriptionVersion. :return:", "pprint import six from enum import Enum class SubscriptionVersion: swagger_types = { 'activated_on':", "of this SubscriptionVersion. The version number indicates the version of the entity. The", "terminated_on of this SubscriptionVersion. :param terminated_on: The terminated_on of this SubscriptionVersion. :type: datetime", "of this SubscriptionVersion. :rtype: list[SubscriptionComponentConfiguration] \"\"\" return self._component_configurations @component_configurations.setter def component_configurations(self, component_configurations): \"\"\"Sets", "@subscription.setter def subscription(self, subscription): \"\"\"Sets the subscription of this SubscriptionVersion. :param subscription: The", "\"\"\"Sets the state of this SubscriptionVersion. :param state: The state of this SubscriptionVersion.", "\"\"\" return self._language @language.setter def language(self, language): \"\"\"Sets the language of this SubscriptionVersion.", "__repr__(self): return self.to_str() def __eq__(self, other): if not isinstance(other, SubscriptionVersion): return False return", "this SubscriptionVersion. :param created_on: The created_on of this SubscriptionVersion. :type: datetime \"\"\" self._created_on", "termination_issued_on of this SubscriptionVersion. :type: datetime \"\"\" self._termination_issued_on = termination_issued_on @property def version(self):", "this SubscriptionVersion. :param state: The state of this SubscriptionVersion. :type: SubscriptionVersionState \"\"\" self._state", ":param subscription: The subscription of this SubscriptionVersion. :type: Subscription \"\"\" self._subscription = subscription", "datetime \"\"\" return self._planned_purge_date @planned_purge_date.setter def planned_purge_date(self, planned_purge_date): \"\"\"Sets the planned_purge_date of this", "SubscriptionVersion. :type: datetime \"\"\" self._activated_on = activated_on @property def billing_currency(self): \"\"\"Gets the billing_currency", "of this SubscriptionVersion. :param state: The state of this SubscriptionVersion. :type: SubscriptionVersionState \"\"\"", "'activated_on': 'datetime', 'billing_currency': 'str', 'component_configurations': 'list[SubscriptionComponentConfiguration]', 'created_on': 'datetime', 'expected_last_period_end': 'datetime', 'failed_on': 'datetime', 'id':", ":return: The planned_purge_date of this SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_purge_date @planned_purge_date.setter def", "to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other): if not isinstance(other,", "None) self.version = kwargs.get('version', None) @property def activated_on(self): \"\"\"Gets the activated_on of this", "self._activated_on = activated_on @property def billing_currency(self): \"\"\"Gets the billing_currency of this SubscriptionVersion. The", "id of this SubscriptionVersion. :type: int \"\"\" self._id = id @property def language(self):", "this SubscriptionVersion. :type: datetime \"\"\" self._failed_on = failed_on @property def id(self): \"\"\"Gets the", "def __repr__(self): return self.to_str() def __eq__(self, other): if not isinstance(other, SubscriptionVersion): return False", "def created_on(self, created_on): \"\"\"Sets the created_on of this SubscriptionVersion. :param created_on: The created_on", ":param selected_components: The selected_components of this SubscriptionVersion. :type: list[SubscriptionProductComponent] \"\"\" self._selected_components = selected_components", "item, value.items() )) elif isinstance(value, Enum): result[attr] = value.value else: result[attr] = value", "\"\"\" return self._component_configurations @component_configurations.setter def component_configurations(self, component_configurations): \"\"\"Sets the component_configurations of this SubscriptionVersion.", "date of the last period is. This is only a projection and as", "of this SubscriptionVersion. :param termination_issued_on: The termination_issued_on of this SubscriptionVersion. :type: datetime \"\"\"", "None _linked_space_id = None _planned_purge_date = None _planned_termination_date = None _product_version = None", "datetime \"\"\" self._failed_on = failed_on @property def id(self): \"\"\"Gets the id of this", "kwargs.get('billing_currency', None) self.component_configurations = kwargs.get('component_configurations', None) self.created_on = kwargs.get('created_on', None) self.expected_last_period_end = kwargs.get('expected_last_period_end',", "None _component_configurations = None _created_on = None _expected_last_period_end = None _failed_on = None", ":return: The product_version of this SubscriptionVersion. :rtype: SubscriptionProductVersion \"\"\" return self._product_version @product_version.setter def", "of this SubscriptionVersion. :param component_configurations: The component_configurations of this SubscriptionVersion. :type: list[SubscriptionComponentConfiguration] \"\"\"", "datetime \"\"\" self._activated_on = activated_on @property def billing_currency(self): \"\"\"Gets the billing_currency of this", "attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] =", "The termination_issued_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._termination_issued_on @termination_issued_on.setter def termination_issued_on(self,", "this SubscriptionVersion. :type: int \"\"\" self._id = id @property def language(self): \"\"\"Gets the", "be removed. :param planned_purge_date: The planned_purge_date of this SubscriptionVersion. :type: datetime \"\"\" self._planned_purge_date", "different. :param expected_last_period_end: The expected_last_period_end of this SubscriptionVersion. :type: datetime \"\"\" self._expected_last_period_end =", "is not planned to be removed. :param planned_purge_date: The planned_purge_date of this SubscriptionVersion.", "self.id = kwargs.get('id', None) self.language = kwargs.get('language', None) self.linked_space_id = kwargs.get('linked_space_id', None) self.planned_purge_date", "return self._planned_purge_date @planned_purge_date.setter def planned_purge_date(self, planned_purge_date): \"\"\"Sets the planned_purge_date of this SubscriptionVersion. The", "this SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_purge_date @planned_purge_date.setter def planned_purge_date(self, planned_purge_date): \"\"\"Sets the", ":type: list[SubscriptionProductComponent] \"\"\" self._selected_components = selected_components @property def state(self): \"\"\"Gets the state of", "'datetime', 'terminating_on': 'datetime', 'termination_issued_on': 'datetime', 'version': 'int', } attribute_map = { 'activated_on': 'activatedOn','billing_currency':", "is. This is only a projection and as such the actual date may", "created_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._created_on @created_on.setter def created_on(self, created_on):", "= kwargs.get('planned_purge_date', None) self.planned_termination_date = kwargs.get('planned_termination_date', None) self.product_version = kwargs.get('product_version', None) self.selected_components =", "return self._selected_components @selected_components.setter def selected_components(self, selected_components): \"\"\"Sets the selected_components of this SubscriptionVersion. :param", "selected_components(self): \"\"\"Gets the selected_components of this SubscriptionVersion. :return: The selected_components of this SubscriptionVersion.", "\"\"\" self._activated_on = activated_on @property def billing_currency(self): \"\"\"Gets the billing_currency of this SubscriptionVersion.", "linked_space_id(self, linked_space_id): \"\"\"Sets the linked_space_id of this SubscriptionVersion. The linked space id holds", "terminating_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._terminating_on @terminating_on.setter def terminating_on(self, terminating_on):", "self._termination_issued_on @termination_issued_on.setter def termination_issued_on(self, termination_issued_on): \"\"\"Sets the termination_issued_on of this SubscriptionVersion. :param termination_issued_on:", ":rtype: datetime \"\"\" return self._planned_purge_date @planned_purge_date.setter def planned_purge_date(self, planned_purge_date): \"\"\"Sets the planned_purge_date of", "\"\"\" self._subscription = subscription @property def terminated_on(self): \"\"\"Gets the terminated_on of this SubscriptionVersion.", "of this SubscriptionVersion. :rtype: datetime \"\"\" return self._expected_last_period_end @expected_last_period_end.setter def expected_last_period_end(self, expected_last_period_end): \"\"\"Sets", "of this SubscriptionVersion. :rtype: Subscription \"\"\" return self._subscription @subscription.setter def subscription(self, subscription): \"\"\"Sets", "to which the entity belongs to. :param linked_space_id: The linked_space_id of this SubscriptionVersion.", "self._planned_purge_date = planned_purge_date @property def planned_termination_date(self): \"\"\"Gets the planned_termination_date of this SubscriptionVersion. :return:", "self._subscription @subscription.setter def subscription(self, subscription): \"\"\"Sets the subscription of this SubscriptionVersion. :param subscription:", "SubscriptionVersion. :return: The language of this SubscriptionVersion. :rtype: str \"\"\" return self._language @language.setter", "datetime \"\"\" return self._created_on @created_on.setter def created_on(self, created_on): \"\"\"Sets the created_on of this", "the space to which the entity belongs to. :return: The linked_space_id of this", "state of this SubscriptionVersion. :rtype: SubscriptionVersionState \"\"\" return self._state @state.setter def state(self, state):", ":param id: The id of this SubscriptionVersion. :type: int \"\"\" self._id = id", "be different. :return: The expected_last_period_end of this SubscriptionVersion. :rtype: datetime \"\"\" return self._expected_last_period_end", "this SubscriptionVersion. :return: The terminating_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._terminating_on", "null the entity is not planned to be removed. :return: The planned_purge_date of", "isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x,", "state(self, state): \"\"\"Sets the state of this SubscriptionVersion. :param state: The state of", "self._terminated_on = terminated_on @property def terminating_on(self): \"\"\"Gets the terminating_on of this SubscriptionVersion. :return:", "@property def expected_last_period_end(self): \"\"\"Gets the expected_last_period_end of this SubscriptionVersion. The expected last period", "component_configurations of this SubscriptionVersion. :param component_configurations: The component_configurations of this SubscriptionVersion. :type: list[SubscriptionComponentConfiguration]", "has to be one of the enabled currencies on the subscription product. :param", "\"\"\" self._expected_last_period_end = expected_last_period_end @property def failed_on(self): \"\"\"Gets the failed_on of this SubscriptionVersion.", "'termination_issued_on': 'datetime', 'version': 'int', } attribute_map = { 'activated_on': 'activatedOn','billing_currency': 'billingCurrency','component_configurations': 'componentConfigurations','created_on': 'createdOn','expected_last_period_end':", "end is the date on which the projected end date of the last", "entity belongs to. :param linked_space_id: The linked_space_id of this SubscriptionVersion. :type: int \"\"\"", "datetime \"\"\" self._planned_termination_date = planned_termination_date @property def product_version(self): \"\"\"Gets the product_version of this", "self._terminated_on @terminated_on.setter def terminated_on(self, terminated_on): \"\"\"Sets the terminated_on of this SubscriptionVersion. :param terminated_on:", "the terminating_on of this SubscriptionVersion. :param terminating_on: The terminating_on of this SubscriptionVersion. :type:", "'billingCurrency','component_configurations': 'componentConfigurations','created_on': 'createdOn','expected_last_period_end': 'expectedLastPeriodEnd','failed_on': 'failedOn','id': 'id','language': 'language','linked_space_id': 'linkedSpaceId','planned_purge_date': 'plannedPurgeDate','planned_termination_date': 'plannedTerminationDate','product_version': 'productVersion','selected_components': 'selectedComponents','state': 'state','subscription':", ":type: str \"\"\" self._language = language @property def linked_space_id(self): \"\"\"Gets the linked_space_id of", "def planned_termination_date(self, planned_termination_date): \"\"\"Sets the planned_termination_date of this SubscriptionVersion. :param planned_termination_date: The planned_termination_date", "entity uniquely. :param id: The id of this SubscriptionVersion. :type: int \"\"\" self._id", "\"\"\"Sets the planned_termination_date of this SubscriptionVersion. :param planned_termination_date: The planned_termination_date of this SubscriptionVersion.", "\"\"\" self._component_configurations = component_configurations @property def created_on(self): \"\"\"Gets the created_on of this SubscriptionVersion.", "= None def __init__(self, **kwargs): self.discriminator = None self.activated_on = kwargs.get('activated_on', None) self.billing_currency", "hasattr(item[1], \"to_dict\") else item, value.items() )) elif isinstance(value, Enum): result[attr] = value.value else:", "@property def id(self): \"\"\"Gets the id of this SubscriptionVersion. The ID is the", ":param language: The language of this SubscriptionVersion. :type: str \"\"\" self._language = language", "this SubscriptionVersion. :rtype: datetime \"\"\" return self._failed_on @failed_on.setter def failed_on(self, failed_on): \"\"\"Sets the", "= kwargs.get('termination_issued_on', None) self.version = kwargs.get('version', None) @property def activated_on(self): \"\"\"Gets the activated_on", "this SubscriptionVersion. :return: The planned_termination_date of this SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_termination_date", "kwargs.get('state', None) self.subscription = kwargs.get('subscription', None) self.terminated_on = kwargs.get('terminated_on', None) self.terminating_on = kwargs.get('terminating_on',", "swagger_types = { 'activated_on': 'datetime', 'billing_currency': 'str', 'component_configurations': 'list[SubscriptionComponentConfiguration]', 'created_on': 'datetime', 'expected_last_period_end': 'datetime',", "int \"\"\" return self._version @version.setter def version(self, version): \"\"\"Sets the version of this", "= version def to_dict(self): result = {} for attr, _ in six.iteritems(self.swagger_types): value", "self.subscription = kwargs.get('subscription', None) self.terminated_on = kwargs.get('terminated_on', None) self.terminating_on = kwargs.get('terminating_on', None) self.termination_issued_on", "of the space to which the entity belongs to. :return: The linked_space_id of", "failed_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._failed_on @failed_on.setter def failed_on(self, failed_on):", "= value if issubclass(SubscriptionVersion, dict): for key, value in self.items(): result[key] = value", "def __eq__(self, other): if not isinstance(other, SubscriptionVersion): return False return self.__dict__ == other.__dict__", "of this SubscriptionVersion. The linked space id holds the ID of the space", "The activated_on of this SubscriptionVersion. :type: datetime \"\"\" self._activated_on = activated_on @property def", "self._failed_on = failed_on @property def id(self): \"\"\"Gets the id of this SubscriptionVersion. The", "holds the ID of the space to which the entity belongs to. :return:", "the termination_issued_on of this SubscriptionVersion. :return: The termination_issued_on of this SubscriptionVersion. :rtype: datetime", "'expected_last_period_end': 'datetime', 'failed_on': 'datetime', 'id': 'int', 'language': 'str', 'linked_space_id': 'int', 'planned_purge_date': 'datetime', 'planned_termination_date':", "isinstance(value, Enum): result[attr] = value.value else: result[attr] = value if issubclass(SubscriptionVersion, dict): for", "of the enabled currencies on the subscription product. :param billing_currency: The billing_currency of", "of this SubscriptionVersion. :return: The failed_on of this SubscriptionVersion. :rtype: datetime \"\"\" return", ":param version: The version of this SubscriptionVersion. :type: int \"\"\" self._version = version", "planned purge date indicates when the entity is permanently removed. When the date", "the billing_currency of this SubscriptionVersion. The subscriber is charged in the billing currency.", "SubscriptionVersion. :param planned_termination_date: The planned_termination_date of this SubscriptionVersion. :type: datetime \"\"\" self._planned_termination_date =", "in self.items(): result[key] = value return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self):", "of the last period is. This is only a projection and as such", "one of the enabled currencies on the subscription product. :param billing_currency: The billing_currency", ":return: The selected_components of this SubscriptionVersion. :rtype: list[SubscriptionProductComponent] \"\"\" return self._selected_components @selected_components.setter def", "SubscriptionVersion. :type: datetime \"\"\" self._failed_on = failed_on @property def id(self): \"\"\"Gets the id", "of this SubscriptionVersion. :param language: The language of this SubscriptionVersion. :type: str \"\"\"", "\"\"\" return self._subscription @subscription.setter def subscription(self, subscription): \"\"\"Sets the subscription of this SubscriptionVersion.", ":rtype: datetime \"\"\" return self._terminating_on @terminating_on.setter def terminating_on(self, terminating_on): \"\"\"Sets the terminating_on of", "self.termination_issued_on = kwargs.get('termination_issued_on', None) self.version = kwargs.get('version', None) @property def activated_on(self): \"\"\"Gets the", "@property def billing_currency(self): \"\"\"Gets the billing_currency of this SubscriptionVersion. The subscriber is charged", "SubscriptionVersion. :param language: The language of this SubscriptionVersion. :type: str \"\"\" self._language =", "expected_last_period_end of this SubscriptionVersion. :rtype: datetime \"\"\" return self._expected_last_period_end @expected_last_period_end.setter def expected_last_period_end(self, expected_last_period_end):", "def version(self): \"\"\"Gets the version of this SubscriptionVersion. The version number indicates the", "def terminated_on(self, terminated_on): \"\"\"Sets the terminated_on of this SubscriptionVersion. :param terminated_on: The terminated_on", "may be different. :return: The expected_last_period_end of this SubscriptionVersion. :rtype: datetime \"\"\" return", "version): \"\"\"Sets the version of this SubscriptionVersion. The version number indicates the version", "to which the entity belongs to. :return: The linked_space_id of this SubscriptionVersion. :rtype:", "self._planned_termination_date @planned_termination_date.setter def planned_termination_date(self, planned_termination_date): \"\"\"Sets the planned_termination_date of this SubscriptionVersion. :param planned_termination_date:", "kwargs.get('expected_last_period_end', None) self.failed_on = kwargs.get('failed_on', None) self.id = kwargs.get('id', None) self.language = kwargs.get('language',", "@property def terminated_on(self): \"\"\"Gets the terminated_on of this SubscriptionVersion. :return: The terminated_on of", "of this SubscriptionVersion. :type: int \"\"\" self._linked_space_id = linked_space_id @property def planned_purge_date(self): \"\"\"Gets", "this SubscriptionVersion. :rtype: list[SubscriptionProductComponent] \"\"\" return self._selected_components @selected_components.setter def selected_components(self, selected_components): \"\"\"Sets the", "self._state @state.setter def state(self, state): \"\"\"Sets the state of this SubscriptionVersion. :param state:", "self.planned_purge_date = kwargs.get('planned_purge_date', None) self.planned_termination_date = kwargs.get('planned_termination_date', None) self.product_version = kwargs.get('product_version', None) self.selected_components", "SubscriptionVersion. The linked space id holds the ID of the space to which", "subscription of this SubscriptionVersion. :param subscription: The subscription of this SubscriptionVersion. :type: Subscription", "terminating_on @property def termination_issued_on(self): \"\"\"Gets the termination_issued_on of this SubscriptionVersion. :return: The termination_issued_on", "this SubscriptionVersion. :return: The language of this SubscriptionVersion. :rtype: str \"\"\" return self._language", "component_configurations @property def created_on(self): \"\"\"Gets the created_on of this SubscriptionVersion. :return: The created_on", "The planned_termination_date of this SubscriptionVersion. :type: datetime \"\"\" self._planned_termination_date = planned_termination_date @property def", "the entity is changed. :return: The version of this SubscriptionVersion. :rtype: int \"\"\"", "\"\"\" return self._activated_on @activated_on.setter def activated_on(self, activated_on): \"\"\"Sets the activated_on of this SubscriptionVersion.", "SubscriptionVersion. :param terminating_on: The terminating_on of this SubscriptionVersion. :type: datetime \"\"\" self._terminating_on =", "the primary key of the entity. The ID identifies the entity uniquely. :return:", "'SubscriptionProductVersion', 'selected_components': 'list[SubscriptionProductComponent]', 'state': 'SubscriptionVersionState', 'subscription': 'Subscription', 'terminated_on': 'datetime', 'terminating_on': 'datetime', 'termination_issued_on': 'datetime',", "self.billing_currency = kwargs.get('billing_currency', None) self.component_configurations = kwargs.get('component_configurations', None) self.created_on = kwargs.get('created_on', None) self.expected_last_period_end", "\"\"\"Gets the created_on of this SubscriptionVersion. :return: The created_on of this SubscriptionVersion. :rtype:", "return self._failed_on @failed_on.setter def failed_on(self, failed_on): \"\"\"Sets the failed_on of this SubscriptionVersion. :param", "state of this SubscriptionVersion. :type: SubscriptionVersionState \"\"\" self._state = state @property def subscription(self):", "from enum import Enum class SubscriptionVersion: swagger_types = { 'activated_on': 'datetime', 'billing_currency': 'str',", "as such the actual date may be different. :return: The expected_last_period_end of this", "terminating_on(self): \"\"\"Gets the terminating_on of this SubscriptionVersion. :return: The terminating_on of this SubscriptionVersion.", "self._product_version = product_version @property def selected_components(self): \"\"\"Gets the selected_components of this SubscriptionVersion. :return:", "this SubscriptionVersion. :rtype: datetime \"\"\" return self._activated_on @activated_on.setter def activated_on(self, activated_on): \"\"\"Sets the", "of this SubscriptionVersion. :rtype: int \"\"\" return self._version @version.setter def version(self, version): \"\"\"Sets", "planned_purge_date): \"\"\"Sets the planned_purge_date of this SubscriptionVersion. The planned purge date indicates when", ":rtype: SubscriptionVersionState \"\"\" return self._state @state.setter def state(self, state): \"\"\"Sets the state of", "the last period is. This is only a projection and as such the", "kwargs.get('subscription', None) self.terminated_on = kwargs.get('terminated_on', None) self.terminating_on = kwargs.get('terminating_on', None) self.termination_issued_on = kwargs.get('termination_issued_on',", "this SubscriptionVersion. :type: str \"\"\" self._billing_currency = billing_currency @property def component_configurations(self): \"\"\"Gets the", "\"\"\"Gets the product_version of this SubscriptionVersion. :return: The product_version of this SubscriptionVersion. :rtype:", "of this SubscriptionVersion. :param subscription: The subscription of this SubscriptionVersion. :type: Subscription \"\"\"", "= kwargs.get('failed_on', None) self.id = kwargs.get('id', None) self.language = kwargs.get('language', None) self.linked_space_id =", "of this SubscriptionVersion. :rtype: SubscriptionVersionState \"\"\" return self._state @state.setter def state(self, state): \"\"\"Sets", "expected_last_period_end @property def failed_on(self): \"\"\"Gets the failed_on of this SubscriptionVersion. :return: The failed_on", "entity belongs to. :return: The linked_space_id of this SubscriptionVersion. :rtype: int \"\"\" return", ":return: The terminated_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._terminated_on @terminated_on.setter def", "SubscriptionVersion. :rtype: int \"\"\" return self._id @id.setter def id(self, id): \"\"\"Sets the id", "planned_termination_date of this SubscriptionVersion. :type: datetime \"\"\" self._planned_termination_date = planned_termination_date @property def product_version(self):", ":param expected_last_period_end: The expected_last_period_end of this SubscriptionVersion. :type: datetime \"\"\" self._expected_last_period_end = expected_last_period_end", "primary key of the entity. The ID identifies the entity uniquely. :param id:", "The terminating_on of this SubscriptionVersion. :type: datetime \"\"\" self._terminating_on = terminating_on @property def", "this SubscriptionVersion. :return: The product_version of this SubscriptionVersion. :rtype: SubscriptionProductVersion \"\"\" return self._product_version", "subscription of this SubscriptionVersion. :rtype: Subscription \"\"\" return self._subscription @subscription.setter def subscription(self, subscription):", "The component_configurations of this SubscriptionVersion. :rtype: list[SubscriptionComponentConfiguration] \"\"\" return self._component_configurations @component_configurations.setter def component_configurations(self,", "the entity uniquely. :param id: The id of this SubscriptionVersion. :type: int \"\"\"", "None _version = None def __init__(self, **kwargs): self.discriminator = None self.activated_on = kwargs.get('activated_on',", "different. :return: The expected_last_period_end of this SubscriptionVersion. :rtype: datetime \"\"\" return self._expected_last_period_end @expected_last_period_end.setter", "_terminated_on = None _terminating_on = None _termination_issued_on = None _version = None def", "currency has to be one of the enabled currencies on the subscription product.", "_expected_last_period_end = None _failed_on = None _id = None _language = None _linked_space_id", "def id(self, id): \"\"\"Sets the id of this SubscriptionVersion. The ID is the", "currencies on the subscription product. :return: The billing_currency of this SubscriptionVersion. :rtype: str", "def planned_termination_date(self): \"\"\"Gets the planned_termination_date of this SubscriptionVersion. :return: The planned_termination_date of this", "ID of the space to which the entity belongs to. :return: The linked_space_id", "on the subscription product. :param billing_currency: The billing_currency of this SubscriptionVersion. :type: str", "the linked_space_id of this SubscriptionVersion. The linked space id holds the ID of", "datetime \"\"\" return self._expected_last_period_end @expected_last_period_end.setter def expected_last_period_end(self, expected_last_period_end): \"\"\"Sets the expected_last_period_end of this", "the subscription of this SubscriptionVersion. :param subscription: The subscription of this SubscriptionVersion. :type:", "None _planned_purge_date = None _planned_termination_date = None _product_version = None _selected_components = None", "_linked_space_id = None _planned_purge_date = None _planned_termination_date = None _product_version = None _selected_components", ":rtype: str \"\"\" return self._billing_currency @billing_currency.setter def billing_currency(self, billing_currency): \"\"\"Sets the billing_currency of", "_termination_issued_on = None _version = None def __init__(self, **kwargs): self.discriminator = None self.activated_on", "the version of this SubscriptionVersion. The version number indicates the version of the", "return self._subscription @subscription.setter def subscription(self, subscription): \"\"\"Sets the subscription of this SubscriptionVersion. :param", "\"\"\"Sets the terminated_on of this SubscriptionVersion. :param terminated_on: The terminated_on of this SubscriptionVersion.", "SubscriptionVersion. :rtype: datetime \"\"\" return self._created_on @created_on.setter def created_on(self, created_on): \"\"\"Sets the created_on", "import pprint import six from enum import Enum class SubscriptionVersion: swagger_types = {", "value.items() )) elif isinstance(value, Enum): result[attr] = value.value else: result[attr] = value if", "Subscription \"\"\" self._subscription = subscription @property def terminated_on(self): \"\"\"Gets the terminated_on of this", "The expected_last_period_end of this SubscriptionVersion. :type: datetime \"\"\" self._expected_last_period_end = expected_last_period_end @property def", "of this SubscriptionVersion. :type: int \"\"\" self._id = id @property def language(self): \"\"\"Gets", "failed_on @property def id(self): \"\"\"Gets the id of this SubscriptionVersion. The ID is", "this SubscriptionVersion. :return: The termination_issued_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._termination_issued_on", "changed. :return: The version of this SubscriptionVersion. :rtype: int \"\"\" return self._version @version.setter", ":param planned_purge_date: The planned_purge_date of this SubscriptionVersion. :type: datetime \"\"\" self._planned_purge_date = planned_purge_date", "\"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0],", "The subscription of this SubscriptionVersion. :rtype: Subscription \"\"\" return self._subscription @subscription.setter def subscription(self,", ":return: The terminating_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._terminating_on @terminating_on.setter def", "The linked_space_id of this SubscriptionVersion. :rtype: int \"\"\" return self._linked_space_id @linked_space_id.setter def linked_space_id(self,", "the subscription of this SubscriptionVersion. :return: The subscription of this SubscriptionVersion. :rtype: Subscription", "be removed. :return: The planned_purge_date of this SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_purge_date", "None _terminated_on = None _terminating_on = None _termination_issued_on = None _version = None", ":rtype: datetime \"\"\" return self._terminated_on @terminated_on.setter def terminated_on(self, terminated_on): \"\"\"Sets the terminated_on of", "= value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if", "The id of this SubscriptionVersion. :rtype: int \"\"\" return self._id @id.setter def id(self,", "billing_currency of this SubscriptionVersion. :rtype: str \"\"\" return self._billing_currency @billing_currency.setter def billing_currency(self, billing_currency):", "end date of the last period is. This is only a projection and", "terminating_on of this SubscriptionVersion. :type: datetime \"\"\" self._terminating_on = terminating_on @property def termination_issued_on(self):", "activated_on(self): \"\"\"Gets the activated_on of this SubscriptionVersion. :return: The activated_on of this SubscriptionVersion.", "_ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map(", "of this SubscriptionVersion. :rtype: datetime \"\"\" return self._failed_on @failed_on.setter def failed_on(self, failed_on): \"\"\"Sets", "'terminated_on': 'datetime', 'terminating_on': 'datetime', 'termination_issued_on': 'datetime', 'version': 'int', } attribute_map = { 'activated_on':", "of this SubscriptionVersion. :type: datetime \"\"\" self._failed_on = failed_on @property def id(self): \"\"\"Gets", "this SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_termination_date @planned_termination_date.setter def planned_termination_date(self, planned_termination_date): \"\"\"Sets the", "'language': 'str', 'linked_space_id': 'int', 'planned_purge_date': 'datetime', 'planned_termination_date': 'datetime', 'product_version': 'SubscriptionProductVersion', 'selected_components': 'list[SubscriptionProductComponent]', 'state':", "termination_issued_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._termination_issued_on @termination_issued_on.setter def termination_issued_on(self, termination_issued_on):", ":param component_configurations: The component_configurations of this SubscriptionVersion. :type: list[SubscriptionComponentConfiguration] \"\"\" self._component_configurations = component_configurations", "date is null the entity is not planned to be removed. :return: The", "of this SubscriptionVersion. :param planned_termination_date: The planned_termination_date of this SubscriptionVersion. :type: datetime \"\"\"", "return self._created_on @created_on.setter def created_on(self, created_on): \"\"\"Sets the created_on of this SubscriptionVersion. :param", "return self._id @id.setter def id(self, id): \"\"\"Sets the id of this SubscriptionVersion. The", "= dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items() ))", "self._selected_components @selected_components.setter def selected_components(self, selected_components): \"\"\"Sets the selected_components of this SubscriptionVersion. :param selected_components:", "the selected_components of this SubscriptionVersion. :param selected_components: The selected_components of this SubscriptionVersion. :type:", "in the billing currency. The billing currency has to be one of the", "def terminating_on(self): \"\"\"Gets the terminating_on of this SubscriptionVersion. :return: The terminating_on of this", "this SubscriptionVersion. The version number indicates the version of the entity. The version", "value in self.items(): result[key] = value return result def to_str(self): return pprint.pformat(self.to_dict()) def", "SubscriptionVersion. :rtype: datetime \"\"\" return self._terminating_on @terminating_on.setter def terminating_on(self, terminating_on): \"\"\"Sets the terminating_on", "SubscriptionVersion. :param activated_on: The activated_on of this SubscriptionVersion. :type: datetime \"\"\" self._activated_on =", "= selected_components @property def state(self): \"\"\"Gets the state of this SubscriptionVersion. :return: The", "SubscriptionVersion. :return: The state of this SubscriptionVersion. :rtype: SubscriptionVersionState \"\"\" return self._state @state.setter", "entity uniquely. :return: The id of this SubscriptionVersion. :rtype: int \"\"\" return self._id", "product_version of this SubscriptionVersion. :rtype: SubscriptionProductVersion \"\"\" return self._product_version @product_version.setter def product_version(self, product_version):", "selected_components @property def state(self): \"\"\"Gets the state of this SubscriptionVersion. :return: The state", "'linkedSpaceId','planned_purge_date': 'plannedPurgeDate','planned_termination_date': 'plannedTerminationDate','product_version': 'productVersion','selected_components': 'selectedComponents','state': 'state','subscription': 'subscription','terminated_on': 'terminatedOn','terminating_on': 'terminatingOn','termination_issued_on': 'terminationIssuedOn','version': 'version', } _activated_on", "entity is permanently removed. When the date is null the entity is not", "\"\"\"Gets the terminated_on of this SubscriptionVersion. :return: The terminated_on of this SubscriptionVersion. :rtype:", "billing_currency: The billing_currency of this SubscriptionVersion. :type: str \"\"\" self._billing_currency = billing_currency @property", "\"\"\" self._linked_space_id = linked_space_id @property def planned_purge_date(self): \"\"\"Gets the planned_purge_date of this SubscriptionVersion.", "removed. When the date is null the entity is not planned to be", ":param state: The state of this SubscriptionVersion. :type: SubscriptionVersionState \"\"\" self._state = state", "def product_version(self, product_version): \"\"\"Sets the product_version of this SubscriptionVersion. :param product_version: The product_version", "'linked_space_id': 'int', 'planned_purge_date': 'datetime', 'planned_termination_date': 'datetime', 'product_version': 'SubscriptionProductVersion', 'selected_components': 'list[SubscriptionProductComponent]', 'state': 'SubscriptionVersionState', 'subscription':", ":rtype: datetime \"\"\" return self._activated_on @activated_on.setter def activated_on(self, activated_on): \"\"\"Sets the activated_on of", "return self._state @state.setter def state(self, state): \"\"\"Sets the state of this SubscriptionVersion. :param", "kwargs.get('planned_termination_date', None) self.product_version = kwargs.get('product_version', None) self.selected_components = kwargs.get('selected_components', None) self.state = kwargs.get('state',", "def activated_on(self, activated_on): \"\"\"Sets the activated_on of this SubscriptionVersion. :param activated_on: The activated_on", "primary key of the entity. The ID identifies the entity uniquely. :return: The", "x, value )) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr]", "the entity is not planned to be removed. :return: The planned_purge_date of this", "kwargs.get('version', None) @property def activated_on(self): \"\"\"Gets the activated_on of this SubscriptionVersion. :return: The", "The planned_purge_date of this SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_purge_date @planned_purge_date.setter def planned_purge_date(self,", "planned_termination_date): \"\"\"Sets the planned_termination_date of this SubscriptionVersion. :param planned_termination_date: The planned_termination_date of this", "The ID is the primary key of the entity. The ID identifies the", "billing_currency of this SubscriptionVersion. :type: str \"\"\" self._billing_currency = billing_currency @property def component_configurations(self):", "'list[SubscriptionProductComponent]', 'state': 'SubscriptionVersionState', 'subscription': 'Subscription', 'terminated_on': 'datetime', 'terminating_on': 'datetime', 'termination_issued_on': 'datetime', 'version': 'int',", "activated_on of this SubscriptionVersion. :param activated_on: The activated_on of this SubscriptionVersion. :type: datetime", "'product_version': 'SubscriptionProductVersion', 'selected_components': 'list[SubscriptionProductComponent]', 'state': 'SubscriptionVersionState', 'subscription': 'Subscription', 'terminated_on': 'datetime', 'terminating_on': 'datetime', 'termination_issued_on':", "selected_components(self, selected_components): \"\"\"Sets the selected_components of this SubscriptionVersion. :param selected_components: The selected_components of", "return self._language @language.setter def language(self, language): \"\"\"Sets the language of this SubscriptionVersion. :param", "of this SubscriptionVersion. :param product_version: The product_version of this SubscriptionVersion. :type: SubscriptionProductVersion \"\"\"", "component_configurations: The component_configurations of this SubscriptionVersion. :type: list[SubscriptionComponentConfiguration] \"\"\" self._component_configurations = component_configurations @property", "self._linked_space_id = linked_space_id @property def planned_purge_date(self): \"\"\"Gets the planned_purge_date of this SubscriptionVersion. The", "\"\"\" return self._termination_issued_on @termination_issued_on.setter def termination_issued_on(self, termination_issued_on): \"\"\"Sets the termination_issued_on of this SubscriptionVersion.", ":return: The termination_issued_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._termination_issued_on @termination_issued_on.setter def", "SubscriptionVersion. :param component_configurations: The component_configurations of this SubscriptionVersion. :type: list[SubscriptionComponentConfiguration] \"\"\" self._component_configurations =", "None) self.component_configurations = kwargs.get('component_configurations', None) self.created_on = kwargs.get('created_on', None) self.expected_last_period_end = kwargs.get('expected_last_period_end', None)", ":rtype: datetime \"\"\" return self._created_on @created_on.setter def created_on(self, created_on): \"\"\"Sets the created_on of", "'version': 'int', } attribute_map = { 'activated_on': 'activatedOn','billing_currency': 'billingCurrency','component_configurations': 'componentConfigurations','created_on': 'createdOn','expected_last_period_end': 'expectedLastPeriodEnd','failed_on': 'failedOn','id':", ":param created_on: The created_on of this SubscriptionVersion. :type: datetime \"\"\" self._created_on = created_on", "\"\"\" return self._selected_components @selected_components.setter def selected_components(self, selected_components): \"\"\"Sets the selected_components of this SubscriptionVersion.", "@activated_on.setter def activated_on(self, activated_on): \"\"\"Sets the activated_on of this SubscriptionVersion. :param activated_on: The", "billing currency. The billing currency has to be one of the enabled currencies", ":type: datetime \"\"\" self._planned_termination_date = planned_termination_date @property def product_version(self): \"\"\"Gets the product_version of", "self._activated_on @activated_on.setter def activated_on(self, activated_on): \"\"\"Sets the activated_on of this SubscriptionVersion. :param activated_on:", "this SubscriptionVersion. The expected last period end is the date on which the", "return self._terminating_on @terminating_on.setter def terminating_on(self, terminating_on): \"\"\"Sets the terminating_on of this SubscriptionVersion. :param", "return self._expected_last_period_end @expected_last_period_end.setter def expected_last_period_end(self, expected_last_period_end): \"\"\"Sets the expected_last_period_end of this SubscriptionVersion. The", "def selected_components(self, selected_components): \"\"\"Sets the selected_components of this SubscriptionVersion. :param selected_components: The selected_components", "def planned_purge_date(self): \"\"\"Gets the planned_purge_date of this SubscriptionVersion. The planned purge date indicates", "@termination_issued_on.setter def termination_issued_on(self, termination_issued_on): \"\"\"Sets the termination_issued_on of this SubscriptionVersion. :param termination_issued_on: The", "this SubscriptionVersion. :type: SubscriptionVersionState \"\"\" self._state = state @property def subscription(self): \"\"\"Gets the", "@property def state(self): \"\"\"Gets the state of this SubscriptionVersion. :return: The state of", "version number indicates the version of the entity. The version is incremented whenever", "None _planned_termination_date = None _product_version = None _selected_components = None _state = None", "subscription): \"\"\"Sets the subscription of this SubscriptionVersion. :param subscription: The subscription of this", "SubscriptionVersion. :param termination_issued_on: The termination_issued_on of this SubscriptionVersion. :type: datetime \"\"\" self._termination_issued_on =", "The linked_space_id of this SubscriptionVersion. :type: int \"\"\" self._linked_space_id = linked_space_id @property def", "result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict())", "= kwargs.get('expected_last_period_end', None) self.failed_on = kwargs.get('failed_on', None) self.id = kwargs.get('id', None) self.language =", "the language of this SubscriptionVersion. :param language: The language of this SubscriptionVersion. :type:", "SubscriptionVersion. :rtype: int \"\"\" return self._linked_space_id @linked_space_id.setter def linked_space_id(self, linked_space_id): \"\"\"Sets the linked_space_id", "this SubscriptionVersion. :type: datetime \"\"\" self._terminated_on = terminated_on @property def terminating_on(self): \"\"\"Gets the", "of this SubscriptionVersion. :type: str \"\"\" self._language = language @property def linked_space_id(self): \"\"\"Gets", "the ID of the space to which the entity belongs to. :return: The", "entity. The version is incremented whenever the entity is changed. :param version: The", "a projection and as such the actual date may be different. :param expected_last_period_end:", "_language = None _linked_space_id = None _planned_purge_date = None _planned_termination_date = None _product_version", "the selected_components of this SubscriptionVersion. :return: The selected_components of this SubscriptionVersion. :rtype: list[SubscriptionProductComponent]", "self._selected_components = selected_components @property def state(self): \"\"\"Gets the state of this SubscriptionVersion. :return:", "return self._billing_currency @billing_currency.setter def billing_currency(self, billing_currency): \"\"\"Sets the billing_currency of this SubscriptionVersion. The", "SubscriptionVersion. The version number indicates the version of the entity. The version is", "the actual date may be different. :return: The expected_last_period_end of this SubscriptionVersion. :rtype:", "SubscriptionVersion. :return: The failed_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._failed_on @failed_on.setter", "int \"\"\" return self._linked_space_id @linked_space_id.setter def linked_space_id(self, linked_space_id): \"\"\"Sets the linked_space_id of this", "if issubclass(SubscriptionVersion, dict): for key, value in self.items(): result[key] = value return result", "termination_issued_on(self, termination_issued_on): \"\"\"Sets the termination_issued_on of this SubscriptionVersion. :param termination_issued_on: The termination_issued_on of", "= { 'activated_on': 'activatedOn','billing_currency': 'billingCurrency','component_configurations': 'componentConfigurations','created_on': 'createdOn','expected_last_period_end': 'expectedLastPeriodEnd','failed_on': 'failedOn','id': 'id','language': 'language','linked_space_id': 'linkedSpaceId','planned_purge_date': 'plannedPurgeDate','planned_termination_date':", "__eq__(self, other): if not isinstance(other, SubscriptionVersion): return False return self.__dict__ == other.__dict__ def", "of this SubscriptionVersion. :rtype: int \"\"\" return self._id @id.setter def id(self, id): \"\"\"Sets", "date on which the projected end date of the last period is. This", "self.activated_on = kwargs.get('activated_on', None) self.billing_currency = kwargs.get('billing_currency', None) self.component_configurations = kwargs.get('component_configurations', None) self.created_on", "def product_version(self): \"\"\"Gets the product_version of this SubscriptionVersion. :return: The product_version of this", "termination_issued_on(self): \"\"\"Gets the termination_issued_on of this SubscriptionVersion. :return: The termination_issued_on of this SubscriptionVersion.", "component_configurations(self): \"\"\"Gets the component_configurations of this SubscriptionVersion. :return: The component_configurations of this SubscriptionVersion.", "belongs to. :param linked_space_id: The linked_space_id of this SubscriptionVersion. :type: int \"\"\" self._linked_space_id", "self._state = state @property def subscription(self): \"\"\"Gets the subscription of this SubscriptionVersion. :return:", "'datetime', 'termination_issued_on': 'datetime', 'version': 'int', } attribute_map = { 'activated_on': 'activatedOn','billing_currency': 'billingCurrency','component_configurations': 'componentConfigurations','created_on':", "None) self.state = kwargs.get('state', None) self.subscription = kwargs.get('subscription', None) self.terminated_on = kwargs.get('terminated_on', None)", "\"\"\"Gets the billing_currency of this SubscriptionVersion. The subscriber is charged in the billing", "The created_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._created_on @created_on.setter def created_on(self,", "= None _product_version = None _selected_components = None _state = None _subscription =", "_planned_purge_date = None _planned_termination_date = None _product_version = None _selected_components = None _state", "this SubscriptionVersion. :type: list[SubscriptionComponentConfiguration] \"\"\" self._component_configurations = component_configurations @property def created_on(self): \"\"\"Gets the", "not isinstance(other, SubscriptionVersion): return False return self.__dict__ == other.__dict__ def __ne__(self, other): return", "currencies on the subscription product. :param billing_currency: The billing_currency of this SubscriptionVersion. :type:", "of this SubscriptionVersion. :return: The component_configurations of this SubscriptionVersion. :rtype: list[SubscriptionComponentConfiguration] \"\"\" return", "result[key] = value return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str()", "\"\"\"Gets the planned_termination_date of this SubscriptionVersion. :return: The planned_termination_date of this SubscriptionVersion. :rtype:", "terminating_on: The terminating_on of this SubscriptionVersion. :type: datetime \"\"\" self._terminating_on = terminating_on @property", "The component_configurations of this SubscriptionVersion. :type: list[SubscriptionComponentConfiguration] \"\"\" self._component_configurations = component_configurations @property def", "def billing_currency(self): \"\"\"Gets the billing_currency of this SubscriptionVersion. The subscriber is charged in", "self.planned_termination_date = kwargs.get('planned_termination_date', None) self.product_version = kwargs.get('product_version', None) self.selected_components = kwargs.get('selected_components', None) self.state", "this SubscriptionVersion. :rtype: datetime \"\"\" return self._terminating_on @terminating_on.setter def terminating_on(self, terminating_on): \"\"\"Sets the", "def termination_issued_on(self): \"\"\"Gets the termination_issued_on of this SubscriptionVersion. :return: The termination_issued_on of this", "subscription(self, subscription): \"\"\"Sets the subscription of this SubscriptionVersion. :param subscription: The subscription of", "selected_components of this SubscriptionVersion. :return: The selected_components of this SubscriptionVersion. :rtype: list[SubscriptionProductComponent] \"\"\"", "terminated_on: The terminated_on of this SubscriptionVersion. :type: datetime \"\"\" self._terminated_on = terminated_on @property", "kwargs.get('selected_components', None) self.state = kwargs.get('state', None) self.subscription = kwargs.get('subscription', None) self.terminated_on = kwargs.get('terminated_on',", ":return: The failed_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._failed_on @failed_on.setter def", "linked_space_id @property def planned_purge_date(self): \"\"\"Gets the planned_purge_date of this SubscriptionVersion. The planned purge", "@property def subscription(self): \"\"\"Gets the subscription of this SubscriptionVersion. :return: The subscription of", "the terminated_on of this SubscriptionVersion. :return: The terminated_on of this SubscriptionVersion. :rtype: datetime", "SubscriptionVersion. :type: datetime \"\"\" self._planned_termination_date = planned_termination_date @property def product_version(self): \"\"\"Gets the product_version", "int \"\"\" self._version = version def to_dict(self): result = {} for attr, _", "class SubscriptionVersion: swagger_types = { 'activated_on': 'datetime', 'billing_currency': 'str', 'component_configurations': 'list[SubscriptionComponentConfiguration]', 'created_on': 'datetime',", "\"\"\"Gets the terminating_on of this SubscriptionVersion. :return: The terminating_on of this SubscriptionVersion. :rtype:", "this SubscriptionVersion. :rtype: datetime \"\"\" return self._expected_last_period_end @expected_last_period_end.setter def expected_last_period_end(self, expected_last_period_end): \"\"\"Sets the", "failed_on of this SubscriptionVersion. :type: datetime \"\"\" self._failed_on = failed_on @property def id(self):", "= None _expected_last_period_end = None _failed_on = None _id = None _language =", "'activated_on': 'activatedOn','billing_currency': 'billingCurrency','component_configurations': 'componentConfigurations','created_on': 'createdOn','expected_last_period_end': 'expectedLastPeriodEnd','failed_on': 'failedOn','id': 'id','language': 'language','linked_space_id': 'linkedSpaceId','planned_purge_date': 'plannedPurgeDate','planned_termination_date': 'plannedTerminationDate','product_version': 'productVersion','selected_components':", "None _state = None _subscription = None _terminated_on = None _terminating_on = None", "_billing_currency = None _component_configurations = None _created_on = None _expected_last_period_end = None _failed_on", "_subscription = None _terminated_on = None _terminating_on = None _termination_issued_on = None _version", "ID is the primary key of the entity. The ID identifies the entity", "language @property def linked_space_id(self): \"\"\"Gets the linked_space_id of this SubscriptionVersion. The linked space", "The failed_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._failed_on @failed_on.setter def failed_on(self,", "be one of the enabled currencies on the subscription product. :return: The billing_currency", "\"\"\" self._version = version def to_dict(self): result = {} for attr, _ in", "None) self.product_version = kwargs.get('product_version', None) self.selected_components = kwargs.get('selected_components', None) self.state = kwargs.get('state', None)", "other): if not isinstance(other, SubscriptionVersion): return False return self.__dict__ == other.__dict__ def __ne__(self,", "this SubscriptionVersion. :rtype: datetime \"\"\" return self._termination_issued_on @termination_issued_on.setter def termination_issued_on(self, termination_issued_on): \"\"\"Sets the", "SubscriptionVersion. :type: datetime \"\"\" self._terminating_on = terminating_on @property def termination_issued_on(self): \"\"\"Gets the termination_issued_on", "\"\"\"Sets the failed_on of this SubscriptionVersion. :param failed_on: The failed_on of this SubscriptionVersion.", "The planned_purge_date of this SubscriptionVersion. :type: datetime \"\"\" self._planned_purge_date = planned_purge_date @property def", "utf-8 import pprint import six from enum import Enum class SubscriptionVersion: swagger_types =", "When the date is null the entity is not planned to be removed.", "= created_on @property def expected_last_period_end(self): \"\"\"Gets the expected_last_period_end of this SubscriptionVersion. The expected", "self.linked_space_id = kwargs.get('linked_space_id', None) self.planned_purge_date = kwargs.get('planned_purge_date', None) self.planned_termination_date = kwargs.get('planned_termination_date', None) self.product_version", "None) self.id = kwargs.get('id', None) self.language = kwargs.get('language', None) self.linked_space_id = kwargs.get('linked_space_id', None)", "expected_last_period_end of this SubscriptionVersion. The expected last period end is the date on", "of this SubscriptionVersion. :rtype: datetime \"\"\" return self._created_on @created_on.setter def created_on(self, created_on): \"\"\"Sets", "SubscriptionVersion. The planned purge date indicates when the entity is permanently removed. When", "the entity. The version is incremented whenever the entity is changed. :param version:", "\"\"\"Sets the expected_last_period_end of this SubscriptionVersion. The expected last period end is the", "\"\"\" self._terminating_on = terminating_on @property def termination_issued_on(self): \"\"\"Gets the termination_issued_on of this SubscriptionVersion.", "SubscriptionVersion. :type: datetime \"\"\" self._terminated_on = terminated_on @property def terminating_on(self): \"\"\"Gets the terminating_on", "the activated_on of this SubscriptionVersion. :return: The activated_on of this SubscriptionVersion. :rtype: datetime", "this SubscriptionVersion. :type: datetime \"\"\" self._expected_last_period_end = expected_last_period_end @property def failed_on(self): \"\"\"Gets the", "The version is incremented whenever the entity is changed. :return: The version of", "identifies the entity uniquely. :param id: The id of this SubscriptionVersion. :type: int", "version(self, version): \"\"\"Sets the version of this SubscriptionVersion. The version number indicates the", "@expected_last_period_end.setter def expected_last_period_end(self, expected_last_period_end): \"\"\"Sets the expected_last_period_end of this SubscriptionVersion. The expected last", "of this SubscriptionVersion. :type: str \"\"\" self._billing_currency = billing_currency @property def component_configurations(self): \"\"\"Gets", "} _activated_on = None _billing_currency = None _component_configurations = None _created_on = None", "'language','linked_space_id': 'linkedSpaceId','planned_purge_date': 'plannedPurgeDate','planned_termination_date': 'plannedTerminationDate','product_version': 'productVersion','selected_components': 'selectedComponents','state': 'state','subscription': 'subscription','terminated_on': 'terminatedOn','terminating_on': 'terminatingOn','termination_issued_on': 'terminationIssuedOn','version': 'version', }", "the version of the entity. The version is incremented whenever the entity is", "this SubscriptionVersion. :return: The subscription of this SubscriptionVersion. :rtype: Subscription \"\"\" return self._subscription", "of this SubscriptionVersion. :return: The terminated_on of this SubscriptionVersion. :rtype: datetime \"\"\" return", "this SubscriptionVersion. :param activated_on: The activated_on of this SubscriptionVersion. :type: datetime \"\"\" self._activated_on", "to be removed. :return: The planned_purge_date of this SubscriptionVersion. :rtype: datetime \"\"\" return", "'planned_termination_date': 'datetime', 'product_version': 'SubscriptionProductVersion', 'selected_components': 'list[SubscriptionProductComponent]', 'state': 'SubscriptionVersionState', 'subscription': 'Subscription', 'terminated_on': 'datetime', 'terminating_on':", "product. :param billing_currency: The billing_currency of this SubscriptionVersion. :type: str \"\"\" self._billing_currency =", "datetime \"\"\" self._terminated_on = terminated_on @property def terminating_on(self): \"\"\"Gets the terminating_on of this", "= planned_termination_date @property def product_version(self): \"\"\"Gets the product_version of this SubscriptionVersion. :return: The", "this SubscriptionVersion. :return: The state of this SubscriptionVersion. :rtype: SubscriptionVersionState \"\"\" return self._state", "'plannedTerminationDate','product_version': 'productVersion','selected_components': 'selectedComponents','state': 'state','subscription': 'subscription','terminated_on': 'terminatedOn','terminating_on': 'terminatingOn','termination_issued_on': 'terminationIssuedOn','version': 'version', } _activated_on = None", "this SubscriptionVersion. :param product_version: The product_version of this SubscriptionVersion. :type: SubscriptionProductVersion \"\"\" self._product_version", "version of the entity. The version is incremented whenever the entity is changed.", "identifies the entity uniquely. :return: The id of this SubscriptionVersion. :rtype: int \"\"\"", "def billing_currency(self, billing_currency): \"\"\"Sets the billing_currency of this SubscriptionVersion. The subscriber is charged", "of this SubscriptionVersion. :type: SubscriptionProductVersion \"\"\" self._product_version = product_version @property def selected_components(self): \"\"\"Gets", "The version number indicates the version of the entity. The version is incremented", "ID identifies the entity uniquely. :return: The id of this SubscriptionVersion. :rtype: int", "SubscriptionVersion. :rtype: list[SubscriptionProductComponent] \"\"\" return self._selected_components @selected_components.setter def selected_components(self, selected_components): \"\"\"Sets the selected_components", "of this SubscriptionVersion. :type: datetime \"\"\" self._terminated_on = terminated_on @property def terminating_on(self): \"\"\"Gets", "the actual date may be different. :param expected_last_period_end: The expected_last_period_end of this SubscriptionVersion.", "None) self.failed_on = kwargs.get('failed_on', None) self.id = kwargs.get('id', None) self.language = kwargs.get('language', None)", "enum import Enum class SubscriptionVersion: swagger_types = { 'activated_on': 'datetime', 'billing_currency': 'str', 'component_configurations':", "\"\"\"Sets the language of this SubscriptionVersion. :param language: The language of this SubscriptionVersion.", "selected_components of this SubscriptionVersion. :param selected_components: The selected_components of this SubscriptionVersion. :type: list[SubscriptionProductComponent]", "to_dict(self): result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr)", "None def __init__(self, **kwargs): self.discriminator = None self.activated_on = kwargs.get('activated_on', None) self.billing_currency =", ":param terminated_on: The terminated_on of this SubscriptionVersion. :type: datetime \"\"\" self._terminated_on = terminated_on", "The subscriber is charged in the billing currency. The billing currency has to", "The terminating_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._terminating_on @terminating_on.setter def terminating_on(self,", "SubscriptionVersion. :param terminated_on: The terminated_on of this SubscriptionVersion. :type: datetime \"\"\" self._terminated_on =", "date may be different. :return: The expected_last_period_end of this SubscriptionVersion. :rtype: datetime \"\"\"", "currency. The billing currency has to be one of the enabled currencies on", "def state(self, state): \"\"\"Sets the state of this SubscriptionVersion. :param state: The state", "of this SubscriptionVersion. :return: The state of this SubscriptionVersion. :rtype: SubscriptionVersionState \"\"\" return", "kwargs.get('created_on', None) self.expected_last_period_end = kwargs.get('expected_last_period_end', None) self.failed_on = kwargs.get('failed_on', None) self.id = kwargs.get('id',", "None _id = None _language = None _linked_space_id = None _planned_purge_date = None", "projection and as such the actual date may be different. :param expected_last_period_end: The", "'activatedOn','billing_currency': 'billingCurrency','component_configurations': 'componentConfigurations','created_on': 'createdOn','expected_last_period_end': 'expectedLastPeriodEnd','failed_on': 'failedOn','id': 'id','language': 'language','linked_space_id': 'linkedSpaceId','planned_purge_date': 'plannedPurgeDate','planned_termination_date': 'plannedTerminationDate','product_version': 'productVersion','selected_components': 'selectedComponents','state':", "the enabled currencies on the subscription product. :return: The billing_currency of this SubscriptionVersion.", "terminating_on): \"\"\"Sets the terminating_on of this SubscriptionVersion. :param terminating_on: The terminating_on of this", "@terminated_on.setter def terminated_on(self, terminated_on): \"\"\"Sets the terminated_on of this SubscriptionVersion. :param terminated_on: The", ":type: SubscriptionProductVersion \"\"\" self._product_version = product_version @property def selected_components(self): \"\"\"Gets the selected_components of", "The planned purge date indicates when the entity is permanently removed. When the", "\"\"\" return self._state @state.setter def state(self, state): \"\"\"Sets the state of this SubscriptionVersion.", "self._language = language @property def linked_space_id(self): \"\"\"Gets the linked_space_id of this SubscriptionVersion. The", "this SubscriptionVersion. :param planned_termination_date: The planned_termination_date of this SubscriptionVersion. :type: datetime \"\"\" self._planned_termination_date", "The terminated_on of this SubscriptionVersion. :type: datetime \"\"\" self._terminated_on = terminated_on @property def", "projected end date of the last period is. This is only a projection", "SubscriptionVersion. :type: int \"\"\" self._id = id @property def language(self): \"\"\"Gets the language", "the planned_purge_date of this SubscriptionVersion. The planned purge date indicates when the entity", "'terminating_on': 'datetime', 'termination_issued_on': 'datetime', 'version': 'int', } attribute_map = { 'activated_on': 'activatedOn','billing_currency': 'billingCurrency','component_configurations':", "subscription @property def terminated_on(self): \"\"\"Gets the terminated_on of this SubscriptionVersion. :return: The terminated_on", "else item, value.items() )) elif isinstance(value, Enum): result[attr] = value.value else: result[attr] =", "subscription(self): \"\"\"Gets the subscription of this SubscriptionVersion. :return: The subscription of this SubscriptionVersion.", "\"\"\" return self._version @version.setter def version(self, version): \"\"\"Sets the version of this SubscriptionVersion.", "\"\"\" self._created_on = created_on @property def expected_last_period_end(self): \"\"\"Gets the expected_last_period_end of this SubscriptionVersion.", "whenever the entity is changed. :param version: The version of this SubscriptionVersion. :type:", "\"\"\" return self._planned_termination_date @planned_termination_date.setter def planned_termination_date(self, planned_termination_date): \"\"\"Sets the planned_termination_date of this SubscriptionVersion.", "= None _component_configurations = None _created_on = None _expected_last_period_end = None _failed_on =", "def subscription(self): \"\"\"Gets the subscription of this SubscriptionVersion. :return: The subscription of this", "product_version): \"\"\"Sets the product_version of this SubscriptionVersion. :param product_version: The product_version of this", "the failed_on of this SubscriptionVersion. :return: The failed_on of this SubscriptionVersion. :rtype: datetime", "kwargs.get('failed_on', None) self.id = kwargs.get('id', None) self.language = kwargs.get('language', None) self.linked_space_id = kwargs.get('linked_space_id',", "of this SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_termination_date @planned_termination_date.setter def planned_termination_date(self, planned_termination_date): \"\"\"Sets", "date is null the entity is not planned to be removed. :param planned_purge_date:", "planned_termination_date(self, planned_termination_date): \"\"\"Sets the planned_termination_date of this SubscriptionVersion. :param planned_termination_date: The planned_termination_date of", "@planned_purge_date.setter def planned_purge_date(self, planned_purge_date): \"\"\"Sets the planned_purge_date of this SubscriptionVersion. The planned purge", "key of the entity. The ID identifies the entity uniquely. :param id: The", "SubscriptionVersion. :type: int \"\"\" self._version = version def to_dict(self): result = {} for", "def component_configurations(self, component_configurations): \"\"\"Sets the component_configurations of this SubscriptionVersion. :param component_configurations: The component_configurations", "this SubscriptionVersion. :return: The created_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._created_on", "created_on(self): \"\"\"Gets the created_on of this SubscriptionVersion. :return: The created_on of this SubscriptionVersion.", "int \"\"\" return self._id @id.setter def id(self, id): \"\"\"Sets the id of this", "this SubscriptionVersion. The ID is the primary key of the entity. The ID", "the entity is not planned to be removed. :param planned_purge_date: The planned_purge_date of", "dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items() )) elif", "language(self): \"\"\"Gets the language of this SubscriptionVersion. :return: The language of this SubscriptionVersion.", "planned_purge_date of this SubscriptionVersion. :type: datetime \"\"\" self._planned_purge_date = planned_purge_date @property def planned_termination_date(self):", "is incremented whenever the entity is changed. :return: The version of this SubscriptionVersion.", "def failed_on(self, failed_on): \"\"\"Sets the failed_on of this SubscriptionVersion. :param failed_on: The failed_on", "@selected_components.setter def selected_components(self, selected_components): \"\"\"Sets the selected_components of this SubscriptionVersion. :param selected_components: The", "= {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value,", "self._version @version.setter def version(self, version): \"\"\"Sets the version of this SubscriptionVersion. The version", "self.component_configurations = kwargs.get('component_configurations', None) self.created_on = kwargs.get('created_on', None) self.expected_last_period_end = kwargs.get('expected_last_period_end', None) self.failed_on", "projection and as such the actual date may be different. :return: The expected_last_period_end", "\"\"\" self._termination_issued_on = termination_issued_on @property def version(self): \"\"\"Gets the version of this SubscriptionVersion.", "not planned to be removed. :return: The planned_purge_date of this SubscriptionVersion. :rtype: datetime", "language of this SubscriptionVersion. :return: The language of this SubscriptionVersion. :rtype: str \"\"\"", "planned_purge_date of this SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_purge_date @planned_purge_date.setter def planned_purge_date(self, planned_purge_date):", "'component_configurations': 'list[SubscriptionComponentConfiguration]', 'created_on': 'datetime', 'expected_last_period_end': 'datetime', 'failed_on': 'datetime', 'id': 'int', 'language': 'str', 'linked_space_id':", "datetime \"\"\" return self._termination_issued_on @termination_issued_on.setter def termination_issued_on(self, termination_issued_on): \"\"\"Sets the termination_issued_on of this", "created_on of this SubscriptionVersion. :return: The created_on of this SubscriptionVersion. :rtype: datetime \"\"\"", "subscription: The subscription of this SubscriptionVersion. :type: Subscription \"\"\" self._subscription = subscription @property", "the space to which the entity belongs to. :param linked_space_id: The linked_space_id of", "planned_termination_date @property def product_version(self): \"\"\"Gets the product_version of this SubscriptionVersion. :return: The product_version", "SubscriptionVersion): return False return self.__dict__ == other.__dict__ def __ne__(self, other): return not self", "of this SubscriptionVersion. :return: The activated_on of this SubscriptionVersion. :rtype: datetime \"\"\" return", "@language.setter def language(self, language): \"\"\"Sets the language of this SubscriptionVersion. :param language: The", "SubscriptionProductVersion \"\"\" self._product_version = product_version @property def selected_components(self): \"\"\"Gets the selected_components of this", "'state','subscription': 'subscription','terminated_on': 'terminatedOn','terminating_on': 'terminatingOn','termination_issued_on': 'terminationIssuedOn','version': 'version', } _activated_on = None _billing_currency = None", "key, value in self.items(): result[key] = value return result def to_str(self): return pprint.pformat(self.to_dict())", "'datetime', 'version': 'int', } attribute_map = { 'activated_on': 'activatedOn','billing_currency': 'billingCurrency','component_configurations': 'componentConfigurations','created_on': 'createdOn','expected_last_period_end': 'expectedLastPeriodEnd','failed_on':", ":rtype: datetime \"\"\" return self._planned_termination_date @planned_termination_date.setter def planned_termination_date(self, planned_termination_date): \"\"\"Sets the planned_termination_date of", "= planned_purge_date @property def planned_termination_date(self): \"\"\"Gets the planned_termination_date of this SubscriptionVersion. :return: The", "The billing currency has to be one of the enabled currencies on the", "list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x, value", "'planned_purge_date': 'datetime', 'planned_termination_date': 'datetime', 'product_version': 'SubscriptionProductVersion', 'selected_components': 'list[SubscriptionProductComponent]', 'state': 'SubscriptionVersionState', 'subscription': 'Subscription', 'terminated_on':", "the planned_termination_date of this SubscriptionVersion. :return: The planned_termination_date of this SubscriptionVersion. :rtype: datetime", "= kwargs.get('state', None) self.subscription = kwargs.get('subscription', None) self.terminated_on = kwargs.get('terminated_on', None) self.terminating_on =", "the terminated_on of this SubscriptionVersion. :param terminated_on: The terminated_on of this SubscriptionVersion. :type:", "@property def activated_on(self): \"\"\"Gets the activated_on of this SubscriptionVersion. :return: The activated_on of", "component_configurations): \"\"\"Sets the component_configurations of this SubscriptionVersion. :param component_configurations: The component_configurations of this", "of the entity. The ID identifies the entity uniquely. :param id: The id", "= activated_on @property def billing_currency(self): \"\"\"Gets the billing_currency of this SubscriptionVersion. The subscriber", "_state = None _subscription = None _terminated_on = None _terminating_on = None _termination_issued_on", "is changed. :return: The version of this SubscriptionVersion. :rtype: int \"\"\" return self._version", "this SubscriptionVersion. :rtype: int \"\"\" return self._version @version.setter def version(self, version): \"\"\"Sets the", "def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other): if not", "whenever the entity is changed. :return: The version of this SubscriptionVersion. :rtype: int", "activated_on: The activated_on of this SubscriptionVersion. :type: datetime \"\"\" self._activated_on = activated_on @property", "\"\"\"Gets the id of this SubscriptionVersion. The ID is the primary key of", "'failedOn','id': 'id','language': 'language','linked_space_id': 'linkedSpaceId','planned_purge_date': 'plannedPurgeDate','planned_termination_date': 'plannedTerminationDate','product_version': 'productVersion','selected_components': 'selectedComponents','state': 'state','subscription': 'subscription','terminated_on': 'terminatedOn','terminating_on': 'terminatingOn','termination_issued_on': 'terminationIssuedOn','version':", ":type: datetime \"\"\" self._terminating_on = terminating_on @property def termination_issued_on(self): \"\"\"Gets the termination_issued_on of", "The planned_termination_date of this SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_termination_date @planned_termination_date.setter def planned_termination_date(self,", "component_configurations of this SubscriptionVersion. :return: The component_configurations of this SubscriptionVersion. :rtype: list[SubscriptionComponentConfiguration] \"\"\"", "created_on): \"\"\"Sets the created_on of this SubscriptionVersion. :param created_on: The created_on of this", "the failed_on of this SubscriptionVersion. :param failed_on: The failed_on of this SubscriptionVersion. :type:", "entity is not planned to be removed. :param planned_purge_date: The planned_purge_date of this", "terminated_on of this SubscriptionVersion. :return: The terminated_on of this SubscriptionVersion. :rtype: datetime \"\"\"", "The activated_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._activated_on @activated_on.setter def activated_on(self,", "language of this SubscriptionVersion. :type: str \"\"\" self._language = language @property def linked_space_id(self):", "self._terminating_on = terminating_on @property def termination_issued_on(self): \"\"\"Gets the termination_issued_on of this SubscriptionVersion. :return:", "self._expected_last_period_end @expected_last_period_end.setter def expected_last_period_end(self, expected_last_period_end): \"\"\"Sets the expected_last_period_end of this SubscriptionVersion. The expected", "= state @property def subscription(self): \"\"\"Gets the subscription of this SubscriptionVersion. :return: The", "lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x, value )) elif hasattr(value, \"to_dict\"):", "\"\"\" self._terminated_on = terminated_on @property def terminating_on(self): \"\"\"Gets the terminating_on of this SubscriptionVersion.", "language): \"\"\"Sets the language of this SubscriptionVersion. :param language: The language of this", "the entity is changed. :param version: The version of this SubscriptionVersion. :type: int", "\"\"\"Gets the linked_space_id of this SubscriptionVersion. The linked space id holds the ID", "actual date may be different. :param expected_last_period_end: The expected_last_period_end of this SubscriptionVersion. :type:", "The created_on of this SubscriptionVersion. :type: datetime \"\"\" self._created_on = created_on @property def", "@property def planned_purge_date(self): \"\"\"Gets the planned_purge_date of this SubscriptionVersion. The planned purge date", "\"\"\" self._billing_currency = billing_currency @property def component_configurations(self): \"\"\"Gets the component_configurations of this SubscriptionVersion.", "self._planned_purge_date @planned_purge_date.setter def planned_purge_date(self, planned_purge_date): \"\"\"Sets the planned_purge_date of this SubscriptionVersion. The planned", "terminating_on of this SubscriptionVersion. :param terminating_on: The terminating_on of this SubscriptionVersion. :type: datetime", "state of this SubscriptionVersion. :param state: The state of this SubscriptionVersion. :type: SubscriptionVersionState", "linked space id holds the ID of the space to which the entity", "_selected_components = None _state = None _subscription = None _terminated_on = None _terminating_on", "The failed_on of this SubscriptionVersion. :type: datetime \"\"\" self._failed_on = failed_on @property def", "of this SubscriptionVersion. The subscriber is charged in the billing currency. The billing", ":return: The created_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._created_on @created_on.setter def", "\"\"\"Sets the planned_purge_date of this SubscriptionVersion. The planned purge date indicates when the", "= None _language = None _linked_space_id = None _planned_purge_date = None _planned_termination_date =", "linked_space_id of this SubscriptionVersion. The linked space id holds the ID of the", "the created_on of this SubscriptionVersion. :return: The created_on of this SubscriptionVersion. :rtype: datetime", "= kwargs.get('billing_currency', None) self.component_configurations = kwargs.get('component_configurations', None) self.created_on = kwargs.get('created_on', None) self.expected_last_period_end =", "this SubscriptionVersion. The planned purge date indicates when the entity is permanently removed.", "termination_issued_on @property def version(self): \"\"\"Gets the version of this SubscriptionVersion. The version number", "of this SubscriptionVersion. :return: The planned_termination_date of this SubscriptionVersion. :rtype: datetime \"\"\" return", "SubscriptionVersion. :return: The product_version of this SubscriptionVersion. :rtype: SubscriptionProductVersion \"\"\" return self._product_version @product_version.setter", "elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda", "issubclass(SubscriptionVersion, dict): for key, value in self.items(): result[key] = value return result def", "item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items() )) elif isinstance(value, Enum):", "datetime \"\"\" self._termination_issued_on = termination_issued_on @property def version(self): \"\"\"Gets the version of this", "result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x, value ))", "result[attr] = value.value else: result[attr] = value if issubclass(SubscriptionVersion, dict): for key, value", "terminating_on of this SubscriptionVersion. :return: The terminating_on of this SubscriptionVersion. :rtype: datetime \"\"\"", "self._product_version @product_version.setter def product_version(self, product_version): \"\"\"Sets the product_version of this SubscriptionVersion. :param product_version:", "space to which the entity belongs to. :param linked_space_id: The linked_space_id of this", "None _terminating_on = None _termination_issued_on = None _version = None def __init__(self, **kwargs):", "= None _version = None def __init__(self, **kwargs): self.discriminator = None self.activated_on =", "None) self.planned_purge_date = kwargs.get('planned_purge_date', None) self.planned_termination_date = kwargs.get('planned_termination_date', None) self.product_version = kwargs.get('product_version', None)", "\"\"\" return self._product_version @product_version.setter def product_version(self, product_version): \"\"\"Sets the product_version of this SubscriptionVersion.", "of the entity. The version is incremented whenever the entity is changed. :return:", "null the entity is not planned to be removed. :param planned_purge_date: The planned_purge_date", "this SubscriptionVersion. :return: The failed_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._failed_on", "\"\"\" self._product_version = product_version @property def selected_components(self): \"\"\"Gets the selected_components of this SubscriptionVersion.", "version of this SubscriptionVersion. The version number indicates the version of the entity.", "return self._linked_space_id @linked_space_id.setter def linked_space_id(self, linked_space_id): \"\"\"Sets the linked_space_id of this SubscriptionVersion. The", ":rtype: int \"\"\" return self._linked_space_id @linked_space_id.setter def linked_space_id(self, linked_space_id): \"\"\"Sets the linked_space_id of", "the id of this SubscriptionVersion. The ID is the primary key of the", "which the entity belongs to. :return: The linked_space_id of this SubscriptionVersion. :rtype: int", "terminated_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._terminated_on @terminated_on.setter def terminated_on(self, terminated_on):", "is null the entity is not planned to be removed. :param planned_purge_date: The", "if hasattr(item[1], \"to_dict\") else item, value.items() )) elif isinstance(value, Enum): result[attr] = value.value", "billing_currency @property def component_configurations(self): \"\"\"Gets the component_configurations of this SubscriptionVersion. :return: The component_configurations", "self._id = id @property def language(self): \"\"\"Gets the language of this SubscriptionVersion. :return:", "this SubscriptionVersion. :rtype: SubscriptionProductVersion \"\"\" return self._product_version @product_version.setter def product_version(self, product_version): \"\"\"Sets the", ":type: SubscriptionVersionState \"\"\" self._state = state @property def subscription(self): \"\"\"Gets the subscription of", "SubscriptionVersion. :type: SubscriptionProductVersion \"\"\" self._product_version = product_version @property def selected_components(self): \"\"\"Gets the selected_components", "self._linked_space_id @linked_space_id.setter def linked_space_id(self, linked_space_id): \"\"\"Sets the linked_space_id of this SubscriptionVersion. The linked", "def linked_space_id(self): \"\"\"Gets the linked_space_id of this SubscriptionVersion. The linked space id holds", "activated_on): \"\"\"Sets the activated_on of this SubscriptionVersion. :param activated_on: The activated_on of this", "= kwargs.get('created_on', None) self.expected_last_period_end = kwargs.get('expected_last_period_end', None) self.failed_on = kwargs.get('failed_on', None) self.id =", "None) self.created_on = kwargs.get('created_on', None) self.expected_last_period_end = kwargs.get('expected_last_period_end', None) self.failed_on = kwargs.get('failed_on', None)", "(item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items() )) elif isinstance(value, Enum): result[attr]", "date may be different. :param expected_last_period_end: The expected_last_period_end of this SubscriptionVersion. :type: datetime", "\"\"\" return self._planned_purge_date @planned_purge_date.setter def planned_purge_date(self, planned_purge_date): \"\"\"Sets the planned_purge_date of this SubscriptionVersion.", "list[SubscriptionComponentConfiguration] \"\"\" self._component_configurations = component_configurations @property def created_on(self): \"\"\"Gets the created_on of this", "is changed. :param version: The version of this SubscriptionVersion. :type: int \"\"\" self._version", "The state of this SubscriptionVersion. :rtype: SubscriptionVersionState \"\"\" return self._state @state.setter def state(self,", "subscriber is charged in the billing currency. The billing currency has to be", "selected_components of this SubscriptionVersion. :rtype: list[SubscriptionProductComponent] \"\"\" return self._selected_components @selected_components.setter def selected_components(self, selected_components):", "self._version = version def to_dict(self): result = {} for attr, _ in six.iteritems(self.swagger_types):", "expected_last_period_end: The expected_last_period_end of this SubscriptionVersion. :type: datetime \"\"\" self._expected_last_period_end = expected_last_period_end @property", "component_configurations of this SubscriptionVersion. :rtype: list[SubscriptionComponentConfiguration] \"\"\" return self._component_configurations @component_configurations.setter def component_configurations(self, component_configurations):", "None self.activated_on = kwargs.get('activated_on', None) self.billing_currency = kwargs.get('billing_currency', None) self.component_configurations = kwargs.get('component_configurations', None)", "None) self.subscription = kwargs.get('subscription', None) self.terminated_on = kwargs.get('terminated_on', None) self.terminating_on = kwargs.get('terminating_on', None)", "failed_on): \"\"\"Sets the failed_on of this SubscriptionVersion. :param failed_on: The failed_on of this", "SubscriptionVersion. :rtype: SubscriptionVersionState \"\"\" return self._state @state.setter def state(self, state): \"\"\"Sets the state", "def state(self): \"\"\"Gets the state of this SubscriptionVersion. :return: The state of this", "= expected_last_period_end @property def failed_on(self): \"\"\"Gets the failed_on of this SubscriptionVersion. :return: The", "linked_space_id): \"\"\"Sets the linked_space_id of this SubscriptionVersion. The linked space id holds the", ":return: The activated_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._activated_on @activated_on.setter def", "selected_components of this SubscriptionVersion. :type: list[SubscriptionProductComponent] \"\"\" self._selected_components = selected_components @property def state(self):", "None) self.language = kwargs.get('language', None) self.linked_space_id = kwargs.get('linked_space_id', None) self.planned_purge_date = kwargs.get('planned_purge_date', None)", "self.failed_on = kwargs.get('failed_on', None) self.id = kwargs.get('id', None) self.language = kwargs.get('language', None) self.linked_space_id", "= None _subscription = None _terminated_on = None _terminating_on = None _termination_issued_on =", "The linked space id holds the ID of the space to which the", "SubscriptionVersion. :rtype: datetime \"\"\" return self._expected_last_period_end @expected_last_period_end.setter def expected_last_period_end(self, expected_last_period_end): \"\"\"Sets the expected_last_period_end", ":return: The expected_last_period_end of this SubscriptionVersion. :rtype: datetime \"\"\" return self._expected_last_period_end @expected_last_period_end.setter def", "result[attr] = value if issubclass(SubscriptionVersion, dict): for key, value in self.items(): result[key] =", "ID identifies the entity uniquely. :param id: The id of this SubscriptionVersion. :type:", "this SubscriptionVersion. :return: The terminated_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._terminated_on", "billing_currency of this SubscriptionVersion. The subscriber is charged in the billing currency. The", "indicates the version of the entity. The version is incremented whenever the entity", "None _created_on = None _expected_last_period_end = None _failed_on = None _id = None", "def expected_last_period_end(self): \"\"\"Gets the expected_last_period_end of this SubscriptionVersion. The expected last period end", "the ID of the space to which the entity belongs to. :param linked_space_id:", "return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other): if not isinstance(other, SubscriptionVersion):", "of this SubscriptionVersion. :type: Subscription \"\"\" self._subscription = subscription @property def terminated_on(self): \"\"\"Gets", "product_version(self, product_version): \"\"\"Sets the product_version of this SubscriptionVersion. :param product_version: The product_version of", "as such the actual date may be different. :param expected_last_period_end: The expected_last_period_end of", "SubscriptionVersion: swagger_types = { 'activated_on': 'datetime', 'billing_currency': 'str', 'component_configurations': 'list[SubscriptionComponentConfiguration]', 'created_on': 'datetime', 'expected_last_period_end':", "@property def planned_termination_date(self): \"\"\"Gets the planned_termination_date of this SubscriptionVersion. :return: The planned_termination_date of", ":type: datetime \"\"\" self._terminated_on = terminated_on @property def terminating_on(self): \"\"\"Gets the terminating_on of", "component_configurations(self, component_configurations): \"\"\"Sets the component_configurations of this SubscriptionVersion. :param component_configurations: The component_configurations of", "SubscriptionVersion. :rtype: datetime \"\"\" return self._activated_on @activated_on.setter def activated_on(self, activated_on): \"\"\"Sets the activated_on", "SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_termination_date @planned_termination_date.setter def planned_termination_date(self, planned_termination_date): \"\"\"Sets the planned_termination_date", "of this SubscriptionVersion. :param activated_on: The activated_on of this SubscriptionVersion. :type: datetime \"\"\"", "datetime \"\"\" return self._terminated_on @terminated_on.setter def terminated_on(self, terminated_on): \"\"\"Sets the terminated_on of this", "'failed_on': 'datetime', 'id': 'int', 'language': 'str', 'linked_space_id': 'int', 'planned_purge_date': 'datetime', 'planned_termination_date': 'datetime', 'product_version':", "= None _terminated_on = None _terminating_on = None _termination_issued_on = None _version =", "product_version: The product_version of this SubscriptionVersion. :type: SubscriptionProductVersion \"\"\" self._product_version = product_version @property", ")) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map(", "value if issubclass(SubscriptionVersion, dict): for key, value in self.items(): result[key] = value return", "six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x:", "charged in the billing currency. The billing currency has to be one of", "def selected_components(self): \"\"\"Gets the selected_components of this SubscriptionVersion. :return: The selected_components of this", "\"\"\"Sets the subscription of this SubscriptionVersion. :param subscription: The subscription of this SubscriptionVersion.", ":rtype: Subscription \"\"\" return self._subscription @subscription.setter def subscription(self, subscription): \"\"\"Sets the subscription of", "which the entity belongs to. :param linked_space_id: The linked_space_id of this SubscriptionVersion. :type:", "uniquely. :param id: The id of this SubscriptionVersion. :type: int \"\"\" self._id =", "= kwargs.get('version', None) @property def activated_on(self): \"\"\"Gets the activated_on of this SubscriptionVersion. :return:", "\"\"\"Sets the version of this SubscriptionVersion. The version number indicates the version of", "return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other):", "SubscriptionVersion. :type: datetime \"\"\" self._created_on = created_on @property def expected_last_period_end(self): \"\"\"Gets the expected_last_period_end", ":rtype: int \"\"\" return self._id @id.setter def id(self, id): \"\"\"Sets the id of", "SubscriptionVersion. :rtype: str \"\"\" return self._language @language.setter def language(self, language): \"\"\"Sets the language", "'list[SubscriptionComponentConfiguration]', 'created_on': 'datetime', 'expected_last_period_end': 'datetime', 'failed_on': 'datetime', 'id': 'int', 'language': 'str', 'linked_space_id': 'int',", "of this SubscriptionVersion. :return: The selected_components of this SubscriptionVersion. :rtype: list[SubscriptionProductComponent] \"\"\" return", "this SubscriptionVersion. :rtype: SubscriptionVersionState \"\"\" return self._state @state.setter def state(self, state): \"\"\"Sets the", "last period end is the date on which the projected end date of", "of this SubscriptionVersion. :type: int \"\"\" self._version = version def to_dict(self): result =", "pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other): if not isinstance(other, SubscriptionVersion): return", "the date is null the entity is not planned to be removed. :return:", "this SubscriptionVersion. :rtype: int \"\"\" return self._id @id.setter def id(self, id): \"\"\"Sets the", "this SubscriptionVersion. :return: The component_configurations of this SubscriptionVersion. :rtype: list[SubscriptionComponentConfiguration] \"\"\" return self._component_configurations", "this SubscriptionVersion. :type: int \"\"\" self._linked_space_id = linked_space_id @property def planned_purge_date(self): \"\"\"Gets the", "'SubscriptionVersionState', 'subscription': 'Subscription', 'terminated_on': 'datetime', 'terminating_on': 'datetime', 'termination_issued_on': 'datetime', 'version': 'int', } attribute_map", ":param billing_currency: The billing_currency of this SubscriptionVersion. :type: str \"\"\" self._billing_currency = billing_currency", "the entity belongs to. :return: The linked_space_id of this SubscriptionVersion. :rtype: int \"\"\"", "of this SubscriptionVersion. :rtype: datetime \"\"\" return self._terminated_on @terminated_on.setter def terminated_on(self, terminated_on): \"\"\"Sets", "'created_on': 'datetime', 'expected_last_period_end': 'datetime', 'failed_on': 'datetime', 'id': 'int', 'language': 'str', 'linked_space_id': 'int', 'planned_purge_date':", "the primary key of the entity. The ID identifies the entity uniquely. :param", "{ 'activated_on': 'datetime', 'billing_currency': 'str', 'component_configurations': 'list[SubscriptionComponentConfiguration]', 'created_on': 'datetime', 'expected_last_period_end': 'datetime', 'failed_on': 'datetime',", "SubscriptionVersion. :return: The terminating_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._terminating_on @terminating_on.setter", "termination_issued_on of this SubscriptionVersion. :param termination_issued_on: The termination_issued_on of this SubscriptionVersion. :type: datetime", "ID of the space to which the entity belongs to. :param linked_space_id: The", "The id of this SubscriptionVersion. :type: int \"\"\" self._id = id @property def", "the subscription product. :return: The billing_currency of this SubscriptionVersion. :rtype: str \"\"\" return", "item[1].to_dict()) if hasattr(item[1], \"to_dict\") else item, value.items() )) elif isinstance(value, Enum): result[attr] =", "this SubscriptionVersion. :param terminating_on: The terminating_on of this SubscriptionVersion. :type: datetime \"\"\" self._terminating_on", "The ID identifies the entity uniquely. :param id: The id of this SubscriptionVersion.", "_product_version = None _selected_components = None _state = None _subscription = None _terminated_on", "self._termination_issued_on = termination_issued_on @property def version(self): \"\"\"Gets the version of this SubscriptionVersion. The", "isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], \"to_dict\") else", "The billing_currency of this SubscriptionVersion. :rtype: str \"\"\" return self._billing_currency @billing_currency.setter def billing_currency(self,", "the projected end date of the last period is. This is only a", "@created_on.setter def created_on(self, created_on): \"\"\"Sets the created_on of this SubscriptionVersion. :param created_on: The", "} attribute_map = { 'activated_on': 'activatedOn','billing_currency': 'billingCurrency','component_configurations': 'componentConfigurations','created_on': 'createdOn','expected_last_period_end': 'expectedLastPeriodEnd','failed_on': 'failedOn','id': 'id','language': 'language','linked_space_id':", "in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda", ":return: The version of this SubscriptionVersion. :rtype: int \"\"\" return self._version @version.setter def", "planned to be removed. :return: The planned_purge_date of this SubscriptionVersion. :rtype: datetime \"\"\"", "return self.to_str() def __eq__(self, other): if not isinstance(other, SubscriptionVersion): return False return self.__dict__", "permanently removed. When the date is null the entity is not planned to", "self._subscription = subscription @property def terminated_on(self): \"\"\"Gets the terminated_on of this SubscriptionVersion. :return:", "of this SubscriptionVersion. The ID is the primary key of the entity. The", "this SubscriptionVersion. :type: datetime \"\"\" self._terminating_on = terminating_on @property def termination_issued_on(self): \"\"\"Gets the", "\"to_dict\") else x, value )) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value,", "int \"\"\" self._linked_space_id = linked_space_id @property def planned_purge_date(self): \"\"\"Gets the planned_purge_date of this", "\"\"\" return self._id @id.setter def id(self, id): \"\"\"Sets the id of this SubscriptionVersion.", "None _subscription = None _terminated_on = None _terminating_on = None _termination_issued_on = None", "None _language = None _linked_space_id = None _planned_purge_date = None _planned_termination_date = None", "\"\"\"Gets the expected_last_period_end of this SubscriptionVersion. The expected last period end is the", "str \"\"\" return self._billing_currency @billing_currency.setter def billing_currency(self, billing_currency): \"\"\"Sets the billing_currency of this", "= None _state = None _subscription = None _terminated_on = None _terminating_on =", "for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr]", "@id.setter def id(self, id): \"\"\"Sets the id of this SubscriptionVersion. The ID is", "space to which the entity belongs to. :return: The linked_space_id of this SubscriptionVersion.", ":type: datetime \"\"\" self._created_on = created_on @property def expected_last_period_end(self): \"\"\"Gets the expected_last_period_end of", "kwargs.get('activated_on', None) self.billing_currency = kwargs.get('billing_currency', None) self.component_configurations = kwargs.get('component_configurations', None) self.created_on = kwargs.get('created_on',", "product_version of this SubscriptionVersion. :param product_version: The product_version of this SubscriptionVersion. :type: SubscriptionProductVersion", "this SubscriptionVersion. :return: The activated_on of this SubscriptionVersion. :rtype: datetime \"\"\" return self._activated_on", "SubscriptionVersion. :return: The selected_components of this SubscriptionVersion. :rtype: list[SubscriptionProductComponent] \"\"\" return self._selected_components @selected_components.setter", "id of this SubscriptionVersion. The ID is the primary key of the entity.", "of the entity. The version is incremented whenever the entity is changed. :param", "value )) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] =", "selected_components): \"\"\"Sets the selected_components of this SubscriptionVersion. :param selected_components: The selected_components of this", "only a projection and as such the actual date may be different. :return:", "kwargs.get('language', None) self.linked_space_id = kwargs.get('linked_space_id', None) self.planned_purge_date = kwargs.get('planned_purge_date', None) self.planned_termination_date = kwargs.get('planned_termination_date',", "to. :param linked_space_id: The linked_space_id of this SubscriptionVersion. :type: int \"\"\" self._linked_space_id =", "= kwargs.get('subscription', None) self.terminated_on = kwargs.get('terminated_on', None) self.terminating_on = kwargs.get('terminating_on', None) self.termination_issued_on =", "None _billing_currency = None _component_configurations = None _created_on = None _expected_last_period_end = None", "of this SubscriptionVersion. :return: The language of this SubscriptionVersion. :rtype: str \"\"\" return", "of the space to which the entity belongs to. :param linked_space_id: The linked_space_id", ":return: The subscription of this SubscriptionVersion. :rtype: Subscription \"\"\" return self._subscription @subscription.setter def", "str \"\"\" return self._language @language.setter def language(self, language): \"\"\"Sets the language of this", ":rtype: datetime \"\"\" return self._failed_on @failed_on.setter def failed_on(self, failed_on): \"\"\"Sets the failed_on of", "None) self.selected_components = kwargs.get('selected_components', None) self.state = kwargs.get('state', None) self.subscription = kwargs.get('subscription', None)", "SubscriptionVersion. :rtype: list[SubscriptionComponentConfiguration] \"\"\" return self._component_configurations @component_configurations.setter def component_configurations(self, component_configurations): \"\"\"Sets the component_configurations", "version of this SubscriptionVersion. :rtype: int \"\"\" return self._version @version.setter def version(self, version):", "last period is. This is only a projection and as such the actual", "\"\"\" self._planned_purge_date = planned_purge_date @property def planned_termination_date(self): \"\"\"Gets the planned_termination_date of this SubscriptionVersion.", "datetime \"\"\" self._terminating_on = terminating_on @property def termination_issued_on(self): \"\"\"Gets the termination_issued_on of this", "self._component_configurations @component_configurations.setter def component_configurations(self, component_configurations): \"\"\"Sets the component_configurations of this SubscriptionVersion. :param component_configurations:", "when the entity is permanently removed. When the date is null the entity", "kwargs.get('id', None) self.language = kwargs.get('language', None) self.linked_space_id = kwargs.get('linked_space_id', None) self.planned_purge_date = kwargs.get('planned_purge_date',", "= kwargs.get('component_configurations', None) self.created_on = kwargs.get('created_on', None) self.expected_last_period_end = kwargs.get('expected_last_period_end', None) self.failed_on =", "this SubscriptionVersion. :rtype: int \"\"\" return self._linked_space_id @linked_space_id.setter def linked_space_id(self, linked_space_id): \"\"\"Sets the", "this SubscriptionVersion. :rtype: datetime \"\"\" return self._terminated_on @terminated_on.setter def terminated_on(self, terminated_on): \"\"\"Sets the", "of this SubscriptionVersion. :param terminating_on: The terminating_on of this SubscriptionVersion. :type: datetime \"\"\"", "value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1],", "_activated_on = None _billing_currency = None _component_configurations = None _created_on = None _expected_last_period_end", "\"\"\"Gets the activated_on of this SubscriptionVersion. :return: The activated_on of this SubscriptionVersion. :rtype:", "holds the ID of the space to which the entity belongs to. :param", "such the actual date may be different. :return: The expected_last_period_end of this SubscriptionVersion.", "\"\"\"Gets the subscription of this SubscriptionVersion. :return: The subscription of this SubscriptionVersion. :rtype:", "terminated_on): \"\"\"Sets the terminated_on of this SubscriptionVersion. :param terminated_on: The terminated_on of this", "self.terminated_on = kwargs.get('terminated_on', None) self.terminating_on = kwargs.get('terminating_on', None) self.termination_issued_on = kwargs.get('termination_issued_on', None) self.version", "None) self.termination_issued_on = kwargs.get('termination_issued_on', None) self.version = kwargs.get('version', None) @property def activated_on(self): \"\"\"Gets", "terminated_on @property def terminating_on(self): \"\"\"Gets the terminating_on of this SubscriptionVersion. :return: The terminating_on", "entity. The ID identifies the entity uniquely. :return: The id of this SubscriptionVersion.", "id(self, id): \"\"\"Sets the id of this SubscriptionVersion. The ID is the primary", "\"\"\" self._failed_on = failed_on @property def id(self): \"\"\"Gets the id of this SubscriptionVersion.", "planned_purge_date @property def planned_termination_date(self): \"\"\"Gets the planned_termination_date of this SubscriptionVersion. :return: The planned_termination_date", "\"\"\" self._state = state @property def subscription(self): \"\"\"Gets the subscription of this SubscriptionVersion.", "this SubscriptionVersion. :param termination_issued_on: The termination_issued_on of this SubscriptionVersion. :type: datetime \"\"\" self._termination_issued_on", "activated_on of this SubscriptionVersion. :return: The activated_on of this SubscriptionVersion. :rtype: datetime \"\"\"", "@product_version.setter def product_version(self, product_version): \"\"\"Sets the product_version of this SubscriptionVersion. :param product_version: The", "= billing_currency @property def component_configurations(self): \"\"\"Gets the component_configurations of this SubscriptionVersion. :return: The", "return False return self.__dict__ == other.__dict__ def __ne__(self, other): return not self ==", "terminated_on of this SubscriptionVersion. :type: datetime \"\"\" self._terminated_on = terminated_on @property def terminating_on(self):", "= product_version @property def selected_components(self): \"\"\"Gets the selected_components of this SubscriptionVersion. :return: The", "SubscriptionVersion. :rtype: datetime \"\"\" return self._terminated_on @terminated_on.setter def terminated_on(self, terminated_on): \"\"\"Sets the terminated_on", "SubscriptionVersion. :rtype: datetime \"\"\" return self._failed_on @failed_on.setter def failed_on(self, failed_on): \"\"\"Sets the failed_on", "be one of the enabled currencies on the subscription product. :param billing_currency: The", "the entity. The ID identifies the entity uniquely. :param id: The id of", "of this SubscriptionVersion. :rtype: str \"\"\" return self._language @language.setter def language(self, language): \"\"\"Sets", ":type: datetime \"\"\" self._termination_issued_on = termination_issued_on @property def version(self): \"\"\"Gets the version of", "of the entity. The ID identifies the entity uniquely. :return: The id of", "self.selected_components = kwargs.get('selected_components', None) self.state = kwargs.get('state', None) self.subscription = kwargs.get('subscription', None) self.terminated_on", ":return: The state of this SubscriptionVersion. :rtype: SubscriptionVersionState \"\"\" return self._state @state.setter def", "this SubscriptionVersion. :rtype: list[SubscriptionComponentConfiguration] \"\"\" return self._component_configurations @component_configurations.setter def component_configurations(self, component_configurations): \"\"\"Sets the", "planned_termination_date of this SubscriptionVersion. :return: The planned_termination_date of this SubscriptionVersion. :rtype: datetime \"\"\"", "self._billing_currency @billing_currency.setter def billing_currency(self, billing_currency): \"\"\"Sets the billing_currency of this SubscriptionVersion. The subscriber", "value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict()", "\"\"\"Sets the component_configurations of this SubscriptionVersion. :param component_configurations: The component_configurations of this SubscriptionVersion.", "self.to_str() def __eq__(self, other): if not isinstance(other, SubscriptionVersion): return False return self.__dict__ ==", "subscription product. :return: The billing_currency of this SubscriptionVersion. :rtype: str \"\"\" return self._billing_currency", "'expectedLastPeriodEnd','failed_on': 'failedOn','id': 'id','language': 'language','linked_space_id': 'linkedSpaceId','planned_purge_date': 'plannedPurgeDate','planned_termination_date': 'plannedTerminationDate','product_version': 'productVersion','selected_components': 'selectedComponents','state': 'state','subscription': 'subscription','terminated_on': 'terminatedOn','terminating_on': 'terminatingOn','termination_issued_on':", "= list(map( lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x, value )) elif", "termination_issued_on): \"\"\"Sets the termination_issued_on of this SubscriptionVersion. :param termination_issued_on: The termination_issued_on of this", "'terminatingOn','termination_issued_on': 'terminationIssuedOn','version': 'version', } _activated_on = None _billing_currency = None _component_configurations = None", "removed. :return: The planned_purge_date of this SubscriptionVersion. :rtype: datetime \"\"\" return self._planned_purge_date @planned_purge_date.setter", "SubscriptionVersion. The expected last period end is the date on which the projected", "for key, value in self.items(): result[key] = value return result def to_str(self): return", "= kwargs.get('id', None) self.language = kwargs.get('language', None) self.linked_space_id = kwargs.get('linked_space_id', None) self.planned_purge_date =", "= component_configurations @property def created_on(self): \"\"\"Gets the created_on of this SubscriptionVersion. :return: The", "this SubscriptionVersion. :type: datetime \"\"\" self._termination_issued_on = termination_issued_on @property def version(self): \"\"\"Gets the", "self._id @id.setter def id(self, id): \"\"\"Sets the id of this SubscriptionVersion. The ID", "billing_currency(self, billing_currency): \"\"\"Sets the billing_currency of this SubscriptionVersion. The subscriber is charged in", "product_version @property def selected_components(self): \"\"\"Gets the selected_components of this SubscriptionVersion. :return: The selected_components", "The billing_currency of this SubscriptionVersion. :type: str \"\"\" self._billing_currency = billing_currency @property def", ":type: int \"\"\" self._linked_space_id = linked_space_id @property def planned_purge_date(self): \"\"\"Gets the planned_purge_date of", "if hasattr(x, \"to_dict\") else x, value )) elif hasattr(value, \"to_dict\"): result[attr] = value.to_dict()", "of this SubscriptionVersion. :rtype: datetime \"\"\" return self._activated_on @activated_on.setter def activated_on(self, activated_on): \"\"\"Sets", "= value.value else: result[attr] = value if issubclass(SubscriptionVersion, dict): for key, value in", "False return self.__dict__ == other.__dict__ def __ne__(self, other): return not self == other", "this SubscriptionVersion. :param failed_on: The failed_on of this SubscriptionVersion. :type: datetime \"\"\" self._failed_on", "'selected_components': 'list[SubscriptionProductComponent]', 'state': 'SubscriptionVersionState', 'subscription': 'Subscription', 'terminated_on': 'datetime', 'terminating_on': 'datetime', 'termination_issued_on': 'datetime', 'version':", "SubscriptionVersion. :type: datetime \"\"\" self._expected_last_period_end = expected_last_period_end @property def failed_on(self): \"\"\"Gets the failed_on", "'createdOn','expected_last_period_end': 'expectedLastPeriodEnd','failed_on': 'failedOn','id': 'id','language': 'language','linked_space_id': 'linkedSpaceId','planned_purge_date': 'plannedPurgeDate','planned_termination_date': 'plannedTerminationDate','product_version': 'productVersion','selected_components': 'selectedComponents','state': 'state','subscription': 'subscription','terminated_on': 'terminatedOn','terminating_on':" ]
[ "in range(valor_inicial, valor_final + 1): digitos_str = str(numero) check = True # essa", "número é par, adiciona ele à lista saida.append(numero) return saida inicio = 0", "saida = [] # usando (valor final + 1) pois a função range", "é par check = False break # após o primeiro dígito ímpar, sai", "é par, adiciona ele à lista saida.append(numero) return saida inicio = 0 fim", "# usando (valor final + 1) pois a função range não inclui o", "int(digito) % 2 != 0: # usando operador modulo pra determinar se o", "int = 0, valor_final: int = 1000): saida = [] # usando (valor", "são todos pares (a e b inclusos)\"\"\" def digitos_pares(valor_inicial: int = 0, valor_final:", "break # após o primeiro dígito ímpar, sai do for loop if check:", "ímpar, sai do for loop if check: # se o número é par,", "!= 0: # usando operador modulo pra determinar se o número é par", "os digitos forem positivos for digito in digitos_str: if int(digito) % 2 !=", "inteiros cujos dígitos são todos pares (a e b inclusos)\"\"\" def digitos_pares(valor_inicial: int", "digitos_pares(valor_inicial: int = 0, valor_final: int = 1000): saida = [] # usando", "[] # usando (valor final + 1) pois a função range não inclui", "valor final por definição for numero in range(valor_inicial, valor_final + 1): digitos_str =", "essa variável se manterá como True se todos os digitos forem positivos for", "True # essa variável se manterá como True se todos os digitos forem", "inclui o valor final por definição for numero in range(valor_inicial, valor_final + 1):", "for numero in range(valor_inicial, valor_final + 1): digitos_str = str(numero) check = True", "+ 1): digitos_str = str(numero) check = True # essa variável se manterá", "int = 1000): saida = [] # usando (valor final + 1) pois", "sai do for loop if check: # se o número é par, adiciona", "se manterá como True se todos os digitos forem positivos for digito in", "não inclui o valor final por definição for numero in range(valor_inicial, valor_final +", "# essa variável se manterá como True se todos os digitos forem positivos", "= True # essa variável se manterá como True se todos os digitos", "0, valor_final: int = 1000): saida = [] # usando (valor final +", "digitos_str: if int(digito) % 2 != 0: # usando operador modulo pra determinar", "Escreva uma função que receba dois números inteiros (a e b), e retorne", "o valor final por definição for numero in range(valor_inicial, valor_final + 1): digitos_str", "variável se manterá como True se todos os digitos forem positivos for digito", "0: # usando operador modulo pra determinar se o número é par check", "positivos for digito in digitos_str: if int(digito) % 2 != 0: # usando", "for digito in digitos_str: if int(digito) % 2 != 0: # usando operador", "# após o primeiro dígito ímpar, sai do for loop if check: #", "cujos dígitos são todos pares (a e b inclusos)\"\"\" def digitos_pares(valor_inicial: int =", "todos pares (a e b inclusos)\"\"\" def digitos_pares(valor_inicial: int = 0, valor_final: int", "o número é par, adiciona ele à lista saida.append(numero) return saida inicio =", "+ 1) pois a função range não inclui o valor final por definição", "após o primeiro dígito ímpar, sai do for loop if check: # se", "(valor final + 1) pois a função range não inclui o valor final", "1000): saida = [] # usando (valor final + 1) pois a função", "primeiro dígito ímpar, sai do for loop if check: # se o número", "final por definição for numero in range(valor_inicial, valor_final + 1): digitos_str = str(numero)", "pra determinar se o número é par check = False break # após", "o número é par check = False break # após o primeiro dígito", "True se todos os digitos forem positivos for digito in digitos_str: if int(digito)", "% 2 != 0: # usando operador modulo pra determinar se o número", "= False break # após o primeiro dígito ímpar, sai do for loop", "b), e retorne uma lista contendo todos os números inteiros cujos dígitos são", "# se o número é par, adiciona ele à lista saida.append(numero) return saida", "(a e b), e retorne uma lista contendo todos os números inteiros cujos", "usando (valor final + 1) pois a função range não inclui o valor", "que receba dois números inteiros (a e b), e retorne uma lista contendo", "lista contendo todos os números inteiros cujos dígitos são todos pares (a e", "e b inclusos)\"\"\" def digitos_pares(valor_inicial: int = 0, valor_final: int = 1000): saida", "8: Escreva uma função que receba dois números inteiros (a e b), e", "números inteiros cujos dígitos são todos pares (a e b inclusos)\"\"\" def digitos_pares(valor_inicial:", "uma lista contendo todos os números inteiros cujos dígitos são todos pares (a", "digito in digitos_str: if int(digito) % 2 != 0: # usando operador modulo", "def digitos_pares(valor_inicial: int = 0, valor_final: int = 1000): saida = [] #", "como True se todos os digitos forem positivos for digito in digitos_str: if", "inclusos)\"\"\" def digitos_pares(valor_inicial: int = 0, valor_final: int = 1000): saida = []", "dígito ímpar, sai do for loop if check: # se o número é", "forem positivos for digito in digitos_str: if int(digito) % 2 != 0: #", "números inteiros (a e b), e retorne uma lista contendo todos os números", "todos os números inteiros cujos dígitos são todos pares (a e b inclusos)\"\"\"", "função range não inclui o valor final por definição for numero in range(valor_inicial,", "se o número é par check = False break # após o primeiro", "valor_final + 1): digitos_str = str(numero) check = True # essa variável se", "digitos forem positivos for digito in digitos_str: if int(digito) % 2 != 0:", "inteiros (a e b), e retorne uma lista contendo todos os números inteiros", "2 != 0: # usando operador modulo pra determinar se o número é", "False break # após o primeiro dígito ímpar, sai do for loop if", "= 0, valor_final: int = 1000): saida = [] # usando (valor final", "número é par check = False break # após o primeiro dígito ímpar,", "todos os digitos forem positivos for digito in digitos_str: if int(digito) % 2", "# usando operador modulo pra determinar se o número é par check =", "numero in range(valor_inicial, valor_final + 1): digitos_str = str(numero) check = True #", "receba dois números inteiros (a e b), e retorne uma lista contendo todos", "o primeiro dígito ímpar, sai do for loop if check: # se o", "for loop if check: # se o número é par, adiciona ele à", "(a e b inclusos)\"\"\" def digitos_pares(valor_inicial: int = 0, valor_final: int = 1000):", "str(numero) check = True # essa variável se manterá como True se todos", "par check = False break # após o primeiro dígito ímpar, sai do", "adiciona ele à lista saida.append(numero) return saida inicio = 0 fim = 30", "função que receba dois números inteiros (a e b), e retorne uma lista", "= [] # usando (valor final + 1) pois a função range não", "pares (a e b inclusos)\"\"\" def digitos_pares(valor_inicial: int = 0, valor_final: int =", "definição for numero in range(valor_inicial, valor_final + 1): digitos_str = str(numero) check =", "valor_final: int = 1000): saida = [] # usando (valor final + 1)", "manterá como True se todos os digitos forem positivos for digito in digitos_str:", "check: # se o número é par, adiciona ele à lista saida.append(numero) return", "se todos os digitos forem positivos for digito in digitos_str: if int(digito) %", "os números inteiros cujos dígitos são todos pares (a e b inclusos)\"\"\" def", "if check: # se o número é par, adiciona ele à lista saida.append(numero)", "in digitos_str: if int(digito) % 2 != 0: # usando operador modulo pra", "final + 1) pois a função range não inclui o valor final por", "por definição for numero in range(valor_inicial, valor_final + 1): digitos_str = str(numero) check", "dois números inteiros (a e b), e retorne uma lista contendo todos os", "se o número é par, adiciona ele à lista saida.append(numero) return saida inicio", "1) pois a função range não inclui o valor final por definição for", "range(valor_inicial, valor_final + 1): digitos_str = str(numero) check = True # essa variável", "par, adiciona ele à lista saida.append(numero) return saida inicio = 0 fim =", "range não inclui o valor final por definição for numero in range(valor_inicial, valor_final", "do for loop if check: # se o número é par, adiciona ele", "ele à lista saida.append(numero) return saida inicio = 0 fim = 30 print(digitos_pares(inicio,", "operador modulo pra determinar se o número é par check = False break", "if int(digito) % 2 != 0: # usando operador modulo pra determinar se", "modulo pra determinar se o número é par check = False break #", "usando operador modulo pra determinar se o número é par check = False", "loop if check: # se o número é par, adiciona ele à lista", "determinar se o número é par check = False break # após o", "digitos_str = str(numero) check = True # essa variável se manterá como True", "check = False break # após o primeiro dígito ímpar, sai do for", "a função range não inclui o valor final por definição for numero in", "e b), e retorne uma lista contendo todos os números inteiros cujos dígitos", "= 1000): saida = [] # usando (valor final + 1) pois a", "check = True # essa variável se manterá como True se todos os", "1): digitos_str = str(numero) check = True # essa variável se manterá como", "dígitos são todos pares (a e b inclusos)\"\"\" def digitos_pares(valor_inicial: int = 0,", "\"\"\"Exercício 8: Escreva uma função que receba dois números inteiros (a e b),", "contendo todos os números inteiros cujos dígitos são todos pares (a e b", "b inclusos)\"\"\" def digitos_pares(valor_inicial: int = 0, valor_final: int = 1000): saida =", "= str(numero) check = True # essa variável se manterá como True se", "à lista saida.append(numero) return saida inicio = 0 fim = 30 print(digitos_pares(inicio, fim))", "uma função que receba dois números inteiros (a e b), e retorne uma", "pois a função range não inclui o valor final por definição for numero", "e retorne uma lista contendo todos os números inteiros cujos dígitos são todos", "retorne uma lista contendo todos os números inteiros cujos dígitos são todos pares" ]
[ "def get_key(row): key_parts = [] for col in row.keys(): if col == 'zone':", "CSV instead of letting pandas do it, due to composite header # we", "key_parts.append(str(row[col])) start = time.time() infilename = '/tmp/test.csv' df = pandas.read_csv(infilename) df['key'] = df.apply(lambda", "Return a pipe-delimited combination of value from every column up through zone def", "time import pandas # Return a pipe-delimited combination of value from every column", "csv_data = pivot.to_csv(None, index=True, header=False) outfile.write(header + '\\n' + csv_data) print('Elapsed: {0:.2f}'.format(time.time() -", "from the output of zonal stats CLI \"\"\" import time import pandas #", "through zone def get_key(row): key_parts = [] for col in row.keys(): if col", "['zone'] + pivot.columns.levels[1].tolist()) csv_data = pivot.to_csv(None, index=True, header=False) outfile.write(header + '\\n' + csv_data)", "table from the output of zonal stats CLI \"\"\" import time import pandas", "infilename = '/tmp/test.csv' df = pandas.read_csv(infilename) df['key'] = df.apply(lambda x: get_key(x), axis=1) sub_df", "in row.keys(): if col == 'zone': return '|'.join(key_parts) key_parts.append(str(row[col])) start = time.time() infilename", "of value from every column up through zone def get_key(row): key_parts = []", "creating a pivot table from the output of zonal stats CLI \"\"\" import", "instead of letting pandas do it, due to composite header # we don't", "\"\"\" import time import pandas # Return a pipe-delimited combination of value from", "to manually create the CSV instead of letting pandas do it, due to", "x: get_key(x), axis=1) sub_df = df[['key', 'zone', 'mean']] pivot = sub_df.pivot('zone', columns='key') #", "letting pandas do it, due to composite header # we don't want with", "columns='key') # Need to manually create the CSV instead of letting pandas do", "col == 'zone': return '|'.join(key_parts) key_parts.append(str(row[col])) start = time.time() infilename = '/tmp/test.csv' df", "key_parts = [] for col in row.keys(): if col == 'zone': return '|'.join(key_parts)", "== 'zone': return '|'.join(key_parts) key_parts.append(str(row[col])) start = time.time() infilename = '/tmp/test.csv' df =", "sub_df.pivot('zone', columns='key') # Need to manually create the CSV instead of letting pandas", "col in row.keys(): if col == 'zone': return '|'.join(key_parts) key_parts.append(str(row[col])) start = time.time()", "combination of value from every column up through zone def get_key(row): key_parts =", "# we don't want with open('/tmp/pivot.csv', 'w') as outfile: header = ','.join( ['zone']", "zone def get_key(row): key_parts = [] for col in row.keys(): if col ==", "'|'.join(key_parts) key_parts.append(str(row[col])) start = time.time() infilename = '/tmp/test.csv' df = pandas.read_csv(infilename) df['key'] =", "manually create the CSV instead of letting pandas do it, due to composite", "= ','.join( ['zone'] + pivot.columns.levels[1].tolist()) csv_data = pivot.to_csv(None, index=True, header=False) outfile.write(header + '\\n'", "pivot = sub_df.pivot('zone', columns='key') # Need to manually create the CSV instead of", "to composite header # we don't want with open('/tmp/pivot.csv', 'w') as outfile: header", "pandas do it, due to composite header # we don't want with open('/tmp/pivot.csv',", "start = time.time() infilename = '/tmp/test.csv' df = pandas.read_csv(infilename) df['key'] = df.apply(lambda x:", "a pipe-delimited combination of value from every column up through zone def get_key(row):", "of zonal stats CLI \"\"\" import time import pandas # Return a pipe-delimited", "axis=1) sub_df = df[['key', 'zone', 'mean']] pivot = sub_df.pivot('zone', columns='key') # Need to", "column up through zone def get_key(row): key_parts = [] for col in row.keys():", "row.keys(): if col == 'zone': return '|'.join(key_parts) key_parts.append(str(row[col])) start = time.time() infilename =", "create the CSV instead of letting pandas do it, due to composite header", "'mean']] pivot = sub_df.pivot('zone', columns='key') # Need to manually create the CSV instead", "get_key(row): key_parts = [] for col in row.keys(): if col == 'zone': return", "zonal stats CLI \"\"\" import time import pandas # Return a pipe-delimited combination", "if col == 'zone': return '|'.join(key_parts) key_parts.append(str(row[col])) start = time.time() infilename = '/tmp/test.csv'", "of letting pandas do it, due to composite header # we don't want", "+ pivot.columns.levels[1].tolist()) csv_data = pivot.to_csv(None, index=True, header=False) outfile.write(header + '\\n' + csv_data) print('Elapsed:", "time.time() infilename = '/tmp/test.csv' df = pandas.read_csv(infilename) df['key'] = df.apply(lambda x: get_key(x), axis=1)", "= '/tmp/test.csv' df = pandas.read_csv(infilename) df['key'] = df.apply(lambda x: get_key(x), axis=1) sub_df =", "with open('/tmp/pivot.csv', 'w') as outfile: header = ','.join( ['zone'] + pivot.columns.levels[1].tolist()) csv_data =", "'w') as outfile: header = ','.join( ['zone'] + pivot.columns.levels[1].tolist()) csv_data = pivot.to_csv(None, index=True,", "= sub_df.pivot('zone', columns='key') # Need to manually create the CSV instead of letting", "do it, due to composite header # we don't want with open('/tmp/pivot.csv', 'w')", "= df.apply(lambda x: get_key(x), axis=1) sub_df = df[['key', 'zone', 'mean']] pivot = sub_df.pivot('zone',", "it, due to composite header # we don't want with open('/tmp/pivot.csv', 'w') as", "\"\"\" Example to demonstrate creating a pivot table from the output of zonal", "composite header # we don't want with open('/tmp/pivot.csv', 'w') as outfile: header =", "'/tmp/test.csv' df = pandas.read_csv(infilename) df['key'] = df.apply(lambda x: get_key(x), axis=1) sub_df = df[['key',", "a pivot table from the output of zonal stats CLI \"\"\" import time", "df['key'] = df.apply(lambda x: get_key(x), axis=1) sub_df = df[['key', 'zone', 'mean']] pivot =", "for col in row.keys(): if col == 'zone': return '|'.join(key_parts) key_parts.append(str(row[col])) start =", "'zone': return '|'.join(key_parts) key_parts.append(str(row[col])) start = time.time() infilename = '/tmp/test.csv' df = pandas.read_csv(infilename)", "= time.time() infilename = '/tmp/test.csv' df = pandas.read_csv(infilename) df['key'] = df.apply(lambda x: get_key(x),", "df.apply(lambda x: get_key(x), axis=1) sub_df = df[['key', 'zone', 'mean']] pivot = sub_df.pivot('zone', columns='key')", "header # we don't want with open('/tmp/pivot.csv', 'w') as outfile: header = ','.join(", "stats CLI \"\"\" import time import pandas # Return a pipe-delimited combination of", "from every column up through zone def get_key(row): key_parts = [] for col", "pandas.read_csv(infilename) df['key'] = df.apply(lambda x: get_key(x), axis=1) sub_df = df[['key', 'zone', 'mean']] pivot", "pivot.columns.levels[1].tolist()) csv_data = pivot.to_csv(None, index=True, header=False) outfile.write(header + '\\n' + csv_data) print('Elapsed: {0:.2f}'.format(time.time()", "the output of zonal stats CLI \"\"\" import time import pandas # Return", "= pandas.read_csv(infilename) df['key'] = df.apply(lambda x: get_key(x), axis=1) sub_df = df[['key', 'zone', 'mean']]", "CLI \"\"\" import time import pandas # Return a pipe-delimited combination of value", "pivot table from the output of zonal stats CLI \"\"\" import time import", "[] for col in row.keys(): if col == 'zone': return '|'.join(key_parts) key_parts.append(str(row[col])) start", "up through zone def get_key(row): key_parts = [] for col in row.keys(): if", "to demonstrate creating a pivot table from the output of zonal stats CLI", "demonstrate creating a pivot table from the output of zonal stats CLI \"\"\"", "# Return a pipe-delimited combination of value from every column up through zone", "get_key(x), axis=1) sub_df = df[['key', 'zone', 'mean']] pivot = sub_df.pivot('zone', columns='key') # Need", "return '|'.join(key_parts) key_parts.append(str(row[col])) start = time.time() infilename = '/tmp/test.csv' df = pandas.read_csv(infilename) df['key']", "every column up through zone def get_key(row): key_parts = [] for col in", "','.join( ['zone'] + pivot.columns.levels[1].tolist()) csv_data = pivot.to_csv(None, index=True, header=False) outfile.write(header + '\\n' +", "due to composite header # we don't want with open('/tmp/pivot.csv', 'w') as outfile:", "'zone', 'mean']] pivot = sub_df.pivot('zone', columns='key') # Need to manually create the CSV", "pandas # Return a pipe-delimited combination of value from every column up through", "as outfile: header = ','.join( ['zone'] + pivot.columns.levels[1].tolist()) csv_data = pivot.to_csv(None, index=True, header=False)", "Example to demonstrate creating a pivot table from the output of zonal stats", "df[['key', 'zone', 'mean']] pivot = sub_df.pivot('zone', columns='key') # Need to manually create the", "output of zonal stats CLI \"\"\" import time import pandas # Return a", "sub_df = df[['key', 'zone', 'mean']] pivot = sub_df.pivot('zone', columns='key') # Need to manually", "pipe-delimited combination of value from every column up through zone def get_key(row): key_parts", "value from every column up through zone def get_key(row): key_parts = [] for", "want with open('/tmp/pivot.csv', 'w') as outfile: header = ','.join( ['zone'] + pivot.columns.levels[1].tolist()) csv_data", "Need to manually create the CSV instead of letting pandas do it, due", "open('/tmp/pivot.csv', 'w') as outfile: header = ','.join( ['zone'] + pivot.columns.levels[1].tolist()) csv_data = pivot.to_csv(None,", "the CSV instead of letting pandas do it, due to composite header #", "header = ','.join( ['zone'] + pivot.columns.levels[1].tolist()) csv_data = pivot.to_csv(None, index=True, header=False) outfile.write(header +", "import pandas # Return a pipe-delimited combination of value from every column up", "outfile: header = ','.join( ['zone'] + pivot.columns.levels[1].tolist()) csv_data = pivot.to_csv(None, index=True, header=False) outfile.write(header", "= [] for col in row.keys(): if col == 'zone': return '|'.join(key_parts) key_parts.append(str(row[col]))", "= df[['key', 'zone', 'mean']] pivot = sub_df.pivot('zone', columns='key') # Need to manually create", "don't want with open('/tmp/pivot.csv', 'w') as outfile: header = ','.join( ['zone'] + pivot.columns.levels[1].tolist())", "= pivot.to_csv(None, index=True, header=False) outfile.write(header + '\\n' + csv_data) print('Elapsed: {0:.2f}'.format(time.time() - start))", "import time import pandas # Return a pipe-delimited combination of value from every", "we don't want with open('/tmp/pivot.csv', 'w') as outfile: header = ','.join( ['zone'] +", "df = pandas.read_csv(infilename) df['key'] = df.apply(lambda x: get_key(x), axis=1) sub_df = df[['key', 'zone',", "# Need to manually create the CSV instead of letting pandas do it," ]
[ "def test_sample_input(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2 * 3 + 4 *", "+ 6) + 2 + 4 * 2\") == 13632 def test_sample_input_with_advanced_priorities(caplog): #", "2 + 4 * 2\", use_advanced_precedence=True) == 23340 def test_big_input(caplog): # caplog.set_level(logging.INFO) with", "= os.path.abspath(os.path.dirname(__file__)) sample_input = None def test_sample_input(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2", "* 3) + (4 * (5 + 6))\", use_advanced_precedence=True) == 51 assert evaluate_expression(\"2", "3 + 9 + 3 * 4 * 3)\", use_advanced_precedence=True) == 1445 assert", "* 3 + 4 * 5 + 6\") == 71 assert evaluate_expression(\"1 +", "+ (4 * (5 + 6))\", use_advanced_precedence=True) == 51 assert evaluate_expression(\"2 * 3", "None def test_sample_input(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2 * 3 + 4", "+ 6))\", use_advanced_precedence=True) == 51 assert evaluate_expression(\"2 * 3 + (4 * 5)\",", "+ 4 * 2\") == 13632 def test_sample_input_with_advanced_priorities(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 +", "3 + (4 * 5)\") == 26 assert evaluate_expression(\"5 + (8 * 3", "+ (8 + 6 * 4))\", use_advanced_precedence=True) == 669060 assert evaluate_expression(\"((2 + 4", "3 + 4 * 5 + 6\") == 71 assert evaluate_expression(\"1 + (2", "if x) == 4696493914530 assert sum(evaluate_expression(x, use_advanced_precedence=True) for x in content.split(\"\\n\") if x)", "5)\") == 26 assert evaluate_expression(\"5 + (8 * 3 + 9 + 3", "+ 9 * 3 + (8 + 6 * 4))\") == 12240 assert", "def test_sample_input_with_advanced_priorities(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2 * 3 + 4 *", "(8 * 3 + 9 + 3 * 4 * 3)\") == 437", "== 51 assert evaluate_expression(\"2 * 3 + (4 * 5)\", use_advanced_precedence=True) == 46", "26 assert evaluate_expression(\"5 + (8 * 3 + 9 + 3 * 4", "logging import os.path from day18.code.main import evaluate_expression logger = logging.getLogger(__name__) local_path = os.path.abspath(os.path.dirname(__file__))", "* 3 * 3 + 9 * 3 + (8 + 6 *", "3 * 4 * 3)\", use_advanced_precedence=True) == 1445 assert evaluate_expression(\"5 * 9 *", "* 4))\", use_advanced_precedence=True) == 669060 assert evaluate_expression(\"((2 + 4 * 9) * (6", "for x in content.split(\"\\n\") if x) == 4696493914530 assert sum(evaluate_expression(x, use_advanced_precedence=True) for x", "12240 assert evaluate_expression(\"((2 + 4 * 9) * (6 + 9 * 8", "+ 6\", use_advanced_precedence=True) == 231 assert evaluate_expression(\"1 + (2 * 3) + (4", "== 4696493914530 assert sum(evaluate_expression(x, use_advanced_precedence=True) for x in content.split(\"\\n\") if x) == 362880372308125", "9 + 3 * 4 * 3)\") == 437 assert evaluate_expression(\"5 * 9", "5 + 6\", use_advanced_precedence=True) == 231 assert evaluate_expression(\"1 + (2 * 3) +", "local_path = os.path.abspath(os.path.dirname(__file__)) sample_input = None def test_sample_input(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 +", "3)\") == 437 assert evaluate_expression(\"5 * 9 * (7 * 3 * 3", "* 3 + (4 * 5)\", use_advanced_precedence=True) == 46 assert evaluate_expression(\"5 + (8", "+ 2 + 4 * 2\", use_advanced_precedence=True) == 23340 def test_big_input(caplog): # caplog.set_level(logging.INFO)", "test_sample_input_with_advanced_priorities(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2 * 3 + 4 * 5", "== 71 assert evaluate_expression(\"1 + (2 * 3) + (4 * (5 +", "9 * 3 + (8 + 6 * 4))\") == 12240 assert evaluate_expression(\"((2", "* 3)\") == 437 assert evaluate_expression(\"5 * 9 * (7 * 3 *", "4 * 3)\", use_advanced_precedence=True) == 1445 assert evaluate_expression(\"5 * 9 * (7 *", "caplog.set_level(logging.INFO) with open(os.path.join(local_path, \"input\"), \"r\") as f: content = f.read() assert sum(evaluate_expression(x) for", "6\") == 71 assert evaluate_expression(\"1 + (2 * 3) + (4 * (5", "+ 9 * 3 + (8 + 6 * 4))\", use_advanced_precedence=True) == 669060", "231 assert evaluate_expression(\"1 + (2 * 3) + (4 * (5 + 6))\",", "use_advanced_precedence=True) == 51 assert evaluate_expression(\"2 * 3 + (4 * 5)\", use_advanced_precedence=True) ==", "2\", use_advanced_precedence=True) == 23340 def test_big_input(caplog): # caplog.set_level(logging.INFO) with open(os.path.join(local_path, \"input\"), \"r\") as", "+ 6))\") == 51 assert evaluate_expression(\"2 * 3 + (4 * 5)\") ==", "9 * 8 + 6) + 6) + 2 + 4 * 2\")", "use_advanced_precedence=True) == 46 assert evaluate_expression(\"5 + (8 * 3 + 9 + 3", "+ (8 * 3 + 9 + 3 * 4 * 3)\", use_advanced_precedence=True)", "3) + (4 * (5 + 6))\") == 51 assert evaluate_expression(\"2 * 3", "+ (2 * 3) + (4 * (5 + 6))\", use_advanced_precedence=True) == 51", "+ 4 * 9) * (6 + 9 * 8 + 6) +", "* 3 + (8 + 6 * 4))\", use_advanced_precedence=True) == 669060 assert evaluate_expression(\"((2", "+ 6) + 6) + 2 + 4 * 2\", use_advanced_precedence=True) == 23340", "(8 + 6 * 4))\") == 12240 assert evaluate_expression(\"((2 + 4 * 9)", "== 437 assert evaluate_expression(\"5 * 9 * (7 * 3 * 3 +", "2\") == 13632 def test_sample_input_with_advanced_priorities(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2 * 3", "logger = logging.getLogger(__name__) local_path = os.path.abspath(os.path.dirname(__file__)) sample_input = None def test_sample_input(caplog): # caplog.set_level(logging.INFO)", "+ (4 * (5 + 6))\") == 51 assert evaluate_expression(\"2 * 3 +", "3 + 9 * 3 + (8 + 6 * 4))\") == 12240", "= f.read() assert sum(evaluate_expression(x) for x in content.split(\"\\n\") if x) == 4696493914530 assert", "+ 2 * 3 + 4 * 5 + 6\") == 71 assert", "+ 4 * 5 + 6\", use_advanced_precedence=True) == 231 assert evaluate_expression(\"1 + (2", "(4 * (5 + 6))\", use_advanced_precedence=True) == 51 assert evaluate_expression(\"2 * 3 +", "evaluate_expression logger = logging.getLogger(__name__) local_path = os.path.abspath(os.path.dirname(__file__)) sample_input = None def test_sample_input(caplog): #", "* 9) * (6 + 9 * 8 + 6) + 6) +", "5)\", use_advanced_precedence=True) == 46 assert evaluate_expression(\"5 + (8 * 3 + 9 +", "\"r\") as f: content = f.read() assert sum(evaluate_expression(x) for x in content.split(\"\\n\") if", "2 * 3 + 4 * 5 + 6\") == 71 assert evaluate_expression(\"1", "51 assert evaluate_expression(\"2 * 3 + (4 * 5)\") == 26 assert evaluate_expression(\"5", "<filename>day18/test/test_main.py import logging import os.path from day18.code.main import evaluate_expression logger = logging.getLogger(__name__) local_path", "== 51 assert evaluate_expression(\"2 * 3 + (4 * 5)\") == 26 assert", "13632 def test_sample_input_with_advanced_priorities(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2 * 3 + 4", "sample_input = None def test_sample_input(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2 * 3", "evaluate_expression(\"2 * 3 + (4 * 5)\", use_advanced_precedence=True) == 46 assert evaluate_expression(\"5 +", "(8 * 3 + 9 + 3 * 4 * 3)\", use_advanced_precedence=True) ==", "assert sum(evaluate_expression(x) for x in content.split(\"\\n\") if x) == 4696493914530 assert sum(evaluate_expression(x, use_advanced_precedence=True)", "with open(os.path.join(local_path, \"input\"), \"r\") as f: content = f.read() assert sum(evaluate_expression(x) for x", "3 + 4 * 5 + 6\", use_advanced_precedence=True) == 231 assert evaluate_expression(\"1 +", "= logging.getLogger(__name__) local_path = os.path.abspath(os.path.dirname(__file__)) sample_input = None def test_sample_input(caplog): # caplog.set_level(logging.INFO) assert", "9 * 3 + (8 + 6 * 4))\", use_advanced_precedence=True) == 669060 assert", "2 * 3 + 4 * 5 + 6\", use_advanced_precedence=True) == 231 assert", "use_advanced_precedence=True) == 669060 assert evaluate_expression(\"((2 + 4 * 9) * (6 + 9", "3 * 3 + 9 * 3 + (8 + 6 * 4))\",", "* (5 + 6))\") == 51 assert evaluate_expression(\"2 * 3 + (4 *", "4))\", use_advanced_precedence=True) == 669060 assert evaluate_expression(\"((2 + 4 * 9) * (6 +", "51 assert evaluate_expression(\"2 * 3 + (4 * 5)\", use_advanced_precedence=True) == 46 assert", "3 + 9 * 3 + (8 + 6 * 4))\", use_advanced_precedence=True) ==", "4 * 5 + 6\") == 71 assert evaluate_expression(\"1 + (2 * 3)", "6))\", use_advanced_precedence=True) == 51 assert evaluate_expression(\"2 * 3 + (4 * 5)\", use_advanced_precedence=True)", "3 + (4 * 5)\", use_advanced_precedence=True) == 46 assert evaluate_expression(\"5 + (8 *", "9) * (6 + 9 * 8 + 6) + 6) + 2", "* 3 + (8 + 6 * 4))\") == 12240 assert evaluate_expression(\"((2 +", "evaluate_expression(\"2 * 3 + (4 * 5)\") == 26 assert evaluate_expression(\"5 + (8", "* (7 * 3 * 3 + 9 * 3 + (8 +", "* (5 + 6))\", use_advanced_precedence=True) == 51 assert evaluate_expression(\"2 * 3 + (4", "assert evaluate_expression(\"1 + 2 * 3 + 4 * 5 + 6\", use_advanced_precedence=True)", "* 5 + 6\") == 71 assert evaluate_expression(\"1 + (2 * 3) +", "* 8 + 6) + 6) + 2 + 4 * 2\") ==", "3 + 9 + 3 * 4 * 3)\") == 437 assert evaluate_expression(\"5", "6) + 2 + 4 * 2\", use_advanced_precedence=True) == 23340 def test_big_input(caplog): #", "def test_big_input(caplog): # caplog.set_level(logging.INFO) with open(os.path.join(local_path, \"input\"), \"r\") as f: content = f.read()", "# caplog.set_level(logging.INFO) with open(os.path.join(local_path, \"input\"), \"r\") as f: content = f.read() assert sum(evaluate_expression(x)", "4 * 3)\") == 437 assert evaluate_expression(\"5 * 9 * (7 * 3", "os.path.abspath(os.path.dirname(__file__)) sample_input = None def test_sample_input(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2 *", "f: content = f.read() assert sum(evaluate_expression(x) for x in content.split(\"\\n\") if x) ==", "3)\", use_advanced_precedence=True) == 1445 assert evaluate_expression(\"5 * 9 * (7 * 3 *", "3 * 4 * 3)\") == 437 assert evaluate_expression(\"5 * 9 * (7", "9 * (7 * 3 * 3 + 9 * 3 + (8", "import os.path from day18.code.main import evaluate_expression logger = logging.getLogger(__name__) local_path = os.path.abspath(os.path.dirname(__file__)) sample_input", "* 3 + 9 * 3 + (8 + 6 * 4))\", use_advanced_precedence=True)", "9 + 3 * 4 * 3)\", use_advanced_precedence=True) == 1445 assert evaluate_expression(\"5 *", "* (6 + 9 * 8 + 6) + 6) + 2 +", "evaluate_expression(\"5 + (8 * 3 + 9 + 3 * 4 * 3)\")", "* 3)\", use_advanced_precedence=True) == 1445 assert evaluate_expression(\"5 * 9 * (7 * 3", "in content.split(\"\\n\") if x) == 4696493914530 assert sum(evaluate_expression(x, use_advanced_precedence=True) for x in content.split(\"\\n\")", "== 26 assert evaluate_expression(\"5 + (8 * 3 + 9 + 3 *", "(4 * 5)\") == 26 assert evaluate_expression(\"5 + (8 * 3 + 9", "(4 * 5)\", use_advanced_precedence=True) == 46 assert evaluate_expression(\"5 + (8 * 3 +", "4 * 5 + 6\", use_advanced_precedence=True) == 231 assert evaluate_expression(\"1 + (2 *", "* 9 * (7 * 3 * 3 + 9 * 3 +", "+ 6 * 4))\", use_advanced_precedence=True) == 669060 assert evaluate_expression(\"((2 + 4 * 9)", "+ (8 * 3 + 9 + 3 * 4 * 3)\") ==", "+ 6) + 6) + 2 + 4 * 2\") == 13632 def", "sum(evaluate_expression(x) for x in content.split(\"\\n\") if x) == 4696493914530 assert sum(evaluate_expression(x, use_advanced_precedence=True) for", "23340 def test_big_input(caplog): # caplog.set_level(logging.INFO) with open(os.path.join(local_path, \"input\"), \"r\") as f: content =", "use_advanced_precedence=True) == 23340 def test_big_input(caplog): # caplog.set_level(logging.INFO) with open(os.path.join(local_path, \"input\"), \"r\") as f:", "use_advanced_precedence=True) == 1445 assert evaluate_expression(\"5 * 9 * (7 * 3 * 3", "3 + (8 + 6 * 4))\", use_advanced_precedence=True) == 669060 assert evaluate_expression(\"((2 +", "\"input\"), \"r\") as f: content = f.read() assert sum(evaluate_expression(x) for x in content.split(\"\\n\")", "assert evaluate_expression(\"2 * 3 + (4 * 5)\") == 26 assert evaluate_expression(\"5 +", "* 3 + 9 + 3 * 4 * 3)\", use_advanced_precedence=True) == 1445", "6\", use_advanced_precedence=True) == 231 assert evaluate_expression(\"1 + (2 * 3) + (4 *", "+ 9 + 3 * 4 * 3)\", use_advanced_precedence=True) == 1445 assert evaluate_expression(\"5", "assert evaluate_expression(\"1 + 2 * 3 + 4 * 5 + 6\") ==", "+ 3 * 4 * 3)\", use_advanced_precedence=True) == 1445 assert evaluate_expression(\"5 * 9", "content = f.read() assert sum(evaluate_expression(x) for x in content.split(\"\\n\") if x) == 4696493914530", "assert evaluate_expression(\"1 + (2 * 3) + (4 * (5 + 6))\") ==", "8 + 6) + 6) + 2 + 4 * 2\") == 13632", "3) + (4 * (5 + 6))\", use_advanced_precedence=True) == 51 assert evaluate_expression(\"2 *", "* 3 + 9 + 3 * 4 * 3)\") == 437 assert", "(5 + 6))\") == 51 assert evaluate_expression(\"2 * 3 + (4 * 5)\")", "assert evaluate_expression(\"5 + (8 * 3 + 9 + 3 * 4 *", "* 3 + (4 * 5)\") == 26 assert evaluate_expression(\"5 + (8 *", "* 5)\") == 26 assert evaluate_expression(\"5 + (8 * 3 + 9 +", "+ 6\") == 71 assert evaluate_expression(\"1 + (2 * 3) + (4 *", "6) + 6) + 2 + 4 * 2\") == 13632 def test_sample_input_with_advanced_priorities(caplog):", "== 1445 assert evaluate_expression(\"5 * 9 * (7 * 3 * 3 +", "= None def test_sample_input(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2 * 3 +", "(4 * (5 + 6))\") == 51 assert evaluate_expression(\"2 * 3 + (4", "* 8 + 6) + 6) + 2 + 4 * 2\", use_advanced_precedence=True)", "+ (8 + 6 * 4))\") == 12240 assert evaluate_expression(\"((2 + 4 *", "6 * 4))\") == 12240 assert evaluate_expression(\"((2 + 4 * 9) * (6", "* 4))\") == 12240 assert evaluate_expression(\"((2 + 4 * 9) * (6 +", "evaluate_expression(\"5 + (8 * 3 + 9 + 3 * 4 * 3)\",", "caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2 * 3 + 4 * 5 + 6\")", "9 * 8 + 6) + 6) + 2 + 4 * 2\",", "== 12240 assert evaluate_expression(\"((2 + 4 * 9) * (6 + 9 *", "# caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2 * 3 + 4 * 5 +", "(5 + 6))\", use_advanced_precedence=True) == 51 assert evaluate_expression(\"2 * 3 + (4 *", "669060 assert evaluate_expression(\"((2 + 4 * 9) * (6 + 9 * 8", "as f: content = f.read() assert sum(evaluate_expression(x) for x in content.split(\"\\n\") if x)", "(7 * 3 * 3 + 9 * 3 + (8 + 6", "437 assert evaluate_expression(\"5 * 9 * (7 * 3 * 3 + 9", "== 23340 def test_big_input(caplog): # caplog.set_level(logging.INFO) with open(os.path.join(local_path, \"input\"), \"r\") as f: content", "caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2 * 3 + 4 * 5 + 6\",", "f.read() assert sum(evaluate_expression(x) for x in content.split(\"\\n\") if x) == 4696493914530 assert sum(evaluate_expression(x,", "os.path from day18.code.main import evaluate_expression logger = logging.getLogger(__name__) local_path = os.path.abspath(os.path.dirname(__file__)) sample_input =", "6))\") == 51 assert evaluate_expression(\"2 * 3 + (4 * 5)\") == 26", "from day18.code.main import evaluate_expression logger = logging.getLogger(__name__) local_path = os.path.abspath(os.path.dirname(__file__)) sample_input = None", "== 46 assert evaluate_expression(\"5 + (8 * 3 + 9 + 3 *", "* 5)\", use_advanced_precedence=True) == 46 assert evaluate_expression(\"5 + (8 * 3 + 9", "== 231 assert evaluate_expression(\"1 + (2 * 3) + (4 * (5 +", "4 * 2\") == 13632 def test_sample_input_with_advanced_priorities(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2", "assert evaluate_expression(\"5 * 9 * (7 * 3 * 3 + 9 *", "4 * 9) * (6 + 9 * 8 + 6) + 6)", "2 + 4 * 2\") == 13632 def test_sample_input_with_advanced_priorities(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1", "evaluate_expression(\"1 + (2 * 3) + (4 * (5 + 6))\") == 51", "* 4 * 3)\", use_advanced_precedence=True) == 1445 assert evaluate_expression(\"5 * 9 * (7", "day18.code.main import evaluate_expression logger = logging.getLogger(__name__) local_path = os.path.abspath(os.path.dirname(__file__)) sample_input = None def", "import evaluate_expression logger = logging.getLogger(__name__) local_path = os.path.abspath(os.path.dirname(__file__)) sample_input = None def test_sample_input(caplog):", "(2 * 3) + (4 * (5 + 6))\", use_advanced_precedence=True) == 51 assert", "(2 * 3) + (4 * (5 + 6))\") == 51 assert evaluate_expression(\"2", "test_big_input(caplog): # caplog.set_level(logging.INFO) with open(os.path.join(local_path, \"input\"), \"r\") as f: content = f.read() assert", "+ 2 * 3 + 4 * 5 + 6\", use_advanced_precedence=True) == 231", "+ 9 + 3 * 4 * 3)\") == 437 assert evaluate_expression(\"5 *", "+ (4 * 5)\") == 26 assert evaluate_expression(\"5 + (8 * 3 +", "evaluate_expression(\"((2 + 4 * 9) * (6 + 9 * 8 + 6)", "import logging import os.path from day18.code.main import evaluate_expression logger = logging.getLogger(__name__) local_path =", "4 * 2\", use_advanced_precedence=True) == 23340 def test_big_input(caplog): # caplog.set_level(logging.INFO) with open(os.path.join(local_path, \"input\"),", "x in content.split(\"\\n\") if x) == 4696493914530 assert sum(evaluate_expression(x, use_advanced_precedence=True) for x in", "71 assert evaluate_expression(\"1 + (2 * 3) + (4 * (5 + 6))\")", "+ (2 * 3) + (4 * (5 + 6))\") == 51 assert", "== 669060 assert evaluate_expression(\"((2 + 4 * 9) * (6 + 9 *", "* 2\", use_advanced_precedence=True) == 23340 def test_big_input(caplog): # caplog.set_level(logging.INFO) with open(os.path.join(local_path, \"input\"), \"r\")", "evaluate_expression(\"1 + (2 * 3) + (4 * (5 + 6))\", use_advanced_precedence=True) ==", "assert evaluate_expression(\"1 + (2 * 3) + (4 * (5 + 6))\", use_advanced_precedence=True)", "+ 6) + 2 + 4 * 2\", use_advanced_precedence=True) == 23340 def test_big_input(caplog):", "* 3 + 9 * 3 + (8 + 6 * 4))\") ==", "+ 6 * 4))\") == 12240 assert evaluate_expression(\"((2 + 4 * 9) *", "6) + 2 + 4 * 2\") == 13632 def test_sample_input_with_advanced_priorities(caplog): # caplog.set_level(logging.INFO)", "* 5 + 6\", use_advanced_precedence=True) == 231 assert evaluate_expression(\"1 + (2 * 3)", "* 4 * 3)\") == 437 assert evaluate_expression(\"5 * 9 * (7 *", "(8 + 6 * 4))\", use_advanced_precedence=True) == 669060 assert evaluate_expression(\"((2 + 4 *", "assert evaluate_expression(\"2 * 3 + (4 * 5)\", use_advanced_precedence=True) == 46 assert evaluate_expression(\"5", "+ 4 * 5 + 6\") == 71 assert evaluate_expression(\"1 + (2 *", "6 * 4))\", use_advanced_precedence=True) == 669060 assert evaluate_expression(\"((2 + 4 * 9) *", "test_sample_input(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2 * 3 + 4 * 5", "+ 9 * 8 + 6) + 6) + 2 + 4 *", "== 13632 def test_sample_input_with_advanced_priorities(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2 * 3 +", "* 3 + 4 * 5 + 6\", use_advanced_precedence=True) == 231 assert evaluate_expression(\"1", "logging.getLogger(__name__) local_path = os.path.abspath(os.path.dirname(__file__)) sample_input = None def test_sample_input(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1", "open(os.path.join(local_path, \"input\"), \"r\") as f: content = f.read() assert sum(evaluate_expression(x) for x in", "+ 3 * 4 * 3)\") == 437 assert evaluate_expression(\"5 * 9 *", "3 * 3 + 9 * 3 + (8 + 6 * 4))\")", "6) + 6) + 2 + 4 * 2\", use_advanced_precedence=True) == 23340 def", "x) == 4696493914530 assert sum(evaluate_expression(x, use_advanced_precedence=True) for x in content.split(\"\\n\") if x) ==", "use_advanced_precedence=True) == 231 assert evaluate_expression(\"1 + (2 * 3) + (4 * (5", "+ 4 * 2\", use_advanced_precedence=True) == 23340 def test_big_input(caplog): # caplog.set_level(logging.INFO) with open(os.path.join(local_path,", "evaluate_expression(\"1 + 2 * 3 + 4 * 5 + 6\", use_advanced_precedence=True) ==", "1445 assert evaluate_expression(\"5 * 9 * (7 * 3 * 3 + 9", "3 + (8 + 6 * 4))\") == 12240 assert evaluate_expression(\"((2 + 4", "4))\") == 12240 assert evaluate_expression(\"((2 + 4 * 9) * (6 + 9", "* 3) + (4 * (5 + 6))\") == 51 assert evaluate_expression(\"2 *", "8 + 6) + 6) + 2 + 4 * 2\", use_advanced_precedence=True) ==", "content.split(\"\\n\") if x) == 4696493914530 assert sum(evaluate_expression(x, use_advanced_precedence=True) for x in content.split(\"\\n\") if", "evaluate_expression(\"5 * 9 * (7 * 3 * 3 + 9 * 3", "46 assert evaluate_expression(\"5 + (8 * 3 + 9 + 3 * 4", "+ 2 + 4 * 2\") == 13632 def test_sample_input_with_advanced_priorities(caplog): # caplog.set_level(logging.INFO) assert", "assert evaluate_expression(\"((2 + 4 * 9) * (6 + 9 * 8 +", "evaluate_expression(\"1 + 2 * 3 + 4 * 5 + 6\") == 71", "* 2\") == 13632 def test_sample_input_with_advanced_priorities(caplog): # caplog.set_level(logging.INFO) assert evaluate_expression(\"1 + 2 *", "5 + 6\") == 71 assert evaluate_expression(\"1 + (2 * 3) + (4", "(6 + 9 * 8 + 6) + 6) + 2 + 4", "+ (4 * 5)\", use_advanced_precedence=True) == 46 assert evaluate_expression(\"5 + (8 * 3" ]
[ "1 return True for i in range(numCourses): if not dfs(i): return [] return", "coursesCanBeTaken = [] for i in range(numCourses): if num_pre[i] == 0: coursesCanBeTaken.append(i) res", "[] while coursesCanBeTaken: course = coursesCanBeTaken.pop() res.append(course) for c in graph[course]: num_pre[c] -=", "= [0 for _ in range(numCourses)] for a, b in prerequisites: graph[a].append(b) result", "num_pre[j] -= 1 if num_pre[j] == 0: coursesCanBeTaken.append(j) return res if len(res) ==", "prerequisites): \"\"\" :type numCourses: int :type prerequisites: List[List[int]] :rtype: List[int] \"\"\" ## Practice:", "[] while coursesCanBeTaken: courseTaken = coursesCanBeTaken.pop(0) res.append(courseTaken) for j in graph[courseTaken]: num_pre[j] -=", "== 0: coursesCanBeTaken.append(j) return res if len(res) == numCourses else [] ## DFS", "numCourses: int :type prerequisites: List[List[int]] :rtype: List[int] \"\"\" ## Practice: graph = collections.defaultdict(list)", "= [[] for _ in range(numCourses)] num_pre = [0]*numCourses for after, pre in", "== -1: return False visit[i] = -1 for j in graph[i]: if not", "if num_pre[i] == 0: coursesCanBeTaken.append(i) res = [] while coursesCanBeTaken: course = coursesCanBeTaken.pop()", "if len(res) == numCourses else [] ## bfs graph = [[] for _", "for j in graph[i]: if not dfs(j): return False result.append(i) visit[i] = 1", "1 coursesCanBeTaken = [i for i in range(numCourses) if num_pre[i] == 0] res", "coursesCanBeTaken.pop(0) res.append(courseTaken) for j in graph[courseTaken]: num_pre[j] -= 1 if num_pre[j] == 0:", "for _ in range(numCourses)] for a, b in prerequisites: graph[a].append(b) result = []", "in graph[courseTaken]: num_pre[j] -= 1 if num_pre[j] == 0: coursesCanBeTaken.append(j) return res if", "in range(numCourses)] visit = [0 for _ in range(numCourses)] for a, b in", "\"\"\" ## Practice: graph = collections.defaultdict(list) num_pre = [0]*numCourses for after, prev in", "num_pre = [0]*numCourses for after, prev in prerequisites: graph[prev].append(after) num_pre[after] += 1 coursesCanBeTaken", "coursesCanBeTaken: course = coursesCanBeTaken.pop() res.append(course) for c in graph[course]: num_pre[c] -= 1 if", "dfs(i): if visit[i] == 1: return True if visit[i] == -1: return False", "for i in range(numCourses) if num_pre[i] == 0] res = [] while coursesCanBeTaken:", "[] ## bfs graph = [[] for _ in range(numCourses)] num_pre = [0]*numCourses", "List[List[int]] :rtype: List[int] \"\"\" ## Practice: graph = collections.defaultdict(list) num_pre = [0]*numCourses for", ":rtype: List[int] \"\"\" ## Practice: graph = collections.defaultdict(list) num_pre = [0]*numCourses for after,", "prerequisites: graph[pre].append(after) num_pre[after] += 1 coursesCanBeTaken = [i for i in range(numCourses) if", "List[int] \"\"\" ## Practice: graph = collections.defaultdict(list) num_pre = [0]*numCourses for after, prev", "= 1 return True for i in range(numCourses): if not dfs(i): return []", "False visit[i] = -1 for j in graph[i]: if not dfs(j): return False", "i in range(numCourses) if num_pre[i] == 0] res = [] while coursesCanBeTaken: courseTaken", "j in graph[courseTaken]: num_pre[j] -= 1 if num_pre[j] == 0: coursesCanBeTaken.append(j) return res", "= -1 for j in graph[i]: if not dfs(j): return False result.append(i) visit[i]", "in range(numCourses): if num_pre[i] == 0: coursesCanBeTaken.append(i) res = [] while coursesCanBeTaken: course", "[i for i in range(numCourses) if num_pre[i] == 0] res = [] while", "res if len(res) == numCourses else [] ## DFS graph = [[] for", "j in graph[i]: if not dfs(j): return False result.append(i) visit[i] = 1 return", "else [] ## DFS graph = [[] for _ in range(numCourses)] visit =", "visit[i] == 1: return True if visit[i] == -1: return False visit[i] =", "graph[course]: num_pre[c] -= 1 if num_pre[c] == 0: coursesCanBeTaken.append(c) return res if len(res)", "prerequisites: graph[a].append(b) result = [] def dfs(i): if visit[i] == 1: return True", "range(numCourses): if num_pre[i] == 0: coursesCanBeTaken.append(i) res = [] while coursesCanBeTaken: course =", "result.append(i) visit[i] = 1 return True for i in range(numCourses): if not dfs(i):", "if len(res) == numCourses else [] ## DFS graph = [[] for _", ":type numCourses: int :type prerequisites: List[List[int]] :rtype: List[int] \"\"\" ## Practice: graph =", "num_pre[j] == 0: coursesCanBeTaken.append(j) return res if len(res) == numCourses else [] ##", "_ in range(numCourses)] for a, b in prerequisites: graph[a].append(b) result = [] def", "findOrder(self, numCourses, prerequisites): \"\"\" :type numCourses: int :type prerequisites: List[List[int]] :rtype: List[int] \"\"\"", "def findOrder(self, numCourses, prerequisites): \"\"\" :type numCourses: int :type prerequisites: List[List[int]] :rtype: List[int]", "graph = [[] for _ in range(numCourses)] visit = [0 for _ in", "[[] for _ in range(numCourses)] visit = [0 for _ in range(numCourses)] for", "while coursesCanBeTaken: courseTaken = coursesCanBeTaken.pop(0) res.append(courseTaken) for j in graph[courseTaken]: num_pre[j] -= 1", "res.append(courseTaken) for j in graph[courseTaken]: num_pre[j] -= 1 if num_pre[j] == 0: coursesCanBeTaken.append(j)", "in graph[i]: if not dfs(j): return False result.append(i) visit[i] = 1 return True", "range(numCourses)] for a, b in prerequisites: graph[a].append(b) result = [] def dfs(i): if", "for i in range(numCourses): if num_pre[i] == 0: coursesCanBeTaken.append(i) res = [] while", "1 if num_pre[j] == 0: coursesCanBeTaken.append(j) return res if len(res) == numCourses else", "Schedule II\"\"\" class Solution(object): def findOrder(self, numCourses, prerequisites): \"\"\" :type numCourses: int :type", "numCourses else [] ## bfs graph = [[] for _ in range(numCourses)] num_pre", "= [0]*numCourses for after, pre in prerequisites: graph[pre].append(after) num_pre[after] += 1 coursesCanBeTaken =", "len(res) == numCourses else [] ## DFS graph = [[] for _ in", "visit[i] = -1 for j in graph[i]: if not dfs(j): return False result.append(i)", "return res if len(res) == numCourses else [] ## bfs graph = [[]", "Course Schedule II\"\"\" class Solution(object): def findOrder(self, numCourses, prerequisites): \"\"\" :type numCourses: int", "in prerequisites: graph[pre].append(after) num_pre[after] += 1 coursesCanBeTaken = [i for i in range(numCourses)", "= [] def dfs(i): if visit[i] == 1: return True if visit[i] ==", "False result.append(i) visit[i] = 1 return True for i in range(numCourses): if not", "if num_pre[j] == 0: coursesCanBeTaken.append(j) return res if len(res) == numCourses else []", "[0 for _ in range(numCourses)] for a, b in prerequisites: graph[a].append(b) result =", "for after, prev in prerequisites: graph[prev].append(after) num_pre[after] += 1 coursesCanBeTaken = [] for", "int :type prerequisites: List[List[int]] :rtype: List[int] \"\"\" ## Practice: graph = collections.defaultdict(list) num_pre", "\"\"\"210. Course Schedule II\"\"\" class Solution(object): def findOrder(self, numCourses, prerequisites): \"\"\" :type numCourses:", "graph[pre].append(after) num_pre[after] += 1 coursesCanBeTaken = [i for i in range(numCourses) if num_pre[i]", "in range(numCourses)] num_pre = [0]*numCourses for after, pre in prerequisites: graph[pre].append(after) num_pre[after] +=", "== 0] res = [] while coursesCanBeTaken: courseTaken = coursesCanBeTaken.pop(0) res.append(courseTaken) for j", "DFS graph = [[] for _ in range(numCourses)] visit = [0 for _", ":type prerequisites: List[List[int]] :rtype: List[int] \"\"\" ## Practice: graph = collections.defaultdict(list) num_pre =", "[] for i in range(numCourses): if num_pre[i] == 0: coursesCanBeTaken.append(i) res = []", "course = coursesCanBeTaken.pop() res.append(course) for c in graph[course]: num_pre[c] -= 1 if num_pre[c]", "return False result.append(i) visit[i] = 1 return True for i in range(numCourses): if", "[0]*numCourses for after, prev in prerequisites: graph[prev].append(after) num_pre[after] += 1 coursesCanBeTaken = []", "return False visit[i] = -1 for j in graph[i]: if not dfs(j): return", "0: coursesCanBeTaken.append(i) res = [] while coursesCanBeTaken: course = coursesCanBeTaken.pop() res.append(course) for c", "II\"\"\" class Solution(object): def findOrder(self, numCourses, prerequisites): \"\"\" :type numCourses: int :type prerequisites:", "= [] for i in range(numCourses): if num_pre[i] == 0: coursesCanBeTaken.append(i) res =", "_ in range(numCourses)] visit = [0 for _ in range(numCourses)] for a, b", "while coursesCanBeTaken: course = coursesCanBeTaken.pop() res.append(course) for c in graph[course]: num_pre[c] -= 1", "graph[prev].append(after) num_pre[after] += 1 coursesCanBeTaken = [] for i in range(numCourses): if num_pre[i]", "return True for i in range(numCourses): if not dfs(i): return [] return result", "0] res = [] while coursesCanBeTaken: courseTaken = coursesCanBeTaken.pop(0) res.append(courseTaken) for j in", "in range(numCourses)] for a, b in prerequisites: graph[a].append(b) result = [] def dfs(i):", "[0]*numCourses for after, pre in prerequisites: graph[pre].append(after) num_pre[after] += 1 coursesCanBeTaken = [i", "1 coursesCanBeTaken = [] for i in range(numCourses): if num_pre[i] == 0: coursesCanBeTaken.append(i)", "if num_pre[c] == 0: coursesCanBeTaken.append(c) return res if len(res) == numCourses else []", "num_pre[c] == 0: coursesCanBeTaken.append(c) return res if len(res) == numCourses else [] ##", "range(numCourses)] visit = [0 for _ in range(numCourses)] for a, b in prerequisites:", "visit[i] == -1: return False visit[i] = -1 for j in graph[i]: if", "def dfs(i): if visit[i] == 1: return True if visit[i] == -1: return", "for _ in range(numCourses)] num_pre = [0]*numCourses for after, pre in prerequisites: graph[pre].append(after)", "-1: return False visit[i] = -1 for j in graph[i]: if not dfs(j):", "0: coursesCanBeTaken.append(j) return res if len(res) == numCourses else [] ## DFS graph", "in prerequisites: graph[a].append(b) result = [] def dfs(i): if visit[i] == 1: return", "= coursesCanBeTaken.pop() res.append(course) for c in graph[course]: num_pre[c] -= 1 if num_pre[c] ==", "1: return True if visit[i] == -1: return False visit[i] = -1 for", "<filename>210.py \"\"\"210. Course Schedule II\"\"\" class Solution(object): def findOrder(self, numCourses, prerequisites): \"\"\" :type", "0: coursesCanBeTaken.append(c) return res if len(res) == numCourses else [] ## bfs graph", "class Solution(object): def findOrder(self, numCourses, prerequisites): \"\"\" :type numCourses: int :type prerequisites: List[List[int]]", "in graph[course]: num_pre[c] -= 1 if num_pre[c] == 0: coursesCanBeTaken.append(c) return res if", "res.append(course) for c in graph[course]: num_pre[c] -= 1 if num_pre[c] == 0: coursesCanBeTaken.append(c)", "res = [] while coursesCanBeTaken: courseTaken = coursesCanBeTaken.pop(0) res.append(courseTaken) for j in graph[courseTaken]:", "coursesCanBeTaken: courseTaken = coursesCanBeTaken.pop(0) res.append(courseTaken) for j in graph[courseTaken]: num_pre[j] -= 1 if", "_ in range(numCourses)] num_pre = [0]*numCourses for after, pre in prerequisites: graph[pre].append(after) num_pre[after]", "in prerequisites: graph[prev].append(after) num_pre[after] += 1 coursesCanBeTaken = [] for i in range(numCourses):", "if num_pre[i] == 0] res = [] while coursesCanBeTaken: courseTaken = coursesCanBeTaken.pop(0) res.append(courseTaken)", "== numCourses else [] ## bfs graph = [[] for _ in range(numCourses)]", "else [] ## bfs graph = [[] for _ in range(numCourses)] num_pre =", "+= 1 coursesCanBeTaken = [i for i in range(numCourses) if num_pre[i] == 0]", "Solution(object): def findOrder(self, numCourses, prerequisites): \"\"\" :type numCourses: int :type prerequisites: List[List[int]] :rtype:", "\"\"\" :type numCourses: int :type prerequisites: List[List[int]] :rtype: List[int] \"\"\" ## Practice: graph", "= collections.defaultdict(list) num_pre = [0]*numCourses for after, prev in prerequisites: graph[prev].append(after) num_pre[after] +=", "range(numCourses)] num_pre = [0]*numCourses for after, pre in prerequisites: graph[pre].append(after) num_pre[after] += 1", "prerequisites: List[List[int]] :rtype: List[int] \"\"\" ## Practice: graph = collections.defaultdict(list) num_pre = [0]*numCourses", "num_pre[c] -= 1 if num_pre[c] == 0: coursesCanBeTaken.append(c) return res if len(res) ==", "prev in prerequisites: graph[prev].append(after) num_pre[after] += 1 coursesCanBeTaken = [] for i in", "== 0: coursesCanBeTaken.append(i) res = [] while coursesCanBeTaken: course = coursesCanBeTaken.pop() res.append(course) for", "for after, pre in prerequisites: graph[pre].append(after) num_pre[after] += 1 coursesCanBeTaken = [i for", "1 if num_pre[c] == 0: coursesCanBeTaken.append(c) return res if len(res) == numCourses else", "= [i for i in range(numCourses) if num_pre[i] == 0] res = []", "num_pre[i] == 0: coursesCanBeTaken.append(i) res = [] while coursesCanBeTaken: course = coursesCanBeTaken.pop() res.append(course)", "dfs(j): return False result.append(i) visit[i] = 1 return True for i in range(numCourses):", "after, pre in prerequisites: graph[pre].append(after) num_pre[after] += 1 coursesCanBeTaken = [i for i", "courseTaken = coursesCanBeTaken.pop(0) res.append(courseTaken) for j in graph[courseTaken]: num_pre[j] -= 1 if num_pre[j]", "num_pre[after] += 1 coursesCanBeTaken = [] for i in range(numCourses): if num_pre[i] ==", "== 0: coursesCanBeTaken.append(c) return res if len(res) == numCourses else [] ## bfs", "= coursesCanBeTaken.pop(0) res.append(courseTaken) for j in graph[courseTaken]: num_pre[j] -= 1 if num_pre[j] ==", "numCourses else [] ## DFS graph = [[] for _ in range(numCourses)] visit", "coursesCanBeTaken = [i for i in range(numCourses) if num_pre[i] == 0] res =", "return res if len(res) == numCourses else [] ## DFS graph = [[]", "c in graph[course]: num_pre[c] -= 1 if num_pre[c] == 0: coursesCanBeTaken.append(c) return res", "coursesCanBeTaken.append(j) return res if len(res) == numCourses else [] ## DFS graph =", "collections.defaultdict(list) num_pre = [0]*numCourses for after, prev in prerequisites: graph[prev].append(after) num_pre[after] += 1", "graph = [[] for _ in range(numCourses)] num_pre = [0]*numCourses for after, pre", "pre in prerequisites: graph[pre].append(after) num_pre[after] += 1 coursesCanBeTaken = [i for i in", "graph[i]: if not dfs(j): return False result.append(i) visit[i] = 1 return True for", "prerequisites: graph[prev].append(after) num_pre[after] += 1 coursesCanBeTaken = [] for i in range(numCourses): if", "[[] for _ in range(numCourses)] num_pre = [0]*numCourses for after, pre in prerequisites:", "num_pre = [0]*numCourses for after, pre in prerequisites: graph[pre].append(after) num_pre[after] += 1 coursesCanBeTaken", "num_pre[after] += 1 coursesCanBeTaken = [i for i in range(numCourses) if num_pre[i] ==", "for _ in range(numCourses)] visit = [0 for _ in range(numCourses)] for a,", "= [] while coursesCanBeTaken: course = coursesCanBeTaken.pop() res.append(course) for c in graph[course]: num_pre[c]", "## DFS graph = [[] for _ in range(numCourses)] visit = [0 for", "graph[a].append(b) result = [] def dfs(i): if visit[i] == 1: return True if", "## bfs graph = [[] for _ in range(numCourses)] num_pre = [0]*numCourses for", "+= 1 coursesCanBeTaken = [] for i in range(numCourses): if num_pre[i] == 0:", "i in range(numCourses): if num_pre[i] == 0: coursesCanBeTaken.append(i) res = [] while coursesCanBeTaken:", "bfs graph = [[] for _ in range(numCourses)] num_pre = [0]*numCourses for after,", "for j in graph[courseTaken]: num_pre[j] -= 1 if num_pre[j] == 0: coursesCanBeTaken.append(j) return", "== numCourses else [] ## DFS graph = [[] for _ in range(numCourses)]", "if visit[i] == 1: return True if visit[i] == -1: return False visit[i]", "= [] while coursesCanBeTaken: courseTaken = coursesCanBeTaken.pop(0) res.append(courseTaken) for j in graph[courseTaken]: num_pre[j]", "for a, b in prerequisites: graph[a].append(b) result = [] def dfs(i): if visit[i]", "return True if visit[i] == -1: return False visit[i] = -1 for j", "graph = collections.defaultdict(list) num_pre = [0]*numCourses for after, prev in prerequisites: graph[prev].append(after) num_pre[after]", "in range(numCourses) if num_pre[i] == 0] res = [] while coursesCanBeTaken: courseTaken =", "[] def dfs(i): if visit[i] == 1: return True if visit[i] == -1:", "res if len(res) == numCourses else [] ## bfs graph = [[] for", "= [0]*numCourses for after, prev in prerequisites: graph[prev].append(after) num_pre[after] += 1 coursesCanBeTaken =", "if not dfs(j): return False result.append(i) visit[i] = 1 return True for i", "not dfs(j): return False result.append(i) visit[i] = 1 return True for i in", "result = [] def dfs(i): if visit[i] == 1: return True if visit[i]", "res = [] while coursesCanBeTaken: course = coursesCanBeTaken.pop() res.append(course) for c in graph[course]:", "-= 1 if num_pre[j] == 0: coursesCanBeTaken.append(j) return res if len(res) == numCourses", "== 1: return True if visit[i] == -1: return False visit[i] = -1", "True if visit[i] == -1: return False visit[i] = -1 for j in", "[] ## DFS graph = [[] for _ in range(numCourses)] visit = [0", "= [[] for _ in range(numCourses)] visit = [0 for _ in range(numCourses)]", "numCourses, prerequisites): \"\"\" :type numCourses: int :type prerequisites: List[List[int]] :rtype: List[int] \"\"\" ##", "a, b in prerequisites: graph[a].append(b) result = [] def dfs(i): if visit[i] ==", "visit = [0 for _ in range(numCourses)] for a, b in prerequisites: graph[a].append(b)", "num_pre[i] == 0] res = [] while coursesCanBeTaken: courseTaken = coursesCanBeTaken.pop(0) res.append(courseTaken) for", "b in prerequisites: graph[a].append(b) result = [] def dfs(i): if visit[i] == 1:", "graph[courseTaken]: num_pre[j] -= 1 if num_pre[j] == 0: coursesCanBeTaken.append(j) return res if len(res)", "-= 1 if num_pre[c] == 0: coursesCanBeTaken.append(c) return res if len(res) == numCourses", "coursesCanBeTaken.append(c) return res if len(res) == numCourses else [] ## bfs graph =", "coursesCanBeTaken.append(i) res = [] while coursesCanBeTaken: course = coursesCanBeTaken.pop() res.append(course) for c in", "## Practice: graph = collections.defaultdict(list) num_pre = [0]*numCourses for after, prev in prerequisites:", "for c in graph[course]: num_pre[c] -= 1 if num_pre[c] == 0: coursesCanBeTaken.append(c) return", "range(numCourses) if num_pre[i] == 0] res = [] while coursesCanBeTaken: courseTaken = coursesCanBeTaken.pop(0)", "coursesCanBeTaken.pop() res.append(course) for c in graph[course]: num_pre[c] -= 1 if num_pre[c] == 0:", "len(res) == numCourses else [] ## bfs graph = [[] for _ in", "-1 for j in graph[i]: if not dfs(j): return False result.append(i) visit[i] =", "visit[i] = 1 return True for i in range(numCourses): if not dfs(i): return", "if visit[i] == -1: return False visit[i] = -1 for j in graph[i]:", "after, prev in prerequisites: graph[prev].append(after) num_pre[after] += 1 coursesCanBeTaken = [] for i", "Practice: graph = collections.defaultdict(list) num_pre = [0]*numCourses for after, prev in prerequisites: graph[prev].append(after)" ]
[ "import os from typing import Dict from fastapi_sso.sso.base import OpenID, SSOBase class KakaoSSO(SSOBase):", "scope = [\"openid\"] version = \"v2\" async def get_discovery_document(self) -> Dict[str, str]: return", "f\"https://kauth.kakao.com/oauth/token\", \"userinfo_endpoint\": f\"https://kapi.kakao.com/{self.version}/user/me\", } @classmethod async def openid_from_response(cls, response: dict) -> OpenID: return", "[\"openid\"] version = \"v2\" async def get_discovery_document(self) -> Dict[str, str]: return { \"authorization_endpoint\":", "Dict from fastapi_sso.sso.base import OpenID, SSOBase class KakaoSSO(SSOBase): provider = \"kakao\" scope =", "= [\"openid\"] version = \"v2\" async def get_discovery_document(self) -> Dict[str, str]: return {", "SSOBase class KakaoSSO(SSOBase): provider = \"kakao\" scope = [\"openid\"] version = \"v2\" async", "async def get_discovery_document(self) -> Dict[str, str]: return { \"authorization_endpoint\": f\"https://kauth.kakao.com/oauth/authorize?client_id={self.client_secret}&response_type=code&redirect_uri={self.redirect_uri}\", \"token_endpoint\": f\"https://kauth.kakao.com/oauth/token\", \"userinfo_endpoint\":", "fastapi_sso.sso.base import OpenID, SSOBase class KakaoSSO(SSOBase): provider = \"kakao\" scope = [\"openid\"] version", "f\"https://kapi.kakao.com/{self.version}/user/me\", } @classmethod async def openid_from_response(cls, response: dict) -> OpenID: return OpenID(display_name=response[\"properties\"][\"nickname\"], provider=cls.provider)", "def get_discovery_document(self) -> Dict[str, str]: return { \"authorization_endpoint\": f\"https://kauth.kakao.com/oauth/authorize?client_id={self.client_secret}&response_type=code&redirect_uri={self.redirect_uri}\", \"token_endpoint\": f\"https://kauth.kakao.com/oauth/token\", \"userinfo_endpoint\": f\"https://kapi.kakao.com/{self.version}/user/me\",", "OpenID, SSOBase class KakaoSSO(SSOBase): provider = \"kakao\" scope = [\"openid\"] version = \"v2\"", "from typing import Dict from fastapi_sso.sso.base import OpenID, SSOBase class KakaoSSO(SSOBase): provider =", "\"kakao\" scope = [\"openid\"] version = \"v2\" async def get_discovery_document(self) -> Dict[str, str]:", "\"userinfo_endpoint\": f\"https://kapi.kakao.com/{self.version}/user/me\", } @classmethod async def openid_from_response(cls, response: dict) -> OpenID: return OpenID(display_name=response[\"properties\"][\"nickname\"],", "return { \"authorization_endpoint\": f\"https://kauth.kakao.com/oauth/authorize?client_id={self.client_secret}&response_type=code&redirect_uri={self.redirect_uri}\", \"token_endpoint\": f\"https://kauth.kakao.com/oauth/token\", \"userinfo_endpoint\": f\"https://kapi.kakao.com/{self.version}/user/me\", } @classmethod async def openid_from_response(cls,", "= \"kakao\" scope = [\"openid\"] version = \"v2\" async def get_discovery_document(self) -> Dict[str,", "import Dict from fastapi_sso.sso.base import OpenID, SSOBase class KakaoSSO(SSOBase): provider = \"kakao\" scope", "version = \"v2\" async def get_discovery_document(self) -> Dict[str, str]: return { \"authorization_endpoint\": f\"https://kauth.kakao.com/oauth/authorize?client_id={self.client_secret}&response_type=code&redirect_uri={self.redirect_uri}\",", "f\"https://kauth.kakao.com/oauth/authorize?client_id={self.client_secret}&response_type=code&redirect_uri={self.redirect_uri}\", \"token_endpoint\": f\"https://kauth.kakao.com/oauth/token\", \"userinfo_endpoint\": f\"https://kapi.kakao.com/{self.version}/user/me\", } @classmethod async def openid_from_response(cls, response: dict) ->", "get_discovery_document(self) -> Dict[str, str]: return { \"authorization_endpoint\": f\"https://kauth.kakao.com/oauth/authorize?client_id={self.client_secret}&response_type=code&redirect_uri={self.redirect_uri}\", \"token_endpoint\": f\"https://kauth.kakao.com/oauth/token\", \"userinfo_endpoint\": f\"https://kapi.kakao.com/{self.version}/user/me\", }", "-> Dict[str, str]: return { \"authorization_endpoint\": f\"https://kauth.kakao.com/oauth/authorize?client_id={self.client_secret}&response_type=code&redirect_uri={self.redirect_uri}\", \"token_endpoint\": f\"https://kauth.kakao.com/oauth/token\", \"userinfo_endpoint\": f\"https://kapi.kakao.com/{self.version}/user/me\", } @classmethod", "\"v2\" async def get_discovery_document(self) -> Dict[str, str]: return { \"authorization_endpoint\": f\"https://kauth.kakao.com/oauth/authorize?client_id={self.client_secret}&response_type=code&redirect_uri={self.redirect_uri}\", \"token_endpoint\": f\"https://kauth.kakao.com/oauth/token\",", "\"token_endpoint\": f\"https://kauth.kakao.com/oauth/token\", \"userinfo_endpoint\": f\"https://kapi.kakao.com/{self.version}/user/me\", } @classmethod async def openid_from_response(cls, response: dict) -> OpenID:", "provider = \"kakao\" scope = [\"openid\"] version = \"v2\" async def get_discovery_document(self) ->", "class KakaoSSO(SSOBase): provider = \"kakao\" scope = [\"openid\"] version = \"v2\" async def", "<reponame>thdwoqor/fastapi-sso import os from typing import Dict from fastapi_sso.sso.base import OpenID, SSOBase class", "= \"v2\" async def get_discovery_document(self) -> Dict[str, str]: return { \"authorization_endpoint\": f\"https://kauth.kakao.com/oauth/authorize?client_id={self.client_secret}&response_type=code&redirect_uri={self.redirect_uri}\", \"token_endpoint\":", "typing import Dict from fastapi_sso.sso.base import OpenID, SSOBase class KakaoSSO(SSOBase): provider = \"kakao\"", "{ \"authorization_endpoint\": f\"https://kauth.kakao.com/oauth/authorize?client_id={self.client_secret}&response_type=code&redirect_uri={self.redirect_uri}\", \"token_endpoint\": f\"https://kauth.kakao.com/oauth/token\", \"userinfo_endpoint\": f\"https://kapi.kakao.com/{self.version}/user/me\", } @classmethod async def openid_from_response(cls, response:", "Dict[str, str]: return { \"authorization_endpoint\": f\"https://kauth.kakao.com/oauth/authorize?client_id={self.client_secret}&response_type=code&redirect_uri={self.redirect_uri}\", \"token_endpoint\": f\"https://kauth.kakao.com/oauth/token\", \"userinfo_endpoint\": f\"https://kapi.kakao.com/{self.version}/user/me\", } @classmethod async", "os from typing import Dict from fastapi_sso.sso.base import OpenID, SSOBase class KakaoSSO(SSOBase): provider", "str]: return { \"authorization_endpoint\": f\"https://kauth.kakao.com/oauth/authorize?client_id={self.client_secret}&response_type=code&redirect_uri={self.redirect_uri}\", \"token_endpoint\": f\"https://kauth.kakao.com/oauth/token\", \"userinfo_endpoint\": f\"https://kapi.kakao.com/{self.version}/user/me\", } @classmethod async def", "\"authorization_endpoint\": f\"https://kauth.kakao.com/oauth/authorize?client_id={self.client_secret}&response_type=code&redirect_uri={self.redirect_uri}\", \"token_endpoint\": f\"https://kauth.kakao.com/oauth/token\", \"userinfo_endpoint\": f\"https://kapi.kakao.com/{self.version}/user/me\", } @classmethod async def openid_from_response(cls, response: dict)", "from fastapi_sso.sso.base import OpenID, SSOBase class KakaoSSO(SSOBase): provider = \"kakao\" scope = [\"openid\"]", "KakaoSSO(SSOBase): provider = \"kakao\" scope = [\"openid\"] version = \"v2\" async def get_discovery_document(self)", "import OpenID, SSOBase class KakaoSSO(SSOBase): provider = \"kakao\" scope = [\"openid\"] version =" ]
[ "tasks = [asyncio.create_task(job.close(timeout=timeout)) for job in self._jobs] closed = True for task in", "def is_running(self) -> bool: return not self._task.done() async def close(self, *, timeout: float", "-> bool: return all(job.is_running for job in self._jobs) async def close(self, *, timeout:", "Collection class Job(abc.ABC): __slots__ = () @property @abc.abstractmethod def is_running(self) -> bool: ...", "-> bool: if self._task.done(): return True self._task.cancel() await asyncio.wait({self._task}, timeout=timeout) return self._task.done() class", "@property def is_running(self) -> bool: return all(job.is_running for job in self._jobs) async def", "job in self._jobs) async def close(self, *, timeout: float = 0.5) -> bool:", "bool: tasks = [asyncio.create_task(job.close(timeout=timeout)) for job in self._jobs] closed = True for task", "from typing import Collection class Job(abc.ABC): __slots__ = () @property @abc.abstractmethod def is_running(self)", "def close(self, *, timeout: float = 0.5) -> bool: tasks = [asyncio.create_task(job.close(timeout=timeout)) for", "timeout: float = 0.5) -> bool: ... class SingleTaskJob(Job): __slots__ = (\"_task\",) def", "0.5) -> bool: ... class SingleTaskJob(Job): __slots__ = (\"_task\",) def __init__(self, task: asyncio.Task[None]):", "bool: ... class SingleTaskJob(Job): __slots__ = (\"_task\",) def __init__(self, task: asyncio.Task[None]): self._task =", "@property def is_running(self) -> bool: return not self._task.done() async def close(self, *, timeout:", "in self._jobs] closed = True for task in tasks: closed &= await task", "bool: return not self._task.done() async def close(self, *, timeout: float = 0.5) ->", "(\"_task\",) def __init__(self, task: asyncio.Task[None]): self._task = task @property def is_running(self) -> bool:", "typing import Collection class Job(abc.ABC): __slots__ = () @property @abc.abstractmethod def is_running(self) ->", "= (\"_task\",) def __init__(self, task: asyncio.Task[None]): self._task = task @property def is_running(self) ->", "def close(self, *, timeout: float = 0.5) -> bool: if self._task.done(): return True", "0.5) -> bool: tasks = [asyncio.create_task(job.close(timeout=timeout)) for job in self._jobs] closed = True", "-> bool: tasks = [asyncio.create_task(job.close(timeout=timeout)) for job in self._jobs] closed = True for", "True self._task.cancel() await asyncio.wait({self._task}, timeout=timeout) return self._task.done() class CombinedJob(Job): __slots__ = (\"_jobs\",) def", "self._task = task @property def is_running(self) -> bool: return not self._task.done() async def", "class Job(abc.ABC): __slots__ = () @property @abc.abstractmethod def is_running(self) -> bool: ... @abc.abstractmethod", "bool: if self._task.done(): return True self._task.cancel() await asyncio.wait({self._task}, timeout=timeout) return self._task.done() class CombinedJob(Job):", "async def close(self, *, timeout: float = 0.5) -> bool: if self._task.done(): return", "asyncio.Task[None]): self._task = task @property def is_running(self) -> bool: return not self._task.done() async", "return all(job.is_running for job in self._jobs) async def close(self, *, timeout: float =", "self._jobs = jobs @property def is_running(self) -> bool: return all(job.is_running for job in", "float = 0.5) -> bool: if self._task.done(): return True self._task.cancel() await asyncio.wait({self._task}, timeout=timeout)", "is_running(self) -> bool: ... @abc.abstractmethod async def close(self, *, timeout: float = 0.5)", "Collection[Job]): self._jobs = jobs @property def is_running(self) -> bool: return all(job.is_running for job", "= () @property @abc.abstractmethod def is_running(self) -> bool: ... @abc.abstractmethod async def close(self,", "= 0.5) -> bool: if self._task.done(): return True self._task.cancel() await asyncio.wait({self._task}, timeout=timeout) return", "return True self._task.cancel() await asyncio.wait({self._task}, timeout=timeout) return self._task.done() class CombinedJob(Job): __slots__ = (\"_jobs\",)", "@abc.abstractmethod def is_running(self) -> bool: ... @abc.abstractmethod async def close(self, *, timeout: float", "close(self, *, timeout: float = 0.5) -> bool: ... class SingleTaskJob(Job): __slots__ =", "def __init__(self, jobs: Collection[Job]): self._jobs = jobs @property def is_running(self) -> bool: return", "asyncio from typing import Collection class Job(abc.ABC): __slots__ = () @property @abc.abstractmethod def", "return self._task.done() class CombinedJob(Job): __slots__ = (\"_jobs\",) def __init__(self, jobs: Collection[Job]): self._jobs =", "self._task.done() async def close(self, *, timeout: float = 0.5) -> bool: if self._task.done():", "self._task.done(): return True self._task.cancel() await asyncio.wait({self._task}, timeout=timeout) return self._task.done() class CombinedJob(Job): __slots__ =", "float = 0.5) -> bool: tasks = [asyncio.create_task(job.close(timeout=timeout)) for job in self._jobs] closed", "class CombinedJob(Job): __slots__ = (\"_jobs\",) def __init__(self, jobs: Collection[Job]): self._jobs = jobs @property", "asyncio.wait({self._task}, timeout=timeout) return self._task.done() class CombinedJob(Job): __slots__ = (\"_jobs\",) def __init__(self, jobs: Collection[Job]):", "timeout: float = 0.5) -> bool: tasks = [asyncio.create_task(job.close(timeout=timeout)) for job in self._jobs]", "self._jobs) async def close(self, *, timeout: float = 0.5) -> bool: tasks =", "0.5) -> bool: if self._task.done(): return True self._task.cancel() await asyncio.wait({self._task}, timeout=timeout) return self._task.done()", "self._jobs] closed = True for task in tasks: closed &= await task return", "in self._jobs) async def close(self, *, timeout: float = 0.5) -> bool: tasks", "@abc.abstractmethod async def close(self, *, timeout: float = 0.5) -> bool: ... class", "__init__(self, jobs: Collection[Job]): self._jobs = jobs @property def is_running(self) -> bool: return all(job.is_running", "*, timeout: float = 0.5) -> bool: if self._task.done(): return True self._task.cancel() await", "not self._task.done() async def close(self, *, timeout: float = 0.5) -> bool: if", "= 0.5) -> bool: tasks = [asyncio.create_task(job.close(timeout=timeout)) for job in self._jobs] closed =", "closed = True for task in tasks: closed &= await task return closed", "class SingleTaskJob(Job): __slots__ = (\"_task\",) def __init__(self, task: asyncio.Task[None]): self._task = task @property", "import asyncio from typing import Collection class Job(abc.ABC): __slots__ = () @property @abc.abstractmethod", "*, timeout: float = 0.5) -> bool: tasks = [asyncio.create_task(job.close(timeout=timeout)) for job in", "timeout: float = 0.5) -> bool: if self._task.done(): return True self._task.cancel() await asyncio.wait({self._task},", "bool: ... @abc.abstractmethod async def close(self, *, timeout: float = 0.5) -> bool:", "jobs @property def is_running(self) -> bool: return all(job.is_running for job in self._jobs) async", "= 0.5) -> bool: ... class SingleTaskJob(Job): __slots__ = (\"_task\",) def __init__(self, task:", "= task @property def is_running(self) -> bool: return not self._task.done() async def close(self,", "await asyncio.wait({self._task}, timeout=timeout) return self._task.done() class CombinedJob(Job): __slots__ = (\"_jobs\",) def __init__(self, jobs:", "def __init__(self, task: asyncio.Task[None]): self._task = task @property def is_running(self) -> bool: return", "@property @abc.abstractmethod def is_running(self) -> bool: ... @abc.abstractmethod async def close(self, *, timeout:", "is_running(self) -> bool: return all(job.is_running for job in self._jobs) async def close(self, *,", "abc import asyncio from typing import Collection class Job(abc.ABC): __slots__ = () @property", "*, timeout: float = 0.5) -> bool: ... class SingleTaskJob(Job): __slots__ = (\"_task\",)", "for job in self._jobs] closed = True for task in tasks: closed &=", "... @abc.abstractmethod async def close(self, *, timeout: float = 0.5) -> bool: ...", "task @property def is_running(self) -> bool: return not self._task.done() async def close(self, *,", "[asyncio.create_task(job.close(timeout=timeout)) for job in self._jobs] closed = True for task in tasks: closed", "-> bool: ... class SingleTaskJob(Job): __slots__ = (\"_task\",) def __init__(self, task: asyncio.Task[None]): self._task", "jobs: Collection[Job]): self._jobs = jobs @property def is_running(self) -> bool: return all(job.is_running for", "def close(self, *, timeout: float = 0.5) -> bool: ... class SingleTaskJob(Job): __slots__", "is_running(self) -> bool: return not self._task.done() async def close(self, *, timeout: float =", "close(self, *, timeout: float = 0.5) -> bool: if self._task.done(): return True self._task.cancel()", "def is_running(self) -> bool: ... @abc.abstractmethod async def close(self, *, timeout: float =", "return not self._task.done() async def close(self, *, timeout: float = 0.5) -> bool:", "timeout=timeout) return self._task.done() class CombinedJob(Job): __slots__ = (\"_jobs\",) def __init__(self, jobs: Collection[Job]): self._jobs", "__slots__ = (\"_task\",) def __init__(self, task: asyncio.Task[None]): self._task = task @property def is_running(self)", "self._task.cancel() await asyncio.wait({self._task}, timeout=timeout) return self._task.done() class CombinedJob(Job): __slots__ = (\"_jobs\",) def __init__(self,", "... class SingleTaskJob(Job): __slots__ = (\"_task\",) def __init__(self, task: asyncio.Task[None]): self._task = task", "= (\"_jobs\",) def __init__(self, jobs: Collection[Job]): self._jobs = jobs @property def is_running(self) ->", "(\"_jobs\",) def __init__(self, jobs: Collection[Job]): self._jobs = jobs @property def is_running(self) -> bool:", "task: asyncio.Task[None]): self._task = task @property def is_running(self) -> bool: return not self._task.done()", "for job in self._jobs) async def close(self, *, timeout: float = 0.5) ->", "SingleTaskJob(Job): __slots__ = (\"_task\",) def __init__(self, task: asyncio.Task[None]): self._task = task @property def", "float = 0.5) -> bool: ... class SingleTaskJob(Job): __slots__ = (\"_task\",) def __init__(self,", "close(self, *, timeout: float = 0.5) -> bool: tasks = [asyncio.create_task(job.close(timeout=timeout)) for job", "self._task.done() class CombinedJob(Job): __slots__ = (\"_jobs\",) def __init__(self, jobs: Collection[Job]): self._jobs = jobs", "async def close(self, *, timeout: float = 0.5) -> bool: ... class SingleTaskJob(Job):", "__slots__ = () @property @abc.abstractmethod def is_running(self) -> bool: ... @abc.abstractmethod async def", "bool: return all(job.is_running for job in self._jobs) async def close(self, *, timeout: float", "-> bool: return not self._task.done() async def close(self, *, timeout: float = 0.5)", "job in self._jobs] closed = True for task in tasks: closed &= await", "Job(abc.ABC): __slots__ = () @property @abc.abstractmethod def is_running(self) -> bool: ... @abc.abstractmethod async", "import Collection class Job(abc.ABC): __slots__ = () @property @abc.abstractmethod def is_running(self) -> bool:", "= jobs @property def is_running(self) -> bool: return all(job.is_running for job in self._jobs)", "() @property @abc.abstractmethod def is_running(self) -> bool: ... @abc.abstractmethod async def close(self, *,", "def is_running(self) -> bool: return all(job.is_running for job in self._jobs) async def close(self,", "__slots__ = (\"_jobs\",) def __init__(self, jobs: Collection[Job]): self._jobs = jobs @property def is_running(self)", "if self._task.done(): return True self._task.cancel() await asyncio.wait({self._task}, timeout=timeout) return self._task.done() class CombinedJob(Job): __slots__", "all(job.is_running for job in self._jobs) async def close(self, *, timeout: float = 0.5)", "-> bool: ... @abc.abstractmethod async def close(self, *, timeout: float = 0.5) ->", "async def close(self, *, timeout: float = 0.5) -> bool: tasks = [asyncio.create_task(job.close(timeout=timeout))", "__init__(self, task: asyncio.Task[None]): self._task = task @property def is_running(self) -> bool: return not", "= [asyncio.create_task(job.close(timeout=timeout)) for job in self._jobs] closed = True for task in tasks:", "CombinedJob(Job): __slots__ = (\"_jobs\",) def __init__(self, jobs: Collection[Job]): self._jobs = jobs @property def", "import abc import asyncio from typing import Collection class Job(abc.ABC): __slots__ = ()" ]
[ "# provide status of all jobs import ulmodb dbname = \"ulmodb.db\" db =", "provide status of all jobs import ulmodb dbname = \"ulmodb.db\" db = ulmodb.UlmoDB(dbname)" ]
[ "import numpy as np import nimfa V = np.random.rand(40, 100) nmf = nimfa.Nmf(V,", "np.random.rand(40, 100) nmf = nimfa.Nmf(V, seed=\"nndsvd\", rank=10, max_iter=12, update='euclidean', objective='fro') nmf_fit = nmf()", "as np import nimfa V = np.random.rand(40, 100) nmf = nimfa.Nmf(V, seed=\"nndsvd\", rank=10,", "import nimfa V = np.random.rand(40, 100) nmf = nimfa.Nmf(V, seed=\"nndsvd\", rank=10, max_iter=12, update='euclidean',", "np import nimfa V = np.random.rand(40, 100) nmf = nimfa.Nmf(V, seed=\"nndsvd\", rank=10, max_iter=12,", "nimfa V = np.random.rand(40, 100) nmf = nimfa.Nmf(V, seed=\"nndsvd\", rank=10, max_iter=12, update='euclidean', objective='fro')", "= np.random.rand(40, 100) nmf = nimfa.Nmf(V, seed=\"nndsvd\", rank=10, max_iter=12, update='euclidean', objective='fro') nmf_fit =", "V = np.random.rand(40, 100) nmf = nimfa.Nmf(V, seed=\"nndsvd\", rank=10, max_iter=12, update='euclidean', objective='fro') nmf_fit", "numpy as np import nimfa V = np.random.rand(40, 100) nmf = nimfa.Nmf(V, seed=\"nndsvd\"," ]
[ "# ############################################################################## from PyQt4 import QtCore import logging ############################################################################## class QParser(QtCore.QThread): def __init__(self,", "utf-8 -*- # ############################################################################## from PyQt4 import QtCore import logging ############################################################################## class QParser(QtCore.QThread):", "-*- # ############################################################################## from PyQt4 import QtCore import logging ############################################################################## class QParser(QtCore.QThread): def", "class QParser(QtCore.QThread): def __init__(self, f, parent=None): super(QParser, self).__init__(parent) self.f = f self.logger =", "__init__(self, f, parent=None): super(QParser, self).__init__(parent) self.f = f self.logger = logging.getLogger('QParser') def run(self):", "#!/usr/bin/env python # -*- coding: utf-8 -*- # ############################################################################## from PyQt4 import QtCore", "import QtCore import logging ############################################################################## class QParser(QtCore.QThread): def __init__(self, f, parent=None): super(QParser, self).__init__(parent)", "<gh_stars>0 #!/usr/bin/env python # -*- coding: utf-8 -*- # ############################################################################## from PyQt4 import", "-*- coding: utf-8 -*- # ############################################################################## from PyQt4 import QtCore import logging ##############################################################################", "logging ############################################################################## class QParser(QtCore.QThread): def __init__(self, f, parent=None): super(QParser, self).__init__(parent) self.f = f", "parent=None): super(QParser, self).__init__(parent) self.f = f self.logger = logging.getLogger('QParser') def run(self): self.logger.debug('started') self.f()", "############################################################################## from PyQt4 import QtCore import logging ############################################################################## class QParser(QtCore.QThread): def __init__(self, f,", "super(QParser, self).__init__(parent) self.f = f self.logger = logging.getLogger('QParser') def run(self): self.logger.debug('started') self.f() self.logger.debug('stoped')", "def __init__(self, f, parent=None): super(QParser, self).__init__(parent) self.f = f self.logger = logging.getLogger('QParser') def", "############################################################################## class QParser(QtCore.QThread): def __init__(self, f, parent=None): super(QParser, self).__init__(parent) self.f = f self.logger", "coding: utf-8 -*- # ############################################################################## from PyQt4 import QtCore import logging ############################################################################## class", "from PyQt4 import QtCore import logging ############################################################################## class QParser(QtCore.QThread): def __init__(self, f, parent=None):", "PyQt4 import QtCore import logging ############################################################################## class QParser(QtCore.QThread): def __init__(self, f, parent=None): super(QParser,", "python # -*- coding: utf-8 -*- # ############################################################################## from PyQt4 import QtCore import", "QtCore import logging ############################################################################## class QParser(QtCore.QThread): def __init__(self, f, parent=None): super(QParser, self).__init__(parent) self.f", "# -*- coding: utf-8 -*- # ############################################################################## from PyQt4 import QtCore import logging", "f, parent=None): super(QParser, self).__init__(parent) self.f = f self.logger = logging.getLogger('QParser') def run(self): self.logger.debug('started')", "import logging ############################################################################## class QParser(QtCore.QThread): def __init__(self, f, parent=None): super(QParser, self).__init__(parent) self.f =", "QParser(QtCore.QThread): def __init__(self, f, parent=None): super(QParser, self).__init__(parent) self.f = f self.logger = logging.getLogger('QParser')" ]
[ "to decode message in order to show to user. if msg: print(msg.decode()) else:", "Receive messages sent by the server and display them to user ''' while", "= '127.0.0.1' SERVER_PORT = 12000 try: # Instantiate socket and start connection with", "SERVER_PORT = 12000 try: # Instantiate socket and start connection with server socket_instance", "with the server socket_instance.close() except Exception as e: print(f'Error connecting to server socket", "closed # so the connection will be closed and an error will be", "from chat and close connection while True: msg = input('> ') if msg", "and display them to user ''' while True: try: msg = connection.recv(1024) #", "handle it's input messages ''' SERVER_ADDRESS = '127.0.0.1' SERVER_PORT = 12000 try: #", "if msg: print(msg.decode()) else: connection.close() break except Exception as e: print(f'Error handling message", "order to handle messages sent by server threading.Thread(target=handle_messages, args=[socket_instance]).start() print('Connected to chat!') #", "message from server: {e}') connection.close() break def client() -> None: ''' Main process", "quit from chat and close connection while True: msg = input('> ') if", "<filename>04_chat/client.py<gh_stars>0 ''' Modified by <NAME> <EMAIL> ''' import socket, threading def handle_messages(connection: socket.socket):", "server and display them to user ''' while True: try: msg = connection.recv(1024)", "# Parse message to utf-8 socket_instance.send(msg.encode()) # Close connection with the server socket_instance.close()", "by server threading.Thread(target=handle_messages, args=[socket_instance]).start() print('Connected to chat!') # Read user's input until it", "by the server and display them to user ''' while True: try: msg", "display them to user ''' while True: try: msg = connection.recv(1024) # If", "{e}') connection.close() break def client() -> None: ''' Main process that start client", "args=[socket_instance]).start() print('Connected to chat!') # Read user's input until it quit from chat", "Create a thread in order to handle messages sent by server threading.Thread(target=handle_messages, args=[socket_instance]).start()", "error will be displayed. # If not, it will try to decode message", "Exception as e: print(f'Error connecting to server socket {e}') socket_instance.close() if __name__ ==", "while True: msg = input('> ') if msg == 'quit': break # Parse", "# If there is no message, there is a chance that connection has", "socket and start connection with server socket_instance = socket.socket() socket_instance.connect((SERVER_ADDRESS, SERVER_PORT)) # Create", "= 12000 try: # Instantiate socket and start connection with server socket_instance =", "messages sent by the server and display them to user ''' while True:", "server: {e}') connection.close() break def client() -> None: ''' Main process that start", "start client connection to the server and handle it's input messages ''' SERVER_ADDRESS", "messages ''' SERVER_ADDRESS = '127.0.0.1' SERVER_PORT = 12000 try: # Instantiate socket and", "socket_instance = socket.socket() socket_instance.connect((SERVER_ADDRESS, SERVER_PORT)) # Create a thread in order to handle", "from server: {e}') connection.close() break def client() -> None: ''' Main process that", "them to user ''' while True: try: msg = connection.recv(1024) # If there", "until it quit from chat and close connection while True: msg = input('>", "be displayed. # If not, it will try to decode message in order", "= input('> ') if msg == 'quit': break # Parse message to utf-8", "connection while True: msg = input('> ') if msg == 'quit': break #", "msg = connection.recv(1024) # If there is no message, there is a chance", "it quit from chat and close connection while True: msg = input('> ')", "client() -> None: ''' Main process that start client connection to the server", "connection to the server and handle it's input messages ''' SERVER_ADDRESS = '127.0.0.1'", "socket_instance.connect((SERVER_ADDRESS, SERVER_PORT)) # Create a thread in order to handle messages sent by", "'quit': break # Parse message to utf-8 socket_instance.send(msg.encode()) # Close connection with the", "a chance that connection has closed # so the connection will be closed", "client connection to the server and handle it's input messages ''' SERVER_ADDRESS =", "handle_messages(connection: socket.socket): ''' Receive messages sent by the server and display them to", "sent by the server and display them to user ''' while True: try:", "True: msg = input('> ') if msg == 'quit': break # Parse message", "as e: print(f'Error handling message from server: {e}') connection.close() break def client() ->", "None: ''' Main process that start client connection to the server and handle", "has closed # so the connection will be closed and an error will", "closed and an error will be displayed. # If not, it will try", "message in order to show to user. if msg: print(msg.decode()) else: connection.close() break", "user ''' while True: try: msg = connection.recv(1024) # If there is no", "it's input messages ''' SERVER_ADDRESS = '127.0.0.1' SERVER_PORT = 12000 try: # Instantiate", "chat!') # Read user's input until it quit from chat and close connection", "break # Parse message to utf-8 socket_instance.send(msg.encode()) # Close connection with the server", "the server socket_instance.close() except Exception as e: print(f'Error connecting to server socket {e}')", "connection will be closed and an error will be displayed. # If not,", "is no message, there is a chance that connection has closed # so", "user. if msg: print(msg.decode()) else: connection.close() break except Exception as e: print(f'Error handling", "Instantiate socket and start connection with server socket_instance = socket.socket() socket_instance.connect((SERVER_ADDRESS, SERVER_PORT)) #", "connection with server socket_instance = socket.socket() socket_instance.connect((SERVER_ADDRESS, SERVER_PORT)) # Create a thread in", "and close connection while True: msg = input('> ') if msg == 'quit':", "to utf-8 socket_instance.send(msg.encode()) # Close connection with the server socket_instance.close() except Exception as", "process that start client connection to the server and handle it's input messages", "connection has closed # so the connection will be closed and an error", "Modified by <NAME> <EMAIL> ''' import socket, threading def handle_messages(connection: socket.socket): ''' Receive", "print(msg.decode()) else: connection.close() break except Exception as e: print(f'Error handling message from server:", "by <NAME> <EMAIL> ''' import socket, threading def handle_messages(connection: socket.socket): ''' Receive messages", "msg = input('> ') if msg == 'quit': break # Parse message to", "# Create a thread in order to handle messages sent by server threading.Thread(target=handle_messages,", "chat and close connection while True: msg = input('> ') if msg ==", "decode message in order to show to user. if msg: print(msg.decode()) else: connection.close()", "while True: try: msg = connection.recv(1024) # If there is no message, there", "input until it quit from chat and close connection while True: msg =", "print('Connected to chat!') # Read user's input until it quit from chat and", "Exception as e: print(f'Error handling message from server: {e}') connection.close() break def client()", "-> None: ''' Main process that start client connection to the server and", "not, it will try to decode message in order to show to user.", "order to show to user. if msg: print(msg.decode()) else: connection.close() break except Exception", "will try to decode message in order to show to user. if msg:", "server and handle it's input messages ''' SERVER_ADDRESS = '127.0.0.1' SERVER_PORT = 12000", "Parse message to utf-8 socket_instance.send(msg.encode()) # Close connection with the server socket_instance.close() except", "connection with the server socket_instance.close() except Exception as e: print(f'Error connecting to server", "if msg == 'quit': break # Parse message to utf-8 socket_instance.send(msg.encode()) # Close", "input('> ') if msg == 'quit': break # Parse message to utf-8 socket_instance.send(msg.encode())", "Read user's input until it quit from chat and close connection while True:", "SERVER_ADDRESS = '127.0.0.1' SERVER_PORT = 12000 try: # Instantiate socket and start connection", "e: print(f'Error connecting to server socket {e}') socket_instance.close() if __name__ == \"__main__\": client()", "# Instantiate socket and start connection with server socket_instance = socket.socket() socket_instance.connect((SERVER_ADDRESS, SERVER_PORT))", "def handle_messages(connection: socket.socket): ''' Receive messages sent by the server and display them", "there is no message, there is a chance that connection has closed #", "print(f'Error handling message from server: {e}') connection.close() break def client() -> None: '''", "thread in order to handle messages sent by server threading.Thread(target=handle_messages, args=[socket_instance]).start() print('Connected to", "that connection has closed # so the connection will be closed and an", "and an error will be displayed. # If not, it will try to", "start connection with server socket_instance = socket.socket() socket_instance.connect((SERVER_ADDRESS, SERVER_PORT)) # Create a thread", "== 'quit': break # Parse message to utf-8 socket_instance.send(msg.encode()) # Close connection with", "socket.socket): ''' Receive messages sent by the server and display them to user", "socket_instance.close() except Exception as e: print(f'Error connecting to server socket {e}') socket_instance.close() if", "If not, it will try to decode message in order to show to", "with server socket_instance = socket.socket() socket_instance.connect((SERVER_ADDRESS, SERVER_PORT)) # Create a thread in order", "user's input until it quit from chat and close connection while True: msg", "so the connection will be closed and an error will be displayed. #", "def client() -> None: ''' Main process that start client connection to the", "True: try: msg = connection.recv(1024) # If there is no message, there is", "except Exception as e: print(f'Error connecting to server socket {e}') socket_instance.close() if __name__", "to show to user. if msg: print(msg.decode()) else: connection.close() break except Exception as", "server threading.Thread(target=handle_messages, args=[socket_instance]).start() print('Connected to chat!') # Read user's input until it quit", "and handle it's input messages ''' SERVER_ADDRESS = '127.0.0.1' SERVER_PORT = 12000 try:", "try: # Instantiate socket and start connection with server socket_instance = socket.socket() socket_instance.connect((SERVER_ADDRESS,", "# so the connection will be closed and an error will be displayed.", "# Close connection with the server socket_instance.close() except Exception as e: print(f'Error connecting", "messages sent by server threading.Thread(target=handle_messages, args=[socket_instance]).start() print('Connected to chat!') # Read user's input", "12000 try: # Instantiate socket and start connection with server socket_instance = socket.socket()", "to chat!') # Read user's input until it quit from chat and close", "= socket.socket() socket_instance.connect((SERVER_ADDRESS, SERVER_PORT)) # Create a thread in order to handle messages", "the connection will be closed and an error will be displayed. # If", "''' SERVER_ADDRESS = '127.0.0.1' SERVER_PORT = 12000 try: # Instantiate socket and start", "threading.Thread(target=handle_messages, args=[socket_instance]).start() print('Connected to chat!') # Read user's input until it quit from", "close connection while True: msg = input('> ') if msg == 'quit': break", "''' Receive messages sent by the server and display them to user '''", "try: msg = connection.recv(1024) # If there is no message, there is a", "<NAME> <EMAIL> ''' import socket, threading def handle_messages(connection: socket.socket): ''' Receive messages sent", "e: print(f'Error handling message from server: {e}') connection.close() break def client() -> None:", "socket, threading def handle_messages(connection: socket.socket): ''' Receive messages sent by the server and", "an error will be displayed. # If not, it will try to decode", "handle messages sent by server threading.Thread(target=handle_messages, args=[socket_instance]).start() print('Connected to chat!') # Read user's", "as e: print(f'Error connecting to server socket {e}') socket_instance.close() if __name__ == \"__main__\":", "# Read user's input until it quit from chat and close connection while", "server socket_instance.close() except Exception as e: print(f'Error connecting to server socket {e}') socket_instance.close()", "Close connection with the server socket_instance.close() except Exception as e: print(f'Error connecting to", "Main process that start client connection to the server and handle it's input", "''' import socket, threading def handle_messages(connection: socket.socket): ''' Receive messages sent by the", "is a chance that connection has closed # so the connection will be", "in order to show to user. if msg: print(msg.decode()) else: connection.close() break except", "it will try to decode message in order to show to user. if", "the server and display them to user ''' while True: try: msg =", "to the server and handle it's input messages ''' SERVER_ADDRESS = '127.0.0.1' SERVER_PORT", "SERVER_PORT)) # Create a thread in order to handle messages sent by server", "show to user. if msg: print(msg.decode()) else: connection.close() break except Exception as e:", "a thread in order to handle messages sent by server threading.Thread(target=handle_messages, args=[socket_instance]).start() print('Connected", "sent by server threading.Thread(target=handle_messages, args=[socket_instance]).start() print('Connected to chat!') # Read user's input until", "else: connection.close() break except Exception as e: print(f'Error handling message from server: {e}')", "be closed and an error will be displayed. # If not, it will", "break def client() -> None: ''' Main process that start client connection to", "and start connection with server socket_instance = socket.socket() socket_instance.connect((SERVER_ADDRESS, SERVER_PORT)) # Create a", "''' Main process that start client connection to the server and handle it's", "threading def handle_messages(connection: socket.socket): ''' Receive messages sent by the server and display", "there is a chance that connection has closed # so the connection will", "'127.0.0.1' SERVER_PORT = 12000 try: # Instantiate socket and start connection with server", "the server and handle it's input messages ''' SERVER_ADDRESS = '127.0.0.1' SERVER_PORT =", "in order to handle messages sent by server threading.Thread(target=handle_messages, args=[socket_instance]).start() print('Connected to chat!')", "chance that connection has closed # so the connection will be closed and", "message, there is a chance that connection has closed # so the connection", "to handle messages sent by server threading.Thread(target=handle_messages, args=[socket_instance]).start() print('Connected to chat!') # Read", "msg: print(msg.decode()) else: connection.close() break except Exception as e: print(f'Error handling message from", "will be displayed. # If not, it will try to decode message in", "try to decode message in order to show to user. if msg: print(msg.decode())", "handling message from server: {e}') connection.close() break def client() -> None: ''' Main", "# If not, it will try to decode message in order to show", "that start client connection to the server and handle it's input messages '''", "message to utf-8 socket_instance.send(msg.encode()) # Close connection with the server socket_instance.close() except Exception", "no message, there is a chance that connection has closed # so the", "connection.close() break except Exception as e: print(f'Error handling message from server: {e}') connection.close()", "socket.socket() socket_instance.connect((SERVER_ADDRESS, SERVER_PORT)) # Create a thread in order to handle messages sent", "break except Exception as e: print(f'Error handling message from server: {e}') connection.close() break", "connection.close() break def client() -> None: ''' Main process that start client connection", "''' while True: try: msg = connection.recv(1024) # If there is no message,", "') if msg == 'quit': break # Parse message to utf-8 socket_instance.send(msg.encode()) #", "to user ''' while True: try: msg = connection.recv(1024) # If there is", "''' Modified by <NAME> <EMAIL> ''' import socket, threading def handle_messages(connection: socket.socket): '''", "except Exception as e: print(f'Error handling message from server: {e}') connection.close() break def", "utf-8 socket_instance.send(msg.encode()) # Close connection with the server socket_instance.close() except Exception as e:", "displayed. # If not, it will try to decode message in order to", "<EMAIL> ''' import socket, threading def handle_messages(connection: socket.socket): ''' Receive messages sent by", "import socket, threading def handle_messages(connection: socket.socket): ''' Receive messages sent by the server", "msg == 'quit': break # Parse message to utf-8 socket_instance.send(msg.encode()) # Close connection", "will be closed and an error will be displayed. # If not, it", "to user. if msg: print(msg.decode()) else: connection.close() break except Exception as e: print(f'Error", "input messages ''' SERVER_ADDRESS = '127.0.0.1' SERVER_PORT = 12000 try: # Instantiate socket", "server socket_instance = socket.socket() socket_instance.connect((SERVER_ADDRESS, SERVER_PORT)) # Create a thread in order to", "socket_instance.send(msg.encode()) # Close connection with the server socket_instance.close() except Exception as e: print(f'Error", "connection.recv(1024) # If there is no message, there is a chance that connection", "= connection.recv(1024) # If there is no message, there is a chance that", "If there is no message, there is a chance that connection has closed" ]
[ "'short_id'], help=\"Attribute\") parser.add_argument('-m', '--miniconda-version', default='latest', help='Add Miniconda version (or use \"latest\").', type=str) args", "import sys import argparse def _find_config_file(): config = 'etc/minicondas.json' while not os.path.isfile(config): config", "_av_ints = sorted([[int(i) for i in item.split('.')] for item in _all_versions]) _all_versions =", "+ '-' + _all_versions[-1] try: attr = data['minicondas'][py_version][miniconda_version][attribute] except: print('Could not find {}", "os.path.isfile(config): config = '../{}'.format(config) if len(config) > 70: raise Exception('Cannot locate config file", "version: \"{}\"'.format(attribute, py_version)) return attr if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument(\"py_version\",", "_all_versions = ['.'.join([str(item) for item in items]) for items in _av_ints] miniconda_version =", "for item in items]) for items in _av_ints] miniconda_version = m_start + '-'", "file \"etc/minicondas.json\".') return config def _get(py_version, miniconda_version, attribute): config = _find_config_file() with open(config)", "argparse def _find_config_file(): config = 'etc/minicondas.json' while not os.path.isfile(config): config = '../{}'.format(config) if", "data['minicondas'][py_version].keys()] m_start = 'm' + py_version.replace('py', '')[0] _av_ints = sorted([[int(i) for i in", "except: print('Could not find {} attribute for python version: \"{}\"'.format(attribute, py_version)) return attr", "open(config) as reader: data = json.load(reader) if miniconda_version == 'latest': _all_versions = [i.split('-')[1]", "_all_versions = [i.split('-')[1] for i in data['minicondas'][py_version].keys()] m_start = 'm' + py_version.replace('py', '')[0]", "[i.split('-')[1] for i in data['minicondas'][py_version].keys()] m_start = 'm' + py_version.replace('py', '')[0] _av_ints =", "sorted([[int(i) for i in item.split('.')] for item in _all_versions]) _all_versions = ['.'.join([str(item) for", "for python version: \"{}\"'.format(attribute, py_version)) return attr if __name__ == '__main__': parser =", "data['minicondas'][py_version][miniconda_version][attribute] except: print('Could not find {} attribute for python version: \"{}\"'.format(attribute, py_version)) return", "i in data['minicondas'][py_version].keys()] m_start = 'm' + py_version.replace('py', '')[0] _av_ints = sorted([[int(i) for", "'md5', 'short_id'], help=\"Attribute\") parser.add_argument('-m', '--miniconda-version', default='latest', help='Add Miniconda version (or use \"latest\").', type=str)", "argparse.ArgumentParser() parser.add_argument(\"py_version\", type=str, help=\"Python version\") parser.add_argument(\"attribute\", type=str, choices=['url', 'md5', 'short_id'], help=\"Attribute\") parser.add_argument('-m', '--miniconda-version',", "data = json.load(reader) if miniconda_version == 'latest': _all_versions = [i.split('-')[1] for i in", "'latest': _all_versions = [i.split('-')[1] for i in data['minicondas'][py_version].keys()] m_start = 'm' + py_version.replace('py',", "__name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument(\"py_version\", type=str, help=\"Python version\") parser.add_argument(\"attribute\", type=str, choices=['url',", "config def _get(py_version, miniconda_version, attribute): config = _find_config_file() with open(config) as reader: data", "i in item.split('.')] for item in _all_versions]) _all_versions = ['.'.join([str(item) for item in", "not find {} attribute for python version: \"{}\"'.format(attribute, py_version)) return attr if __name__", "os import json import sys import argparse def _find_config_file(): config = 'etc/minicondas.json' while", "print('Could not find {} attribute for python version: \"{}\"'.format(attribute, py_version)) return attr if", "while not os.path.isfile(config): config = '../{}'.format(config) if len(config) > 70: raise Exception('Cannot locate", "attribute): config = _find_config_file() with open(config) as reader: data = json.load(reader) if miniconda_version", "as reader: data = json.load(reader) if miniconda_version == 'latest': _all_versions = [i.split('-')[1] for", "type=str, choices=['url', 'md5', 'short_id'], help=\"Attribute\") parser.add_argument('-m', '--miniconda-version', default='latest', help='Add Miniconda version (or use", "help='Add Miniconda version (or use \"latest\").', type=str) args = parser.parse_args() print(_get(args.py_version, args.miniconda_version, args.attribute))", "+ _all_versions[-1] try: attr = data['minicondas'][py_version][miniconda_version][attribute] except: print('Could not find {} attribute for", "json import sys import argparse def _find_config_file(): config = 'etc/minicondas.json' while not os.path.isfile(config):", "in _av_ints] miniconda_version = m_start + '-' + _all_versions[-1] try: attr = data['minicondas'][py_version][miniconda_version][attribute]", "if miniconda_version == 'latest': _all_versions = [i.split('-')[1] for i in data['minicondas'][py_version].keys()] m_start =", "import json import sys import argparse def _find_config_file(): config = 'etc/minicondas.json' while not", "with open(config) as reader: data = json.load(reader) if miniconda_version == 'latest': _all_versions =", "miniconda_version == 'latest': _all_versions = [i.split('-')[1] for i in data['minicondas'][py_version].keys()] m_start = 'm'", "item.split('.')] for item in _all_versions]) _all_versions = ['.'.join([str(item) for item in items]) for", "return attr if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument(\"py_version\", type=str, help=\"Python version\")", "parser.add_argument('-m', '--miniconda-version', default='latest', help='Add Miniconda version (or use \"latest\").', type=str) args = parser.parse_args()", "def _find_config_file(): config = 'etc/minicondas.json' while not os.path.isfile(config): config = '../{}'.format(config) if len(config)", "try: attr = data['minicondas'][py_version][miniconda_version][attribute] except: print('Could not find {} attribute for python version:", "= 'm' + py_version.replace('py', '')[0] _av_ints = sorted([[int(i) for i in item.split('.')] for", "= data['minicondas'][py_version][miniconda_version][attribute] except: print('Could not find {} attribute for python version: \"{}\"'.format(attribute, py_version))", "miniconda_version, attribute): config = _find_config_file() with open(config) as reader: data = json.load(reader) if", "= 'etc/minicondas.json' while not os.path.isfile(config): config = '../{}'.format(config) if len(config) > 70: raise", "\"etc/minicondas.json\".') return config def _get(py_version, miniconda_version, attribute): config = _find_config_file() with open(config) as", "= [i.split('-')[1] for i in data['minicondas'][py_version].keys()] m_start = 'm' + py_version.replace('py', '')[0] _av_ints", "in item.split('.')] for item in _all_versions]) _all_versions = ['.'.join([str(item) for item in items])", "attribute for python version: \"{}\"'.format(attribute, py_version)) return attr if __name__ == '__main__': parser", "sys import argparse def _find_config_file(): config = 'etc/minicondas.json' while not os.path.isfile(config): config =", "py_version)) return attr if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument(\"py_version\", type=str, help=\"Python", "config = '../{}'.format(config) if len(config) > 70: raise Exception('Cannot locate config file \"etc/minicondas.json\".')", "config file \"etc/minicondas.json\".') return config def _get(py_version, miniconda_version, attribute): config = _find_config_file() with", "= json.load(reader) if miniconda_version == 'latest': _all_versions = [i.split('-')[1] for i in data['minicondas'][py_version].keys()]", "['.'.join([str(item) for item in items]) for items in _av_ints] miniconda_version = m_start +", "+ py_version.replace('py', '')[0] _av_ints = sorted([[int(i) for i in item.split('.')] for item in", "parser.add_argument(\"attribute\", type=str, choices=['url', 'md5', 'short_id'], help=\"Attribute\") parser.add_argument('-m', '--miniconda-version', default='latest', help='Add Miniconda version (or", "for item in _all_versions]) _all_versions = ['.'.join([str(item) for item in items]) for items", "in data['minicondas'][py_version].keys()] m_start = 'm' + py_version.replace('py', '')[0] _av_ints = sorted([[int(i) for i", "_all_versions]) _all_versions = ['.'.join([str(item) for item in items]) for items in _av_ints] miniconda_version", "return config def _get(py_version, miniconda_version, attribute): config = _find_config_file() with open(config) as reader:", "'m' + py_version.replace('py', '')[0] _av_ints = sorted([[int(i) for i in item.split('.')] for item", "'')[0] _av_ints = sorted([[int(i) for i in item.split('.')] for item in _all_versions]) _all_versions", "python import os import json import sys import argparse def _find_config_file(): config =", "config = _find_config_file() with open(config) as reader: data = json.load(reader) if miniconda_version ==", "items in _av_ints] miniconda_version = m_start + '-' + _all_versions[-1] try: attr =", "miniconda_version = m_start + '-' + _all_versions[-1] try: attr = data['minicondas'][py_version][miniconda_version][attribute] except: print('Could", "python version: \"{}\"'.format(attribute, py_version)) return attr if __name__ == '__main__': parser = argparse.ArgumentParser()", "import argparse def _find_config_file(): config = 'etc/minicondas.json' while not os.path.isfile(config): config = '../{}'.format(config)", "def _get(py_version, miniconda_version, attribute): config = _find_config_file() with open(config) as reader: data =", "m_start + '-' + _all_versions[-1] try: attr = data['minicondas'][py_version][miniconda_version][attribute] except: print('Could not find", "'../{}'.format(config) if len(config) > 70: raise Exception('Cannot locate config file \"etc/minicondas.json\".') return config", "find {} attribute for python version: \"{}\"'.format(attribute, py_version)) return attr if __name__ ==", "_all_versions[-1] try: attr = data['minicondas'][py_version][miniconda_version][attribute] except: print('Could not find {} attribute for python", "parser = argparse.ArgumentParser() parser.add_argument(\"py_version\", type=str, help=\"Python version\") parser.add_argument(\"attribute\", type=str, choices=['url', 'md5', 'short_id'], help=\"Attribute\")", "py_version.replace('py', '')[0] _av_ints = sorted([[int(i) for i in item.split('.')] for item in _all_versions])", "== 'latest': _all_versions = [i.split('-')[1] for i in data['minicondas'][py_version].keys()] m_start = 'm' +", "\"{}\"'.format(attribute, py_version)) return attr if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument(\"py_version\", type=str,", "not os.path.isfile(config): config = '../{}'.format(config) if len(config) > 70: raise Exception('Cannot locate config", "for i in data['minicondas'][py_version].keys()] m_start = 'm' + py_version.replace('py', '')[0] _av_ints = sorted([[int(i)", "= _find_config_file() with open(config) as reader: data = json.load(reader) if miniconda_version == 'latest':", "= '../{}'.format(config) if len(config) > 70: raise Exception('Cannot locate config file \"etc/minicondas.json\".') return", "attr if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument(\"py_version\", type=str, help=\"Python version\") parser.add_argument(\"attribute\",", "item in _all_versions]) _all_versions = ['.'.join([str(item) for item in items]) for items in", "= m_start + '-' + _all_versions[-1] try: attr = data['minicondas'][py_version][miniconda_version][attribute] except: print('Could not", "== '__main__': parser = argparse.ArgumentParser() parser.add_argument(\"py_version\", type=str, help=\"Python version\") parser.add_argument(\"attribute\", type=str, choices=['url', 'md5',", "json.load(reader) if miniconda_version == 'latest': _all_versions = [i.split('-')[1] for i in data['minicondas'][py_version].keys()] m_start", "if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument(\"py_version\", type=str, help=\"Python version\") parser.add_argument(\"attribute\", type=str,", "#!/usr/bin/env python import os import json import sys import argparse def _find_config_file(): config", "70: raise Exception('Cannot locate config file \"etc/minicondas.json\".') return config def _get(py_version, miniconda_version, attribute):", "= ['.'.join([str(item) for item in items]) for items in _av_ints] miniconda_version = m_start", "_find_config_file(): config = 'etc/minicondas.json' while not os.path.isfile(config): config = '../{}'.format(config) if len(config) >", "'__main__': parser = argparse.ArgumentParser() parser.add_argument(\"py_version\", type=str, help=\"Python version\") parser.add_argument(\"attribute\", type=str, choices=['url', 'md5', 'short_id'],", "import os import json import sys import argparse def _find_config_file(): config = 'etc/minicondas.json'", "version\") parser.add_argument(\"attribute\", type=str, choices=['url', 'md5', 'short_id'], help=\"Attribute\") parser.add_argument('-m', '--miniconda-version', default='latest', help='Add Miniconda version", "type=str, help=\"Python version\") parser.add_argument(\"attribute\", type=str, choices=['url', 'md5', 'short_id'], help=\"Attribute\") parser.add_argument('-m', '--miniconda-version', default='latest', help='Add", "config = 'etc/minicondas.json' while not os.path.isfile(config): config = '../{}'.format(config) if len(config) > 70:", "for i in item.split('.')] for item in _all_versions]) _all_versions = ['.'.join([str(item) for item", "in items]) for items in _av_ints] miniconda_version = m_start + '-' + _all_versions[-1]", "for items in _av_ints] miniconda_version = m_start + '-' + _all_versions[-1] try: attr", "help=\"Attribute\") parser.add_argument('-m', '--miniconda-version', default='latest', help='Add Miniconda version (or use \"latest\").', type=str) args =", "_get(py_version, miniconda_version, attribute): config = _find_config_file() with open(config) as reader: data = json.load(reader)", "default='latest', help='Add Miniconda version (or use \"latest\").', type=str) args = parser.parse_args() print(_get(args.py_version, args.miniconda_version,", "if len(config) > 70: raise Exception('Cannot locate config file \"etc/minicondas.json\".') return config def", "_av_ints] miniconda_version = m_start + '-' + _all_versions[-1] try: attr = data['minicondas'][py_version][miniconda_version][attribute] except:", "'--miniconda-version', default='latest', help='Add Miniconda version (or use \"latest\").', type=str) args = parser.parse_args() print(_get(args.py_version,", "locate config file \"etc/minicondas.json\".') return config def _get(py_version, miniconda_version, attribute): config = _find_config_file()", "reader: data = json.load(reader) if miniconda_version == 'latest': _all_versions = [i.split('-')[1] for i", "raise Exception('Cannot locate config file \"etc/minicondas.json\".') return config def _get(py_version, miniconda_version, attribute): config", "in _all_versions]) _all_versions = ['.'.join([str(item) for item in items]) for items in _av_ints]", "{} attribute for python version: \"{}\"'.format(attribute, py_version)) return attr if __name__ == '__main__':", "m_start = 'm' + py_version.replace('py', '')[0] _av_ints = sorted([[int(i) for i in item.split('.')]", "= sorted([[int(i) for i in item.split('.')] for item in _all_versions]) _all_versions = ['.'.join([str(item)", "_find_config_file() with open(config) as reader: data = json.load(reader) if miniconda_version == 'latest': _all_versions", "> 70: raise Exception('Cannot locate config file \"etc/minicondas.json\".') return config def _get(py_version, miniconda_version,", "= argparse.ArgumentParser() parser.add_argument(\"py_version\", type=str, help=\"Python version\") parser.add_argument(\"attribute\", type=str, choices=['url', 'md5', 'short_id'], help=\"Attribute\") parser.add_argument('-m',", "item in items]) for items in _av_ints] miniconda_version = m_start + '-' +", "'-' + _all_versions[-1] try: attr = data['minicondas'][py_version][miniconda_version][attribute] except: print('Could not find {} attribute", "attr = data['minicondas'][py_version][miniconda_version][attribute] except: print('Could not find {} attribute for python version: \"{}\"'.format(attribute,", "items]) for items in _av_ints] miniconda_version = m_start + '-' + _all_versions[-1] try:", "choices=['url', 'md5', 'short_id'], help=\"Attribute\") parser.add_argument('-m', '--miniconda-version', default='latest', help='Add Miniconda version (or use \"latest\").',", "'etc/minicondas.json' while not os.path.isfile(config): config = '../{}'.format(config) if len(config) > 70: raise Exception('Cannot", "len(config) > 70: raise Exception('Cannot locate config file \"etc/minicondas.json\".') return config def _get(py_version,", "help=\"Python version\") parser.add_argument(\"attribute\", type=str, choices=['url', 'md5', 'short_id'], help=\"Attribute\") parser.add_argument('-m', '--miniconda-version', default='latest', help='Add Miniconda", "Exception('Cannot locate config file \"etc/minicondas.json\".') return config def _get(py_version, miniconda_version, attribute): config =", "parser.add_argument(\"py_version\", type=str, help=\"Python version\") parser.add_argument(\"attribute\", type=str, choices=['url', 'md5', 'short_id'], help=\"Attribute\") parser.add_argument('-m', '--miniconda-version', default='latest'," ]
[ "from transformers import GPT2Tokenizer tokenizer = GPT2Tokenizer.from_pretrained('gpt2', bos_token='<|startoftext|>', eos_token='<|endoftext|>', pad_token='<|pad|>') tokenizer.pad_token = tokenizer.eos_token" ]
[ "- color (string; optional) - name (string; optional) - selectCommand (string; optional) -", "- activeNodes (list of dicts; optional): Active node selections (nodes not saved with", "activeClades=Component.UNDEFINED, activeLeaves=Component.UNDEFINED, saved=Component.UNDEFINED, **kwargs): self._prop_names = ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path',", "Active node selections (nodes not saved with name). - height (string; default \"100%\"):", "ID used to identify this component in Dash callbacks. - activeClades (list of", "and values of type dict with keys: - color (string; optional) - name", "(dict; optional): Hovered node. - path (string; default \"\"): URL from where ETE's", "server is running. - saved (dict; optional): Saved selections. `saved` is a dict", "class EteComponent(Component): \"\"\"An EteComponent component. Keyword arguments: - id (string; required): The ID", "'url', 'treeid']: if k not in args: raise TypeError( 'Required argument `' +", "100%.\"\"\" @_explicitize_args def __init__(self, id=Component.REQUIRED, url=Component.REQUIRED, path=Component.UNDEFINED, treeid=Component.REQUIRED, width=Component.UNDEFINED, height=Component.UNDEFINED, hover=Component.UNDEFINED, activeNodes=Component.UNDEFINED, activeClades=Component.UNDEFINED,", "url=Component.REQUIRED, path=Component.UNDEFINED, treeid=Component.REQUIRED, width=Component.UNDEFINED, height=Component.UNDEFINED, hover=Component.UNDEFINED, activeNodes=Component.UNDEFINED, activeClades=Component.UNDEFINED, activeLeaves=Component.UNDEFINED, saved=Component.UNDEFINED, **kwargs): self._prop_names =", "if k != 'children'} for k in ['id', 'url', 'treeid']: if k not", "name). - activeNodes (list of dicts; optional): Active node selections (nodes not saved", "100%. - hover (dict; optional): Hovered node. - path (string; default \"\"): URL", "= ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid', 'url', 'width'] self.available_wildcard_properties", "to identify this component in Dash callbacks. - activeClades (list of dicts; optional):", "node selections (nodes not saved with name). - height (string; default \"100%\"): iframe", "used to identify this component in Dash callbacks. - activeClades (list of dicts;", "'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid', 'url', 'width'] self._type = 'EteComponent' self._namespace", "(number; required): Integer that defines a tree. - url (string; required): URL from", "selections. `saved` is a dict with strings as keys and values of type", "'treeid', 'url', 'width'] self.available_wildcard_properties = [] _explicit_args = kwargs.pop('_explicit_args') _locals = locals() _locals.update(kwargs)", "'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid', 'url', 'width'] self.available_wildcard_properties = []", "activeNodes=Component.UNDEFINED, activeClades=Component.UNDEFINED, activeLeaves=Component.UNDEFINED, saved=Component.UNDEFINED, **kwargs): self._prop_names = ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover',", "kwargs.pop('_explicit_args') _locals = locals() _locals.update(kwargs) # For wildcard attrs args = {k: _locals[k]", "'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid', 'url', 'width'] self.available_wildcard_properties = [] _explicit_args", "- path (string; default \"\"): URL from where ETE's server is running. -", "from where ETE's server is running. - saved (dict; optional): Saved selections. `saved`", "TypeError( 'Required argument `' + k + '` was not specified.') super(EteComponent, self).__init__(**args)", "optional): Active leaf selections (nodes not saved with name). - activeNodes (list of", "with name). - activeLeaves (list of dicts; optional): Active leaf selections (nodes not", "\"\"): URL from where ETE's server is running. - saved (dict; optional): Saved", "'saved', 'treeid', 'url', 'width'] self.available_wildcard_properties = [] _explicit_args = kwargs.pop('_explicit_args') _locals = locals()", "saved=Component.UNDEFINED, **kwargs): self._prop_names = ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid',", "type dict with keys: - color (string; optional) - name (string; optional) -", "with keys: - color (string; optional) - name (string; optional) - selectCommand (string;", "self.available_wildcard_properties = [] _explicit_args = kwargs.pop('_explicit_args') _locals = locals() _locals.update(kwargs) # For wildcard", "!= 'children'} for k in ['id', 'url', 'treeid']: if k not in args:", "wildcard attrs args = {k: _locals[k] for k in _explicit_args if k !=", "is running. - width (string; default \"100%\"): iframe width. Default: 100%.\"\"\" @_explicitize_args def", "k in ['id', 'url', 'treeid']: if k not in args: raise TypeError( 'Required", "with name). - height (string; default \"100%\"): iframe height. Default: 100%. - hover", "clade selections (nodes not saved with name). - activeLeaves (list of dicts; optional):", "path=Component.UNDEFINED, treeid=Component.REQUIRED, width=Component.UNDEFINED, height=Component.UNDEFINED, hover=Component.UNDEFINED, activeNodes=Component.UNDEFINED, activeClades=Component.UNDEFINED, activeLeaves=Component.UNDEFINED, saved=Component.UNDEFINED, **kwargs): self._prop_names = ['id',", "(string; optional) - name (string; optional) - selectCommand (string; optional) - treeid (number;", "activeLeaves (list of dicts; optional): Active leaf selections (nodes not saved with name).", "dash.development.base_component import Component, _explicitize_args class EteComponent(Component): \"\"\"An EteComponent component. Keyword arguments: - id", "default \"100%\"): iframe width. Default: 100%.\"\"\" @_explicitize_args def __init__(self, id=Component.REQUIRED, url=Component.REQUIRED, path=Component.UNDEFINED, treeid=Component.REQUIRED,", "'ete_component' self._valid_wildcard_attributes = [] self.available_properties = ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path',", "_locals[k] for k in _explicit_args if k != 'children'} for k in ['id',", "treeid (number; required): Integer that defines a tree. - url (string; required): URL", "default \"100%\"): iframe height. Default: 100%. - hover (dict; optional): Hovered node. -", "component in Dash callbacks. - activeClades (list of dicts; optional): Active clade selections", "url (string; required): URL from where ETE's server is running. - width (string;", "= locals() _locals.update(kwargs) # For wildcard attrs args = {k: _locals[k] for k", "saved (dict; optional): Saved selections. `saved` is a dict with strings as keys", "where ETE's server is running. - width (string; default \"100%\"): iframe width. Default:", "with strings as keys and values of type dict with keys: - color", "Integer that defines a tree. - url (string; required): URL from where ETE's", "EteComponent(Component): \"\"\"An EteComponent component. Keyword arguments: - id (string; required): The ID used", "path (string; default \"\"): URL from where ETE's server is running. - saved", "= [] self.available_properties = ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid',", "required): The ID used to identify this component in Dash callbacks. - activeClades", "dict with keys: - color (string; optional) - name (string; optional) - selectCommand", "tree. - url (string; required): URL from where ETE's server is running. -", "['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid', 'url', 'width'] self.available_wildcard_properties =", "height=Component.UNDEFINED, hover=Component.UNDEFINED, activeNodes=Component.UNDEFINED, activeClades=Component.UNDEFINED, activeLeaves=Component.UNDEFINED, saved=Component.UNDEFINED, **kwargs): self._prop_names = ['id', 'activeClades', 'activeLeaves', 'activeNodes',", "(string; default \"100%\"): iframe height. Default: 100%. - hover (dict; optional): Hovered node.", "'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid', 'url', 'width'] self._type = 'EteComponent' self._namespace =", "width (string; default \"100%\"): iframe width. Default: 100%.\"\"\" @_explicitize_args def __init__(self, id=Component.REQUIRED, url=Component.REQUIRED,", "'url', 'width'] self._type = 'EteComponent' self._namespace = 'ete_component' self._valid_wildcard_attributes = [] self.available_properties =", "optional): Hovered node. - path (string; default \"\"): URL from where ETE's server", "__init__(self, id=Component.REQUIRED, url=Component.REQUIRED, path=Component.UNDEFINED, treeid=Component.REQUIRED, width=Component.UNDEFINED, height=Component.UNDEFINED, hover=Component.UNDEFINED, activeNodes=Component.UNDEFINED, activeClades=Component.UNDEFINED, activeLeaves=Component.UNDEFINED, saved=Component.UNDEFINED, **kwargs):", "'hover', 'path', 'saved', 'treeid', 'url', 'width'] self.available_wildcard_properties = [] _explicit_args = kwargs.pop('_explicit_args') _locals", "arguments: - id (string; required): The ID used to identify this component in", "not saved with name). - activeLeaves (list of dicts; optional): Active leaf selections", "'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid', 'url', 'width'] self.available_wildcard_properties = [] _explicit_args =", "Active leaf selections (nodes not saved with name). - activeNodes (list of dicts;", "callbacks. - activeClades (list of dicts; optional): Active clade selections (nodes not saved", "selectCommand (string; optional) - treeid (number; required): Integer that defines a tree. -", "'height', 'hover', 'path', 'saved', 'treeid', 'url', 'width'] self.available_wildcard_properties = [] _explicit_args = kwargs.pop('_explicit_args')", "'path', 'saved', 'treeid', 'url', 'width'] self.available_wildcard_properties = [] _explicit_args = kwargs.pop('_explicit_args') _locals =", "@_explicitize_args def __init__(self, id=Component.REQUIRED, url=Component.REQUIRED, path=Component.UNDEFINED, treeid=Component.REQUIRED, width=Component.UNDEFINED, height=Component.UNDEFINED, hover=Component.UNDEFINED, activeNodes=Component.UNDEFINED, activeClades=Component.UNDEFINED, activeLeaves=Component.UNDEFINED,", "node. - path (string; default \"\"): URL from where ETE's server is running.", "optional) - treeid (number; required): Integer that defines a tree. - url (string;", "dicts; optional): Active leaf selections (nodes not saved with name). - activeNodes (list", "leaf selections (nodes not saved with name). - activeNodes (list of dicts; optional):", "optional) - selectCommand (string; optional) - treeid (number; required): Integer that defines a", "['id', 'url', 'treeid']: if k not in args: raise TypeError( 'Required argument `'", "- activeLeaves (list of dicts; optional): Active leaf selections (nodes not saved with", "color (string; optional) - name (string; optional) - selectCommand (string; optional) - treeid", "args: raise TypeError( 'Required argument `' + k + '` was not specified.')", "- saved (dict; optional): Saved selections. `saved` is a dict with strings as", "running. - width (string; default \"100%\"): iframe width. Default: 100%.\"\"\" @_explicitize_args def __init__(self,", "k != 'children'} for k in ['id', 'url', 'treeid']: if k not in", "Dash callbacks. - activeClades (list of dicts; optional): Active clade selections (nodes not", "selections (nodes not saved with name). - height (string; default \"100%\"): iframe height.", "'hover', 'path', 'saved', 'treeid', 'url', 'width'] self._type = 'EteComponent' self._namespace = 'ete_component' self._valid_wildcard_attributes", "# AUTO GENERATED FILE - DO NOT EDIT from dash.development.base_component import Component, _explicitize_args", "from dash.development.base_component import Component, _explicitize_args class EteComponent(Component): \"\"\"An EteComponent component. Keyword arguments: -", "'path', 'saved', 'treeid', 'url', 'width'] self._type = 'EteComponent' self._namespace = 'ete_component' self._valid_wildcard_attributes =", "self._type = 'EteComponent' self._namespace = 'ete_component' self._valid_wildcard_attributes = [] self.available_properties = ['id', 'activeClades',", "= 'ete_component' self._valid_wildcard_attributes = [] self.available_properties = ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover',", "_explicit_args = kwargs.pop('_explicit_args') _locals = locals() _locals.update(kwargs) # For wildcard attrs args =", "\"100%\"): iframe height. Default: 100%. - hover (dict; optional): Hovered node. - path", "for k in ['id', 'url', 'treeid']: if k not in args: raise TypeError(", "from where ETE's server is running. - width (string; default \"100%\"): iframe width.", "(list of dicts; optional): Active clade selections (nodes not saved with name). -", "id=Component.REQUIRED, url=Component.REQUIRED, path=Component.UNDEFINED, treeid=Component.REQUIRED, width=Component.UNDEFINED, height=Component.UNDEFINED, hover=Component.UNDEFINED, activeNodes=Component.UNDEFINED, activeClades=Component.UNDEFINED, activeLeaves=Component.UNDEFINED, saved=Component.UNDEFINED, **kwargs): self._prop_names", "'height', 'hover', 'path', 'saved', 'treeid', 'url', 'width'] self._type = 'EteComponent' self._namespace = 'ete_component'", "attrs args = {k: _locals[k] for k in _explicit_args if k != 'children'}", "Default: 100%. - hover (dict; optional): Hovered node. - path (string; default \"\"):", "activeNodes (list of dicts; optional): Active node selections (nodes not saved with name).", "not saved with name). - activeNodes (list of dicts; optional): Active node selections", "optional) - name (string; optional) - selectCommand (string; optional) - treeid (number; required):", "(string; optional) - selectCommand (string; optional) - treeid (number; required): Integer that defines", "optional): Active clade selections (nodes not saved with name). - activeLeaves (list of", "- width (string; default \"100%\"): iframe width. Default: 100%.\"\"\" @_explicitize_args def __init__(self, id=Component.REQUIRED,", "- url (string; required): URL from where ETE's server is running. - width", "height. Default: 100%. - hover (dict; optional): Hovered node. - path (string; default", "Default: 100%.\"\"\" @_explicitize_args def __init__(self, id=Component.REQUIRED, url=Component.REQUIRED, path=Component.UNDEFINED, treeid=Component.REQUIRED, width=Component.UNDEFINED, height=Component.UNDEFINED, hover=Component.UNDEFINED, activeNodes=Component.UNDEFINED,", "locals() _locals.update(kwargs) # For wildcard attrs args = {k: _locals[k] for k in", "where ETE's server is running. - saved (dict; optional): Saved selections. `saved` is", "iframe width. Default: 100%.\"\"\" @_explicitize_args def __init__(self, id=Component.REQUIRED, url=Component.REQUIRED, path=Component.UNDEFINED, treeid=Component.REQUIRED, width=Component.UNDEFINED, height=Component.UNDEFINED,", "'EteComponent' self._namespace = 'ete_component' self._valid_wildcard_attributes = [] self.available_properties = ['id', 'activeClades', 'activeLeaves', 'activeNodes',", "Keyword arguments: - id (string; required): The ID used to identify this component", "in _explicit_args if k != 'children'} for k in ['id', 'url', 'treeid']: if", "(dict; optional): Saved selections. `saved` is a dict with strings as keys and", "(nodes not saved with name). - activeNodes (list of dicts; optional): Active node", "in Dash callbacks. - activeClades (list of dicts; optional): Active clade selections (nodes", "with name). - activeNodes (list of dicts; optional): Active node selections (nodes not", "in ['id', 'url', 'treeid']: if k not in args: raise TypeError( 'Required argument", "- name (string; optional) - selectCommand (string; optional) - treeid (number; required): Integer", "`saved` is a dict with strings as keys and values of type dict", "- id (string; required): The ID used to identify this component in Dash", "width. Default: 100%.\"\"\" @_explicitize_args def __init__(self, id=Component.REQUIRED, url=Component.REQUIRED, path=Component.UNDEFINED, treeid=Component.REQUIRED, width=Component.UNDEFINED, height=Component.UNDEFINED, hover=Component.UNDEFINED,", "NOT EDIT from dash.development.base_component import Component, _explicitize_args class EteComponent(Component): \"\"\"An EteComponent component. Keyword", "selections (nodes not saved with name). - activeLeaves (list of dicts; optional): Active", "**kwargs): self._prop_names = ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid', 'url',", "'width'] self._type = 'EteComponent' self._namespace = 'ete_component' self._valid_wildcard_attributes = [] self.available_properties = ['id',", "k not in args: raise TypeError( 'Required argument `' + k + '`", "as keys and values of type dict with keys: - color (string; optional)", "'treeid', 'url', 'width'] self._type = 'EteComponent' self._namespace = 'ete_component' self._valid_wildcard_attributes = [] self.available_properties", "required): URL from where ETE's server is running. - width (string; default \"100%\"):", "Hovered node. - path (string; default \"\"): URL from where ETE's server is", "dicts; optional): Active node selections (nodes not saved with name). - height (string;", "a tree. - url (string; required): URL from where ETE's server is running.", "DO NOT EDIT from dash.development.base_component import Component, _explicitize_args class EteComponent(Component): \"\"\"An EteComponent component.", "'width'] self.available_wildcard_properties = [] _explicit_args = kwargs.pop('_explicit_args') _locals = locals() _locals.update(kwargs) # For", "= 'EteComponent' self._namespace = 'ete_component' self._valid_wildcard_attributes = [] self.available_properties = ['id', 'activeClades', 'activeLeaves',", "'treeid']: if k not in args: raise TypeError( 'Required argument `' + k", "of dicts; optional): Active leaf selections (nodes not saved with name). - activeNodes", "(nodes not saved with name). - activeLeaves (list of dicts; optional): Active leaf", "AUTO GENERATED FILE - DO NOT EDIT from dash.development.base_component import Component, _explicitize_args class", "ETE's server is running. - width (string; default \"100%\"): iframe width. Default: 100%.\"\"\"", "self._valid_wildcard_attributes = [] self.available_properties = ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved',", "(string; required): URL from where ETE's server is running. - width (string; default", "self._prop_names = ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid', 'url', 'width']", "args = {k: _locals[k] for k in _explicit_args if k != 'children'} for", "defines a tree. - url (string; required): URL from where ETE's server is", "this component in Dash callbacks. - activeClades (list of dicts; optional): Active clade", "_locals.update(kwargs) # For wildcard attrs args = {k: _locals[k] for k in _explicit_args", "- activeClades (list of dicts; optional): Active clade selections (nodes not saved with", "For wildcard attrs args = {k: _locals[k] for k in _explicit_args if k", "of type dict with keys: - color (string; optional) - name (string; optional)", "not in args: raise TypeError( 'Required argument `' + k + '` was", "saved with name). - activeNodes (list of dicts; optional): Active node selections (nodes", "Active clade selections (nodes not saved with name). - activeLeaves (list of dicts;", "values of type dict with keys: - color (string; optional) - name (string;", "keys and values of type dict with keys: - color (string; optional) -", "self.available_properties = ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid', 'url', 'width']", "is a dict with strings as keys and values of type dict with", "(string; optional) - treeid (number; required): Integer that defines a tree. - url", "(list of dicts; optional): Active leaf selections (nodes not saved with name). -", "Component, _explicitize_args class EteComponent(Component): \"\"\"An EteComponent component. Keyword arguments: - id (string; required):", "optional): Active node selections (nodes not saved with name). - height (string; default", "not saved with name). - height (string; default \"100%\"): iframe height. Default: 100%.", "= ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid', 'url', 'width'] self._type", "[] self.available_properties = ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid', 'url',", "treeid=Component.REQUIRED, width=Component.UNDEFINED, height=Component.UNDEFINED, hover=Component.UNDEFINED, activeNodes=Component.UNDEFINED, activeClades=Component.UNDEFINED, activeLeaves=Component.UNDEFINED, saved=Component.UNDEFINED, **kwargs): self._prop_names = ['id', 'activeClades',", "- selectCommand (string; optional) - treeid (number; required): Integer that defines a tree.", "{k: _locals[k] for k in _explicit_args if k != 'children'} for k in", "(string; default \"100%\"): iframe width. Default: 100%.\"\"\" @_explicitize_args def __init__(self, id=Component.REQUIRED, url=Component.REQUIRED, path=Component.UNDEFINED,", "ETE's server is running. - saved (dict; optional): Saved selections. `saved` is a", "The ID used to identify this component in Dash callbacks. - activeClades (list", "for k in _explicit_args if k != 'children'} for k in ['id', 'url',", "dict with strings as keys and values of type dict with keys: -", "- DO NOT EDIT from dash.development.base_component import Component, _explicitize_args class EteComponent(Component): \"\"\"An EteComponent", "(list of dicts; optional): Active node selections (nodes not saved with name). -", "component. Keyword arguments: - id (string; required): The ID used to identify this", "EteComponent component. Keyword arguments: - id (string; required): The ID used to identify", "hover=Component.UNDEFINED, activeNodes=Component.UNDEFINED, activeClades=Component.UNDEFINED, activeLeaves=Component.UNDEFINED, saved=Component.UNDEFINED, **kwargs): self._prop_names = ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height',", "(string; required): The ID used to identify this component in Dash callbacks. -", "URL from where ETE's server is running. - width (string; default \"100%\"): iframe", "that defines a tree. - url (string; required): URL from where ETE's server", "activeLeaves=Component.UNDEFINED, saved=Component.UNDEFINED, **kwargs): self._prop_names = ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved',", "= kwargs.pop('_explicit_args') _locals = locals() _locals.update(kwargs) # For wildcard attrs args = {k:", "- height (string; default \"100%\"): iframe height. Default: 100%. - hover (dict; optional):", "GENERATED FILE - DO NOT EDIT from dash.development.base_component import Component, _explicitize_args class EteComponent(Component):", "dicts; optional): Active clade selections (nodes not saved with name). - activeLeaves (list", "required): Integer that defines a tree. - url (string; required): URL from where", "'saved', 'treeid', 'url', 'width'] self._type = 'EteComponent' self._namespace = 'ete_component' self._valid_wildcard_attributes = []", "k in _explicit_args if k != 'children'} for k in ['id', 'url', 'treeid']:", "if k not in args: raise TypeError( 'Required argument `' + k +", "self._namespace = 'ete_component' self._valid_wildcard_attributes = [] self.available_properties = ['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height',", "running. - saved (dict; optional): Saved selections. `saved` is a dict with strings", "- treeid (number; required): Integer that defines a tree. - url (string; required):", "Saved selections. `saved` is a dict with strings as keys and values of", "server is running. - width (string; default \"100%\"): iframe width. Default: 100%.\"\"\" @_explicitize_args", "name). - height (string; default \"100%\"): iframe height. Default: 100%. - hover (dict;", "\"100%\"): iframe width. Default: 100%.\"\"\" @_explicitize_args def __init__(self, id=Component.REQUIRED, url=Component.REQUIRED, path=Component.UNDEFINED, treeid=Component.REQUIRED, width=Component.UNDEFINED,", "EDIT from dash.development.base_component import Component, _explicitize_args class EteComponent(Component): \"\"\"An EteComponent component. Keyword arguments:", "raise TypeError( 'Required argument `' + k + '` was not specified.') super(EteComponent,", "def __init__(self, id=Component.REQUIRED, url=Component.REQUIRED, path=Component.UNDEFINED, treeid=Component.REQUIRED, width=Component.UNDEFINED, height=Component.UNDEFINED, hover=Component.UNDEFINED, activeNodes=Component.UNDEFINED, activeClades=Component.UNDEFINED, activeLeaves=Component.UNDEFINED, saved=Component.UNDEFINED,", "selections (nodes not saved with name). - activeNodes (list of dicts; optional): Active", "keys: - color (string; optional) - name (string; optional) - selectCommand (string; optional)", "default \"\"): URL from where ETE's server is running. - saved (dict; optional):", "_locals = locals() _locals.update(kwargs) # For wildcard attrs args = {k: _locals[k] for", "a dict with strings as keys and values of type dict with keys:", "in args: raise TypeError( 'Required argument `' + k + '` was not", "'children'} for k in ['id', 'url', 'treeid']: if k not in args: raise", "= {k: _locals[k] for k in _explicit_args if k != 'children'} for k", "= [] _explicit_args = kwargs.pop('_explicit_args') _locals = locals() _locals.update(kwargs) # For wildcard attrs", "activeClades (list of dicts; optional): Active clade selections (nodes not saved with name).", "is running. - saved (dict; optional): Saved selections. `saved` is a dict with", "identify this component in Dash callbacks. - activeClades (list of dicts; optional): Active", "FILE - DO NOT EDIT from dash.development.base_component import Component, _explicitize_args class EteComponent(Component): \"\"\"An", "\"\"\"An EteComponent component. Keyword arguments: - id (string; required): The ID used to", "'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid', 'url', 'width'] self._type = 'EteComponent'", "saved with name). - height (string; default \"100%\"): iframe height. Default: 100%. -", "name (string; optional) - selectCommand (string; optional) - treeid (number; required): Integer that", "iframe height. Default: 100%. - hover (dict; optional): Hovered node. - path (string;", "'url', 'width'] self.available_wildcard_properties = [] _explicit_args = kwargs.pop('_explicit_args') _locals = locals() _locals.update(kwargs) #", "width=Component.UNDEFINED, height=Component.UNDEFINED, hover=Component.UNDEFINED, activeNodes=Component.UNDEFINED, activeClades=Component.UNDEFINED, activeLeaves=Component.UNDEFINED, saved=Component.UNDEFINED, **kwargs): self._prop_names = ['id', 'activeClades', 'activeLeaves',", "saved with name). - activeLeaves (list of dicts; optional): Active leaf selections (nodes", "hover (dict; optional): Hovered node. - path (string; default \"\"): URL from where", "name). - activeLeaves (list of dicts; optional): Active leaf selections (nodes not saved", "_explicit_args if k != 'children'} for k in ['id', 'url', 'treeid']: if k", "strings as keys and values of type dict with keys: - color (string;", "of dicts; optional): Active clade selections (nodes not saved with name). - activeLeaves", "# For wildcard attrs args = {k: _locals[k] for k in _explicit_args if", "of dicts; optional): Active node selections (nodes not saved with name). - height", "[] _explicit_args = kwargs.pop('_explicit_args') _locals = locals() _locals.update(kwargs) # For wildcard attrs args", "id (string; required): The ID used to identify this component in Dash callbacks.", "- hover (dict; optional): Hovered node. - path (string; default \"\"): URL from", "_explicitize_args class EteComponent(Component): \"\"\"An EteComponent component. Keyword arguments: - id (string; required): The", "import Component, _explicitize_args class EteComponent(Component): \"\"\"An EteComponent component. Keyword arguments: - id (string;", "['id', 'activeClades', 'activeLeaves', 'activeNodes', 'height', 'hover', 'path', 'saved', 'treeid', 'url', 'width'] self._type =", "(string; default \"\"): URL from where ETE's server is running. - saved (dict;", "(nodes not saved with name). - height (string; default \"100%\"): iframe height. Default:", "URL from where ETE's server is running. - saved (dict; optional): Saved selections.", "optional): Saved selections. `saved` is a dict with strings as keys and values", "height (string; default \"100%\"): iframe height. Default: 100%. - hover (dict; optional): Hovered" ]