YashsharmaPhD commited on
Commit
e2b5458
Β·
verified Β·
1 Parent(s): 37f92e7

Update robot_sim.py

Browse files
Files changed (1) hide show
  1. robot_sim.py +33 -81
robot_sim.py CHANGED
@@ -5,18 +5,16 @@ import matplotlib.pyplot as plt
5
  import tempfile
6
  import time
7
 
8
- # Set up the simulation in DIRECT mode
9
  p.connect(p.DIRECT)
10
  p.setAdditionalSearchPath(pybullet_data.getDataPath())
11
  p.setGravity(0, 0, -9.8)
12
 
13
- # Load robot and environment
14
  plane_id = p.loadURDF("plane.urdf")
15
  robot_id = p.loadURDF("franka_panda/panda.urdf", basePosition=[0, 0, 0], useFixedBase=True)
16
  cube_id = p.loadURDF("cube_small.urdf", basePosition=[0.6, 0, 0.02])
17
  p.changeVisualShape(cube_id, -1, rgbaColor=[0, 0, 0, 1])
18
 
19
- # Identify joint indices
20
  def get_joint_indices(robot):
21
  arm, fingers = [], []
22
  for i in range(p.getNumJoints(robot)):
@@ -30,23 +28,18 @@ def get_joint_indices(robot):
30
  return arm, fingers
31
 
32
  arm_joints, finger_joints = get_joint_indices(robot_id)
33
- trajectory = [] # Stores joint trajectory for 3D plot
34
 
35
- # Render camera view
36
  def capture_view():
37
- width, height = 1280,1280
38
  view_matrix = p.computeViewMatrix(cameraEyePosition=[1.2, 0, 1],
39
  cameraTargetPosition=[0, 0, 0.4],
40
  cameraUpVector=[0, 0, 1])
41
- proj_matrix = p.computeProjectionMatrixFOV(fov=60,
42
- aspect=1.0,
43
- nearVal=0.1,
44
- farVal=3.1)
45
  _, _, px, _, _ = p.getCameraImage(width=width, height=height,
46
  viewMatrix=view_matrix,
47
  projectionMatrix=proj_matrix)
48
  rgb_array = np.reshape(px, (height, width, 4))[:, :, :3]
49
-
50
  fig, ax = plt.subplots()
51
  ax.imshow(rgb_array.astype(np.uint8))
52
  ax.axis("off")
@@ -55,24 +48,6 @@ def capture_view():
55
  plt.close()
56
  return tmp.name
57
 
58
- # Plot 3D trajectory
59
- def plot_trajectory():
60
- if not trajectory:
61
- return None
62
-
63
- traj = np.array(trajectory)
64
- fig = plt.figure()
65
- ax = fig.add_subplot(111, projection='3d')
66
- ax.plot(traj[:, 0], traj[:, 1], traj[:, 2], label="End Effector Path")
67
- ax.scatter(traj[-1, 0], traj[-1, 1], traj[-1, 2], color='red', label='Final Position')
68
- ax.set_title("3D Trajectory")
69
- ax.legend()
70
- tmp = tempfile.NamedTemporaryFile(delete=False, suffix=".png")
71
- plt.savefig(tmp.name)
72
- plt.close()
73
- return tmp.name
74
-
75
- # Move robot to given joint angles
76
  def move_robot(joint_angles, grip=0.04, steps=100):
77
  if len(joint_angles) != 7:
78
  return "❌ Invalid joint input."
@@ -89,73 +64,50 @@ def move_robot(joint_angles, grip=0.04, steps=100):
89
  trajectory.append(pos)
90
  time.sleep(0.005)
91
 
92
- # Execute pick and place
93
- #def perform_pick_and_place():
94
- # approach = [0.0, -0.5, 0.3, -1.5, 0.0, 1.4, 0.6]
95
- # pick = [0.0, -0.4, 0.3, -1.4, 0.0, 1.2, 0.6]
96
- # place = [0.5, -0.4, 0.2, -1.8, 0.0, 1.4, 0.6]
97
-
98
- # move_robot(approach)
99
- # move_robot(pick, grip=0.0) # Close gripper
100
- # move_robot([a + 0.1 for a in pick], grip=0.0)
101
- # move_robot(place, grip=0.0)
102
- # move_robot(place, grip=0.04) # Open gripper
103
-
104
  def perform_pick_and_place():
105
- approach = [0.0, -0.4, 0.0, -2.4, 0.0, 2.0, 0.7]
106
- pick = [0.0, -0.2, 0.0, -2.1, 0.0, 1.8, 0.7]
107
- lift = [0.0, -0.4, 0.0, -2.4, 0.0, 2.0, 0.7]
108
- place = [0.5, -0.4, 0.0, -2.4, 0.0, 2.0, 0.7]
109
 
110
- # Approach object
111
  move_robot(approach)
112
- move_robot(pick)
113
-
114
- # Close gripper
115
- move_robot(pick, grip=0.0)
116
- time.sleep(0.2)
117
-
118
- # Attach object to gripper using constraint
119
- cube_pos, cube_ori = p.getBasePositionAndOrientation(cube_id)
120
- gripper_index = arm_joints[-1]
121
- ee_link_state = p.getLinkState(robot_id, gripper_index)
122
- constraint_id = p.createConstraint(
123
- parentBodyUniqueId=robot_id,
124
- parentLinkIndex=gripper_index,
125
- childBodyUniqueId=cube_id,
126
- childLinkIndex=-1,
127
- jointType=p.JOINT_FIXED,
128
- jointAxis=[0, 0, 0],
129
- parentFramePosition=[0, 0, 0],
130
- childFramePosition=[0, 0, 0]
131
- )
132
-
133
- # Lift and move to place
134
- move_robot(lift, grip=0.0)
135
  move_robot(place, grip=0.0)
 
136
 
137
- # Open gripper and remove constraint
138
- move_robot(place, grip=0.04)
139
- p.removeConstraint(constraint_id)
140
- time.sleep(0.2)
141
-
 
 
 
 
 
 
 
 
 
142
 
143
- # Main chatbot interface
144
  def robot_chatbot(user_input: str):
145
  user_input = user_input.lower().strip()
146
- trajectory.clear()
147
-
148
- if "pick" in user_input and "place" in user_input:
149
  trajectory.clear()
150
  perform_pick_and_place()
151
- return "βœ… Pick and place executed.", capture_view(), plot_trajectory()
 
 
152
 
153
  try:
154
  values = [float(v.strip()) for v in user_input.split(",")]
155
  if len(values) == 7:
 
156
  move_robot(values)
157
- return "βœ… Moved to specified joint angles.", capture_view(), plot_trajectory()
 
 
158
  except Exception:
159
  pass
160
 
161
- return "πŸ€– Command not recognized. Try 'pick', 'place', or provide 7 joint angles.", capture_view(), plot_trajectory()
 
5
  import tempfile
6
  import time
7
 
8
+ # Setup simulation
9
  p.connect(p.DIRECT)
10
  p.setAdditionalSearchPath(pybullet_data.getDataPath())
11
  p.setGravity(0, 0, -9.8)
12
 
 
13
  plane_id = p.loadURDF("plane.urdf")
14
  robot_id = p.loadURDF("franka_panda/panda.urdf", basePosition=[0, 0, 0], useFixedBase=True)
15
  cube_id = p.loadURDF("cube_small.urdf", basePosition=[0.6, 0, 0.02])
16
  p.changeVisualShape(cube_id, -1, rgbaColor=[0, 0, 0, 1])
17
 
 
18
  def get_joint_indices(robot):
19
  arm, fingers = [], []
20
  for i in range(p.getNumJoints(robot)):
 
28
  return arm, fingers
29
 
30
  arm_joints, finger_joints = get_joint_indices(robot_id)
31
+ trajectory = []
32
 
 
33
  def capture_view():
34
+ width, height = 640, 640
35
  view_matrix = p.computeViewMatrix(cameraEyePosition=[1.2, 0, 1],
36
  cameraTargetPosition=[0, 0, 0.4],
37
  cameraUpVector=[0, 0, 1])
38
+ proj_matrix = p.computeProjectionMatrixFOV(fov=60, aspect=1.0, nearVal=0.1, farVal=3.1)
 
 
 
39
  _, _, px, _, _ = p.getCameraImage(width=width, height=height,
40
  viewMatrix=view_matrix,
41
  projectionMatrix=proj_matrix)
42
  rgb_array = np.reshape(px, (height, width, 4))[:, :, :3]
 
43
  fig, ax = plt.subplots()
44
  ax.imshow(rgb_array.astype(np.uint8))
45
  ax.axis("off")
 
48
  plt.close()
49
  return tmp.name
50
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51
  def move_robot(joint_angles, grip=0.04, steps=100):
52
  if len(joint_angles) != 7:
53
  return "❌ Invalid joint input."
 
64
  trajectory.append(pos)
65
  time.sleep(0.005)
66
 
 
 
 
 
 
 
 
 
 
 
 
 
67
  def perform_pick_and_place():
68
+ approach = [0.0, -0.5, 0.3, -1.5, 0.0, 1.4, 0.6]
69
+ pick = [0.0, -0.4, 0.3, -1.4, 0.0, 1.2, 0.6]
70
+ place = [0.5, -0.4, 0.2, -1.8, 0.0, 1.4, 0.6]
 
71
 
 
72
  move_robot(approach)
73
+ move_robot(pick, grip=0.0) # Close gripper
74
+ move_robot([a + 0.1 for a in pick], grip=0.0)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75
  move_robot(place, grip=0.0)
76
+ move_robot(place, grip=0.04) # Open gripper
77
 
78
+ def plot_trajectory():
79
+ if not trajectory:
80
+ return None
81
+ traj = np.array(trajectory)
82
+ fig = plt.figure()
83
+ ax = fig.add_subplot(111, projection='3d')
84
+ ax.plot(traj[:, 0], traj[:, 1], traj[:, 2], label="End Effector Path")
85
+ ax.scatter(traj[-1, 0], traj[-1, 1], traj[-1, 2], color='red', label='Final Position')
86
+ ax.set_title("3D Trajectory")
87
+ ax.legend()
88
+ tmp = tempfile.NamedTemporaryFile(delete=False, suffix=".png")
89
+ plt.savefig(tmp.name)
90
+ plt.close()
91
+ return tmp.name
92
 
 
93
  def robot_chatbot(user_input: str):
94
  user_input = user_input.lower().strip()
95
+ if "pick" in user_input:
 
 
96
  trajectory.clear()
97
  perform_pick_and_place()
98
+ sim_img = capture_view()
99
+ traj_img = plot_trajectory()
100
+ return "βœ… Pick and place executed.", (sim_img, traj_img)
101
 
102
  try:
103
  values = [float(v.strip()) for v in user_input.split(",")]
104
  if len(values) == 7:
105
+ trajectory.clear()
106
  move_robot(values)
107
+ sim_img = capture_view()
108
+ traj_img = plot_trajectory()
109
+ return "βœ… Moved to specified joint angles.", (sim_img, traj_img)
110
  except Exception:
111
  pass
112
 
113
+ return "πŸ€– Command not recognized. Try 'pick', 'place', or provide 7 joint angles.", (capture_view(), None)