1mpreccable commited on
Commit
f426b7a
·
verified ·
1 Parent(s): bdbaced

Update interface_pages/yoga_position_from_stream.py

Browse files
interface_pages/yoga_position_from_stream.py CHANGED
@@ -6,6 +6,7 @@ from mediapipe.python.solutions import drawing_utils as mp_drawing
6
  from PoseClassification.pose_embedding import FullBodyPoseEmbedding
7
  from PoseClassification.pose_classifier import PoseClassifier
8
  from PoseClassification.utils import EMADictSmoothing
 
9
 
10
  # Initialize components
11
  mp_pose = mp.solutions.pose
@@ -61,9 +62,11 @@ def yoga_position_from_stream():
61
  last_update_time = 0
62
  recording = False
63
  recorded_frames = []
 
 
64
 
65
  def classify_pose(frame):
66
- nonlocal current_position, position_timer, last_update_time, recording, recorded_frames
67
  if frame is None:
68
  return (
69
  None,
@@ -112,6 +115,9 @@ def yoga_position_from_stream():
112
 
113
  if recording:
114
  recorded_frames.append(processed_frame)
 
 
 
115
 
116
  return (
117
  frame,
@@ -128,9 +134,11 @@ def yoga_position_from_stream():
128
  ]
129
 
130
  def start_recording():
131
- nonlocal recording, recorded_frames
132
  recording = True
133
  recorded_frames = []
 
 
134
  return "Recording started"
135
 
136
  def stop_recording():
@@ -139,46 +147,76 @@ def yoga_position_from_stream():
139
  return "Recording stopped"
140
 
141
  def save_video():
142
- nonlocal recorded_frames
143
  if not recorded_frames:
144
  return None, "No recorded frames available"
145
 
146
  output_path = "recorded_yoga_session.mp4"
147
  height, width, _ = recorded_frames[0].shape
 
 
 
 
 
148
  fourcc = cv2.VideoWriter_fourcc(*"mp4v")
149
- out = cv2.VideoWriter(output_path, fourcc, 30.0, (width, height))
150
 
151
  for frame in recorded_frames:
152
- out.write(frame)
 
 
153
  out.release()
154
 
155
- return output_path, "Video saved successfully"
156
 
157
  with gr.Column() as yoga_stream:
158
- gr.Markdown("# Yoga Position Classifier")
159
- gr.Markdown("Stream live yoga sessions and get real-time pose classification.")
160
-
161
- debug_toggle = gr.Checkbox(label="Debug Mode", value=False)
 
162
 
163
- with gr.Column(visible=True) as normal_view:
164
- video_feed = gr.Webcam(streaming=True, elem_classes="fullscreen")
165
- pose_output = gr.Textbox(label="Current Pose")
166
- timer_output = gr.Textbox(label="Pose Duration")
 
 
 
 
 
 
 
 
 
 
167
 
168
  with gr.Column(visible=False) as debug_view:
169
- classified_video = gr.Image(label="Classified Video Feed")
 
 
170
  with gr.Row():
171
- start_button = gr.Button("Start Recording")
172
- stop_button = gr.Button("Stop Recording")
173
- save_button = gr.Button("Save Recording")
174
- recording_status = gr.Textbox(label="Recording Status")
175
- recorded_video = gr.Video(label="Recorded Video")
176
- download_button = gr.Button("Download Recorded Video")
 
 
 
 
 
 
 
 
 
 
177
 
178
  debug_toggle.change(
179
  toggle_debug,
180
  inputs=[debug_toggle],
181
- outputs=[debug_view, normal_view, classified_video],
182
  )
183
 
184
  video_feed.stream(
@@ -197,6 +235,17 @@ def yoga_position_from_stream():
197
 
198
 
199
  if __name__ == "__main__":
200
- with gr.Blocks() as demo:
 
 
 
 
 
 
 
 
 
 
 
201
  yoga_position_from_stream()
202
  demo.launch()
 
6
  from PoseClassification.pose_embedding import FullBodyPoseEmbedding
7
  from PoseClassification.pose_classifier import PoseClassifier
8
  from PoseClassification.utils import EMADictSmoothing
9
+ import time
10
 
11
  # Initialize components
12
  mp_pose = mp.solutions.pose
 
62
  last_update_time = 0
63
  recording = False
64
  recorded_frames = []
65
+ start_time = 0
66
+ frame_count = 0
67
 
68
  def classify_pose(frame):
69
+ nonlocal current_position, position_timer, last_update_time, recording, recorded_frames, start_time, frame_count
70
  if frame is None:
71
  return (
72
  None,
 
115
 
116
  if recording:
117
  recorded_frames.append(processed_frame)
118
+ frame_count += 1
119
+ if frame_count == 1:
120
+ start_time = time.time()
121
 
122
  return (
123
  frame,
 
134
  ]
135
 
136
  def start_recording():
137
+ nonlocal recording, recorded_frames, start_time, frame_count
138
  recording = True
139
  recorded_frames = []
140
+ start_time = 0
141
+ frame_count = 0
142
  return "Recording started"
143
 
144
  def stop_recording():
 
147
  return "Recording stopped"
148
 
149
  def save_video():
150
+ nonlocal recorded_frames, start_time, frame_count
151
  if not recorded_frames:
152
  return None, "No recorded frames available"
153
 
154
  output_path = "recorded_yoga_session.mp4"
155
  height, width, _ = recorded_frames[0].shape
156
+
157
+ # Calculate the actual frame rate
158
+ elapsed_time = time.time() - start_time
159
+ fps = frame_count / elapsed_time if elapsed_time > 0 else 30.0
160
+
161
  fourcc = cv2.VideoWriter_fourcc(*"mp4v")
162
+ out = cv2.VideoWriter(output_path, fourcc, fps, (width, height))
163
 
164
  for frame in recorded_frames:
165
+ # Convert frame to BGR color space before writing
166
+ frame_bgr = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)
167
+ out.write(frame_bgr)
168
  out.release()
169
 
170
+ return output_path, f"Video saved successfully at {fps:.2f} FPS"
171
 
172
  with gr.Column() as yoga_stream:
173
+ gr.Markdown("# Yoga Position Classifier", elem_classes=["custom-title"])
174
+ gr.Markdown(
175
+ "Stream live yoga sessions and get real-time pose classification.",
176
+ elem_classes=["custom-subtitle"],
177
+ )
178
 
179
+ with gr.Row():
180
+ with gr.Column(scale=3):
181
+ video_feed = gr.Webcam(streaming=True, elem_classes=["custom-webcam"])
182
+
183
+ with gr.Column(scale=2):
184
+ pose_output = gr.Textbox(
185
+ label="Current Pose", elem_classes=["custom-textbox"]
186
+ )
187
+ timer_output = gr.Textbox(
188
+ label="Pose Duration", elem_classes=["custom-textbox"]
189
+ )
190
+ debug_toggle = gr.Checkbox(
191
+ label="Debug Mode", value=False, elem_classes=["custom-checkbox"]
192
+ )
193
 
194
  with gr.Column(visible=False) as debug_view:
195
+ classified_video = gr.Image(
196
+ label="Classified Video Feed", elem_classes=["custom-image"]
197
+ )
198
  with gr.Row():
199
+ start_button = gr.Button(
200
+ "Start Recording", elem_classes=["custom-button"]
201
+ )
202
+ stop_button = gr.Button(
203
+ "Stop Recording", elem_classes=["custom-button"]
204
+ )
205
+ save_button = gr.Button("Save Recording", elem_classes=["custom-button"])
206
+ recording_status = gr.Textbox(
207
+ label="Recording Status", elem_classes=["custom-textbox"]
208
+ )
209
+ recorded_video = gr.Video(
210
+ label="Recorded Video", elem_classes=["custom-video"]
211
+ )
212
+ download_button = gr.Button(
213
+ "Download Recorded Video", elem_classes=["custom-button"]
214
+ )
215
 
216
  debug_toggle.change(
217
  toggle_debug,
218
  inputs=[debug_toggle],
219
+ outputs=[debug_view, video_feed, classified_video],
220
  )
221
 
222
  video_feed.stream(
 
235
 
236
 
237
  if __name__ == "__main__":
238
+ with gr.Blocks(
239
+ css="""
240
+ .custom-title { font-size: 36px; font-weight: bold; margin-bottom: 10px; }
241
+ .custom-subtitle { font-size: 18px; margin-bottom: 20px; }
242
+ .custom-webcam { height: 480px; }
243
+ .custom-textbox input { font-size: 24px; }
244
+ .custom-checkbox label { font-size: 18px; }
245
+ .custom-button { font-size: 18px; }
246
+ .custom-image img { max-height: 400px; }
247
+ .custom-video video { max-height: 400px; }
248
+ """
249
+ ) as demo:
250
  yoga_position_from_stream()
251
  demo.launch()