RP-Azul commited on
Commit
3a4cfd6
·
verified ·
1 Parent(s): 5770718

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -75
app.py CHANGED
@@ -2,116 +2,64 @@ import os
2
  import streamlit as st
3
  import mediapipe as mp
4
  import cv2
5
- import time
6
  from streamlit_webrtc import webrtc_streamer, VideoTransformerBase
7
  import av
8
- from queue import Queue
9
 
10
- # MediaPipe components for gesture recognition and hand landmarks
11
  mp_hands = mp.solutions.hands
12
  mp_drawing = mp.solutions.drawing_utils
13
- mp_drawing_styles = mp.solutions.drawing_styles
14
- BaseOptions = mp.tasks.BaseOptions
15
- GestureRecognizer = mp.tasks.vision.GestureRecognizer
16
- GestureRecognizerOptions = mp.tasks.vision.GestureRecognizerOptions
17
- GestureRecognizerResult = mp.tasks.vision.GestureRecognizerResult
18
- VisionRunningMode = mp.tasks.vision.RunningMode
19
 
20
  # Path to the gesture recognizer model
21
- model_path = 'model/gesture_recognizer.task'
22
  if not os.path.exists(model_path):
23
- raise FileNotFoundError(f"Model file not found at {model_path}")
 
24
 
25
- # Custom video processor class for handling webcam input
26
  class VideoProcessor(VideoTransformerBase):
27
  def __init__(self):
28
- self.gesture_queue = Queue() # Queue to store gesture results
29
-
30
- # Configure the Gesture Recognizer with a callback
31
- self.recognizer = GestureRecognizer.create_from_options(
32
- GestureRecognizerOptions(
33
- base_options=BaseOptions(model_asset_path=model_path),
34
- running_mode=VisionRunningMode.LIVE_STREAM,
35
- result_callback=self.handle_result, # Set the result callback
36
- )
37
- )
38
  self.hands = mp_hands.Hands(
39
  max_num_hands=2,
40
  model_complexity=0,
41
- min_detection_confidence=0.3,
42
- min_tracking_confidence=0.3
43
  )
44
 
45
- def handle_result(self, result: GestureRecognizerResult, output_image: mp.Image, timestamp_ms: int):
46
- # Process the result and add it to the queue
47
- gestures = []
48
- if result.gestures:
49
- for hand_gestures in result.gestures:
50
- for gesture in hand_gestures:
51
- gestures.append(f"{gesture.category_name} (Confidence: {gesture.score:.2f})")
52
- else:
53
- gestures.append("No gestures detected.")
54
-
55
- self.gesture_queue.put(gestures)
56
-
57
  def recv(self, frame):
58
  img = frame.to_ndarray(format="bgr24")
59
- img = cv2.flip(img, 1) # Flip horizontally
60
  img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
61
 
62
- # Perform hand landmark detection
63
- hand_results = self.hands.process(img_rgb)
64
-
65
- # Perform gesture recognition asynchronously
66
- mp_image = mp.Image(image_format=mp.ImageFormat.SRGB, data=img_rgb)
67
- current_time_ms = int(time.time() * 1000)
68
- self.recognizer.recognize_async(mp_image, current_time_ms)
69
 
70
- # Draw hand landmarks on the frame
71
- if hand_results.multi_hand_landmarks:
72
- for hand_landmarks in hand_results.multi_hand_landmarks:
73
  mp_drawing.draw_landmarks(
74
- img,
75
- hand_landmarks,
76
- mp_hands.HAND_CONNECTIONS,
77
- mp_drawing_styles.get_default_hand_landmarks_style(),
78
- mp_drawing_styles.get_default_hand_connections_style()
79
- )
80
-
81
- # Optionally display detected gestures from the queue
82
- if not self.gesture_queue.empty():
83
- gestures = self.gesture_queue.get()
84
- for i, gesture in enumerate(gestures):
85
- cv2.putText(
86
- img,
87
- gesture,
88
- (10, 50 + i * 30),
89
- cv2.FONT_HERSHEY_SIMPLEX,
90
- 1,
91
- (0, 255, 0),
92
- 2,
93
  )
94
 
95
  return av.VideoFrame.from_ndarray(img, format="bgr24")
96
 
97
  # Streamlit UI
98
  st.title("Gesture & Hand Landmark Detection 🚀")
99
- st.markdown("### Recognize and save hand gestures in real time using MediaPipe and Streamlit WebRTC.")
100
-
101
- # Sidebar controls
102
- st.sidebar.title("Control Panel")
103
- st.sidebar.markdown("<hr>", unsafe_allow_html=True)
104
 
105
- # Run the Streamlit WebRTC app
106
- webrtc_streamer(key="gesture-detection", video_processor_factory=VideoProcessor)
 
 
 
 
107
 
108
  # Footer
109
- st.sidebar.markdown(
110
  """
111
  <style>
112
  .footer {text-align: center; font-size: 12px; color: grey; margin-top: 20px;}
113
  </style>
114
- <p class="footer">Made using Streamlit, MediaPipe & OpenCV</p>
115
  """,
116
  unsafe_allow_html=True,
117
  )
 
2
  import streamlit as st
3
  import mediapipe as mp
4
  import cv2
 
5
  from streamlit_webrtc import webrtc_streamer, VideoTransformerBase
6
  import av
 
7
 
8
+ # Initialize MediaPipe Hands
9
  mp_hands = mp.solutions.hands
10
  mp_drawing = mp.solutions.drawing_utils
 
 
 
 
 
 
11
 
12
  # Path to the gesture recognizer model
13
+ model_path = "model/gesture_recognizer.task"
14
  if not os.path.exists(model_path):
15
+ st.error(f"Model file not found at {model_path}. Please upload it to the correct directory.")
16
+ st.stop()
17
 
 
18
  class VideoProcessor(VideoTransformerBase):
19
  def __init__(self):
20
+ # Initialize MediaPipe Hands
 
 
 
 
 
 
 
 
 
21
  self.hands = mp_hands.Hands(
22
  max_num_hands=2,
23
  model_complexity=0,
24
+ min_detection_confidence=0.5,
25
+ min_tracking_confidence=0.5,
26
  )
27
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  def recv(self, frame):
29
  img = frame.to_ndarray(format="bgr24")
30
+ img = cv2.flip(img, 1) # Flip for a mirror effect
31
  img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
32
 
33
+ # Process the frame for hand landmarks
34
+ result = self.hands.process(img_rgb)
 
 
 
 
 
35
 
36
+ # Draw hand landmarks
37
+ if result.multi_hand_landmarks:
38
+ for hand_landmarks in result.multi_hand_landmarks:
39
  mp_drawing.draw_landmarks(
40
+ img, hand_landmarks, mp_hands.HAND_CONNECTIONS
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
  )
42
 
43
  return av.VideoFrame.from_ndarray(img, format="bgr24")
44
 
45
  # Streamlit UI
46
  st.title("Gesture & Hand Landmark Detection 🚀")
47
+ st.write("This app uses MediaPipe and Streamlit to detect hand landmarks in real-time.")
 
 
 
 
48
 
49
+ # WebRTC streamer for live video
50
+ webrtc_streamer(
51
+ key="gesture-detection",
52
+ video_processor_factory=VideoProcessor,
53
+ rtc_configuration={"iceServers": [{"urls": ["stun:stun.l.google.com:19302"]}]},
54
+ )
55
 
56
  # Footer
57
+ st.markdown(
58
  """
59
  <style>
60
  .footer {text-align: center; font-size: 12px; color: grey; margin-top: 20px;}
61
  </style>
62
+ <p class="footer">Made with ❤️ using Streamlit & MediaPipe</p>
63
  """,
64
  unsafe_allow_html=True,
65
  )