simar007 commited on
Commit
dd1bac4
·
verified ·
1 Parent(s): e7e1800

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +54 -0
app.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import cv2
2
+ import mediapipe as mp
3
+ import gradio as gr
4
+ import numpy as np
5
+
6
+ # Initialize MediaPipe Hands
7
+ mp_hands = mp.solutions.hands
8
+ mp_drawing = mp.solutions.drawing_utils
9
+ hands = mp_hands.Hands(max_num_hands=1, min_detection_confidence=0.7)
10
+
11
+ # Simple gesture classifier
12
+ def classify_gesture(landmarks):
13
+ if landmarks:
14
+ thumb_tip = landmarks[4]
15
+ index_tip = landmarks[8]
16
+ if thumb_tip.y < index_tip.y:
17
+ return "A" # Thumb up
18
+ return "Unknown"
19
+
20
+ def process_frame(frame):
21
+ frame = cv2.flip(frame, 1)
22
+ h, w, _ = frame.shape
23
+ rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
24
+ result = hands.process(rgb)
25
+
26
+ gesture = "No hand detected"
27
+
28
+ if result.multi_hand_landmarks:
29
+ for hand_landmarks in result.multi_hand_landmarks:
30
+ mp_drawing.draw_landmarks(
31
+ frame, hand_landmarks, mp_hands.HAND_CONNECTIONS
32
+ )
33
+ gesture = classify_gesture(hand_landmarks.landmark)
34
+
35
+ cv2.putText(frame, f"Sign: {gesture}",
36
+ (10, 40), cv2.FONT_HERSHEY_SIMPLEX,
37
+ 1, (0, 255, 0), 2)
38
+
39
+ cv2.putText(frame, "Made by Simar",
40
+ (10, h - 10), cv2.FONT_HERSHEY_SIMPLEX,
41
+ 0.8, (255, 0, 255), 2)
42
+
43
+ return frame
44
+
45
+ demo = gr.Interface(
46
+ fn=process_frame,
47
+ inputs=gr.Image(source="webcam", streaming=True),
48
+ outputs=gr.Image(),
49
+ live=True,
50
+ title="Sign Language Recognition",
51
+ description="Demo sign recognition using MediaPipe + OpenCV"
52
+ )
53
+
54
+ demo.launch()