AamirMalik commited on
Commit
9edd20b
·
verified ·
1 Parent(s): 29faecf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -36
app.py CHANGED
@@ -1,22 +1,16 @@
1
  import streamlit as st
2
- import cv2
3
- import mediapipe as mp
4
  import numpy as np
5
  import tensorflow as tf
6
- import tempfile
7
  from transformers import AutoImageProcessor, AutoModelForImageClassification
8
 
9
  # Load gesture classification model from Hugging Face Hub (public model)
10
  processor = AutoImageProcessor.from_pretrained("google/vit-base-patch16-224-in21k")
11
  model = AutoModelForImageClassification.from_pretrained("google/vit-base-patch16-224-in21k")
12
 
13
- # Mediapipe initialization
14
- mp_hands = mp.solutions.hands
15
- hands = mp_hands.Hands()
16
- mp_draw = mp.solutions.drawing_utils
17
-
18
  # Function for gesture classification
19
  def classify_gesture(image):
 
20
  inputs = processor(images=image, return_tensors="pt")
21
  outputs = model(**inputs)
22
  prediction = outputs.logits.argmax(-1).item()
@@ -26,40 +20,32 @@ def classify_gesture(image):
26
  def main():
27
  st.set_page_config(page_title="Sign Language Translator", layout="wide")
28
  st.title("🤟 Sign Language Translator")
29
- st.write("Translate sign language gestures into text and speech in real time.")
30
 
31
  # Sidebar
32
  st.sidebar.header("Settings")
33
  use_camera = st.sidebar.checkbox("Use Camera")
34
 
35
- # Display Video Feed
36
- if use_camera:
37
- st.write("### 📸 Live Camera Feed")
38
- frame_placeholder = st.empty()
39
-
40
- cap = cv2.VideoCapture(0)
41
- while cap.isOpened():
42
- ret, frame = cap.read()
43
- if not ret:
44
- break
45
-
46
- frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
47
- results = hands.process(frame)
48
-
49
- if results.multi_hand_landmarks:
50
- for hand_landmarks in results.multi_hand_landmarks:
51
- mp_draw.draw_landmarks(frame, hand_landmarks, mp_hands.HAND_CONNECTIONS)
52
 
53
- # Gesture classification
54
- gesture = classify_gesture(frame)
55
- st.write(f"Gesture: {gesture}")
56
-
57
- frame_placeholder.image(frame, channels="RGB")
58
-
59
- if cv2.waitKey(1) & 0xFF == ord('q'):
60
- break
61
-
62
- cap.release()
 
 
 
 
 
 
 
63
 
64
  if __name__ == "__main__":
65
  main()
 
1
  import streamlit as st
 
 
2
  import numpy as np
3
  import tensorflow as tf
4
+ from PIL import Image
5
  from transformers import AutoImageProcessor, AutoModelForImageClassification
6
 
7
  # Load gesture classification model from Hugging Face Hub (public model)
8
  processor = AutoImageProcessor.from_pretrained("google/vit-base-patch16-224-in21k")
9
  model = AutoModelForImageClassification.from_pretrained("google/vit-base-patch16-224-in21k")
10
 
 
 
 
 
 
11
  # Function for gesture classification
12
  def classify_gesture(image):
13
+ image = image.convert("RGB")
14
  inputs = processor(images=image, return_tensors="pt")
15
  outputs = model(**inputs)
16
  prediction = outputs.logits.argmax(-1).item()
 
20
  def main():
21
  st.set_page_config(page_title="Sign Language Translator", layout="wide")
22
  st.title("🤟 Sign Language Translator")
23
+ st.write("Translate sign language gestures into text and speech.")
24
 
25
  # Sidebar
26
  st.sidebar.header("Settings")
27
  use_camera = st.sidebar.checkbox("Use Camera")
28
 
29
+ # Upload image
30
+ uploaded_image = st.file_uploader("Upload an image of a hand gesture", type=["png", "jpg", "jpeg"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
 
32
+ # Camera input
33
+ if use_camera:
34
+ st.write("### 📸 Capture Gesture Using Camera")
35
+ camera_image = st.camera_input("Take a picture")
36
+
37
+ if camera_image:
38
+ image = Image.open(camera_image)
39
+ st.image(image, caption="Captured Image", use_column_width=True)
40
+ gesture = classify_gesture(image)
41
+ st.write(f"Gesture: {gesture}")
42
+
43
+ # Display uploaded image
44
+ if uploaded_image:
45
+ image = Image.open(uploaded_image)
46
+ st.image(image, caption="Uploaded Image", use_column_width=True)
47
+ gesture = classify_gesture(image)
48
+ st.write(f"Gesture: {gesture}")
49
 
50
  if __name__ == "__main__":
51
  main()