Beasto's picture
Update app.py
8bcb9e5
raw
history blame
2.4 kB
import cv2
from tensorflow.keras.models import load_model
from PIL import Image
import numpy as np
import tensorflow as tf
import streamlit as st
import tempfile
# Function to detect hand using Haar Cascade
def detect_hand(frame, hand_cascade):
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
hands = hand_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30))
return hands
# Load Haar Cascade for hand detection
hand_cascade_path = 'path/to/your/hand_cascade.xml' # Replace with your actual path
hand_cascade = cv2.CascadeClassifier(hand_cascade_path)
# Open the video file
f = st.file_uploader("Choose a Video")
if f is not None:
tfile = tempfile.NamedTemporaryFile(delete=False)
tfile.write(f.read())
cap = cv2.VideoCapture(tfile.name)
fps = cap.get(cv2.CAP_PROP_FPS)
st.write(fps)
interval = int(round(fps/1))
frame_count = 0
model = tf.keras.models.load_model('HandSignClassifier (1).h5')
array = ['a','b','c','d','e','f','g','h','i','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y']
out = ''
while True:
ret, frame = cap.read()
if not ret:
break
# Check if it's time to capture a frame
if frame_count % interval == 0:
hands = detect_hand(frame, hand_cascade)
for (x, y, w, h) in hands:
# Draw rectangles around detected hands
cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 0, 0), 2)
# Display the frame with detected hands
st.image(frame, 'input')
if hands:
# Extract the region of interest for hand from the frame
hand_roi = frame[y:y+h, x:x+w]
# Preprocess the hand ROI for your model (resize, convert to grayscale, etc.)
hand_roi = cv2.cvtColor(hand_roi, cv2.COLOR_BGR2GRAY)
hand_roi = cv2.resize(hand_roi, (28, 28))
hand_roi = np.reshape(hand_roi, (1, 28, 28, 1))
# Make predictions using your model
pred = model.predict(hand_roi)
st.write(pred)
pred = np.argmax(pred)
pred = array[pred]
if not out or out[-1] != pred:
out = out + pred
# Increment the frame counter
frame_count += 1
cap.release()
st.write(out)