Spaces:
Sleeping
Sleeping
| import { useState, useEffect, useRef } from 'react'; | |
| import { HandLandmarker, FilesetResolver } from '@mediapipe/tasks-vision'; | |
| import { drawLandmarks, analyzeHandGesture } from '../utils/handUtils'; | |
| const useHandDetection = (videoRef, canvasRef, isMobile) => { | |
| const [handLandmarker, setHandLandmarker] = useState(null); | |
| const [handDetected, setHandDetected] = useState(false); | |
| const [isMouthOpen, setIsMouthOpen] = useState(false); | |
| const [isLeftHand, setIsLeftHand] = useState(true); | |
| const [thumbPosition, setThumbPosition] = useState({ x: 0, y: 0 }); | |
| const [isFirstLoad, setIsFirstLoad] = useState(true); | |
| const requestRef = useRef(null); | |
| const lastDetectionTimeRef = useRef(0); | |
| const isComponentMounted = useRef(true); | |
| // Initialize the HandLandmarker | |
| useEffect(() => { | |
| isComponentMounted.current = true; | |
| const initializeHandLandmarker = async () => { | |
| try { | |
| const vision = await FilesetResolver.forVisionTasks( | |
| "https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@latest/wasm" | |
| ); | |
| if (!isComponentMounted.current) return; | |
| const landmarker = await HandLandmarker.createFromOptions(vision, { | |
| baseOptions: { | |
| modelAssetPath: "https://storage.googleapis.com/mediapipe-models/hand_landmarker/hand_landmarker/float16/latest/hand_landmarker.task", | |
| delegate: "GPU" | |
| }, | |
| runningMode: "VIDEO", | |
| numHands: 1, | |
| minHandDetectionConfidence: 0.5, | |
| minHandPresenceConfidence: 0.5, | |
| minTrackingConfidence: 0.5 | |
| }); | |
| if (!isComponentMounted.current) return; | |
| setHandLandmarker(landmarker); | |
| console.log("Hand landmarker initialized successfully"); | |
| // Set first load to false after initialization | |
| setTimeout(() => { | |
| if (isComponentMounted.current) { | |
| setIsFirstLoad(false); | |
| } | |
| }, 3000); | |
| } catch (error) { | |
| console.error("Error initializing hand landmarker:", error); | |
| } | |
| }; | |
| initializeHandLandmarker(); | |
| return () => { | |
| isComponentMounted.current = false; | |
| if (requestRef.current) { | |
| cancelAnimationFrame(requestRef.current); | |
| requestRef.current = null; | |
| } | |
| }; | |
| }, []); | |
| // Process video frames and detect hand gestures | |
| useEffect(() => { | |
| if (!handLandmarker || !videoRef.current || !canvasRef.current) return; | |
| const video = videoRef.current; | |
| const canvas = canvasRef.current; | |
| const ctx = canvas.getContext('2d'); | |
| const detectHands = async (now) => { | |
| if (!isComponentMounted.current) return; | |
| if (video.readyState < 2) { | |
| requestRef.current = requestAnimationFrame(detectHands); | |
| return; | |
| } | |
| // Only run detection every 100ms for performance | |
| if (now - lastDetectionTimeRef.current > 100) { | |
| lastDetectionTimeRef.current = now; | |
| // Process the frame with HandLandmarker | |
| const results = handLandmarker.detectForVideo(video, now); | |
| // Clear the canvas | |
| ctx.clearRect(0, 0, canvas.width, canvas.height); | |
| // Draw the video frame on the canvas, maintaining aspect ratio | |
| const videoWidth = video.videoWidth; | |
| const videoHeight = video.videoHeight; | |
| // Calculate dimensions to maintain aspect ratio | |
| let drawWidth = canvas.width; | |
| let drawHeight = canvas.height; | |
| let offsetX = 0; | |
| let offsetY = 0; | |
| // Center the video in the canvas | |
| ctx.drawImage(video, offsetX, offsetY, drawWidth, drawHeight); | |
| // Check if hands are detected | |
| if (results.landmarks && results.landmarks.length > 0) { | |
| const landmarks = results.landmarks[0]; | |
| setHandDetected(true); | |
| // Draw hand landmarks | |
| drawLandmarks(ctx, landmarks, canvas, isMobile); | |
| // Analyze hand gesture | |
| const { isOpen, isLeftHand: isLeft, thumbPosition: thumbPos } = analyzeHandGesture(landmarks); | |
| // Update state with hand information | |
| setIsLeftHand(isLeft); | |
| setThumbPosition({ | |
| x: thumbPos.x * canvas.width, | |
| y: thumbPos.y * canvas.height | |
| }); | |
| // Update UI based on hand state | |
| if (isOpen !== isMouthOpen) { | |
| setIsMouthOpen(isOpen); | |
| } | |
| } else { | |
| // No hands detected | |
| setHandDetected(false); | |
| if (isMouthOpen) { | |
| setIsMouthOpen(false); | |
| } | |
| } | |
| } | |
| requestRef.current = requestAnimationFrame(detectHands); | |
| }; | |
| requestRef.current = requestAnimationFrame(detectHands); | |
| return () => { | |
| if (requestRef.current) { | |
| cancelAnimationFrame(requestRef.current); | |
| requestRef.current = null; | |
| } | |
| }; | |
| }, [handLandmarker, isMouthOpen, isMobile, videoRef, canvasRef]); | |
| return { | |
| handDetected, | |
| isMouthOpen, | |
| isLeftHand, | |
| thumbPosition, | |
| isFirstLoad, | |
| isComponentMounted | |
| }; | |
| }; | |
| export default useHandDetection; |