Spaces:
Sleeping
Sleeping
| import os | |
| os.environ["STREAMLIT_CONFIG_DIR"] = "/tmp/.streamlit" | |
| os.environ["STREAMLIT_CACHE_DIR"] = "/tmp/.streamlit/cache" | |
| os.makedirs("/tmp/.streamlit/cache", exist_ok=True) | |
| import altair as alt | |
| import numpy as np | |
| import pandas as pd | |
| import streamlit as st | |
| import cv2 | |
| import mediapipe as mp | |
| import time | |
| from mediapipe.python.solutions import hands | |
| from streamlit_webrtc import webrtc_streamer, VideoTransformerBase | |
| st.set_page_config(page_title="ποΈ Hand Tracking Demo", layout="wide") | |
| # Define constants and helper functions | |
| HAND_CONNECTIONS = hands.HAND_CONNECTIONS | |
| def draw_hand_landmarks(image, hand_landmarks): | |
| h, w, _ = image.shape | |
| # Draw landmarks and connections | |
| # Manually draw landmarks as circles | |
| for idx, landmark in enumerate(hand_landmarks): | |
| cx, cy = int(landmark.x * w), int(landmark.y * h) | |
| cv2.circle(image, (cx, cy), 5, (0, 255, 0), -1) # green dots | |
| # Draw connections (using standard hand skeleton connections) | |
| # Define connections between landmarks as per Mediapipe Hands | |
| connections = HAND_CONNECTIONS | |
| for connection in connections: | |
| start_idx, end_idx = connection | |
| start = hand_landmarks[start_idx] | |
| end = hand_landmarks[end_idx] | |
| start_point = (int(start.x * w), int(start.y * h)) | |
| end_point = (int(end.x * w), int(end.y * h)) | |
| cv2.line(image, start_point, end_point, (255, 0, 0), 2) # blue lines | |
| BaseOptions = mp.tasks.BaseOptions | |
| HandLandmarker = mp.tasks.vision.HandLandmarker | |
| HandLandmarkerOptions = mp.tasks.vision.HandLandmarkerOptions | |
| VisionRunningMode = mp.tasks.vision.RunningMode | |
| model_path = "/app/src/hand_landmarker.task" | |
| options = HandLandmarkerOptions( | |
| base_options=BaseOptions(model_asset_path=model_path), | |
| running_mode=VisionRunningMode.IMAGE, | |
| num_hands=2, | |
| ) | |
| landmarker = HandLandmarker.create_from_options(options) | |
| # Set up Streamlit interface | |
| st.title("ποΈ Hand Tracking Demo") | |
| # Add a stop button | |
| stop_button = st.button("Stop") | |
| frame_placeholder = st.empty() | |
| finger_count_text = st.empty() | |
| class VideoProcessor(VideoTransformerBase): | |
| def __init__(self): | |
| self.finger_count = 0 | |
| def transform(self, frame): | |
| img = frame.to_ndarray(format="bgr24") | |
| frame_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) | |
| mp_image = mp.Image(image_format=mp.ImageFormat.SRGB, data=frame_rgb) | |
| result = landmarker.detect(mp_image) | |
| self.finger_count = 0 | |
| if result.hand_landmarks: | |
| for hand_landmarks in result.hand_landmarks: | |
| draw_hand_landmarks(img, hand_landmarks) | |
| hand_finger_count = 0 | |
| if hand_landmarks[4].y < hand_landmarks[3].y: | |
| hand_finger_count += 1 | |
| for i in [8, 12, 16, 20]: | |
| if hand_landmarks[i].y < hand_landmarks[i - 2].y: | |
| hand_finger_count += 1 | |
| self.finger_count += hand_finger_count | |
| return img | |
| # Start webcam capture | |
| ctx = webrtc_streamer( | |
| key="hand-tracker", | |
| video_processor_factory=VideoProcessor, | |
| media_stream_constraints={"video": True, "audio": False}, | |
| async_processing=True, | |
| rtc_configuration={ | |
| "iceServers": [{"urls": ["stun:stun.l.google.com:19302"]}] | |
| } | |
| ) | |
| # Display finger count | |
| if ctx.state.playing: | |
| st.markdown("### Live tracking active...") | |
| while ctx.video_processor: | |
| finger_count_text.markdown(f"### Fingers detected: {ctx.video_processor.finger_count}") | |
| time.sleep(0.1) | |
| else: | |
| st.info("Click 'Start' to begin camera tracking.") | |
| # Initialize finger count | |
| # current_finger_count = 0 | |
| # # Initialize MediaPipe Hands | |
| # with hands.Hands( | |
| # max_num_hands=2, min_detection_confidence=0.5, min_tracking_confidence=0.5 | |
| # ) as hands_model: | |
| # while not stop_button: | |
| # ret, frame = cap.read() | |
| # if not ret: | |
| # st.error("Failed to capture video from webcam") | |
| # break | |
| # frame = cv2.flip(frame, 1) | |
| # frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) | |
| # mp_image = mp.Image(image_format=mp.ImageFormat.SRGB, data=frame_rgb) | |
| # result = landmarker.detect(mp_image) | |
| # # Reset finger count for each frame | |
| # current_finger_count = 0 | |
| # if result.hand_landmarks: | |
| # for hand_landmarks in result.hand_landmarks: | |
| # # Draw landmarks & connections | |
| # draw_hand_landmarks(frame, hand_landmarks) | |
| # # Calculate finger count for this hand | |
| # hand_finger_count = 0 | |
| # if hand_landmarks[4].y < hand_landmarks[3].y: # Thumb | |
| # hand_finger_count += 1 | |
| # for i in [8, 12, 16, 20]: # Index, middle, ring, pinky | |
| # if hand_landmarks[i].y < hand_landmarks[i - 2].y: | |
| # hand_finger_count += 1 | |
| # # Add this hand's fingers to the total count | |
| # current_finger_count += hand_finger_count | |
| # # Display finger count | |
| # finger_count_text.markdown(f"### Fingers detected: {current_finger_count}") | |
| # # Convert BGR to RGB for displaying in Streamlit | |
| # frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) | |
| # frame_placeholder.image(frame_rgb, channels="RGB") | |
| # # Add a small delay to simulate real-time processing | |
| # time.sleep(0.05) | |
| # # Rerun to check if stop button was pressed | |
| # if stop_button: | |
| # break | |
| # # Release resources | |
| # cap.release() | |
| # st.success("Camera released. Application stopped.") | |
| # """ | |
| # # Welcome to Streamlit! | |
| # Edit `/streamlit_app.py` to customize this app to your heart's desire :heart:. | |
| # If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community | |
| # forums](https://discuss.streamlit.io). | |
| # In the meantime, below is an example of what you can do with just a few lines of code: | |
| # """ | |
| # num_points = st.slider("Number of points in spiral", 1, 10000, 1100) | |
| # num_turns = st.slider("Number of turns in spiral", 1, 300, 31) | |
| # indices = np.linspace(0, 1, num_points) | |
| # theta = 2 * np.pi * num_turns * indices | |
| # radius = indices | |
| # x = radius * np.cos(theta) | |
| # y = radius * np.sin(theta) | |
| # df = pd.DataFrame({ | |
| # "x": x, | |
| # "y": y, | |
| # "idx": indices, | |
| # "rand": np.random.randn(num_points), | |
| # }) | |
| # st.altair_chart(alt.Chart(df, height=700, width=700) | |
| # .mark_point(filled=True) | |
| # .encode( | |
| # x=alt.X("x", axis=None), | |
| # y=alt.Y("y", axis=None), | |
| # color=alt.Color("idx", legend=None, scale=alt.Scale()), | |
| # size=alt.Size("rand", legend=None, scale=alt.Scale(range=[1, 150])), | |
| # )) |