Hussein El-Hadidy
Latest realtime
53952ec
import cv2
from ultralytics import YOLO
# Load YOLOv8-pose model (adjust path as needed)
model = YOLO("yolo11n-pose.pt")
# Stream URL (MJPEG HTTP stream)
stream_url = "http://192.168.137.244:8080/video"
# Open video capture from stream
cap = cv2.VideoCapture(stream_url)
if not cap.isOpened():
print("Error: Cannot open video stream")
exit()
# Get properties (fallback to defaults if zero)
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
fps = cap.get(cv2.CAP_PROP_FPS)
if width == 0 or height == 0:
# Typical fallback for MJPEG streams that don't report width/height properly
width, height = 640, 480
if fps == 0:
fps = 20 # Set a reasonable default FPS for streams
print(f"Stream opened: {width}x{height} at {fps} FPS")
# Since we rotate 90 degrees clockwise, swap width and height for output video
out = cv2.VideoWriter("output_pose.mp4", cv2.VideoWriter_fourcc(*"mp4v"), fps, (height, width))
while True:
ret, frame = cap.read()
if not ret:
print("Failed to grab frame")
break
# Rotate frame 90 degrees clockwise
rotated_frame = cv2.rotate(frame, cv2.ROTATE_90_CLOCKWISE)
# Run YOLO pose estimation on rotated frame
results = model(rotated_frame, verbose=False, conf=0.3, iou=0.1)
# Draw pose annotations
annotated_frame = results[0].plot()
# Write annotated frame to output video
out.write(annotated_frame)
# Show annotated frame
cv2.imshow("Pose Detection (rotated)", annotated_frame)
# Press 'q' to quit
if cv2.waitKey(1) & 0xFF == ord("q"):
break
# Cleanup
cap.release()
out.release()
cv2.destroyAllWindows()