Spaces:
Sleeping
Sleeping
| from ultralytics import YOLO | |
| import gradio as gr | |
| import numpy as np | |
| from PIL import Image | |
| # ------------------------- | |
| # Load ONLY your trained model | |
| # ------------------------- | |
| model = YOLO("mules_vs_whitetails.pt") | |
| # ------------------------- | |
| # Inference function | |
| # ------------------------- | |
| def predict(image): | |
| # Ensure RGB numpy array | |
| if isinstance(image, Image.Image): | |
| image = np.array(image.convert("RGB")) | |
| # Run inference | |
| results = model.predict( | |
| source=image, | |
| conf=0.25, | |
| device="cpu", # change to "cuda" if GPU is available | |
| verbose=False | |
| ) | |
| # Take first result | |
| r = results[0] | |
| # Draw detections (BGR → RGB) | |
| im_bgr = r.plot() | |
| im_rgb = im_bgr[:, :, ::-1] | |
| return im_rgb | |
| # ------------------------- | |
| # Gradio UI | |
| # ------------------------- | |
| app = gr.Interface( | |
| fn=predict, | |
| inputs=gr.Image(type="numpy", label="Upload Image"), | |
| outputs=gr.Image(type="numpy", label="Detection Result"), | |
| title="Deer Detection AI – Mule Deer vs Whitetail", | |
| description="Custom YOLO model: mules_vs_whitetails.pt" | |
| ) | |
| # ------------------------- | |
| # Launch | |
| # ------------------------- | |
| if __name__ == "__main__": | |
| app.launch() | |