Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import torch | |
| from transformers import DPTFeatureExtractor, DPTForDepthEstimation | |
| from PIL import Image | |
| import numpy as np | |
| model_name = "Intel/dpt-large" | |
| feature_extractor = DPTFeatureExtractor.from_pretrained(model_name) | |
| model = DPTForDepthEstimation.from_pretrained(model_name) | |
| def estimate_depth(image): | |
| if isinstance(image, np.ndarray): | |
| image = Image.fromarray(image) | |
| inputs = feature_extractor(images=image, return_tensors="pt") | |
| with torch.no_grad(): | |
| outputs = model(**inputs) | |
| depth = outputs.predicted_depth | |
| depth = depth.squeeze().cpu().numpy() | |
| depth_min, depth_max = depth.min(), depth.max() | |
| depth = (depth - depth_min) / (depth_max - depth_min) * 255 | |
| depth = depth.astype(np.uint8) | |
| depth_img = Image.fromarray(depth) | |
| return depth_img | |
| description = "Upload an image and get its depth estimation using Intel DPT-Large." | |
| demo = gr.Interface( | |
| fn=estimate_depth, | |
| inputs=gr.Image(type="pil", label="Upload Image"), | |
| outputs=gr.Image(type="pil", label="Depth Estimation"), | |
| title="Depth Estimation with Intel DPT-Large", | |
| description=description | |
| ) | |
| demo.launch(share=True) |