|
|
import torch |
|
|
import gradio as gr |
|
|
|
|
|
from zoedepth.utils.misc import colorize, save_raw_16bit |
|
|
from zoedepth.utils.geometry import depth_to_points, create_triangles |
|
|
|
|
|
from PIL import Image |
|
|
import numpy as np |
|
|
|
|
|
css = """ |
|
|
img { |
|
|
max-height: 500px; |
|
|
object-fit: contain; |
|
|
} |
|
|
""" |
|
|
|
|
|
|
|
|
MODEL = torch.hub.load('isl-org/ZoeDepth', "ZoeD_N", pretrained=True, trust_repo=True).eval() |
|
|
|
|
|
|
|
|
def save_raw_16bit(depth, fpath="raw.png"): |
|
|
if isinstance(depth, torch.Tensor): |
|
|
depth = depth.squeeze().cpu().numpy() |
|
|
|
|
|
|
|
|
|
|
|
depth = depth * 256 |
|
|
depth = depth.astype(np.uint16) |
|
|
return depth |
|
|
|
|
|
def process_image(image: Image.Image): |
|
|
global MODEL |
|
|
image = image.convert("RGB") |
|
|
|
|
|
depth = MODEL.infer_pil(image) |
|
|
|
|
|
processed_array = save_raw_16bit(colorize(depth)[:, :, 0]) |
|
|
return Image.fromarray(processed_array) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
title = "# ZoeDepth" |
|
|
description = """Unofficial demo for **ZoeDepth: Zero-shot Transfer by Combining Relative and Metric Depth**.""" |
|
|
|
|
|
with gr.Blocks(css=css) as API: |
|
|
gr.Markdown(title) |
|
|
gr.Markdown(description) |
|
|
with gr.Tab("Depth Prediction"): |
|
|
with gr.Row(): |
|
|
inputs=gr.Image(label="Input Image", type='pil', height=500) |
|
|
outputs=gr.Image(label="Depth Map", type='pil', height=500) |
|
|
generate_btn = gr.Button(value="Generate") |
|
|
generate_btn.click(process_image, inputs=inputs, outputs=outputs, api_name="generate_depth") |
|
|
|
|
|
if __name__ == '__main__': |
|
|
API.launch() |