DepthAnythingV2 How to use depth-anything/Depth-Anything-V2-Base with DepthAnythingV2:
# Install from https://github.com/DepthAnything/Depth-Anything-V2
# Load the model and infer depth from an image
import cv2
import torch
from depth_anything_v2.dpt import DepthAnythingV2
# instantiate the model
model = DepthAnythingV2(encoder="vitb", features=128, out_channels=[96, 192, 384, 768])
# load the weights
filepath = hf_hub_download(repo_id="depth-anything/Depth-Anything-V2-Base", filename="depth_anything_v2_vitb.pth", repo_type="model")
state_dict = torch.load(filepath, map_location="cpu")
model.load_state_dict(state_dict).eval()
raw_img = cv2.imread("your/image/path")
depth = model.infer_image(raw_img) # HxW raw depth map in numpy