please-work-thx / app.py
danieaneta
update reverse proxy
6399190
import base64
import io
import numpy as np
import torch
import matplotlib.pyplot as plt
from fastapi import FastAPI, HTTPException
from fastapi.responses import JSONResponse
from pydantic import BaseModel
from transformers import pipeline
from PIL import Image
# Initialize FastAPI
app = FastAPI()
# Load depth estimation model
pipe = pipeline("depth-estimation", model="Intel/dpt-hybrid-midas")
# Define request schema
class ImageRequest(BaseModel):
image_base64: str # Expecting a base64-encoded image
@app.get("/")
def home():
return {"message": "Depth Estimation API is running"}
@app.post("/predict/")
def predict(request: ImageRequest):
try:
# Decode base64 image
image_data = base64.b64decode(request.image_base64)
image = Image.open(io.BytesIO(image_data)).convert("RGB")
# Perform depth estimation
result = pipe(image)
# Convert depth map to numpy array
depth_map = np.array(result["depth"])
# Normalize depth map for visualization
depth_map = (depth_map - depth_map.min()) / (depth_map.max() - depth_map.min())
# Convert depth map to an image (in-memory, no saving)
fig, ax = plt.subplots()
ax.imshow(depth_map, cmap="inferno")
ax.axis("off")
img_buf = io.BytesIO()
fig.savefig(img_buf, format="png", bbox_inches='tight', pad_inches=0)
plt.close(fig) # Close the figure to free memory
img_buf.seek(0)
# Convert depth map image to base64
depth_map_base64 = base64.b64encode(img_buf.read()).decode("utf-8")
# Return base64-encoded depth map
return JSONResponse(content={"depth_map_base64": depth_map_base64})
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))