Spaces:
Build error
Build error
| from octo.model.octo_model import OctoModel | |
| from PIL import Image | |
| import numpy as np | |
| import jax | |
| from fastapi import FastAPI, HTTPException | |
| from pydantic import BaseModel | |
| import os | |
| import io | |
| import base64 | |
| # Set JAX to use CPU platform (adjust if GPU is needed) | |
| os.environ['JAX_PLATFORMS'] = 'cpu' | |
| # Load the model once globally (assumes it's cached locally) | |
| model = OctoModel.load_pretrained("hf://rail-berkeley/octo-small-1.5") | |
| # Initialize FastAPI app | |
| app = FastAPI(title="Octo Model Inference API") | |
| # Define request body model | |
| class InferenceRequest(BaseModel): | |
| image_base64: str # Base64-encoded image string | |
| task: str = "pick up the fork" # Default task | |
| # Health check endpoint | |
| async def health_check(): | |
| return {"status": "healthy"} | |
| # Inference endpoint | |
| async def predict(request: InferenceRequest): | |
| try: | |
| # Decode base64 image | |
| img_base64 = request.image_base64 | |
| if img_base64.startswith("data:image"): | |
| img_base64 = img_base64.split(",")[1] | |
| img_data = base64.b64decode(img_base64) | |
| img = Image.open(io.BytesIO(img_data)).resize((256, 256)) | |
| img = np.array(img) | |
| # Add batch and time horizon dimensions | |
| img = img[np.newaxis, np.newaxis, ...] # Shape: (1, 1, 256, 256, 3) | |
| observation = { | |
| "image_primary": img, | |
| "timestep_pad_mask": np.array([[True]]) | |
| } | |
| # Create task and predict actions | |
| task_obj = model.create_tasks(texts=[request.task]) | |
| actions = model.sample_actions( | |
| observation, | |
| task_obj, | |
| unnormalization_statistics=model.dataset_statistics["bridge_dataset"]["action"], | |
| rng=jax.random.PRNGKey(0) | |
| ) | |
| actions = actions[0] | |
| # Convert NumPy array to list for JSON response | |
| actions_list = actions.tolist() | |
| return {"actions": actions_list} | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=f"Error processing request: {str(e)}") |