TestAPI / app.py
Adedoyinjames's picture
Update app.py
5ea1723 verified
raw
history blame
1.06 kB
import os
from fastapi import FastAPI
from pydantic import BaseModel
from typing import Optional
MODEL_ID = os.getenv("MODEL_ID", "gpt2")
CACHE_DIR = os.getenv("TRANSFORMERS_CACHE", "/app/.cache")
# ensure cache dir exists
os.makedirs(CACHE_DIR, exist_ok=True)
app = FastAPI(title="FastAPI Hugging Face Space")
generator = None
class GenerateRequest(BaseModel):
prompt: str
max_length: Optional[int] = None
@app.on_event("startup")
async def load_model():
global generator
# import here so transformers uses the configured cache
from transformers import pipeline
generator = pipeline("text-generation", model=MODEL_ID)
@app.get("/health")
async def health():
return {"status": "ok", "model": MODEL_ID, "cache": CACHE_DIR}
@app.post("/generate")
async def generate(req: GenerateRequest):
if generator is None:
return {"error": "model not loaded yet"}
max_len = req.max_length or 64
out = generator(req.prompt, max_length=max_len, num_return_sequences=1)
return {"generated_text": out[0]["generated_text"]}