ibani-model / app.py
williampepple1's picture
Remove anti-hallucination implementation and restore standard translation
b6f6301
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from transformers import pipeline
import torch
import os
# Version: 2026-01-15 - Updated model
app = FastAPI(title="Ibani Translator API")
# Model configuration
MODEL_ID = "williampepple1/ibani-translator"
print(f"Loading model {MODEL_ID}...")
try:
# Use pipeline for easy inference
# device=-1 forces CPU usage which is what HF Spaces free tier provides
# force_download=True ensures we get the latest model version
translator = pipeline(
"translation",
model=MODEL_ID,
device=-1,
model_kwargs={"force_download": True}
)
print("Model loaded successfully!")
except Exception as e:
print(f"Error loading model: {e}")
translator = None
class TranslationRequest(BaseModel):
text: str
@app.get("/")
def read_root():
return {"status": "healthy", "model": MODEL_ID}
@app.post("/translate")
async def translate(request: TranslationRequest):
if translator is None:
raise HTTPException(status_code=503, detail="Model not loaded")
try:
result = translator(request.text)
return {
"translated_text": result[0]['translation_text'],
"original_text": request.text
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))