dlucidone's picture
Update app.py
470baba verified
import gradio as gr
from huggingface_hub import InferenceClient
from fastapi import FastAPI, Request
from gradio.routes import mount_gradio_app
client = InferenceClient("dlucidone/kumaoni-mbart-lora")
def translate(text: str):
if not text.strip():
return "Please enter text."
try:
response = client.post(json={"inputs": text})
if isinstance(response, list) and len(response) and "generated_text" in response[0]:
return response[0]["generated_text"]
return str(response)
except Exception as e:
return f"Error: {e}"
# Create a FastAPI app
api = FastAPI()
# Add /predict endpoint
@api.post("/predict")
async def predict(request: Request):
body = await request.json()
text = body.get("text", "")
return {"translation": translate(text)}
# Mount Gradio interface for human testing
gr_app = gr.Interface(fn=translate, inputs="text", outputs="text", title="Kumaoni Translator")
app = mount_gradio_app(api, gr_app, path="/")
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=7860)