Spaces:
Runtime error
Runtime error
| from fastapi import FastAPI, HTTPException | |
| from transformers import AutoModel, AutoTokenizer | |
| import gradio as gr | |
| # Inicializar FastAPI | |
| app = FastAPI() | |
| # Cargar el modelo y el tokenizador desde Hugging Face | |
| model_name = "ancerlop/ToxicBERTMultilabelTextClassification" | |
| model = AutoModel.from_pretrained(model_name) | |
| tokenizer = AutoTokenizer.from_pretrained(model_name) | |
| # Definir funci贸n de predicci贸n | |
| def predict(text): | |
| inputs = tokenizer(text, return_tensors="pt") | |
| outputs = model(**inputs) | |
| return outputs.logits | |
| # Crear una interfaz Gradio | |
| iface = gr.Interface( | |
| fn=predict, | |
| inputs=gr.inputs.Textboxbox(), | |
| outputs=gr.outputs.Label(num_top_classes=5), | |
| live=True, | |
| title="Modelo de Clasificaci贸n de Texto", | |
| description="Este modelo clasifica texto en diferentes categor铆as." | |
| ) | |
| # Definir una ruta en FastAPI para la API | |
| def predict_api(text: str): | |
| try: | |
| result = predict(text) | |
| return {"predictions": result.tolist()} | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| # Montar Gradio en FastAPI | |
| def gradio_interface(): | |
| return iface.launch() | |
| if __name__ == "__main__": | |
| import uvicorn | |
| uvicorn.run(app, host="0.0.0.0", port=8000) | |