# =============================================== # 🌸 BLOOM CHECK-IN QUALITY CLASSIFIER API (FastAPI) # =============================================== from fastapi import FastAPI, Request from transformers import pipeline import uvicorn app = FastAPI( title="Bloom Check-in Quality Classifier", description="A FastAPI app that classifies check-ins as vague, neutral, or descriptive.", version="1.0.0" ) # Load the Hugging Face text classification model # This automatically downloads your public model from the Hub classifier = pipeline("text-classification", model="user6295018/checkin-quality-classifier") @app.get("/") def read_root(): """ Root endpoint – simple health check """ return {"message": "Bloom Check-in Quality Classifier is running!"} @app.post("/api/predict") async def predict(request: Request): """ Predict endpoint – classify input text using the fine-tuned model """ try: data = await request.json() except Exception: return {"error": "Invalid JSON body."} # Accept either {"text": "..."} or {"inputs": "..."} text = data.get("text") or data.get("inputs") if not text: return {"error": "No text provided."} try: # Run model inference result = classifier(text)[0] # Optional: Map model output labels to human-friendly names label_map = { "LABEL_0": "vague", "LABEL_1": "neutral", "LABEL_2": "descriptive" } label = label_map.get(result["label"], result["label"]) score = round(float(result["score"]), 3) return {"label": label, "score": score} except Exception as e: return {"error": f"Inference failed: {str(e)}"} # Optional: local testing entry point (ignored on Spaces) if __name__ == "__main__": uvicorn.run(app, host="0.0.0.0", port=7860)