Spaces:
Running
Running
File size: 4,502 Bytes
a9e4252 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 |
from fastapi import FastAPI, UploadFile, File, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from typing import List, Optional
import os
# Import your agent directly (same repo)
from nivra_agent import nivra_chat
app = FastAPI(
title="Nivra AI Healthcare Assistant API",
description="🩺 India-first AI Healthcare Assistant with ClinicalBERT + Groq",
version="1.0.0"
)
# CORS for Flutter app (production-ready)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # Lock this to your Flutter app domain in production
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
class SymptomInput(BaseModel):
symptoms: List[str] = []
language: str = "en"
age: Optional[int] = None
gender: Optional[str] = None
class DiagnosisResponse(BaseModel):
diagnosis: str
confidence: float = 0.85
recommendations: str = ""
urgency: str = "low"
audio_url: Optional[str] = None
success: bool = True
@app.post("/diagnose/text", response_model=DiagnosisResponse)
async def diagnose_text_symptoms(input: SymptomInput):
"""
Main App endpoint - Text-based symptom diagnosis
Calls Nivra AI Agent for diagnosis via text
"""
try:
# Format prompt for your agent
symptoms_text = "Patient age: {} {}, symptoms: {}".format(
input.age or "unknown",
input.gender or "unknown",
", ".join(input.symptoms)
)
# Call YOUR existing nivra_chat agent directly (no HTTP calls!)
diagnosis = nivra_chat(symptoms_text)
# Parse urgency from diagnosis (simple keyword matching)
urgency = "low"
if any(word in diagnosis.lower() for word in ["critical", "emergency", "severe"]):
urgency = "critical"
elif any(word in diagnosis.lower() for word in ["consult doctor", "see specialist"]):
urgency = "medium"
return DiagnosisResponse(
diagnosis=diagnosis,
confidence=0.85,
recommendations="Follow the guidance above. Consult a doctor if symptoms worsen.",
urgency=urgency,
audio_url=f"https://huggingface.co/spaces/nivra/tts/{input.language}", # TTS endpoint
success=True
)
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Diagnosis failed: {str(e)}"
)
@app.post("/diagnose/image")
async def diagnose_image_symptoms(
file: UploadFile = File(...),
age: Optional[int] = None,
gender: Optional[str] = None
):
"""
Image-based diagnosis endpoint
Uses your image_symptom_tool.py
"""
try:
# Save uploaded image temporarily
image_path = f"/tmp/{file.filename}"
with open(image_path, "wb") as f:
f.write(await file.read())
# Call your agent with image context
prompt = f"Patient image analysis: {image_path}"
if age or gender:
prompt += f"\nPatient: {age}yo {gender}"
diagnosis = nivra_chat(prompt)
return {
"diagnosis": diagnosis,
"type": "image_analysis",
"success": True
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/")
async def root():
"""Root endpoint - API info"""
return {
"message": "🩺 Nivra AI Healthcare API",
"version": "1.0.0",
"endpoints": {
"text_diagnosis": "/diagnose/text",
"image_diagnosis": "/diagnose/image",
"health_check": "/health",
"docs": "/docs"
}
}
@app.get("/health")
async def health_check():
"""Health check for monitoring"""
return {
"status": "healthy",
"agent": "nivra_chat loaded",
"models": ["ClinicalBERT", "Groq LLM", "Indic Parler-TTS"]
}
# Environment info (useful for debugging on HF Spaces)
@app.get("/info")
async def system_info():
"""System information"""
return {
"space_author": os.getenv("SPACE_AUTHOR_NAME", "unknown"),
"space_repo": os.getenv("SPACE_REPO_NAME", "unknown"),
"space_id": os.getenv("SPACE_ID", "unknown"),
"host": os.getenv("SPACE_HOST", "localhost")
}
|