chatbot / src /streamlit_app.py
ayshaanjum's picture
app.py
1d9a625 verified
import streamlit as st
import torch
from transformers import pipeline
# β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”
# Models
# β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”
PRIMARY_MODEL = "AventIQ-AI/t5-medical-chatbot"
FALLBACK_MODEL = "Anjanams04/healthbot"
@st.cache_resource
def load_pipeline(model_name: str):
device = 0 if torch.cuda.is_available() else -1
try:
return pipeline("text2text-generation", model=model_name, tokenizer=model_name, device=device)
except Exception as e:
st.error(f"Error loading {model_name}: {e}")
return None
# β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”
# Streamlit UI
# β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”β€”
st.set_page_config(page_title="Medical Chatbot Demo", page_icon="🩺")
st.title("🩺 Medical Chatbot Demo")
st.caption("For demonstration only β€” not medical advice.")
symptoms = st.text_area("Describe your symptoms:", value="I have flu, body pain and runny nose")
if st.button("Get Suggestion"):
if not symptoms.strip():
st.warning("Please enter symptoms.")
else:
med_pipe = load_pipeline(PRIMARY_MODEL)
if med_pipe is None:
med_pipe = load_pipeline(FALLBACK_MODEL)
if med_pipe is None:
st.error("Could not load any model.")
else:
prompt = f"Patient symptoms: {symptoms}\nProvide: 1) Likely condition(s) 2) Non-prescription remedies 3) Precautions & red flags 4) Disclaimer."
try:
result = med_pipe(prompt, max_length=200, do_sample=False)
reply = result[0]["generated_text"].strip()
st.subheader("πŸ’‘ Suggested Response")
st.write(reply)
except Exception as e:
st.error(f"Model inference failed: {e}")