Anshini's picture
Update app.py
d01c13e verified
import os
import streamlit as st
st.set_page_config(page_title="Medical Assistant AI", page_icon="🩺")
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
from langchain_core.messages import SystemMessage, HumanMessage, AIMessage
# βœ… Load Hugging Face token securely
hf_token = os.getenv("hf")
os.environ["HUGGINGFACEHUB_API_TOKEN"] = hf_token
# βœ… Custom CSS styling
st.markdown("""
<style>
body {
background-image: url('https://images.unsplash.com/photo-1588776814546-bb1a479f4c25');
background-size: cover;
background-attachment: fixed;
}
.main {
background-color: rgba(255, 255, 255, 0.9);
padding: 2rem;
border-radius: 20px;
}
.stTextInput>div>div>input {
background-color: #ffffffcc;
padding: 12px;
border-radius: 8px;
border: 1px solid #ccc;
}
.chat-bubble-user {
background-color: #DCF8C6;
padding: 10px 15px;
margin: 10px 0;
border-radius: 10px;
max-width: 80%;
margin-left: auto;
text-align: right;
}
.chat-bubble-ai {
background-color: #F1F0F0;
padding: 10px 15px;
margin: 10px 0;
border-radius: 10px;
max-width: 80%;
text-align: left;
}
</style>
""", unsafe_allow_html=True)
# βœ… Initialize model
llama_endpoint = HuggingFaceEndpoint(
repo_id="meta-llama/Llama-3.1-8B-Instruct",
provider="novita",
temperature=0.8,
max_new_tokens=256,
task="conversational"
)
llama_model = ChatHuggingFace(
llm=llama_endpoint,
repo_id="meta-llama/Llama-3.1-8B-Instruct",
provider="novita",
temperature=0.8,
max_new_tokens=256,
task="conversational"
)
# βœ… Streamlit Interface
st.markdown('<div class="main">', unsafe_allow_html=True)
st.title("🩺 Medical Assistant (LLaMA 3.1)")
st.markdown("Ask your medical queries below and get AI-powered advice.")
# βœ… Conversation memory
if "messages" not in st.session_state:
st.session_state.messages = [
SystemMessage(content="You are a highly experienced and qualified medical assistant with 10 years of experience in a busy family practice clinic. You are known for your professionalism, empathy, attention to detail, and ability to anticipate the needs of both patients and the doctor.")
]
# βœ… Input Box
user_input = st.text_input("πŸ’¬ You:", placeholder="E.g. What is paracetamol?", key="input")
# βœ… Handle Input
if user_input:
st.session_state.messages.append(HumanMessage(content=user_input))
with st.spinner("πŸ’‘ Thinking..."):
response = llama_model.invoke(st.session_state.messages)
st.session_state.messages.append(AIMessage(content=response.content))
# βœ… Display Chat History
for msg in st.session_state.messages[1:]:
if isinstance(msg, HumanMessage):
st.markdown(f'<div class="chat-bubble-user">πŸ§‘β€βš•οΈ {msg.content}</div>', unsafe_allow_html=True)
elif isinstance(msg, AIMessage):
st.markdown(f'<div class="chat-bubble-ai">πŸ€– {msg.content}</div>', unsafe_allow_html=True)
st.markdown('</div>', unsafe_allow_html=True)