Spaces:
Sleeping
Sleeping
| import os | |
| import streamlit as st | |
| st.set_page_config(page_title="Medical Assistant AI", page_icon="π©Ί") | |
| from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace | |
| from langchain_core.messages import SystemMessage, HumanMessage, AIMessage | |
| # β Load Hugging Face token securely | |
| hf_token = os.getenv("hf") | |
| os.environ["HUGGINGFACEHUB_API_TOKEN"] = hf_token | |
| # β Custom CSS styling | |
| st.markdown(""" | |
| <style> | |
| body { | |
| background-image: url('https://images.unsplash.com/photo-1588776814546-bb1a479f4c25'); | |
| background-size: cover; | |
| background-attachment: fixed; | |
| } | |
| .main { | |
| background-color: rgba(255, 255, 255, 0.9); | |
| padding: 2rem; | |
| border-radius: 20px; | |
| } | |
| .stTextInput>div>div>input { | |
| background-color: #ffffffcc; | |
| padding: 12px; | |
| border-radius: 8px; | |
| border: 1px solid #ccc; | |
| } | |
| .chat-bubble-user { | |
| background-color: #DCF8C6; | |
| padding: 10px 15px; | |
| margin: 10px 0; | |
| border-radius: 10px; | |
| max-width: 80%; | |
| margin-left: auto; | |
| text-align: right; | |
| } | |
| .chat-bubble-ai { | |
| background-color: #F1F0F0; | |
| padding: 10px 15px; | |
| margin: 10px 0; | |
| border-radius: 10px; | |
| max-width: 80%; | |
| text-align: left; | |
| } | |
| </style> | |
| """, unsafe_allow_html=True) | |
| # β Initialize model | |
| llama_endpoint = HuggingFaceEndpoint( | |
| repo_id="meta-llama/Llama-3.1-8B-Instruct", | |
| provider="novita", | |
| temperature=0.8, | |
| max_new_tokens=256, | |
| task="conversational" | |
| ) | |
| llama_model = ChatHuggingFace( | |
| llm=llama_endpoint, | |
| repo_id="meta-llama/Llama-3.1-8B-Instruct", | |
| provider="novita", | |
| temperature=0.8, | |
| max_new_tokens=256, | |
| task="conversational" | |
| ) | |
| # β Streamlit Interface | |
| st.markdown('<div class="main">', unsafe_allow_html=True) | |
| st.title("π©Ί Medical Assistant (LLaMA 3.1)") | |
| st.markdown("Ask your medical queries below and get AI-powered advice.") | |
| # β Conversation memory | |
| if "messages" not in st.session_state: | |
| st.session_state.messages = [ | |
| SystemMessage(content="You are a highly experienced and qualified medical assistant with 10 years of experience in a busy family practice clinic. You are known for your professionalism, empathy, attention to detail, and ability to anticipate the needs of both patients and the doctor.") | |
| ] | |
| # β Input Box | |
| user_input = st.text_input("π¬ You:", placeholder="E.g. What is paracetamol?", key="input") | |
| # β Handle Input | |
| if user_input: | |
| st.session_state.messages.append(HumanMessage(content=user_input)) | |
| with st.spinner("π‘ Thinking..."): | |
| response = llama_model.invoke(st.session_state.messages) | |
| st.session_state.messages.append(AIMessage(content=response.content)) | |
| # β Display Chat History | |
| for msg in st.session_state.messages[1:]: | |
| if isinstance(msg, HumanMessage): | |
| st.markdown(f'<div class="chat-bubble-user">π§ββοΈ {msg.content}</div>', unsafe_allow_html=True) | |
| elif isinstance(msg, AIMessage): | |
| st.markdown(f'<div class="chat-bubble-ai">π€ {msg.content}</div>', unsafe_allow_html=True) | |
| st.markdown('</div>', unsafe_allow_html=True) | |