dntwaritag's picture
Update app.py
be9a1d2 verified
# # Import embedding model
from langchain_huggingface import HuggingFaceEmbeddings
embed_model = HuggingFaceEmbeddings(model_name="mixedbread-ai/mxbai-embed-large-v1")
from langchain_core.prompts import PromptTemplate
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough
import gradio as gr
import pandas as pd
from langchain_groq import ChatGroq
# Create a vector store...
from langchain_chroma import Chroma
import os
vectorstore = Chroma(
collection_name="medical_dataset_store",
embedding_function=embed_model,
persist_directory="./",
)
vectorstore.get().keys()
# Load the dataset to be used.
context = pd.read_csv("./drugs_side_effects_drugs_com.csv")
# Because the vector store is empty... Add your context data.
vectorstore.add_texts(context)
retriever = vectorstore.as_retriever()
template = ("""
You are a medical expert specializing in pharmacology.
Your task is to use the provided context to answer questions about drug side effects for patients.
Please follow these guidelines:
- Provide accurate and detailed answers based on the context.
- If you don't know the answer, clearly state that you don't know.
- Do not reference the context directly in your response; just provide the answer.
- Ensure your answers are clear, concise, and informative.
Context: {context}
Question: {question}
Answer:
""")
rag_prompt = PromptTemplate.from_template(template)
# Initialize the model
llm_model = ChatGroq(model="llama-3.3-70b-versatile", api_key=os.environ.get("medibot"))
rag_chain = (
{"context": retriever, "question": RunnablePassthrough()}
| rag_prompt
| llm_model
| StrOutputParser()
)
def rag_memory_stream(message, history):
partial_text = ""
for new_text in rag_chain.stream(message):
partial_text += new_text
yield partial_text
examples = [
"What is a drug ?",
"What are the side effects of lisinopril?"
]
description = "Real-Time AI-Powered Medical Assistant: Drug Side Effect Queries Chatbot"
title = "AI-Powered Medical Chatbot :) Try me!"
demo = gr.ChatInterface(fn=rag_memory_stream,
type="messages",
title=title,
description=description,
fill_height=True,
examples=examples,
theme="glass",
)
# Launch the application and make it sharable
demo.launch(share=True)
if __name__ == "__main__":
demo.launch()