|
|
import gradio as gr |
|
|
import pickle |
|
|
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder |
|
|
from langchain_community.chat_message_histories import SQLChatMessageHistory |
|
|
from langchain_groq import ChatGroq |
|
|
from langchain_core.runnables import RunnablePassthrough |
|
|
from langchain.schema.output_parser import StrOutputParser |
|
|
from langchain_core.runnables.history import RunnableWithMessageHistory |
|
|
from operator import itemgetter |
|
|
|
|
|
|
|
|
with open("qdrant_vectorstore.pkl", "rb") as f: |
|
|
qdrant_vectorstore = pickle.load(f) |
|
|
|
|
|
|
|
|
def echo_user_input(*args): |
|
|
user_input = args[0] |
|
|
|
|
|
|
|
|
qdrant_retriever = qdrant_vectorstore.as_retriever() |
|
|
found_docs = qdrant_vectorstore.similarity_search(user_input) |
|
|
|
|
|
context_str = "" |
|
|
for context_data in found_docs: |
|
|
context_str += context_data.page_content + '\n\n' |
|
|
|
|
|
|
|
|
prompt = ChatPromptTemplate.from_messages([ |
|
|
("system", "Act as a helpful AI Assistant. Here is some {context}"), |
|
|
MessagesPlaceholder(variable_name="history"), |
|
|
("human", "{human_input}") |
|
|
]) |
|
|
|
|
|
|
|
|
def get_session_history(session_id): |
|
|
return SQLChatMessageHistory(session_id, "sqlite:///memory.db") |
|
|
|
|
|
|
|
|
groq_api_key = "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" |
|
|
llm = ChatGroq(groq_api_key=groq_api_key, model_name="Gemma2-9b-It") |
|
|
|
|
|
|
|
|
context = itemgetter("human_input") | qdrant_retriever |
|
|
first_step = RunnablePassthrough.assign(context=context) |
|
|
llm_chain = first_step | prompt | llm | StrOutputParser() |
|
|
|
|
|
conv_chain = RunnableWithMessageHistory(llm_chain, get_session_history, input_messages_key="human_input", history_messages_key="history") |
|
|
|
|
|
|
|
|
session_id = 'bond007' |
|
|
|
|
|
|
|
|
llm_response = conv_chain.invoke(({"human_input": user_input}), {'configurable': {'session_id': session_id}}) |
|
|
|
|
|
|
|
|
|
|
|
combined_output = f"**Summarized Response:**\n{llm_response}**Retrieved Context:**\n{context_str}\n\n" |
|
|
return combined_output |
|
|
|
|
|
|
|
|
interface = gr.ChatInterface( |
|
|
fn=echo_user_input, |
|
|
title="MCG Demo", |
|
|
description="Type your question and press enter to see a conversational response. 🤖", |
|
|
) |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
interface.launch(share=True) |
|
|
|