Spaces:
Running
Running
File size: 7,512 Bytes
01aea15 bfd2a8f 9420d04 ef63721 9420d04 bfd2a8f bd3347f 01aea15 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 | import os
import gradio as gr
#from langchain_openai import ChatOpenAI
from langchain_groq import ChatGroq
from langchain_core.runnables import Runnable
from chat_engine import conversation_prompt
from chat_engine import chapter_index
from chat_engine import tree_index_list
from chat_engine import select_index
from langchain_core.messages import SystemMessage, HumanMessage, AIMessage
from pathlib import Path
#from chat_engine import prompt_query
groq_llm = ChatGroq(
model_name="llama-3.3-70b-versatile",
temperature=0.2,
api_key=os.getenv('GROQ_API_KEY')
)
response_chain: Runnable = conversation_prompt | groq_llm
#groq_user_engg=ChatGroq(
# model_name="llama3-70b-8192",
# temperature=0,
# api_key=os.getenv("GROQ_API")
#)
#query_chain: Runnable = prompt_query | groq_user_engg
def gradio_chat(user_query, chat_history, index, tree_index_list=tree_index_list, chapter_index=chapter_index,response_chain=response_chain):
if chat_history is None:
chat_history=[]
if user_query=="":
chat_history.append(HumanMessage(user_query))
chat_history.append(AIMessage("Kindly ask a question from the selected chapter."))
return "Kindly ask a question from the selected chapter", chat_history
vector_index=select_index(index)
retriever1=vector_index.as_retriever(similarity_top_k=2)
retrieved_nodes1=retriever1.retrieve(user_query)
tree_index=tree_index_list[chapter_index[retrieved_nodes1[0].metadata["chapter"]]]
if retrieved_nodes1[0].metadata["section"]=="poem":
retriever = tree_index.as_retriever(similarity_top_k=4, retriever_mode="all_leaf")
retrieved_nodes3=retriever.retrieve("summarize the poem")
#answer = response_synthesizer.synthesize(query=user_query, nodes=retrieved_nodes3)
pext=""
for content in retrieved_nodes3:
pext=pext+' '+content.text.strip()
context='Author: '+retrieved_nodes1[0].metadata['author']+'\nSection: '+retrieved_nodes1[0].metadata['section']+'\nChapter: '+retrieved_nodes1[0].metadata['chapter']+'\nContext: '+pext
else:
contextt=[]
for text in retrieved_nodes1:
contextt.append((text.metadata['page'], text.text))
contextt.sort(key=lambda x:x[0])
context1=[x[1] for x in contextt]
retriever = tree_index.as_retriever(similarity_top_k=1,retriever_mode="root",
search_kwargs={"num_children":3})
retrieved_nodes2=retriever.retrieve("summarize this chapter")
for text in retrieved_nodes2:
context1.append(text.text.strip())
context="\n".join(context1)
context='Author: '+retrieved_nodes1[0].metadata['author']+'\nSection: '+retrieved_nodes1[0].metadata['section']+'\nChapter: '+retrieved_nodes1[0].metadata['chapter']+'\nContext: '+context
chat_history.append(HumanMessage(user_query))
response=response_chain.invoke({"chat_history":chat_history[-12:], "user_query":user_query, "document_context":context})
chat_history.append(AIMessage(response.content))
return response.content, chat_history
#def prompt_engg(message, chain_history, index, previous_index="Broken Images"):
#if previous_index == index:
#index_change=0
#else:
#index_change=1
#question=query_chain.invoke({"user_query":message, "chat_history":chain_history, "index":index, "index_change":index_change})
#return question.content
def respond(message, chain_history, ui_history, index):
ui_history.append({"role": "user", "content": message})
#message=prompt_engg(message, chain_history, index)
response_text, updated_history = gradio_chat(message, chain_history, index=index)
if ui_history is None:
ui_history = []
ui_history.append({"role": "assistant", "content": response_text})
#print(ui_history)
return "", updated_history, ui_history
def download_file(index):
filepath=chapter_dir[index]
return filepath
custom_css = """
#chatbot_interface {
background: #f0f0f0;
padding: 20px;
border-radius: 10px;
}
/* Center the markdown text */
#welcome_markdown {
text-align: center;
margin: auto;
}
"""
with gr.Blocks(css=custom_css,fill_width=True) as demo:
gr.Markdown("""
# I’m Shalini ☺️ #
This chatbot uses a Retrieval-Augmented Generation (RAG) pipeline, built for English textbook *Kaleidoscope* 📚.
How to use:
1️⃣ Pick a chapter 📂 2️⃣ Ask your question 💬 3️⃣ Get context-aware answers ⚡
Powered by LangChain 🛠️, Qdrant 🗄️, and LLaMA 🧠 .
""", elem_id="welcome_markdown")
chapter_dir={"Broken Images":"Dataset/Drama/Broken_images.pdf",
"Blood":"Dataset/Poems/Blood.pdf",
"Flim Making":"Dataset/non_fiction/Flim_making.pdf",
"Kubla Khan":"Dataset/Poems/Kubla_khan.pdf",
"One Centimeter":"Dataset/Stories/One_centimetre.pdf",
"I Sell My Dreams":"Dataset/Stories/I_sell_my_dreams.pdf",
"Poems By Blake":"Dataset/Poems/The_divine_image.pdf",
"Time and Time Again":"Dataset/Poems/Time_and_time_again.pdf",
"On Time":"Dataset/Poems/On_time.pdf",
"Trees":"Dataset/Poems/Trees_emily_dickinson.pdf",
"On Science Fiction":"Dataset/non_fiction/On_science_fiction.pdf",
"The Argumentative Indian":"Dataset/non_fiction/The_argumentative_indian.pdf",
"Why The Novel Matters":"Dataset/non_fiction/Why_the_novel_matters.pdf",
"Tomorrow":"Dataset/Stories/Tomorrow.pdf",
"A Lecture Upon The Shadow":"Dataset/Poems/A_lecture_upon_the_shadow.pdf",
"Freedom":"Dataset/non_fiction/Freedom_freedom.pdf",
"A Wedding in Brownsville":"Dataset/Stories/A_wedding_in_brownsville.pdf",
"Eveline":"Dataset/Stories/eveline.pdf",
"Chandalika":"Dataset/Drama/Chandalika.pdf",
"The Wild Swans At Coole":"Dataset/Poems/The_wild_swans_at_coole.pdf",
"The Mark On The Wall":"Dataset/non_fiction/The_mark_on_the_wall.pdf"}
chatbot = gr.Chatbot(label="Chat Interface", elem_id="chatbot_interface", type="messages")
index=gr.State()
with gr.Row():
index=gr.Dropdown(
choices=list(chapter_dir.keys()),
label="Chapter",
value="Broken Images",
info="Select the chapter on which you would like to ask questions."
)
msg = gr.Textbox(label="Enter your query:", placeholder="Type your question here...", lines=2)
d = gr.DownloadButton("Download Selected Chapter", visible=True)
index.change(fn=download_file, inputs=index, outputs=d)
#d.click(download_file, index, [d])
chain_history = gr.State([]) # For LangChain message objects
ui_history = gr.State([]) # For display, a list of dictionaries
gr.Button("Glide In🎨").click(respond, [msg, chain_history, ui_history, index], [msg, chain_history, chatbot])
#clear = gr.ClearButton([msg, chatbot], size="sm")
msg.submit(fn=respond, inputs=[msg, chain_history, ui_history, index], outputs=[msg, chain_history, chatbot])
demo.launch(allowed_paths=["Dataset/Stories","Dataset/Drama",
"Dataset/Poems","Dataset/non_fiction"]) |