paulinusjua commited on
Commit
068d18a
·
verified ·
1 Parent(s): 3850a48

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -25
app.py CHANGED
@@ -91,7 +91,22 @@ llm_chain = create_stuff_documents_chain(llm=llm, prompt=qa_prompt)
91
 
92
  #rag_chain = create_retrieval_chain(history_aware_retriever, llm_chain)
93
 
94
- def truncate_history(chat_history, max_chars=4000):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
95
  total = 0
96
  trimmed = []
97
  for q, a in reversed(chat_history):
@@ -102,46 +117,31 @@ def truncate_history(chat_history, max_chars=4000):
102
  total += pair_len
103
  return trimmed
104
 
 
105
  def rag_tool_func(input_question: str, chat_history: list = None) -> str:
106
- # Detect language
107
  lang = detect(input_question)
108
- lang = "fr" if lang == "fr" else "en" # fallback to English if unsure
109
 
110
- # Load correct vector store
111
- retriever = FAISS.load_local(
112
- folder_path=INDEX_PATHS[lang],
113
- embeddings=embedding,
114
- allow_dangerous_deserialization=True
115
- ).as_retriever(search_kwargs={"k": 2})
116
-
117
- # Create history-aware retriever
118
- history_aware_retriever = create_history_aware_retriever(
119
- llm, retriever, condense_question_prompt
120
- )
121
-
122
- # Create LLM QA chain
123
- llm_chain = create_stuff_documents_chain(llm=llm, prompt=qa_prompt)
124
- rag_chain = create_retrieval_chain(history_aware_retriever, llm_chain)
125
-
126
- # Format chat history
127
 
 
128
  chat_history = truncate_history(chat_history)
129
-
130
- messages = []
131
  if isinstance(chat_history, list):
132
  for q, a in chat_history:
133
- messages.append(("human", q))
134
- messages.append(("ai", a))
135
 
 
136
  result = rag_chain.invoke({
137
  "input": input_question,
138
- "chat_history": messages
139
  })
140
  return result["answer"]
141
 
142
 
143
 
144
 
 
145
  chat_history = [] # Global chat history
146
 
147
  def chatbot_interface(user_input, history):
 
91
 
92
  #rag_chain = create_retrieval_chain(history_aware_retriever, llm_chain)
93
 
94
+ # Preload retrievers once
95
+ retrievers = {
96
+ "en": FAISS.load_local(
97
+ folder_path=INDEX_PATHS["en"],
98
+ embeddings=embedding,
99
+ allow_dangerous_deserialization=True
100
+ ).as_retriever(search_kwargs={"k": 2}),
101
+ "fr": FAISS.load_local(
102
+ folder_path=INDEX_PATHS["fr"],
103
+ embeddings=embedding,
104
+ allow_dangerous_deserialization=True
105
+ ).as_retriever(search_kwargs={"k": 2}),
106
+ }
107
+
108
+ # Truncate long history
109
+ def truncate_history(chat_history, max_chars=1500):
110
  total = 0
111
  trimmed = []
112
  for q, a in reversed(chat_history):
 
117
  total += pair_len
118
  return trimmed
119
 
120
+ # Simpler, faster RAG function
121
  def rag_tool_func(input_question: str, chat_history: list = None) -> str:
 
122
  lang = detect(input_question)
123
+ lang = "fr" if lang == "fr" else "en"
124
 
125
+ retriever = retrievers[lang]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
126
 
127
+ # Format chat history (optional, for prompt context)
128
  chat_history = truncate_history(chat_history)
129
+ history_str = ""
 
130
  if isinstance(chat_history, list):
131
  for q, a in chat_history:
132
+ history_str += f"User: {q}\nAssistant: {a}\n"
 
133
 
134
+ rag_chain = create_retrieval_chain(retriever, create_stuff_documents_chain(llm=llm, prompt=qa_prompt))
135
  result = rag_chain.invoke({
136
  "input": input_question,
137
+ "chat_history": history_str
138
  })
139
  return result["answer"]
140
 
141
 
142
 
143
 
144
+
145
  chat_history = [] # Global chat history
146
 
147
  def chatbot_interface(user_input, history):