NavyDevilDoc commited on
Commit
161831e
·
verified ·
1 Parent(s): c174894

Update src/app.py

Browse files
Files changed (1) hide show
  1. src/app.py +26 -6
src/app.py CHANGED
@@ -237,19 +237,39 @@ with tab1:
237
 
238
  # RAG Search
239
  context_txt = ""
240
- sys_p = "You are a helpful assistant."
 
241
 
242
  if use_rag:
243
- with st.spinner("Searching DB..."):
244
  docs = rag_engine.search_knowledge_base(prompt, st.session_state.username)
245
  if docs:
 
 
 
246
  sys_p = (
247
- "You are a Navy Document Analyst. Answer using ONLY the Context below. "
248
- "If the answer is not in the context, say 'I cannot find that information'."
 
 
 
249
  )
250
- for d in docs: context_txt += f"\n---\n{d.page_content}"
251
 
252
- final_prompt = f"{prompt}\n\nCONTEXT:\n{context_txt}" if context_txt else prompt
 
 
 
 
 
 
 
 
 
 
 
 
 
 
253
 
254
  # Generation
255
  with st.chat_message("assistant"):
 
237
 
238
  # RAG Search
239
  context_txt = ""
240
+ # 1. Default System Prompt (No RAG)
241
+ sys_p = "You are a helpful AI assistant."
242
 
243
  if use_rag:
244
+ with st.spinner("Searching Knowledge Base..."):
245
  docs = rag_engine.search_knowledge_base(prompt, st.session_state.username)
246
  if docs:
247
+ # 2. Strict System Prompt (With RAG)
248
+ # We relax the strictness slightly to allow for inference,
249
+ # while still demanding evidence.
250
  sys_p = (
251
+ "You are a Navy Document Analyst. "
252
+ "You must answer the user's question based PRIMARILY on the provided Context. "
253
+ "If the Context contains the answer, output it clearly. "
254
+ "If the Context does NOT contain the answer, simply state: "
255
+ "'I cannot find that specific information in the documents provided.'"
256
  )
 
257
 
258
+ # 3. XML-Formatted Context Construction
259
+ # This helps the model "see" the start and end of each chunk clearly.
260
+ for i, d in enumerate(docs):
261
+ src = d.metadata.get('source', 'Unknown')
262
+ context_txt += f"<document index='{i+1}' source='{src}'>\n{d.page_content}\n</document>\n"
263
+
264
+ # 4. Construct Final User Payload
265
+ if context_txt:
266
+ final_prompt = (
267
+ f"User Question: {prompt}\n\n"
268
+ f"<context>\n{context_txt}\n</context>\n\n"
269
+ "Instruction: Answer the question using the context above."
270
+ )
271
+ else:
272
+ final_prompt = prompt
273
 
274
  # Generation
275
  with st.chat_message("assistant"):