IMHamza101 commited on
Commit
cb2694d
·
verified ·
1 Parent(s): cd4b788

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -18
app.py CHANGED
@@ -7,7 +7,6 @@ from typing import List
7
  from langchain.agents.middleware import dynamic_prompt, ModelRequest
8
  from langchain.agents import create_agent
9
  from langchain_core.documents import Document
10
- from langchain_core.runnables import chain
11
 
12
  import gradio as gr
13
  import os
@@ -79,7 +78,7 @@ def initialize_vector_store(documents: List[Document]):
79
  vector_store = Milvus(
80
  embedding_function=embeddings,
81
  connection_args={"uri": uri},
82
- index_params={"index_type": "FLAT", "metric_type": "L2"},
83
  drop_old=True
84
  )
85
 
@@ -89,15 +88,8 @@ def initialize_vector_store(documents: List[Document]):
89
  return vector_store
90
 
91
  # -----------------------------
92
- # Retriever
93
  # -----------------------------
94
- @chain
95
- def create_retriever(vector_store):
96
- """Create a retriever function with the vector store."""
97
- def retriever(query: str) -> List[Document]:
98
- return vector_store.similarity_search(query, k=K_RETRIEVE)
99
- return retriever
100
-
101
  def format_context(docs: List[Document]) -> str:
102
  """
103
  Format retrieved documents with citations.
@@ -149,7 +141,7 @@ def create_prompt_middleware(vector_store):
149
  last_message = request.state["messages"][-1]
150
  last_query = getattr(last_message, "text", None) or getattr(last_message, "content", "")
151
 
152
- # Retrieve relevant documents
153
  retrieved_docs = vector_store.similarity_search(last_query, k=K_RETRIEVE)
154
  docs_content = format_context(retrieved_docs)
155
 
@@ -206,15 +198,11 @@ def create_chat_function(agent):
206
  last_message = step["messages"][-1]
207
  results.append(last_message)
208
 
209
- # Extract response content
210
- # Try the standard approach first
211
- if len(results) > 1 and hasattr(results[1], 'content'):
212
- return results[1].content
213
-
214
- # Fallback: search through results for content
215
  for msg in reversed(results):
216
  content = getattr(msg, "content", None)
217
- if content:
218
  return content
219
 
220
  return "I apologize, but I couldn't generate a response. Please try rephrasing your question."
 
7
  from langchain.agents.middleware import dynamic_prompt, ModelRequest
8
  from langchain.agents import create_agent
9
  from langchain_core.documents import Document
 
10
 
11
  import gradio as gr
12
  import os
 
78
  vector_store = Milvus(
79
  embedding_function=embeddings,
80
  connection_args={"uri": uri},
81
+ index_params={"index_type": "FLAT", "metric_type": "COSINE"}, # COSINE for semantic similarity
82
  drop_old=True
83
  )
84
 
 
88
  return vector_store
89
 
90
  # -----------------------------
91
+ # Context Formatting
92
  # -----------------------------
 
 
 
 
 
 
 
93
  def format_context(docs: List[Document]) -> str:
94
  """
95
  Format retrieved documents with citations.
 
141
  last_message = request.state["messages"][-1]
142
  last_query = getattr(last_message, "text", None) or getattr(last_message, "content", "")
143
 
144
+ # Retrieve relevant documents directly from vector store
145
  retrieved_docs = vector_store.similarity_search(last_query, k=K_RETRIEVE)
146
  docs_content = format_context(retrieved_docs)
147
 
 
198
  last_message = step["messages"][-1]
199
  results.append(last_message)
200
 
201
+ # Extract the latest assistant response
202
+ # Search from the end for the most recent content
 
 
 
 
203
  for msg in reversed(results):
204
  content = getattr(msg, "content", None)
205
+ if content and content.strip(): # Ensure non-empty content
206
  return content
207
 
208
  return "I apologize, but I couldn't generate a response. Please try rephrasing your question."