Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
|
@@ -457,17 +457,6 @@ def refresh_faiss_index(documents=None):
|
|
| 457 |
|
| 458 |
|
| 459 |
|
| 460 |
-
def load_user_data_source(user_id):
|
| 461 |
-
try:
|
| 462 |
-
docs = db.child("users").child(user_id).child("KnowledgeBase").get().val()
|
| 463 |
-
if not docs:
|
| 464 |
-
return []
|
| 465 |
-
user_documents = [Document(page_content=doc["content"]) for doc in docs.values()]
|
| 466 |
-
return user_documents
|
| 467 |
-
except Exception as e:
|
| 468 |
-
st.error(f"Error loading user data source: {e}")
|
| 469 |
-
return []
|
| 470 |
-
|
| 471 |
|
| 472 |
def update_message_counter():
|
| 473 |
remaining_messages = st.session_state["message_limit"] - st.session_state["used_messages"]
|
|
@@ -1083,29 +1072,36 @@ def google_search(query):
|
|
| 1083 |
return ["Error occurred during Google search"]
|
| 1084 |
|
| 1085 |
|
| 1086 |
-
|
| 1087 |
def rag_response(query):
|
| 1088 |
"""
|
| 1089 |
-
Handle queries by searching both static and dynamically uploaded knowledge
|
| 1090 |
"""
|
| 1091 |
try:
|
| 1092 |
-
#
|
| 1093 |
results = []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1094 |
if "vector_store" in st.session_state:
|
| 1095 |
for vector_store in st.session_state["vector_store"].values():
|
| 1096 |
-
|
|
|
|
| 1097 |
|
| 1098 |
-
# Combine results into a context
|
| 1099 |
-
context = "\n".join([
|
| 1100 |
-
if not context:
|
| 1101 |
return "No relevant information found in the knowledge base."
|
| 1102 |
|
| 1103 |
# Generate AI response with the retrieved context
|
| 1104 |
prompt = f"Context:\n{context}\n\nQuestion: {query}\nAnswer:"
|
| 1105 |
-
llm = ChatOpenAI(model="gpt-4o", temperature=0.
|
| 1106 |
response = llm.invoke(prompt)
|
| 1107 |
|
| 1108 |
-
return response.content
|
| 1109 |
except Exception as e:
|
| 1110 |
logger.error(f"Error generating RAG response: {e}")
|
| 1111 |
return "An error occurred during the RAG response generation process."
|
|
|
|
| 457 |
|
| 458 |
|
| 459 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 460 |
|
| 461 |
def update_message_counter():
|
| 462 |
remaining_messages = st.session_state["message_limit"] - st.session_state["used_messages"]
|
|
|
|
| 1072 |
return ["Error occurred during Google search"]
|
| 1073 |
|
| 1074 |
|
|
|
|
| 1075 |
def rag_response(query):
|
| 1076 |
"""
|
| 1077 |
+
Handle queries by searching both static and dynamically uploaded knowledge bases.
|
| 1078 |
"""
|
| 1079 |
try:
|
| 1080 |
+
# Initialize results list
|
| 1081 |
results = []
|
| 1082 |
+
|
| 1083 |
+
# Search FAISS database (static knowledge base)
|
| 1084 |
+
if "faiss_db" in st.session_state:
|
| 1085 |
+
retrieved_docs = search_knowledge_base(query)
|
| 1086 |
+
results.extend(retrieved_docs)
|
| 1087 |
+
|
| 1088 |
+
# Search vector stores (dynamic knowledge base)
|
| 1089 |
if "vector_store" in st.session_state:
|
| 1090 |
for vector_store in st.session_state["vector_store"].values():
|
| 1091 |
+
vector_store_results = vector_store.similarity_search(query, k=3) # Adjust `k` as needed
|
| 1092 |
+
results.extend(vector_store_results)
|
| 1093 |
|
| 1094 |
+
# Combine results into a single context
|
| 1095 |
+
context = "\n".join([doc.page_content for doc in results])
|
| 1096 |
+
if not context.strip():
|
| 1097 |
return "No relevant information found in the knowledge base."
|
| 1098 |
|
| 1099 |
# Generate AI response with the retrieved context
|
| 1100 |
prompt = f"Context:\n{context}\n\nQuestion: {query}\nAnswer:"
|
| 1101 |
+
llm = ChatOpenAI(model="gpt-4o", temperature=0.3, api_key=openai_api_key)
|
| 1102 |
response = llm.invoke(prompt)
|
| 1103 |
|
| 1104 |
+
return response.content.strip()
|
| 1105 |
except Exception as e:
|
| 1106 |
logger.error(f"Error generating RAG response: {e}")
|
| 1107 |
return "An error occurred during the RAG response generation process."
|