vaishnav commited on
Commit
03d5ee5
·
1 Parent(s): d01d5bf

bug fix for respond

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -20,7 +20,7 @@ service.scrape_and_get_store_vector_retriever(config.URLS)
20
  # Initialize the LLMService with logger, prompt, and store vector retriever
21
  llm_svc = LLMService(logger, config.SYSTEM_PROMPT, store.get_chroma_instance().as_retriever())
22
 
23
- def respond(user_input):
24
  if user_input == "clear_chat_history_aisdb_override":
25
  llm_svc.store={}
26
  response = llm_svc.conversational_rag_chain().invoke(
 
20
  # Initialize the LLMService with logger, prompt, and store vector retriever
21
  llm_svc = LLMService(logger, config.SYSTEM_PROMPT, store.get_chroma_instance().as_retriever())
22
 
23
+ def respond(user_input, history):
24
  if user_input == "clear_chat_history_aisdb_override":
25
  llm_svc.store={}
26
  response = llm_svc.conversational_rag_chain().invoke(