1MR commited on
Commit
4fe6caf
Β·
verified Β·
1 Parent(s): ae9e9ac

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -25
app.py CHANGED
@@ -121,31 +121,25 @@ def get_conversation_chain(vectorstore, tokenH):
121
  raise ValueError(f"Error generating response: {str(e)}")
122
 
123
  # Create conversational retrieval chain without memory
124
- def conversation_chain(user_input):
125
- # Ensure user input is a string
126
- if isinstance(user_input, dict):
127
- user_input = user_input.get('question', '')
128
-
129
- # Get the relevant documents from the vector store using retriever's query method
130
- retriever = vectorstore.as_retriever(search_type="similarity", search_kwargs={"k": 5}) # Adjust the search arguments
131
- documents = retriever.get_relevant_documents(user_input)
132
- st.write(documents)
133
-
134
- # Extract text from the documents (ensure they are strings)
135
- # documents_text = [doc['text'] if isinstance(doc, dict) else str(doc) for doc in documents]
136
- documents_text = "\n".join(doc['text'] for doc in documents)
137
- messages = [{"role": "user", "content": user_input}, {"role": "system", "content": documents_text}]
138
- # Prepare the messages for the InferenceClient
139
- # messages = [{"role": "user", "content": user_input}]
140
- assistant_response = generate_response(messages)
141
-
142
- # Return both the assistant response and chat history
143
- return {
144
- "response": assistant_response,
145
- "chat_history": chat_history
146
- }
147
-
148
- return conversation_chain
149
 
150
  def handle_userinput(user_question):
151
  # λŒ€ν™” 체인을 μ‚¬μš©ν•˜μ—¬ μ‚¬μš©μž μ§ˆλ¬Έμ— λŒ€ν•œ 응닡을 μƒμ„±ν•©λ‹ˆλ‹€.
 
121
  raise ValueError(f"Error generating response: {str(e)}")
122
 
123
  # Create conversational retrieval chain without memory
124
+ def conversation_chain(user_input):
125
+ # Ensure user input is a string
126
+ if isinstance(user_input, dict):
127
+ user_input = user_input.get('question', '')
128
+
129
+ # Get the relevant documents from the vector store using retriever's query method
130
+ retriever = vectorstore.as_retriever(search_type="similarity", search_kwargs={"k": 5}) # Adjust the search arguments
131
+ documents = retriever.get_relevant_documents(user_input)
132
+
133
+ # Extract text from the documents (ensure they are strings)
134
+ documents_text = "\n".join(doc.page_content for doc in documents) # Use doc.page_content instead of doc['text']
135
+
136
+ # Prepare the messages for the InferenceClient
137
+ messages = [{"role": "user", "content": user_input}, {"role": "system", "content": documents_text}]
138
+ response = generate_response(messages)
139
+
140
+ # Return the response
141
+ return response
142
+
 
 
 
 
 
 
143
 
144
  def handle_userinput(user_question):
145
  # λŒ€ν™” 체인을 μ‚¬μš©ν•˜μ—¬ μ‚¬μš©μž μ§ˆλ¬Έμ— λŒ€ν•œ 응닡을 μƒμ„±ν•©λ‹ˆλ‹€.