1MR commited on
Commit
e0ef6cf
Β·
verified Β·
1 Parent(s): 40ffe05

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -5
app.py CHANGED
@@ -143,6 +143,34 @@ from langchain.chains import ConversationalRetrievalChain
143
  # )
144
 
145
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
146
  # # Set up memory for conversation history
147
  # memory = ConversationBufferMemory(
148
  # memory_key='chat_history', return_messages=True
@@ -185,6 +213,7 @@ def get_conversation_chain(vectorstore, tokenH):
185
 
186
  return conversation_chain
187
 
 
188
  def handle_userinput(user_question):
189
  # λŒ€ν™” 체인을 μ‚¬μš©ν•˜μ—¬ μ‚¬μš©μž μ§ˆλ¬Έμ— λŒ€ν•œ 응닡을 μƒμ„±ν•©λ‹ˆλ‹€.
190
  response = st.session_state.conversation({'question': user_question})
@@ -215,15 +244,19 @@ def main():
215
  st.header("Chat with multiple Files :",)
216
  st.header("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21")
217
  tokenH = st.text_input("Paste your TOKEN key (sk-...)", key="token_input_1")
 
 
 
 
 
 
218
  user_question = st.text_input("Ask a question about your documents:")
219
  if user_question:
220
  handle_userinput(user_question)
221
 
222
  with st.sidebar:
223
  tokenH_sidebar = st.text_input("Paste your TOKEN key (sk-...)", key="token_input_2")
224
- tokenH=tokenH_sidebar
225
- # if openai_key:
226
- # os.environ["OPENAI_API_KEY"] = openai_key
227
 
228
  st.subheader("Your documents")
229
  docs = st.file_uploader(
@@ -257,7 +290,5 @@ def main():
257
  # create conversation chain
258
  st.session_state.conversation = get_conversation_chain(vectorstore, tokenH)
259
 
260
-
261
-
262
  if __name__ == '__main__':
263
  main()
 
143
  # )
144
 
145
 
146
+ # # Set up memory for conversation history
147
+ # memory = ConversationBufferMemory(
148
+ # memory_key='chat_history', return_messages=True
149
+ # )
150
+
151
+ # # Create conversational retrieval chain
152
+ # conversation_chain = ConversationalRetrievalChain.from_llm(
153
+ # llm=llm,
154
+ # retriever=vectorstore.as_retriever(),
155
+ # memory=memory
156
+ # )
157
+
158
+ # return conversation_chain
159
+
160
+ # def get_conversation_chain(vectorstore, tokenH):
161
+ # if not tokenH:
162
+ # raise ValueError("API token is required to initialize the HuggingFaceHub model")
163
+
164
+ # # Initialize the HuggingFaceHub model with the correct token
165
+ # try:
166
+ # llm = HuggingFaceHub(
167
+ # repo_id="Qwen/Qwen2.5-7B-Instruct",
168
+ # token=tokenH,
169
+ # use_auth_token=True
170
+ # )
171
+ # except Exception as e:
172
+ # raise ValueError(f"Error initializing HuggingFaceHub model: {str(e)}")
173
+
174
  # # Set up memory for conversation history
175
  # memory = ConversationBufferMemory(
176
  # memory_key='chat_history', return_messages=True
 
213
 
214
  return conversation_chain
215
 
216
+
217
  def handle_userinput(user_question):
218
  # λŒ€ν™” 체인을 μ‚¬μš©ν•˜μ—¬ μ‚¬μš©μž μ§ˆλ¬Έμ— λŒ€ν•œ 응닡을 μƒμ„±ν•©λ‹ˆλ‹€.
219
  response = st.session_state.conversation({'question': user_question})
 
244
  st.header("Chat with multiple Files :",)
245
  st.header("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21")
246
  tokenH = st.text_input("Paste your TOKEN key (sk-...)", key="token_input_1")
247
+
248
+ # Validate if the token is empty
249
+ if not tokenH:
250
+ st.warning("Please enter a valid HuggingFace API token.")
251
+ return
252
+
253
  user_question = st.text_input("Ask a question about your documents:")
254
  if user_question:
255
  handle_userinput(user_question)
256
 
257
  with st.sidebar:
258
  tokenH_sidebar = st.text_input("Paste your TOKEN key (sk-...)", key="token_input_2")
259
+ tokenH = tokenH_sidebar or tokenH # fallback to main input if sidebar is empty
 
 
260
 
261
  st.subheader("Your documents")
262
  docs = st.file_uploader(
 
290
  # create conversation chain
291
  st.session_state.conversation = get_conversation_chain(vectorstore, tokenH)
292
 
 
 
293
  if __name__ == '__main__':
294
  main()