andreska commited on
Commit
5ff5053
·
1 Parent(s): ed8a0c9

Bug 20523: Web.Help: AI Chat stopped working. Another try to get this working on HF Spaces

Browse files
Files changed (2) hide show
  1. .gitignore +1 -0
  2. app.py +7 -6
.gitignore CHANGED
@@ -1,2 +1,3 @@
1
  AdregaAiChat61_Environment/
2
  .env
 
 
1
  AdregaAiChat61_Environment/
2
  .env
3
+ venv/
app.py CHANGED
@@ -17,6 +17,7 @@ from langchain_together import Together
17
  from langchain.prompts.chat import (
18
  ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate,
19
  )
 
20
 
21
  # Load environment variables
22
  load_dotenv()
@@ -257,10 +258,8 @@ def render_chat():
257
  else:
258
  if in_ul_list:
259
  processed_lines.append("</ul>")
260
- in_ul_list = False
261
  if in_ol_list:
262
  processed_lines.append("</ol>")
263
- in_ol_list = False
264
  processed_lines.append(line)
265
 
266
  if in_ul_list:
@@ -319,6 +318,11 @@ def handle_submit():
319
  full_prompt = f"{system_prompt}\n\nConversation History:\n{context}\n\nUser: {user_input}\n\nAnswer directly without any reasoning or explanation of your thought process:"
320
 
321
  def call_llm(llm, prompt):
 
 
 
 
 
322
  # prefer synchronous call
323
  try:
324
  return llm(prompt)
@@ -339,9 +343,6 @@ def handle_submit():
339
  answer = call_llm(llm, full_prompt)
340
 
341
 
342
- answer = call_llm(llm, full_prompt)
343
-
344
-
345
  # Extract content from response object
346
  if hasattr(answer, "content"):
347
  answer = answer.content
@@ -413,4 +414,4 @@ if st.button("Ask"):
413
 
414
  # 🔹 Display chat history in container
415
  with chat_container:
416
- render_chat()
 
17
  from langchain.prompts.chat import (
18
  ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate,
19
  )
20
+ from langchain_core.messages import HumanMessage
21
 
22
  # Load environment variables
23
  load_dotenv()
 
258
  else:
259
  if in_ul_list:
260
  processed_lines.append("</ul>")
 
261
  if in_ol_list:
262
  processed_lines.append("</ol>")
 
263
  processed_lines.append(line)
264
 
265
  if in_ul_list:
 
318
  full_prompt = f"{system_prompt}\n\nConversation History:\n{context}\n\nUser: {user_input}\n\nAnswer directly without any reasoning or explanation of your thought process:"
319
 
320
  def call_llm(llm, prompt):
321
+ # Convert prompt to proper message format
322
+ if isinstance(prompt, str):
323
+ # For string prompts, create a proper message object
324
+ prompt = [HumanMessage(content=prompt)]
325
+
326
  # prefer synchronous call
327
  try:
328
  return llm(prompt)
 
343
  answer = call_llm(llm, full_prompt)
344
 
345
 
 
 
 
346
  # Extract content from response object
347
  if hasattr(answer, "content"):
348
  answer = answer.content
 
414
 
415
  # 🔹 Display chat history in container
416
  with chat_container:
417
+ render_chat()