korupolujayanth2004 commited on
Commit
0321a5b
·
1 Parent(s): b69f44b

Update main.py

Browse files
Files changed (1) hide show
  1. backend/main.py +6 -4
backend/main.py CHANGED
@@ -90,10 +90,12 @@ async def chat(question: str, session_id: str = ""):
90
  full_response = ""
91
  # Call the LLM client to get a streaming response
92
  for chunk in stream_llm_response(prompt=prompt_for_llm):
93
- if chunk.choices and chunk.choices[0].delta.content:
94
- token = chunk.choices.delta.content
95
- full_response += token
96
- yield token # Yield each token as it arrives
 
 
97
 
98
  # Store the full assistant response in chat history once complete
99
  store_chat_turn(session_id, "assistant", full_response, turn_number)
 
90
  full_response = ""
91
  # Call the LLM client to get a streaming response
92
  for chunk in stream_llm_response(prompt=prompt_for_llm):
93
+ if chunk.choices and len(chunk.choices) > 0:
94
+ choice = chunk.choices[0] # Get the first choice
95
+ if hasattr(choice, 'delta') and hasattr(choice.delta, 'content') and choice.delta.content:
96
+ token = choice.delta.content
97
+ full_response += token
98
+ yield token # Yield each token as it arrives
99
 
100
  # Store the full assistant response in chat history once complete
101
  store_chat_turn(session_id, "assistant", full_response, turn_number)