bibibi12345 commited on
Commit
2cdaad6
·
1 Parent(s): 84bf36e

added some logging

Browse files
Files changed (1) hide show
  1. main.py +2 -2
main.py CHANGED
@@ -212,7 +212,7 @@ async def chat_completions(
212
 
213
  # Get the plain text response directly
214
  flowith_text = response.text
215
-
216
  # 7. Handle response based on *client's* request.stream preference
217
  if not request.stream:
218
  # Client wants non-streaming: Construct OpenAI-compatible JSON from plain text
@@ -258,7 +258,7 @@ async def chat_completions(
258
  )
259
  response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
260
  flowith_text_local = response.text # Store in local variable
261
- print(f"response preview: {flowith_text_local[:500]}")
262
  except Exception as e:
263
  # print(f"Error fetching from Flowith: {e}") # Optional debug
264
  error_occurred = e # Store error to yield later
 
212
 
213
  # Get the plain text response directly
214
  flowith_text = response.text
215
+ print(f"response preview: {flowith_text[:500]}")
216
  # 7. Handle response based on *client's* request.stream preference
217
  if not request.stream:
218
  # Client wants non-streaming: Construct OpenAI-compatible JSON from plain text
 
258
  )
259
  response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
260
  flowith_text_local = response.text # Store in local variable
261
+
262
  except Exception as e:
263
  # print(f"Error fetching from Flowith: {e}") # Optional debug
264
  error_occurred = e # Store error to yield later