SarahXia0405 commited on
Commit
c2aa07b
·
verified ·
1 Parent(s): 73b3683

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -8
app.py CHANGED
@@ -1,5 +1,7 @@
1
  import os
2
  import time
 
 
3
  import base64
4
  from collections import defaultdict
5
  from typing import List, Dict
@@ -999,7 +1001,8 @@ with gr.Blocks(
999
  segs["rag_retrieve_done"] = (time.perf_counter() - t_r0) * 1000.0
1000
 
1001
  # LLM (chat_with_clare must return 3 values)
1002
- t_llm0 = time.perf_counter()
 
1003
  answer, new_history, llm_stats = chat_with_clare(
1004
  message=message,
1005
  history=chat_history,
@@ -1012,8 +1015,15 @@ with gr.Blocks(
1012
  cognitive_state=cognitive_state,
1013
  rag_context=rag_context_text,
1014
  )
1015
- mark("llm_done")
1016
- segs["llm_done"] = (time.perf_counter() - t_llm0) * 1000.0
 
 
 
 
 
 
 
1017
 
1018
  # merge llm_stats into perf
1019
  perf = {
@@ -1239,17 +1249,18 @@ with gr.Blocks(
1239
  {
1240
  "experiment_id": experiment_id,
1241
  "student_id": student_id,
1242
- "event_type": "micro_quiz_start",
1243
- "timestamp": time.time(),
1244
- "latency_ms": perf["total_ms"],
1245
- "question": quiz_instruction,
1246
  "answer": answer,
1247
  "model_name": model_name_val,
1248
  "language": resolved_lang,
1249
  "learning_mode": mode_val,
1250
- "ttft_ms": perf.get("segments_ms", {}).get("llm_ttft_ms"),
1251
  }
1252
  )
 
1253
  except Exception as e:
1254
  print("log_event error:", e)
1255
 
 
1
  import os
2
  import time
3
+ import json
4
+
5
  import base64
6
  from collections import defaultdict
7
  from typing import List, Dict
 
1001
  segs["rag_retrieve_done"] = (time.perf_counter() - t_r0) * 1000.0
1002
 
1003
  # LLM (chat_with_clare must return 3 values)
1004
+ start_ts = time.time()
1005
+
1006
  answer, new_history, llm_stats = chat_with_clare(
1007
  message=message,
1008
  history=chat_history,
 
1015
  cognitive_state=cognitive_state,
1016
  rag_context=rag_context_text,
1017
  )
1018
+
1019
+ end_ts = time.time()
1020
+ latency_ms = (end_ts - start_ts) * 1000.0
1021
+
1022
+ # === optional: print profiled metrics locally ===
1023
+ try:
1024
+ print("[LLM_PROFILE] " + json.dumps(llm_stats, ensure_ascii=False))
1025
+ except Exception:
1026
+ pass
1027
 
1028
  # merge llm_stats into perf
1029
  perf = {
 
1249
  {
1250
  "experiment_id": experiment_id,
1251
  "student_id": student_id,
1252
+ "event_type": "chat_turn",
1253
+ "timestamp": end_ts,
1254
+ "latency_ms": latency_ms,
1255
+ "question": message,
1256
  "answer": answer,
1257
  "model_name": model_name_val,
1258
  "language": resolved_lang,
1259
  "learning_mode": mode_val,
1260
+ "llm_stats": llm_stats,
1261
  }
1262
  )
1263
+
1264
  except Exception as e:
1265
  print("log_event error:", e)
1266