Vrda commited on
Commit
a238159
·
verified ·
1 Parent(s): f19f4b8

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +36 -25
app.py CHANGED
@@ -17,6 +17,8 @@ FEEDBACK_FILE = Path(__file__).parent / "feedback_data.json"
17
  HF_DATASET_REPO = "Vrda/im-error-check-data"
18
  HF_DATASET_FILE = "feedback_data.json"
19
 
 
 
20
  # -------------------------------------------------------------------------
21
  # Feedback persistence (local + HF Hub sync)
22
  # -------------------------------------------------------------------------
@@ -101,27 +103,32 @@ st.set_page_config(
101
  st.markdown("""
102
  <style>
103
  .error-card {
104
- background: #fff5f5; border-left: 4px solid #e53e3e;
105
  border-radius: 8px; padding: 0.8rem 1rem; margin: 0.5rem 0;
 
106
  }
 
 
107
  .suggestion-card {
108
- background: #f0fff4; border-left: 4px solid #38a169;
109
  border-radius: 8px; padding: 0.8rem 1rem; margin: 0.5rem 0;
 
110
  }
 
111
  .model-header-a {
112
- background: #ebf8ff; border-left: 4px solid #3182ce;
113
  border-radius: 8px; padding: 0.6rem 1rem; margin-bottom: 0.5rem;
114
  }
115
  .model-header-b {
116
- background: #faf5ff; border-left: 4px solid #805ad5;
117
  border-radius: 8px; padding: 0.6rem 1rem; margin-bottom: 0.5rem;
118
  }
119
- .severity-critical { color: #c53030; font-weight: bold; }
120
- .severity-high { color: #dd6b20; font-weight: bold; }
121
- .severity-medium { color: #d69e2e; }
122
- .severity-low { color: #38a169; }
123
  .category-badge {
124
- display: inline-block; background: #edf2f7; color: #4a5568;
125
  padding: 2px 8px; border-radius: 12px; font-size: 0.8em; margin-right: 4px;
126
  }
127
  </style>
@@ -245,10 +252,14 @@ st.button("Analyze", type="primary", on_click=trigger_analysis)
245
  # Run analysis (progressive: show GPT-OSS first, DeepSeek when ready)
246
  # -------------------------------------------------------------------------
247
 
248
- def _run_deepseek_background(english_text: str):
249
- """Background thread: calls DeepSeek and stores result in session state."""
250
  result = call_model_a(english_text)
251
- st.session_state._deepseek_container["result"] = result
 
 
 
 
252
 
253
  if st.session_state.run_analysis and st.session_state.input_text.strip():
254
  st.session_state.run_analysis = False
@@ -257,6 +268,9 @@ if st.session_state.run_analysis and st.session_state.input_text.strip():
257
  st.session_state.total_elapsed = 0
258
  st.session_state._analysis_start = time.time()
259
 
 
 
 
260
  with st.spinner("Translating discharge letter..."):
261
  t0 = time.time()
262
  st.session_state.translated_text = translate_to_english(st.session_state.input_text)
@@ -264,10 +278,9 @@ if st.session_state.run_analysis and st.session_state.input_text.strip():
264
 
265
  english = st.session_state.translated_text
266
 
267
- container = {"result": None}
268
- st.session_state._deepseek_container = container
269
- thread = threading.Thread(target=_run_deepseek_background, args=(english,), daemon=True)
270
- st.session_state._deepseek_thread = thread
271
  thread.start()
272
 
273
  with st.spinner("GPT-OSS-120B responding (~5s)..."):
@@ -394,20 +407,18 @@ if has_any_result:
394
  else:
395
  @st.fragment(run_every=5)
396
  def _poll_deepseek():
397
- if (
398
- hasattr(st.session_state, "_deepseek_container")
399
- and st.session_state._deepseek_container["result"] is not None
400
- and st.session_state.model_a_result is None
401
- ):
402
- st.session_state.model_a_result = st.session_state._deepseek_container["result"]
403
  st.session_state.total_elapsed = round(
404
  time.time() - st.session_state._analysis_start, 2
405
  )
406
  st.rerun()
 
407
  st.markdown(
408
- '<div style="background:#f7fafc; border:2px dashed #cbd5e0; '
409
- 'border-radius:8px; padding:2rem; text-align:center; color:#718096;">'
410
- "<strong>DeepSeek Reasoner</strong> is still processing...<br>"
411
  "This typically takes 60-90 seconds.<br>"
412
  "Review and rate GPT-OSS results below while you wait."
413
  "</div>",
 
17
  HF_DATASET_REPO = "Vrda/im-error-check-data"
18
  HF_DATASET_FILE = "feedback_data.json"
19
 
20
+ _DEEPSEEK_RESULTS: dict[str, dict] = {}
21
+
22
  # -------------------------------------------------------------------------
23
  # Feedback persistence (local + HF Hub sync)
24
  # -------------------------------------------------------------------------
 
103
  st.markdown("""
104
  <style>
105
  .error-card {
106
+ background: #2d1f1f; border-left: 4px solid #e53e3e;
107
  border-radius: 8px; padding: 0.8rem 1rem; margin: 0.5rem 0;
108
+ color: #fde8e8;
109
  }
110
+ .error-card strong { color: #feb2b2; }
111
+ .error-card em { color: #fbd38d; }
112
  .suggestion-card {
113
+ background: #1a2e1a; border-left: 4px solid #38a169;
114
  border-radius: 8px; padding: 0.8rem 1rem; margin: 0.5rem 0;
115
+ color: #c6f6d5;
116
  }
117
+ .suggestion-card strong { color: #9ae6b4; }
118
  .model-header-a {
119
+ background: #1a2332; border-left: 4px solid #63b3ed;
120
  border-radius: 8px; padding: 0.6rem 1rem; margin-bottom: 0.5rem;
121
  }
122
  .model-header-b {
123
+ background: #2d1f3d; border-left: 4px solid #b794f4;
124
  border-radius: 8px; padding: 0.6rem 1rem; margin-bottom: 0.5rem;
125
  }
126
+ .severity-critical { color: #fc8181; font-weight: bold; }
127
+ .severity-high { color: #f6ad55; font-weight: bold; }
128
+ .severity-medium { color: #f6e05e; }
129
+ .severity-low { color: #68d391; }
130
  .category-badge {
131
+ display: inline-block; background: #2d3748; color: #e2e8f0;
132
  padding: 2px 8px; border-radius: 12px; font-size: 0.8em; margin-right: 4px;
133
  }
134
  </style>
 
252
  # Run analysis (progressive: show GPT-OSS first, DeepSeek when ready)
253
  # -------------------------------------------------------------------------
254
 
255
+ def _run_deepseek_background(session_key: str, english_text: str):
256
+ """Background thread: calls DeepSeek and stores result in module-level dict."""
257
  result = call_model_a(english_text)
258
+ _DEEPSEEK_RESULTS[session_key] = result
259
+
260
+ if "session_key" not in st.session_state:
261
+ import uuid
262
+ st.session_state.session_key = str(uuid.uuid4())
263
 
264
  if st.session_state.run_analysis and st.session_state.input_text.strip():
265
  st.session_state.run_analysis = False
 
268
  st.session_state.total_elapsed = 0
269
  st.session_state._analysis_start = time.time()
270
 
271
+ skey = st.session_state.session_key
272
+ _DEEPSEEK_RESULTS.pop(skey, None)
273
+
274
  with st.spinner("Translating discharge letter..."):
275
  t0 = time.time()
276
  st.session_state.translated_text = translate_to_english(st.session_state.input_text)
 
278
 
279
  english = st.session_state.translated_text
280
 
281
+ thread = threading.Thread(
282
+ target=_run_deepseek_background, args=(skey, english), daemon=True
283
+ )
 
284
  thread.start()
285
 
286
  with st.spinner("GPT-OSS-120B responding (~5s)..."):
 
407
  else:
408
  @st.fragment(run_every=5)
409
  def _poll_deepseek():
410
+ skey = st.session_state.session_key
411
+ if skey in _DEEPSEEK_RESULTS:
412
+ st.session_state.model_a_result = _DEEPSEEK_RESULTS.pop(skey)
 
 
 
413
  st.session_state.total_elapsed = round(
414
  time.time() - st.session_state._analysis_start, 2
415
  )
416
  st.rerun()
417
+ elapsed = round(time.time() - st.session_state._analysis_start)
418
  st.markdown(
419
+ '<div style="background:#1e293b; border:2px dashed #475569; '
420
+ 'border-radius:8px; padding:2rem; text-align:center; color:#e2e8f0;">'
421
+ f"<strong>DeepSeek Reasoner</strong> is still processing... ({elapsed}s)<br>"
422
  "This typically takes 60-90 seconds.<br>"
423
  "Review and rate GPT-OSS results below while you wait."
424
  "</div>",