NavyDevilDoc commited on
Commit
9938e85
·
verified ·
1 Parent(s): 4f42001

Update src/app.py

Browse files
Files changed (1) hide show
  1. src/app.py +9 -3
src/app.py CHANGED
@@ -329,8 +329,11 @@ with tab2:
329
  # --- CONTROLS AND METRICS ---
330
  c1, c2, c3 = st.columns([2, 1, 1])
331
  with c1:
332
- # Use the global model_choice
333
- selected_model_name = st.session_state.get('model_choice', 'Granite 4 (IBM)')
 
 
 
334
  with c2:
335
  use_rag = st.toggle("🔌 Enable Knowledge Base", value=False)
336
 
@@ -426,9 +429,12 @@ with tab2:
426
  model_id = val
427
  break
428
 
 
429
  if not model_id and "gpt" in selected_model_name.lower():
 
430
  response, usage = query_openai_model(messages_payload, max_len)
431
  elif model_id:
 
432
  response, usage = query_local_model(messages_payload, max_len, model_id)
433
  else:
434
  response, usage = "Error: Could not determine model to use.", None
@@ -455,7 +461,7 @@ with tab2:
455
  st.caption(f"Rank {i+1} (Source: {src}, Rel: {score})")
456
  st.text(doc.page_content)
457
  st.divider()
458
-
459
  # --- TAB 3: PROMPT ARCHITECT ---
460
  with tab3:
461
  st.header("🛠️ Mega-Prompt Factory")
 
329
  # --- CONTROLS AND METRICS ---
330
  c1, c2, c3 = st.columns([2, 1, 1])
331
  with c1:
332
+ # FIX: Access the correct key from the sidebar widget
333
+ # We default to the global variable 'model_choice' if state is missing
334
+ selected_model_name = st.session_state.get('model_selector_radio', model_choice)
335
+ st.caption(f"Active Model: **{selected_model_name}**")
336
+
337
  with c2:
338
  use_rag = st.toggle("🔌 Enable Knowledge Base", value=False)
339
 
 
429
  model_id = val
430
  break
431
 
432
+ # ROUTING CHECK
433
  if not model_id and "gpt" in selected_model_name.lower():
434
+ # If it's the GPT model choice
435
  response, usage = query_openai_model(messages_payload, max_len)
436
  elif model_id:
437
+ # If it's the local Ollama model
438
  response, usage = query_local_model(messages_payload, max_len, model_id)
439
  else:
440
  response, usage = "Error: Could not determine model to use.", None
 
461
  st.caption(f"Rank {i+1} (Source: {src}, Rel: {score})")
462
  st.text(doc.page_content)
463
  st.divider()
464
+
465
  # --- TAB 3: PROMPT ARCHITECT ---
466
  with tab3:
467
  st.header("🛠️ Mega-Prompt Factory")