NavyDevilDoc commited on
Commit
b62f4f4
·
verified ·
1 Parent(s): f8cbd02

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -4
app.py CHANGED
@@ -190,13 +190,33 @@ if query:
190
 
191
  # --- AI SUMMARY SECTION ---
192
  with st.container():
193
- st.markdown("### 🤖 Executive Summary")
194
  st.caption(f"Analyzing primary source: {top_match['source']}")
195
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
196
  if st.button("✨ Generate Assessment"):
197
- with st.spinner("Sending Data to Selected Model..."):
198
- # Call our separated LLM client
199
- response = ask_granite(query, full_doc_text)
 
200
 
201
  st.markdown("---")
202
  st.markdown(response)
 
190
 
191
  # --- AI SUMMARY SECTION ---
192
  with st.container():
193
+ st.markdown("### 🤖 Intelligence Hub")
194
  st.caption(f"Analyzing primary source: {top_match['source']}")
195
 
196
+ # LAYOUT: Two columns for controls
197
+ col1, col2 = st.columns(2)
198
+
199
+ with col1:
200
+ # The "Deep Dive" Selector
201
+ analysis_mode = st.selectbox(
202
+ "Select Analysis Type:",
203
+ ["Executive Summary", "Action Plan", "Risk Assessment", "Socratic Review"]
204
+ )
205
+
206
+ with col2:
207
+ # The "Brain" Selector
208
+ model_choice = st.selectbox(
209
+ "Select Model:",
210
+ ["Gemini (Cloud - Smartest)", "Granite (Private Space)"]
211
+ )
212
+ # Map the UI text to the backend key
213
+ provider = "Gemini" if "Gemini" in model_choice else "Granite"
214
+
215
  if st.button("✨ Generate Assessment"):
216
+ with st.spinner(f"Consulting {provider} via {analysis_mode}..."):
217
+
218
+ # Call the client with the new parameters
219
+ response = ask_llm(query, full_doc_text, mode=analysis_mode, model_provider=provider)
220
 
221
  st.markdown("---")
222
  st.markdown(response)