Wall06 commited on
Commit
41478c4
·
verified ·
1 Parent(s): 322bd19

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +72 -55
src/streamlit_app.py CHANGED
@@ -2,76 +2,93 @@ import streamlit as st
2
  import os
3
  from google import genai
4
  from google.genai import types
 
5
 
6
- st.set_page_config(page_title="Gemini 3 Hypothesis Engine", layout="wide")
 
7
  st.title("🔬 Advanced Scientific Hypothesis Engine")
 
8
 
9
- # 1. API Key Check
10
- API_KEY = os.environ.get("GOOGLE_API_KEY")
11
- if not API_KEY:
12
- st.error("API Key not found! Add GOOGLE_API_KEY to Space Secrets.")
13
- st.stop()
14
-
15
- # 2. Model Selection (Sidebar)
16
- # Switch to Flash if you hit 429 errors on Pro!
17
- with st.sidebar:
18
- st.header("Settings")
19
- selected_model = st.selectbox(
20
- "Select Model",
21
- ["gemini-3-flash-preview", "gemini-3-pro-preview"],
22
- help="Flash has higher quota. Use Pro for complex math."
23
- )
24
- st.divider()
25
 
26
- # 3. Persistent Client & Chat (Fixes NoneType Error)
27
- if "client" not in st.session_state:
28
- st.session_state.client = genai.Client(api_key=API_KEY)
29
 
30
- if "chat" not in st.session_state or st.session_state.get("current_model") != selected_model:
31
- st.session_state.current_model = selected_model
32
- config = types.GenerateContentConfig(
33
- system_instruction="You are a senior scientific auditor. Use Python tools to verify all hypotheses.",
34
- thinking_config=types.ThinkingConfig(include_thoughts=True, thinking_level="HIGH"),
35
- tools=[types.Tool(google_search=types.GoogleSearchRetrieval()),
36
- types.Tool(code_execution=types.ToolCodeExecution())]
37
- )
38
- st.session_state.chat = st.session_state.client.chats.create(model=selected_model, config=config)
39
 
40
- if "messages" not in st.session_state:
41
- st.session_state.messages = []
42
 
43
- # 4. File Ingestion
44
  with st.sidebar:
45
- st.header("Research Corpus")
 
 
 
46
  uploaded_files = st.file_uploader("Upload PDFs", type="pdf", accept_multiple_files=True)
 
47
  if st.button("Reset Lab State"):
48
- for key in list(st.session_state.keys()): del st.session_state[key]
 
49
  st.rerun()
50
 
51
- # 5. Chat Logic
 
 
 
 
52
  for msg in st.session_state.messages:
53
- with st.chat_message(msg["role"]): st.markdown(msg["content"])
 
54
 
55
  if prompt := st.chat_input("Enter your research objective..."):
56
  st.session_state.messages.append({"role": "user", "content": prompt})
57
- with st.chat_message("user"): st.markdown(prompt)
 
58
 
59
  with st.chat_message("assistant"):
60
- with st.status("Agent Reasoning...", expanded=True) as status:
61
- try:
62
- # Execution
63
- response = st.session_state.chat.send_message(prompt)
64
-
65
- if response.candidates[0].thought_summary:
66
- st.info(f"**Thought Path:** {response.candidates[0].thought_summary}")
67
-
68
- st.markdown(response.text)
69
- st.session_state.messages.append({"role": "assistant", "content": response.text})
70
- status.update(label="Analysis Complete", state="complete")
71
-
72
- except Exception as e:
73
- if "429" in str(e):
74
- st.error("Quota Exceeded (429)! Switch to the 'Flash' model in the sidebar.")
75
- else:
76
- st.error(f"Error: {e}")
77
- status.update(label="Failed", state="error")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  import os
3
  from google import genai
4
  from google.genai import types
5
+ from groq import Groq
6
 
7
+ # --- Configuration & Styling ---
8
+ st.set_page_config(page_title="Gemini 3 + Groq: Hypothesis Engine", layout="wide")
9
  st.title("🔬 Advanced Scientific Hypothesis Engine")
10
+ st.caption("Dual-Engine: Gemini 3 for Research | Groq for Speed")
11
 
12
+ # --- SECURE API KEYS ---
13
+ GEMINI_KEY = os.environ.get("gemini_api")
14
+ GROQ_KEY = os.environ.get("groq_api")
 
 
 
 
 
 
 
 
 
 
 
 
 
15
 
16
+ if not GEMINI_KEY or not GROQ_KEY:
17
+ st.error("Please add 'gemini_api' and 'groq_api' to your Secrets.")
18
+ st.stop()
19
 
20
+ # --- Initialize Clients ---
21
+ if "gemini_client" not in st.session_state:
22
+ st.session_state.gemini_client = genai.Client(api_key=GEMINI_KEY)
23
+ if "groq_client" not in st.session_state:
24
+ st.session_state.groq_client = Groq(api_key=GROQ_KEY)
 
 
 
 
25
 
26
+ SYSTEM_INSTRUCTIONS = "You are a Senior Scientific Discovery Agent. Be precise and ground claims in evidence."
 
27
 
28
+ # --- Sidebar: Engine Selection & Research Corpus ---
29
  with st.sidebar:
30
+ st.header("⚙️ Engine Settings")
31
+ engine = st.radio("Select Primary Brain:", ["Groq (Fast/No Limits)", "Gemini 3 (Deep Search/Code)"])
32
+
33
+ st.header("📚 Research Corpus")
34
  uploaded_files = st.file_uploader("Upload PDFs", type="pdf", accept_multiple_files=True)
35
+
36
  if st.button("Reset Lab State"):
37
+ for key in list(st.session_state.keys()):
38
+ del st.session_state[key]
39
  st.rerun()
40
 
41
+ # --- Initialize Messages ---
42
+ if "messages" not in st.session_state:
43
+ st.session_state.messages = []
44
+
45
+ # --- Main Interaction Loop ---
46
  for msg in st.session_state.messages:
47
+ with st.chat_message(msg["role"]):
48
+ st.markdown(msg["content"])
49
 
50
  if prompt := st.chat_input("Enter your research objective..."):
51
  st.session_state.messages.append({"role": "user", "content": prompt})
52
+ with st.chat_message("user"):
53
+ st.markdown(prompt)
54
 
55
  with st.chat_message("assistant"):
56
+ if engine == "Groq (Fast/No Limits)":
57
+ with st.spinner("Groq is thinking..."):
58
+ chat_completion = st.session_state.groq_client.chat.completions.create(
59
+ messages=[{"role": "user", "content": prompt}],
60
+ model="llama-3.3-70b-versatile",
61
+ )
62
+ response_text = chat_completion.choices[0].message.content
63
+ st.markdown(response_text)
64
+
65
+ else:
66
+ with st.status("Gemini 3 Researching...", expanded=True) as status:
67
+ try:
68
+ config = types.GenerateContentConfig(
69
+ system_instruction=SYSTEM_INSTRUCTIONS,
70
+ thinking_config=types.ThinkingConfig(include_thoughts=True, thinking_level=types.ThinkingLevel.LOW),
71
+ tools=[types.Tool(google_search=types.GoogleSearchRetrieval()), types.Tool(code_execution=types.ToolCodeExecution())]
72
+ )
73
+ # Send message using Gemini
74
+ response = st.session_state.gemini_client.models.generate_content(
75
+ model="gemini-3-flash-preview",
76
+ contents=prompt,
77
+ config=config
78
+ )
79
+
80
+ # Display Reasoning/Code
81
+ for part in response.candidates[0].content.parts:
82
+ if part.thought:
83
+ st.info(f"**Reasoning:** {part.text}")
84
+ if part.executable_code:
85
+ st.code(part.executable_code.code, language="python")
86
+
87
+ response_text = response.text
88
+ st.markdown(response_text)
89
+ status.update(label="Research Complete", state="complete")
90
+ except Exception as e:
91
+ st.error(f"Gemini Error: {e}")
92
+ response_text = "Error occurred."
93
+
94
+ st.session_state.messages.append({"role": "assistant", "content": response_text})