Wall06 commited on
Commit
41154b4
·
verified ·
1 Parent(s): d5e5efa

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +48 -34
src/streamlit_app.py CHANGED
@@ -3,53 +3,60 @@ import os
3
  from google import genai
4
  from google.genai import types
5
 
6
- # 1. Page Configuration
7
- st.set_page_config(page_title="Advanced Scientific Hypothesis Engine", layout="wide")
8
  st.title("🔬 Advanced Scientific Hypothesis Engine")
9
- st.caption("Powered by Gemini 3 Pro Reasoning & Action Loops")
10
 
11
- # 2. Secure API Key Retrieval
12
- # Ensure you have 'GOOGLE_API_KEY' in your Hugging Face Space Secrets
13
  API_KEY = os.environ.get("GOOGLE_API_KEY")
14
  if not API_KEY:
15
- st.error("API Key not found. Please add GOOGLE_API_KEY to your Space Secrets.")
16
  st.stop()
17
 
18
- # 3. Persistent Client Initialization
19
- # We store the client in session_state so it isn't closed between runs
 
 
 
 
 
 
 
 
 
 
20
  if "client" not in st.session_state:
21
  st.session_state.client = genai.Client(api_key=API_KEY)
22
 
23
- # 4. Persistent Chat Session Initialization
24
- # This prevents the 'NoneType' error by ensuring 'chat' always exists
25
  if "chat" not in st.session_state:
26
- # Define your agent's core 'Plan -> Act -> Verify' instructions here
27
- sys_instruct = "You are a senior scientific auditor. Use Python tools to verify all hypotheses."
28
-
 
 
 
 
 
 
 
29
  st.session_state.chat = st.session_state.client.chats.create(
30
- model="gemini-3-flash-preview",
31
- config=types.GenerateContentConfig(
32
- system_instruction=sys_instruct,
33
- thinking_config=types.ThinkingConfig(include_thoughts=True, thinking_level="HIGH"),
34
- tools=[types.Tool(google_search=types.GoogleSearchRetrieval()),
35
- types.Tool(code_execution=types.ToolCodeExecution())]
36
- )
37
  )
38
 
39
  if "messages" not in st.session_state:
40
  st.session_state.messages = []
41
 
42
- # 5. Sidebar for File Management
43
  with st.sidebar:
44
  st.header("Research Corpus")
45
- uploaded_files = st.file_uploader("Upload PDFs (Max 1M Tokens)", type="pdf", accept_multiple_files=True)
46
  if st.button("Reset Lab State"):
47
- # Clear state and rerun to fix any 'Closed Client' issues
48
  for key in list(st.session_state.keys()):
49
  del st.session_state[key]
50
  st.rerun()
51
 
52
- # 6. Chat Interaction Logic
53
  for msg in st.session_state.messages:
54
  with st.chat_message(msg["role"]):
55
  st.markdown(msg["content"])
@@ -62,21 +69,28 @@ if prompt := st.chat_input("Enter your research objective..."):
62
  with st.chat_message("assistant"):
63
  with st.status("Agent Reasoning...", expanded=True) as status:
64
  try:
65
- # Use the persistent chat session
66
  response = st.session_state.chat.send_message(prompt)
67
 
68
- # Show thoughts and tool usage
69
  if response.candidates[0].thought_summary:
70
- st.info(f"**Thought Signature:** {response.candidates[0].thought_summary}")
 
 
 
 
 
 
 
71
 
72
- status.update(label="Analysis Complete", state="complete")
73
  st.markdown(response.text)
74
  st.session_state.messages.append({"role": "assistant", "content": response.text})
 
75
 
76
  except Exception as e:
77
- st.error(f"Reasoning Error: {e}")
78
- # If the client is closed, trigger a reset
79
- if "closed" in str(e).lower():
80
- st.warning("Client session expired. Re-initializing...")
81
- del st.session_state.client
82
- st.rerun()
 
3
  from google import genai
4
  from google.genai import types
5
 
6
+ # --- 1. Page Config ---
7
+ st.set_page_config(page_title="Gemini 3 Hypothesis Engine", layout="wide")
8
  st.title("🔬 Advanced Scientific Hypothesis Engine")
 
9
 
10
+ # --- 2. Secure API & Model Selection ---
 
11
  API_KEY = os.environ.get("GOOGLE_API_KEY")
12
  if not API_KEY:
13
+ st.error("API Key missing! Add 'GOOGLE_API_KEY' to Space Secrets.")
14
  st.stop()
15
 
16
+ # ADDED: Model toggle to help you manage your quota (429 errors)
17
+ with st.sidebar:
18
+ st.header("Settings")
19
+ model_choice = st.selectbox(
20
+ "Select Model",
21
+ ["gemini-3-flash-preview", "gemini-3-pro-preview"],
22
+ help="Use Flash for testing to avoid 429 Quota errors. Use Pro for final discovery."
23
+ )
24
+ st.divider()
25
+
26
+ # --- 3. Persistent Session Management ---
27
+ # Storing the client and chat in st.session_state prevents the NoneType error
28
  if "client" not in st.session_state:
29
  st.session_state.client = genai.Client(api_key=API_KEY)
30
 
 
 
31
  if "chat" not in st.session_state:
32
+ # Action Era Configuration
33
+ config = types.GenerateContentConfig(
34
+ system_instruction="You are a senior scientific auditor. Use Python tools to verify all hypotheses.",
35
+ thinking_config=types.ThinkingConfig(include_thoughts=True, thinking_level="HIGH"),
36
+ tools=[
37
+ types.Tool(google_search=types.GoogleSearchRetrieval()),
38
+ types.Tool(code_execution=types.ToolCodeExecution())
39
+ ],
40
+ temperature=1.0
41
+ )
42
  st.session_state.chat = st.session_state.client.chats.create(
43
+ model=model_choice,
44
+ config=config
 
 
 
 
 
45
  )
46
 
47
  if "messages" not in st.session_state:
48
  st.session_state.messages = []
49
 
50
+ # --- 4. Sidebar File Ingestion ---
51
  with st.sidebar:
52
  st.header("Research Corpus")
53
+ uploaded_files = st.file_uploader("Upload PDFs", type="pdf", accept_multiple_files=True)
54
  if st.button("Reset Lab State"):
 
55
  for key in list(st.session_state.keys()):
56
  del st.session_state[key]
57
  st.rerun()
58
 
59
+ # --- 5. Chat Interface ---
60
  for msg in st.session_state.messages:
61
  with st.chat_message(msg["role"]):
62
  st.markdown(msg["content"])
 
69
  with st.chat_message("assistant"):
70
  with st.status("Agent Reasoning...", expanded=True) as status:
71
  try:
72
+ # Execution loop
73
  response = st.session_state.chat.send_message(prompt)
74
 
75
+ # Show Thought Signatures (The "Thinking" process)
76
  if response.candidates[0].thought_summary:
77
+ st.info(f"**Reasoning Path:**\n{response.candidates[0].thought_summary}")
78
+
79
+ # Show Tool usage (Python Code)
80
+ for part in response.candidates[0].content.parts:
81
+ if part.executable_code:
82
+ st.code(part.executable_code.code, language="python", label="Generated Script")
83
+ if part.code_execution_result:
84
+ st.success(f"Execution Result: {part.code_execution_result.output}")
85
 
 
86
  st.markdown(response.text)
87
  st.session_state.messages.append({"role": "assistant", "content": response.text})
88
+ status.update(label="Analysis Complete", state="complete")
89
 
90
  except Exception as e:
91
+ # Handle the 429 Quota error gracefully
92
+ if "429" in str(e):
93
+ st.error("Quota Exceeded (429)! Switch to the 'Flash' model in the sidebar to continue.")
94
+ else:
95
+ st.error(f"Error: {e}")
96
+ status.update(label="Process Failed", state="error")