carolinacon commited on
Commit
0c44617
ยท
1 Parent(s): a605490

Modified the Gradio interface

Browse files
Files changed (5) hide show
  1. .env.example +6 -1
  2. README.md +2 -1
  3. app.py +82 -5
  4. config/settings.py +2 -0
  5. core/agent.py +2 -3
.env.example CHANGED
@@ -13,4 +13,9 @@ CHESS_ENGINE_PATH=stock-fish-engine-location-here
13
  # Configure this if you want to enable observability with LangSmith
14
  LANGSMITH_API_KEY=your-langsmith-key
15
  LANGSMITH_TRACING=false
16
- LANGSMITH_PROJECT=gaia_agent
 
 
 
 
 
 
13
  # Configure this if you want to enable observability with LangSmith
14
  LANGSMITH_API_KEY=your-langsmith-key
15
  LANGSMITH_TRACING=false
16
+ LANGSMITH_PROJECT=gaia_agent
17
+
18
+ # Flag indicating whether the agent will work in submission mode or not.
19
+ # If submission mode is on the agent will retrieve 20 questions and submit their answers
20
+ # If submission mode is off the agent will accept questions from the user
21
+ SUBMISSION_MODE_ON=False
README.md CHANGED
@@ -123,7 +123,8 @@ to the `stockfish` executable, otherwise the `stockfish`installation is automati
123
 
124
  The `LANGSMITH_*` properties need to be configured only if you want to enable observability with LangSmith.
125
 
126
-
 
127
 
128
  ## References ๐Ÿ“š
129
  The math tool implementation was inspired from this repo https://github.com/langchain-ai/open_deep_research
 
123
 
124
  The `LANGSMITH_*` properties need to be configured only if you want to enable observability with LangSmith.
125
 
126
+ The `SUBMISSION_MODE_ON` flag indicates whether the application will run in submission mode (when the 20 questions are fetched and the answers are
127
+ submitted for agent evaluation) or not (the agent accepts a question and an attachment).
128
 
129
  ## References ๐Ÿ“š
130
  The math tool implementation was inspired from this repo https://github.com/langchain-ai/open_deep_research
app.py CHANGED
@@ -1,11 +1,15 @@
1
  import os
 
 
2
  import time
 
3
  from pathlib import Path
4
 
5
  import gradio as gr
6
  import pandas as pd
7
  import requests
8
 
 
9
  from core.agent import GaiaAgent, Attachment
10
  from utils.cache_answers import AnswersCache
11
  from utils.dependencies_checker import check_dependencies
@@ -122,7 +126,7 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
122
  print(f"Agent received an attachment : {attachment.file_path}...")
123
  submitted_answer = agent(question_text, attachment)
124
  print(f"Agent returning fixed answer: {submitted_answer}")
125
- #sleep in
126
  time.sleep(30)
127
  cache.set(task_id, submitted_answer)
128
  answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer})
@@ -140,7 +144,7 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
140
  status_update = f"Agent finished. Submitting {len(answers_payload)} answers for user '{username}'..."
141
  print(status_update)
142
 
143
- #5. Submit
144
  print(f"Submitting {len(answers_payload)} answers to: {submit_url}")
145
  try:
146
  response = requests.post(submit_url, json=submission_data, timeout=60)
@@ -185,7 +189,7 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
185
 
186
 
187
  # --- Build Gradio Interface using Blocks ---
188
- with gr.Blocks() as demo:
189
  gr.Markdown("# Basic Agent Evaluation Runner")
190
  gr.Markdown(
191
  """
@@ -215,6 +219,75 @@ with gr.Blocks() as demo:
215
  outputs=[status_output, results_table]
216
  )
217
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
218
  if __name__ == "__main__":
219
  print("\n" + "-" * 30 + " App Starting " + "-" * 30)
220
  # Check for SPACE_HOST and SPACE_ID at startup for information
@@ -237,6 +310,10 @@ if __name__ == "__main__":
237
  print("-" * (60 + len(" App Starting ")) + "\n")
238
  print("-" * (60 + len(" Check dependencies ")) + "\n")
239
  check_dependencies()
 
240
 
241
- print("Launching Gradio Interface for Basic Agent Evaluation...")
242
- demo.launch(debug=True, share=False)
 
 
 
 
1
  import os
2
+ import shutil
3
+ import tempfile
4
  import time
5
+ import uuid
6
  from pathlib import Path
7
 
8
  import gradio as gr
9
  import pandas as pd
10
  import requests
11
 
12
+ from config.settings import config
13
  from core.agent import GaiaAgent, Attachment
14
  from utils.cache_answers import AnswersCache
15
  from utils.dependencies_checker import check_dependencies
 
126
  print(f"Agent received an attachment : {attachment.file_path}...")
127
  submitted_answer = agent(question_text, attachment)
128
  print(f"Agent returning fixed answer: {submitted_answer}")
129
+ # sleep in
130
  time.sleep(30)
131
  cache.set(task_id, submitted_answer)
132
  answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer})
 
144
  status_update = f"Agent finished. Submitting {len(answers_payload)} answers for user '{username}'..."
145
  print(status_update)
146
 
147
+ # 5. Submit
148
  print(f"Submitting {len(answers_payload)} answers to: {submit_url}")
149
  try:
150
  response = requests.post(submit_url, json=submission_data, timeout=60)
 
189
 
190
 
191
  # --- Build Gradio Interface using Blocks ---
192
+ with gr.Blocks() as demo_submit:
193
  gr.Markdown("# Basic Agent Evaluation Runner")
194
  gr.Markdown(
195
  """
 
219
  outputs=[status_output, results_table]
220
  )
221
 
222
+
223
+ def process_input(question: str, file: gr.File):
224
+ """
225
+ Process the user's question and attached file
226
+ """
227
+ if not question:
228
+ return "Please enter a question."
229
+
230
+ # Extract file information
231
+ attachment = None
232
+ if file is not None:
233
+ print(f"Received file {file.name} ")
234
+
235
+ # Save to disk
236
+ task_id = uuid.uuid4()
237
+ file_name = Path(file.name).name
238
+ file_path = Path("uploads") / f"{task_id}" / f"{file_name}"
239
+
240
+ # # Create parent directories if they don't exist
241
+ file_path.parent.mkdir(parents=True, exist_ok=True)
242
+ shutil.copy(file, file_path)
243
+
244
+ content = file_path.read_bytes()
245
+ attachment = Attachment(content, file_path.as_posix())
246
+
247
+ response = agent(question, attachment)
248
+
249
+ return response
250
+
251
+
252
+ with gr.Blocks(title="๐Ÿ‰ GAIA Agent Demo", theme=gr.themes.Ocean()) as demo:
253
+ gr.Markdown("# ๐Ÿ‰ GAIA Agent")
254
+ gr.Markdown("Ask me a complex question")
255
+
256
+ with gr.Row():
257
+ with gr.Column(scale=1):
258
+ question_input = gr.Textbox(
259
+ label="Your Question",
260
+ placeholder="Type your question here",
261
+ lines=5
262
+ )
263
+
264
+ file_input = gr.File(
265
+ label="Attach File",
266
+ file_types=[
267
+ ".txt", ".pdf", ".png", ".jpg", ".jpeg",
268
+ ".csv", ".py", ".mp3", ".xslx"
269
+ ]
270
+ )
271
+
272
+ submit_btn = gr.Button("Submit", variant="primary")
273
+
274
+ with gr.Column(scale=2):
275
+ output = gr.Textbox(label="Response", lines=10)
276
+
277
+ # Set up the submission action
278
+ submit_btn.click(
279
+ fn=process_input,
280
+ inputs=[question_input, file_input],
281
+ outputs=output
282
+ )
283
+
284
+ # Also process when file is uploaded (optional)
285
+ file_input.upload(
286
+ fn=process_input,
287
+ inputs=[question_input, file_input],
288
+ outputs=output
289
+ )
290
+
291
  if __name__ == "__main__":
292
  print("\n" + "-" * 30 + " App Starting " + "-" * 30)
293
  # Check for SPACE_HOST and SPACE_ID at startup for information
 
310
  print("-" * (60 + len(" App Starting ")) + "\n")
311
  print("-" * (60 + len(" Check dependencies ")) + "\n")
312
  check_dependencies()
313
+ agent = GaiaAgent()
314
 
315
+ if config.submission_mode_on:
316
+ print("Launching Gradio Interface for Basic Agent Evaluation...")
317
+ demo_submit.launch(debug=True, share=False)
318
+ else:
319
+ demo.launch(debug=True, share=False)
config/settings.py CHANGED
@@ -25,6 +25,8 @@ class AgentConfig(BaseSettings):
25
  project_root: Path = Path(__file__).parent.parent
26
  prompts_location: Path = project_root / "config" / "prompts.yaml"
27
 
 
 
28
  class Config:
29
  env_file = ".env"
30
  case_sensitive = False
 
25
  project_root: Path = Path(__file__).parent.parent
26
  prompts_location: Path = project_root / "config" / "prompts.yaml"
27
 
28
+ submission_mode_on: bool = False
29
+
30
  class Config:
31
  env_file = ".env"
32
  case_sensitive = False
core/agent.py CHANGED
@@ -8,8 +8,7 @@ from langgraph.prebuilt import tools_condition
8
 
9
  from core.messages import Attachment
10
  from core.state import State
11
- from nodes.nodes import assistant, optimize_memory, response_processing, pre_processor
12
- from tools.tavily_tools import web_search_tools
13
 
14
 
15
  class GaiaAgent:
@@ -22,7 +21,7 @@ class GaiaAgent:
22
  # Define nodes: these do the work
23
  builder.add_node("pre_processor", pre_processor)
24
  builder.add_node("assistant", assistant)
25
- builder.add_node("tools", ToolNode(web_search_tools))
26
  builder.add_node("optimize_memory", optimize_memory)
27
  builder.add_node("response_processing", response_processing)
28
 
 
8
 
9
  from core.messages import Attachment
10
  from core.state import State
11
+ from nodes.nodes import assistant, optimize_memory, response_processing, pre_processor, agent_tools
 
12
 
13
 
14
  class GaiaAgent:
 
21
  # Define nodes: these do the work
22
  builder.add_node("pre_processor", pre_processor)
23
  builder.add_node("assistant", assistant)
24
+ builder.add_node("tools", ToolNode(agent_tools))
25
  builder.add_node("optimize_memory", optimize_memory)
26
  builder.add_node("response_processing", response_processing)
27