ATHARVA commited on
Commit
407c815
Β·
1 Parent(s): 984ac15

Fix LangChain imports and simplify requirements.txt

Browse files
Files changed (3) hide show
  1. README.md +1 -1
  2. app.py +30 -5
  3. requirements.txt +25 -23
README.md CHANGED
@@ -10,7 +10,7 @@ pinned: false
10
  license: mit
11
  hf_oauth: true
12
  hf_oauth_expiration_minutes: 480
13
- short_description: Advanced AI agent for GAIA benchmark evaluation with 30+ score target
14
  tags:
15
  - ai-agent
16
  - gaia-benchmark
 
10
  license: mit
11
  hf_oauth: true
12
  hf_oauth_expiration_minutes: 480
13
+ short_description: Advanced AI agent for GAIA benchmark - 30+ score target
14
  tags:
15
  - ai-agent
16
  - gaia-benchmark
app.py CHANGED
@@ -3,17 +3,32 @@ import os
3
  import gradio as gr
4
  import requests
5
  import pandas as pd
6
- from langchain_core.messages import HumanMessage
7
- from agent import build_graph
8
  import time
9
- import asyncio
10
- from concurrent.futures import ThreadPoolExecutor
11
  import warnings
12
 
13
  # Suppress warnings for cleaner output
14
  warnings.filterwarnings("ignore", category=UserWarning)
15
  os.environ["TOKENIZERS_PARALLELISM"] = "false"
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  # --- Constants ---
18
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
19
 
@@ -23,13 +38,19 @@ class OptimizedGAIAAgent:
23
 
24
  def __init__(self, provider="groq"):
25
  print("πŸ€– Initializing Optimized GAIA Agent...")
 
 
 
 
 
 
26
  try:
27
  self.graph = build_graph(provider=provider)
28
  self.provider = provider
29
  print(f"βœ… Agent initialized successfully with {provider} provider!")
30
  except Exception as e:
31
  print(f"❌ Error initializing agent: {e}")
32
- raise
33
 
34
  def extract_final_answer(self, content: str) -> str:
35
  """Extract clean final answer from agent response"""
@@ -54,6 +75,10 @@ class OptimizedGAIAAgent:
54
  """Process question and return clean answer"""
55
  print(f"πŸ” Processing: {question[:50]}{'...' if len(question) > 50 else ''}")
56
 
 
 
 
 
57
  try:
58
  start_time = time.time()
59
 
 
3
  import gradio as gr
4
  import requests
5
  import pandas as pd
 
 
6
  import time
 
 
7
  import warnings
8
 
9
  # Suppress warnings for cleaner output
10
  warnings.filterwarnings("ignore", category=UserWarning)
11
  os.environ["TOKENIZERS_PARALLELISM"] = "false"
12
 
13
+ # Try to import LangChain dependencies
14
+ try:
15
+ from langchain_core.messages import HumanMessage
16
+ from agent import build_graph
17
+ LANGCHAIN_AVAILABLE = True
18
+ print("βœ… LangChain dependencies loaded successfully")
19
+ except ImportError as e:
20
+ print(f"❌ LangChain import failed: {e}")
21
+ print("πŸ”„ The Space will need to restart after dependencies are installed")
22
+ LANGCHAIN_AVAILABLE = False
23
+
24
+ # Create fallback classes
25
+ class HumanMessage:
26
+ def __init__(self, content):
27
+ self.content = content
28
+
29
+ def build_graph(provider="groq"):
30
+ return None
31
+
32
  # --- Constants ---
33
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
34
 
 
38
 
39
  def __init__(self, provider="groq"):
40
  print("πŸ€– Initializing Optimized GAIA Agent...")
41
+ if not LANGCHAIN_AVAILABLE:
42
+ print("❌ LangChain not available - using fallback mode")
43
+ self.graph = None
44
+ self.provider = provider
45
+ return
46
+
47
  try:
48
  self.graph = build_graph(provider=provider)
49
  self.provider = provider
50
  print(f"βœ… Agent initialized successfully with {provider} provider!")
51
  except Exception as e:
52
  print(f"❌ Error initializing agent: {e}")
53
+ self.graph = None
54
 
55
  def extract_final_answer(self, content: str) -> str:
56
  """Extract clean final answer from agent response"""
 
75
  """Process question and return clean answer"""
76
  print(f"πŸ” Processing: {question[:50]}{'...' if len(question) > 50 else ''}")
77
 
78
+ # Check if agent is available
79
+ if not self.graph:
80
+ return f"LangChain agent not available. Question: {question[:100]}..."
81
+
82
  try:
83
  start_time = time.time()
84
 
requirements.txt CHANGED
@@ -1,23 +1,25 @@
1
- gradio==5.25.2
2
- requests>=2.31.0
3
- langchain>=0.2.0
4
- langchain-community>=0.2.0
5
- langchain-core>=0.2.0
6
- langchain-google-genai>=1.0.0
7
- langchain-huggingface>=0.0.3
8
- langchain-groq>=0.1.0
9
- langchain-tavily>=0.1.0
10
- langchain-chroma>=0.1.0
11
- langgraph>=0.2.0
12
- huggingface_hub>=0.20.0
13
- supabase>=2.0.0
14
- arxiv>=2.1.0
15
- pymupdf>=1.23.0
16
- wikipedia>=1.4.0
17
- python-dotenv>=1.0.0
18
- pandas>=2.0.0
19
- numpy>=1.24.0
20
- aiohttp>=3.8.0
21
- beautifulsoup4>=4.12.0
22
- lxml>=4.9.0
23
- sentence-transformers>=2.2.0
 
 
 
1
+ # Core dependencies
2
+ gradio==4.44.0
3
+ requests==2.31.0
4
+ pandas==2.0.3
5
+ python-dotenv==1.0.0
6
+
7
+ # LangChain core (essential)
8
+ langchain-core==0.3.15
9
+ langchain==0.3.7
10
+ langchain-community==0.3.7
11
+
12
+ # LLM providers
13
+ langchain-groq==0.2.1
14
+ langchain-google-genai==2.0.5
15
+
16
+ # LangGraph for agent workflows
17
+ langgraph==0.2.34
18
+
19
+ # Tools and utilities
20
+ tavily-python==0.5.0
21
+ wikipedia==1.4.0
22
+ arxiv==2.1.3
23
+
24
+ # Optional dependencies (lightweight)
25
+ aiohttp==3.9.5