Prj2 / app /agent_fallback.py
iitmbs24f's picture
Upload 15 files
8740e76 verified
"""
LangGraph-based agent fallback for complex quiz solving.
Used when structured strategies fail or for novel quiz types.
"""
import os
import logging
import time
from typing import Dict, Any, Optional, List, Annotated
from typing_extensions import TypedDict
logger = logging.getLogger(__name__)
# Try to import LangGraph components (optional dependency)
try:
from langgraph.graph import StateGraph, END, START
from langgraph.prebuilt import ToolNode
from langchain_core.messages import trim_messages, HumanMessage
from langchain.chat_models import init_chat_model
from langgraph.graph.message import add_messages
from langchain_core.rate_limiters import InMemoryRateLimiter
LANGGRAPH_AVAILABLE = True
except ImportError:
LANGGRAPH_AVAILABLE = False
logger.warning("LangGraph not available - agent fallback disabled")
class AgentState(TypedDict):
"""State for LangGraph agent."""
messages: Annotated[List, add_messages]
class AgentFallback:
"""Agent-based fallback solver using LangGraph."""
def __init__(self, email: str, secret: str):
self.email = email
self.secret = secret
self.agent = None
self.tools = []
if LANGGRAPH_AVAILABLE:
self._initialize_agent()
def _initialize_agent(self):
"""Initialize the LangGraph agent."""
try:
# Define tools (simplified - you'd import from your tools module)
# For now, we'll create a minimal agent
# Initialize LLM with rate limiting
rate_limiter = InMemoryRateLimiter(
requests_per_second=4 / 60,
check_every_n_seconds=1,
max_bucket_size=4
)
llm = init_chat_model(
model_provider="google_genai",
model="gemini-2.5-flash",
rate_limiter=rate_limiter
)
# Create simple graph (you'd add your tools here)
graph = StateGraph(AgentState)
graph.add_node("agent", self._agent_node)
graph.add_edge(START, "agent")
graph.add_conditional_edges("agent", self._route, {END: END})
self.agent = graph.compile()
logger.info("Agent fallback initialized")
except Exception as e:
logger.error(f"Error initializing agent: {e}")
self.agent = None
def _agent_node(self, state: AgentState):
"""Agent node that processes messages."""
# Simplified - would use actual LLM here
return {"messages": state["messages"]}
def _route(self, state: AgentState):
"""Route logic for agent."""
return END
async def solve(self, question: str, page_content: Dict[str, Any],
remaining_time: float) -> Optional[Any]:
"""
Attempt to solve using agent-based approach.
Args:
question: Question text
page_content: Page content
remaining_time: Time remaining in seconds
Returns:
Answer if solved, None otherwise
"""
if not LANGGRAPH_AVAILABLE or not self.agent:
return None
# Only use agent if we have enough time
if remaining_time < 30.0:
logger.debug("Skipping agent fallback - insufficient time")
return None
try:
logger.info("Attempting agent-based solution...")
system_prompt = f"""
You are a quiz-solving agent. Solve this question:
Question: {question}
Page Content: {page_content.get('text', '')[:2000]}
Email: {self.email}
Secret: {self.secret}
Provide a clear, concise answer.
"""
initial_messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": question}
]
# Run agent with timeout
result = self.agent.invoke(
{"messages": initial_messages},
config={"recursion_limit": 100}
)
# Extract answer from result
if result and "messages" in result:
last_message = result["messages"][-1]
if hasattr(last_message, "content"):
return last_message.content
elif isinstance(last_message, dict) and "content" in last_message:
return last_message["content"]
return None
except Exception as e:
logger.error(f"Error in agent fallback: {e}")
return None
def get_agent_fallback(email: str, secret: str) -> Optional[AgentFallback]:
"""Get or create agent fallback instance."""
if not LANGGRAPH_AVAILABLE:
return None
return AgentFallback(email, secret)