BirdScopeAI / langgraph_agent /subagent_factory.py
facemelter's picture
Added provider-specific prompt infrastructure; thinking and progress indicators to chat ui
68723f3 verified
"""
Subagent Factory
Creates specialized agents with filtered tool subsets.
"""
from typing import List, Dict, Any
from langchain_core.language_models import BaseChatModel
from langchain.agents import create_agent
from langgraph.checkpoint.memory import InMemorySaver
from .subagent_config import SubAgentConfig
from .config import AgentConfig
class SubAgentFactory:
"""Factory for creating specialized subagents."""
@staticmethod
async def create_subagent(
subagent_name: str,
all_tools: List[Any],
llm: BaseChatModel,
provider: str = "openai"
):
"""
Create a specialized subagent with filtered tools.
Args:
subagent_name: Name of the subagent (e.g., "image_identifier")
all_tools: Full list of available tools
llm: Language model instance
provider: LLM provider name ("openai", "anthropic", "huggingface")
Returns:
LangGraph agent configured for the subagent
"""
# Get subagent configuration with provider-specific prompts
definitions = SubAgentConfig.get_subagent_definitions(provider=provider)
if subagent_name not in definitions:
raise ValueError(f"Unknown subagent: {subagent_name}")
config = definitions[subagent_name]
# Filter tools for this subagent
allowed_tool_names = set(config["tools"])
subagent_tools = [
tool for tool in all_tools
if tool.name in allowed_tool_names
]
print(f"[SUBAGENT]: Creating {config['name']}")
print(f" • Tools: {', '.join([t.name for t in subagent_tools])}")
print(f" • Prompt preview: {config['prompt'][:80]}...")
# Create specialized agent with filtered tools and name
# Note: create_agent auto-compiles, so we pass name directly
agent = create_agent(
model=llm,
tools=subagent_tools,
system_prompt=config["prompt"],
name=subagent_name
)
return agent
@staticmethod
async def create_all_subagents(
all_tools: List[Any],
llm: BaseChatModel,
provider: str = "openai"
) -> Dict[str, Any]:
"""
Create all specialized subagents.
Args:
all_tools: Full list of available tools
llm: Language model instance
provider: LLM provider name ("openai", "anthropic", "huggingface")
Returns:
Dict mapping subagent names to agent instances
"""
definitions = SubAgentConfig.get_subagent_definitions(provider=provider)
subagents = {}
for name in definitions.keys():
subagents[name] = await SubAgentFactory.create_subagent(
name, all_tools, llm, provider=provider
)
return subagents