File size: 3,918 Bytes
ff0e97f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68723f3
ff0e97f
 
 
 
 
 
 
 
 
 
 
 
68723f3
 
 
ff0e97f
 
 
 
 
 
68723f3
ff0e97f
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
"""
Agent creation and configuration.

Unified agent factory using subagent architecture for all modes.
"""
from langchain_openai import ChatOpenAI

from .config import AgentConfig
from .mcp_clients import MCPClientManager


class AgentFactory:
    """Factory for creating agents using unified subagent architecture."""

    @staticmethod
    async def create_subagent_orchestrator(
        model: str,
        api_key: str,
        provider: str,
        mode: str = "Single Agent (All Tools)"
    ):
        """
        Create agent using subagent architecture (always uses subagent system).

        Args:
            model: LLM model name
            api_key: API key for the provider
            provider: LLM provider ("openai" or "huggingface")
            mode: Agent mode (e.g., "Single Agent (All Tools)", "Specialized Subagents (3 Specialists)")

        Returns:
            Configured agent (single subagent or router workflow)
        """
        from .subagent_config import SubAgentConfig
        from .subagent_supervisor import create_supervisor_workflow
        from .subagent_factory import SubAgentFactory
        from langchain_openai import ChatOpenAI
        from langchain_anthropic import ChatAnthropic

        # Get mode configuration
        mode_config = SubAgentConfig.get_mode_config(mode)
        print(f"[AGENT]: Creating agent in '{mode}' mode")

        # Create LLM based on provider
        if provider == "huggingface":
            llm = ChatOpenAI(
                base_url="https://router.huggingface.co/v1",
                api_key=api_key,
                model=model,
                temperature=AgentConfig.HF_TEMPERATURE,
                streaming=True
            )
        elif provider == "anthropic": 
            llm = ChatAnthropic(
                model=model,
                api_key=api_key,
                temperature=AgentConfig.ANTHROPIC_TEMPERATURE,
                streaming=True
            )
        else:  # openai
            llm = ChatOpenAI(
                model=model,
                api_key=api_key,
                temperature=AgentConfig.OPENAI_TEMPERATURE,
                streaming=True
            )

        # Get all MCP tools
        client = await MCPClientManager.create_multi_server_client()
        tools = await MCPClientManager.get_tools(client)

        # Create agent based on mode
        if mode_config["use_router"]:
            # Multi-agent mode: create router with specialists
            print(f"[AGENT]: Creating supervisor with subagents: {mode_config['subagents']}")
            workflow = await create_supervisor_workflow(tools, llm, provider=provider)
            return workflow
        else:
            # Single agent mode: create one subagent directly
            subagent_name = mode_config["subagents"][0]
            print(f"[AGENT]: Creating single subagent: {subagent_name}")

            # Create agent with memory for streaming support
            from langchain.agents import create_agent
            from langgraph.checkpoint.memory import InMemorySaver

            # create_agent auto-compiles, so pass checkpointer and name directly
            # Filter tools based on subagent configuration
            # Pass provider to get provider-specific prompts
            subagent_defs = SubAgentConfig.get_subagent_definitions(provider=provider)
            subagent_tools = subagent_defs["generalist"]["tools"]
            filtered_tools = [tool for tool in tools if tool.name in subagent_tools]
            print(f"[AGENT]: Filtered {len(filtered_tools)} tools for {subagent_name}: {[t.name for t in filtered_tools]}")

            agent = create_agent(
                model=llm,
                tools=filtered_tools,
                system_prompt=subagent_defs["generalist"]["prompt"],
                checkpointer=InMemorySaver(),
                name=subagent_name
            )
            return agent