Spaces:
Running
Running
File size: 3,050 Bytes
3193174 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 | """
Tools for agents.
If an agent has tools — they are ALWAYS used on every LLM call.
Tools are passed via the API (the `tools` parameter), not in the prompt text.
Supported tools:
- shell: Shell command execution
- code_interpreter: Python code execution in a sandbox
- file_search: File and content search
- web_search: Information search on the internet (DuckDuckGo, Serper, etc.)
- Any custom functions via the @tool decorator
Usage example:
from tools import tool, get_registry, CodeInterpreterTool
from core.agent import AgentProfile
from execution import MACPRunner
# 1. Register tools (globally or via the registry)
@tool
def fibonacci(n: int) -> str:
'''Calculate n-th Fibonacci number.'''
a, b = 0, 1
for _ in range(n):
a, b = b, a + b
return str(a)
# 2. Create an agent with tools
agent = AgentProfile(
agent_id="math",
display_name="Math Agent",
persona="a helpful math assistant",
tools=["fibonacci", "code_interpreter"], # <-- tools here!
)
# 3. Run via runner — tools are used automatically
runner = MACPRunner(llm_caller=my_caller)
result = runner.run_round(graph)
"""
from .base import (
BaseTool,
ToolCall,
ToolRegistry,
ToolResult,
create_tool_from_config,
get_registry,
register_tool,
register_tool_factory,
tool,
)
from .code_interpreter import CodeInterpreterTool
from .file_search import FileSearchTool
from .function_calling import FunctionTool, FunctionWrapper
from .llm_integration import (
LLMResponse,
LLMToolCall,
# New unified caller (recommended)
OpenAICaller,
# Aliases for backward compatibility
OpenAIToolsCaller,
create_openai_caller,
create_openai_tools_caller,
parse_anthropic_response,
parse_openai_response,
)
from .shell import ShellTool
from .web_search import (
DuckDuckGoProvider,
SearchProvider,
SeleniumFetcher,
SerperProvider,
TavilyProvider,
URLFetcher,
WebSearchTool,
)
__all__ = [
# Base classes
"BaseTool",
"CodeInterpreterTool",
"DuckDuckGoProvider",
"FileSearchTool",
"FunctionTool",
"FunctionWrapper",
# Native function calling (recommended)
"LLMResponse",
"LLMToolCall",
"OpenAICaller", # Unified caller — works with and without tools
# Backward compatibility aliases
"OpenAIToolsCaller", # = OpenAICaller
# Web search providers and utilities
"SearchProvider",
"SeleniumFetcher",
"SerperProvider",
# Built-in tools
"ShellTool",
"TavilyProvider",
"ToolCall",
"ToolRegistry",
"ToolResult",
"URLFetcher",
"WebSearchTool",
"create_openai_caller", # Recommended way to create a caller
"create_openai_tools_caller", # = create_openai_caller
"create_tool_from_config",
# Global registry helpers
"get_registry",
"parse_anthropic_response",
"parse_openai_response",
"register_tool",
"register_tool_factory",
"tool",
]
|