File size: 10,752 Bytes
5669b22 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 | """Constructs prompts for servers and tools, formats tool information for OpenAI API."""
from typing import Dict, Optional, List, Tuple, Any
from loguru import logger
from .types import FormattedTool
from .mcp_client import MCPClient
from .server_registry import ServerRegistry
class ToolAdapter:
"""Dynamically fetches tool information from enabled MCP servers and formats it."""
def __init__(self, server_registery: Optional[ServerRegistry] = None) -> None:
"""Initialize with an ServerRegistry."""
self.server_registery = server_registery or ServerRegistry()
async def get_server_and_tool_info(
self, enabled_servers: List[str]
) -> Tuple[Dict[str, Dict[str, str]], Dict[str, FormattedTool]]:
"""Fetch tool information from specified enabled MCP servers."""
servers_info: Dict[str, Dict[str, str]] = {}
formatted_tools: Dict[str, FormattedTool] = {}
if not enabled_servers:
logger.warning(
"MC: No enabled MCP servers specified. Cannot fetch tool info."
)
return servers_info, formatted_tools
logger.debug(f"MC: Fetching tool info for enabled servers: {enabled_servers}")
# Use a single client instance for efficiency
async with MCPClient(self.server_registery) as client:
for server_name in enabled_servers:
if server_name not in self.server_registery.servers:
logger.warning(
f"MC: Enabled server '{server_name}' not found in Server Manager. Skipping."
)
continue
try:
servers_info[server_name] = {}
tools = await client.list_tools(server_name)
logger.debug(
f"MC: Found {len(tools)} tools on server '{server_name}'"
)
for tool in tools:
servers_info[server_name][tool.name] = {}
tool_info = servers_info[server_name][tool.name]
tool_info["description"] = tool.description
tool_info["parameters"] = tool.inputSchema.get("properties", {})
tool_info["required"] = tool.inputSchema.get("required", [])
# Store the tool info in FormattedTool format
formatted_tools[tool.name] = FormattedTool(
input_schema=tool.inputSchema,
related_server=server_name,
description=tool.description,
# Generic schema will be generated later if needed
generic_schema=None,
)
except (ValueError, RuntimeError, ConnectionError) as e:
logger.error(
f"MC: Failed to get info for server '{server_name}': {e}"
)
if (
server_name not in servers_info
): # Ensure entry exists even on error
servers_info[server_name] = {}
continue # Continue to next server
except Exception as e:
logger.error(
f"MC: Unexpected error for server '{server_name}': {e}"
)
if server_name not in servers_info:
servers_info[server_name] = {}
continue # Continue to next server
logger.debug(
f"MC: Finished fetching tool info. Found {len(formatted_tools)} tools across enabled servers."
)
return servers_info, formatted_tools
def construct_mcp_prompt_string(
self, servers_info: Dict[str, Dict[str, str]]
) -> str:
"""Build a single prompt string describing enabled servers and their tools."""
full_prompt_content = ""
if not servers_info:
logger.warning(
"MC: Cannot construct MCP prompt string, servers_info is empty."
)
return full_prompt_content
logger.debug(
f"MC: Constructing MCP prompt string for {len(servers_info)} server(s)."
)
for server_name, tools in servers_info.items():
if not tools: # Skip servers where info couldn't be fetched
logger.warning(
f"MC: No tool info available for server '{server_name}', skipping in prompt."
)
continue
prompt_content = f"Server: {server_name}\n"
prompt_content += " Tools:\n"
for tool_name, tool_info in tools.items():
prompt_content += f" {tool_name}:\n"
# Ensure description is handled correctly (might be None)
description = tool_info.get("description", "No description available.")
prompt_content += f" Description: {description}\n"
parameters = tool_info.get("parameters", {})
if parameters:
prompt_content += " Parameters:\n"
for param_name, param_info in parameters.items():
param_desc = param_info.get("description") or param_info.get(
"title", "No description provided."
)
param_type = param_info.get(
"type", "string"
) # Default to string if type missing
prompt_content += f" {param_name}:\n"
prompt_content += f" Type: {param_type}\n"
prompt_content += (
f" Description: {param_desc}\n"
)
required = tool_info.get("required", [])
if required:
prompt_content += f" Required: {', '.join(required)}\n"
full_prompt_content += prompt_content + "\n" # Add newline between servers
logger.debug("MC: Finished constructing MCP prompt string.")
return full_prompt_content.strip() # Remove trailing newline
def format_tools_for_api(
self, formatted_tools_dict: Dict[str, FormattedTool]
) -> Tuple[List[Dict[str, Any]], List[Dict[str, Any]]]:
"""Format tools to OpenAI and Claude function-calling compatible schemas."""
openai_tools = []
claude_tools = []
if not formatted_tools_dict:
logger.warning(
"MC: Cannot format tools for API, input dictionary is empty."
)
return openai_tools, claude_tools
logger.debug(f"MC: Formatting {len(formatted_tools_dict)} tools for API usage.")
for tool_name, data_object in formatted_tools_dict.items():
if not isinstance(data_object, FormattedTool):
logger.warning(f"MC: Skipping invalid tool format for '{tool_name}'")
continue
input_schema = data_object.input_schema
properties: Dict[str, Dict[str, str]] = input_schema.get("properties", {})
tool_description = data_object.description or "No description provided."
required_params = input_schema.get("required", [])
# Format for OpenAI
openai_function_params = {
"type": "object",
"properties": {},
"required": required_params,
"additionalProperties": False, # Disallow extra properties
}
for param_name, param_info in properties.items():
param_schema = {
"type": param_info.get("type", "string"),
"description": param_info.get("description")
or param_info.get("title", "No description provided."),
}
# Add enum if present
if "enum" in param_info:
param_schema["enum"] = param_info["enum"]
# Handle array type correctly
if param_schema["type"] == "array" and "items" in param_info:
param_schema["items"] = param_info["items"]
elif param_schema["type"] == "array" and "items" not in param_info:
logger.warning(
f"MC: Array parameter '{param_name}' in tool '{tool_name}' is missing 'items' definition. Assuming items are strings."
)
param_schema["items"] = {"type": "string"} # Default or log warning
openai_function_params["properties"][param_name] = param_schema
openai_tools.append(
{
"type": "function",
"function": {
"name": tool_name,
"description": tool_description,
"parameters": openai_function_params,
},
}
)
# Format for Claude
claude_input_schema = {
"type": "object",
"properties": properties,
"required": required_params,
}
claude_tools.append(
{
"name": tool_name,
"description": tool_description,
"input_schema": claude_input_schema,
}
)
logger.debug(
f"MC: Finished formatting tools. OpenAI: {len(openai_tools)}, Claude: {len(claude_tools)}."
)
return openai_tools, claude_tools
async def get_tools(
self, enabled_servers: List[str]
) -> Tuple[str, List[Dict[str, Any]], List[Dict[str, Any]]]:
"""Run the dynamic fetching and formatting process."""
logger.info(
f"MC: Running dynamic tool construction for servers: {enabled_servers}"
)
servers_info, formatted_tools_dict = await self.get_server_and_tool_info(
enabled_servers
)
mcp_prompt_string = self.construct_mcp_prompt_string(servers_info)
openai_tools, claude_tools = self.format_tools_for_api(formatted_tools_dict)
logger.info("MC: Dynamic tool construction complete.")
return mcp_prompt_string, openai_tools, claude_tools
|