geronimo-pericoli commited on
Commit
b14749d
·
verified ·
1 Parent(s): 30ee4ff

Update mcp_agent.py

Browse files
Files changed (1) hide show
  1. mcp_agent.py +3 -3
mcp_agent.py CHANGED
@@ -1,11 +1,11 @@
1
  # mcp_agent.py
2
  import asyncio
3
- from typing import List, Dict, Optional
4
  from fastmcp import Client
5
  from fastmcp.client.transports import SSETransport
6
  from llama_index.core.workflow import Context
7
  from llama_index.tools.mcp import McpToolSpec
8
- from llama_index.core.agent.workflow import FunctionAgent, ToolCall
9
  from mcp.client.sse import sse_client as original_sse_client
10
  from mcp.client.session import ClientSession
11
  from datetime import timedelta
@@ -91,7 +91,7 @@ class MCPAgent:
91
  handler = self.agent.run(message, ctx=self.agent_context)
92
  return str(await handler)
93
 
94
- async def stream_response(self, message: str):
95
  """Genera la respuesta del agente en formato de streaming"""
96
  if not self.agent:
97
  await self.initialize()
 
1
  # mcp_agent.py
2
  import asyncio
3
+ from typing import List, Dict, Optional, Generator, AsyncGenerator
4
  from fastmcp import Client
5
  from fastmcp.client.transports import SSETransport
6
  from llama_index.core.workflow import Context
7
  from llama_index.tools.mcp import McpToolSpec
8
+ from llama_index.core.agent.workflow import FunctionAgent, ToolCall, ToolCallResult
9
  from mcp.client.sse import sse_client as original_sse_client
10
  from mcp.client.session import ClientSession
11
  from datetime import timedelta
 
91
  handler = self.agent.run(message, ctx=self.agent_context)
92
  return str(await handler)
93
 
94
+ async def stream_response(self, message: str) -> AsyncGenerator[Dict, None]:
95
  """Genera la respuesta del agente en formato de streaming"""
96
  if not self.agent:
97
  await self.initialize()