vikaskapur's picture
update log msg
818d0b1
import gradio as gr
import json
import logging
import os
from dotenv import load_dotenv
from smolagents import InferenceClientModel, CodeAgent, MCPClient
logging.basicConfig(
level=logging.INFO, # Set the logging level
format="%(asctime)s - %(levelname)s - %(message)s", # Define the log format
handlers=[
logging.StreamHandler() # Ensure logs are displayed on the console
]
)
load_dotenv()
def run(message, agent):
result = str(agent.run(message))
logging.info(f"Input: {message}, Output: {result}")
return result
if __name__ == "__main__":
logging.info("Starting the agent with MCP tools...")
try:
mcp_client = MCPClient(
{
"url": "https://vikaskapur-mcp-server-sentiment.hf.space/gradio_api/mcp/sse",
"transport": 'sse'
}
)
tools = mcp_client.get_tools()
# Print tools in a readable format
logging.info("Tools retrieved:")
logging.info("\n".join(f"{t.name}: {t.description}" for t in tools))
model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
logging.info(f"Model: {model.model_id}")
agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"], )
demo = gr.Interface(
fn=lambda message: run(message, agent),
inputs="text",
outputs="text",
examples=["Analyze the sentiment of the following text 'This is awesome'"],
title="Agent with MCP Tools",
description="This is a simple agent that uses MCP tools to answer questions.",
)
demo.launch()
finally:
mcp_client.disconnect()