Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import os | |
| from huggingface_hub import login | |
| from huggingface_hub import InferenceClient | |
| from smolagents import InferenceClientModel, CodeAgent, MCPClient # Only import what's available | |
| ### added token | |
| token=os.environ.get("HF_TOKEN") | |
| if token: | |
| login(token=token) | |
| else: | |
| print("Hugging Face token not found in environment variable HF_TOKEN") | |
| ###login(token=os.getenv("HUGGINGFACE_API_TOKEN")) | |
| try: | |
| mcp_client = MCPClient( | |
| {"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse"} | |
| ) | |
| tools = mcp_client.get_tools() | |
| # Create InferenceClient directly | |
| hf_client = InferenceClient( | |
| model="Qwen/Qwen2.5-Coder-32B-Instruct", | |
| token=os.getenv("HUGGINGFACE_API_TOKEN") # Ensure this environment variable exists | |
| ) | |
| # Create model with the pre-configured client | |
| model = InferenceClientModel(client=hf_client) | |
| agent = CodeAgent( | |
| tools=[*tools], | |
| model=model, | |
| additional_authorized_imports=["json", "ast", "urllib", "base64"] | |
| ) | |
| demo = gr.ChatInterface( | |
| fn=lambda message, history: str(agent.run(message)), | |
| type="messages", | |
| examples=["Analyze the sentiment of the following text 'This is awesome'"], | |
| title="Agent with MCP Tools", | |
| description="This is a simple agent that uses MCP tools to answer questions.", | |
| ) | |
| demo.launch() | |
| finally: | |
| mcp_client.disconnect() |