import gradio as gr import os from dotenv import load_dotenv from smolagents import InferenceClientModel, CodeAgent, MCPClient load_dotenv() try: mcp_client = MCPClient( {"url": "https://darkgenius01-mcp-sentiment.hf.space/gradio_api/mcp/sse"} ) tools = mcp_client.get_tools() except Exception as e: print(f"Ошибка подключения к MCP серверу: {e}") tools = [] # На Hugging Face Spaces используем HF_TOKEN, локально - HUGGINGFACE_API_TOKEN hf_token = os.getenv("HF_TOKEN") or os.getenv("HUGGINGFACE_API_TOKEN") if not hf_token: raise ValueError("Необходимо установить переменную окружения HF_TOKEN или HUGGINGFACE_API_TOKEN") model = InferenceClientModel(token=hf_token) agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"]) def chat_function(message, history): try: return str(agent.run(message)) except Exception as e: return f"Ошибка: {str(e)}" demo = gr.ChatInterface( fn=chat_function, type="messages", examples=["Analyze the sentiment of the following text 'This is awesome'"], title="Agent with MCP Tools", description="This is a simple agent that uses MCP tools to answer questions.", ) demo.launch()