File size: 1,342 Bytes
9401146
 
 
 
 
 
 
 
 
 
 
 
94df82c
 
 
9401146
94df82c
 
 
 
9401146
94df82c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9401146
94df82c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import gradio as gr
import os
from dotenv import load_dotenv
from smolagents import InferenceClientModel, CodeAgent, MCPClient

load_dotenv()

try:
    mcp_client = MCPClient(
        {"url": "https://darkgenius01-mcp-sentiment.hf.space/gradio_api/mcp/sse"}
    )
    tools = mcp_client.get_tools()
except Exception as e:
    print(f"Ошибка подключения к MCP серверу: {e}")
    tools = []

# На Hugging Face Spaces используем HF_TOKEN, локально - HUGGINGFACE_API_TOKEN
hf_token = os.getenv("HF_TOKEN") or os.getenv("HUGGINGFACE_API_TOKEN")
if not hf_token:
    raise ValueError("Необходимо установить переменную окружения HF_TOKEN или HUGGINGFACE_API_TOKEN")

model = InferenceClientModel(token=hf_token)
agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"])

def chat_function(message, history):
    try:
        return str(agent.run(message))
    except Exception as e:
        return f"Ошибка: {str(e)}"

demo = gr.ChatInterface(
    fn=chat_function,
    type="messages",
    examples=["Analyze the sentiment of the following text 'This is awesome'"],
    title="Agent with MCP Tools",
    description="This is a simple agent that uses MCP tools to answer questions.",
)

demo.launch()