Spaces:
Sleeping
Sleeping
File size: 1,119 Bytes
4558342 552487c 426cc60 a17521d 4558342 552487c 4558342 691ca73 4558342 552487c 4558342 691ca73 4558342 691ca73 9bc79ad 691ca73 f3bc14f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 |
import gradio as gr
import requests
import sseclient
import json
def chat(message, history):
url = "https://wlchee-mcp-sentiment.hf.space/gradio_api/mcp/sse"
headers = {
"Accept": "text/event-stream",
"Content-Type": "application/json"
}
payload = json.dumps({"input": message})
output = ""
try:
response = requests.post(url, data=payload, headers=headers, stream=True)
client = sseclient.SSEClient(response)
for event in client.events():
if event.data == "[DONE]":
break
output += event.data
# Always yield messages in OpenAI format
yield [{"role": "assistant", "content": output}]
# If no output was produced at all
if output.strip() == "":
yield [{"role": "assistant", "content": "⚠️ No response received from the MCP server."}]
except Exception as e:
yield [{"role": "assistant", "content": f"❌ Error: {str(e)}"}]
# Launch without share=True in a Space
gr.ChatInterface(fn=chat, title="Sentiment Tiny Agent", type="messages").launch()
|