Spaces:
Sleeping
Sleeping
Robin Chiu commited on
Commit ·
9f63939
1
Parent(s): 4c29c7e
Add the agent mcp
Browse files- app.py +39 -8
- pyproject.toml +2 -0
- requirements.txt +1 -0
- uv.lock +0 -0
app.py
CHANGED
|
@@ -1,5 +1,7 @@
|
|
| 1 |
# %%
|
|
|
|
| 2 |
from bs4 import BeautifulSoup
|
|
|
|
| 3 |
|
| 4 |
def parse_news_item(html: str) -> dict:
|
| 5 |
soup = BeautifulSoup(html, "html.parser")
|
|
@@ -29,9 +31,6 @@ def parse_news_item(html: str) -> dict:
|
|
| 29 |
|
| 30 |
|
| 31 |
# %%
|
| 32 |
-
import requests
|
| 33 |
-
from bs4 import BeautifulSoup
|
| 34 |
-
|
| 35 |
def search_news(keyword, page=1):
|
| 36 |
"""
|
| 37 |
Fetch news articles related to a keyword from udn.com.
|
|
@@ -98,6 +97,35 @@ def get_content(url):
|
|
| 98 |
'text': text_content
|
| 99 |
}
|
| 100 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 101 |
|
| 102 |
# get_content('https://money.udn.com/money/story/5612/8832289?from=edn_search_result') # Example usage to fetch content from a specific URL
|
| 103 |
|
|
@@ -105,7 +133,6 @@ def get_content(url):
|
|
| 105 |
# using the gradio to create two tab
|
| 106 |
# 1. search news
|
| 107 |
# 2. get content from url
|
| 108 |
-
import gradio as gr
|
| 109 |
def main():
|
| 110 |
with gr.Blocks() as demo:
|
| 111 |
gr.Markdown("# News Search and Content Fetcher")
|
|
@@ -121,11 +148,15 @@ def main():
|
|
| 121 |
with gr.Tab("Get Content from URL"):
|
| 122 |
url_input = gr.Textbox(label="URL", placeholder="Enter URL to fetch content")
|
| 123 |
content_output = gr.JSON(label="Content Output")
|
| 124 |
-
|
| 125 |
url_input.submit(get_content, inputs=url_input, outputs=content_output)
|
| 126 |
-
demo.launch(mcp_server=True, server_name="0.0.0.0",allowed_paths=["/"])
|
| 127 |
|
| 128 |
-
|
| 129 |
-
|
|
|
|
|
|
|
|
|
|
| 130 |
|
|
|
|
| 131 |
|
|
|
|
|
|
|
|
|
| 1 |
# %%
|
| 2 |
+
import requests
|
| 3 |
from bs4 import BeautifulSoup
|
| 4 |
+
import gradio as gr
|
| 5 |
|
| 6 |
def parse_news_item(html: str) -> dict:
|
| 7 |
soup = BeautifulSoup(html, "html.parser")
|
|
|
|
| 31 |
|
| 32 |
|
| 33 |
# %%
|
|
|
|
|
|
|
|
|
|
| 34 |
def search_news(keyword, page=1):
|
| 35 |
"""
|
| 36 |
Fetch news articles related to a keyword from udn.com.
|
|
|
|
| 97 |
'text': text_content
|
| 98 |
}
|
| 99 |
|
| 100 |
+
# %%
|
| 101 |
+
from smolagents import Tool, CodeAgent, LiteLLMModel, ToolCollection, ActionStep, FinalAnswerStep
|
| 102 |
+
import os
|
| 103 |
+
|
| 104 |
+
model = LiteLLMModel("openrouter/qwen/qwen-2.5-coder-32b-instruct:free", api_key=os.environ["OPENROUTER_API_KEY"])
|
| 105 |
+
url = "https://robin0307-newsmcp.hf.space/gradio_api/mcp/sse"
|
| 106 |
+
server_parameters = {"url": url, "transport": "sse"}
|
| 107 |
+
|
| 108 |
+
def newsAgent(task: str) -> str:
|
| 109 |
+
"""
|
| 110 |
+
News Agent to handle the news task.
|
| 111 |
+
|
| 112 |
+
Args:
|
| 113 |
+
task: The task description.
|
| 114 |
+
|
| 115 |
+
Returns:
|
| 116 |
+
The result of the Task.
|
| 117 |
+
"""
|
| 118 |
+
result = ""
|
| 119 |
+
with ToolCollection.from_mcp(server_parameters, trust_remote_code=True) as mcp_tools:
|
| 120 |
+
agent = CodeAgent(tools=[*mcp_tools.tools], model=model)
|
| 121 |
+
for event in agent.run(task, stream=True, max_steps=5):
|
| 122 |
+
if isinstance(event, ActionStep):
|
| 123 |
+
result = f"\n======Step {event.step_number}======\n{event.code_action}"
|
| 124 |
+
# yield result
|
| 125 |
+
if isinstance(event, FinalAnswerStep):
|
| 126 |
+
result = f"\n======Final======\n{event.output}"
|
| 127 |
+
# yield result
|
| 128 |
+
return result
|
| 129 |
|
| 130 |
# get_content('https://money.udn.com/money/story/5612/8832289?from=edn_search_result') # Example usage to fetch content from a specific URL
|
| 131 |
|
|
|
|
| 133 |
# using the gradio to create two tab
|
| 134 |
# 1. search news
|
| 135 |
# 2. get content from url
|
|
|
|
| 136 |
def main():
|
| 137 |
with gr.Blocks() as demo:
|
| 138 |
gr.Markdown("# News Search and Content Fetcher")
|
|
|
|
| 148 |
with gr.Tab("Get Content from URL"):
|
| 149 |
url_input = gr.Textbox(label="URL", placeholder="Enter URL to fetch content")
|
| 150 |
content_output = gr.JSON(label="Content Output")
|
|
|
|
| 151 |
url_input.submit(get_content, inputs=url_input, outputs=content_output)
|
|
|
|
| 152 |
|
| 153 |
+
with gr.Tab("News Agent"):
|
| 154 |
+
agent_input = gr.Textbox(label="Task", placeholder="Enter the task")
|
| 155 |
+
run_button = gr.Button("Run")
|
| 156 |
+
result_output = gr.Textbox(label="Result", lines=10)
|
| 157 |
+
run_button.click(newsAgent, inputs=agent_input, outputs=result_output)
|
| 158 |
|
| 159 |
+
demo.launch(mcp_server=True, server_name="0.0.0.0",allowed_paths=["/"], share=True)
|
| 160 |
|
| 161 |
+
if __name__ == "__main__":
|
| 162 |
+
main()
|
pyproject.toml
CHANGED
|
@@ -7,4 +7,6 @@ requires-python = ">=3.13"
|
|
| 7 |
dependencies = [
|
| 8 |
"bs4>=0.0.2",
|
| 9 |
"gradio[mcp]>=5.33.1",
|
|
|
|
|
|
|
| 10 |
]
|
|
|
|
| 7 |
dependencies = [
|
| 8 |
"bs4>=0.0.2",
|
| 9 |
"gradio[mcp]>=5.33.1",
|
| 10 |
+
"litellm>=1.74.1",
|
| 11 |
+
"smolagents[litellm,mcp]>=1.20.0",
|
| 12 |
]
|
requirements.txt
CHANGED
|
@@ -1,2 +1,3 @@
|
|
| 1 |
gradio[mcp]
|
| 2 |
bs4
|
|
|
|
|
|
| 1 |
gradio[mcp]
|
| 2 |
bs4
|
| 3 |
+
smolagents[litellm,mcp]
|
uv.lock
CHANGED
|
The diff for this file is too large to render.
See raw diff
|
|
|