Spaces:
Runtime error
Runtime error
Upload folder using huggingface_hub
Browse files- .env.example +2 -0
- .github/workflows/update_space.yml +28 -0
- .gitignore +4 -0
- .pytest_cache/.gitignore +2 -0
- .pytest_cache/CACHEDIR.TAG +4 -0
- .pytest_cache/README.md +8 -0
- .pytest_cache/v/cache/lastfailed +3 -0
- .pytest_cache/v/cache/nodeids +1 -0
- .pytest_cache/v/cache/stepwise +1 -0
- README.md +103 -7
- gradio_ui.py +79 -0
- integration_test.py +39 -0
- main.py +108 -0
- requirements.txt +4 -0
- swarm_config.json +16 -0
- swarm_editor.py +44 -0
- user_interface.py +68 -0
.env.example
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
OPENAI_API_KEY=
|
| 2 |
+
FIRECRAWL_API_KEY=
|
.github/workflows/update_space.yml
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Run Python script
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
push:
|
| 5 |
+
branches:
|
| 6 |
+
- main
|
| 7 |
+
|
| 8 |
+
jobs:
|
| 9 |
+
build:
|
| 10 |
+
runs-on: ubuntu-latest
|
| 11 |
+
|
| 12 |
+
steps:
|
| 13 |
+
- name: Checkout
|
| 14 |
+
uses: actions/checkout@v2
|
| 15 |
+
|
| 16 |
+
- name: Set up Python
|
| 17 |
+
uses: actions/setup-python@v2
|
| 18 |
+
with:
|
| 19 |
+
python-version: '3.9'
|
| 20 |
+
|
| 21 |
+
- name: Install Gradio
|
| 22 |
+
run: python -m pip install gradio
|
| 23 |
+
|
| 24 |
+
- name: Log in to Hugging Face
|
| 25 |
+
run: python -c 'import huggingface_hub; huggingface_hub.login(token="${{ secrets.hf_token }}")'
|
| 26 |
+
|
| 27 |
+
- name: Deploy to Spaces
|
| 28 |
+
run: gradio deploy
|
.gitignore
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
project_snapshot_no_images.json
|
| 2 |
+
.venv
|
| 3 |
+
.env
|
| 4 |
+
__pycache__
|
.pytest_cache/.gitignore
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Created by pytest automatically.
|
| 2 |
+
*
|
.pytest_cache/CACHEDIR.TAG
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Signature: 8a477f597d28d172789f06886806bc55
|
| 2 |
+
# This file is a cache directory tag created by pytest.
|
| 3 |
+
# For information about cache directory tags, see:
|
| 4 |
+
# https://bford.info/cachedir/spec.html
|
.pytest_cache/README.md
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# pytest cache directory #
|
| 2 |
+
|
| 3 |
+
This directory contains data from the pytest's cache plugin,
|
| 4 |
+
which provides the `--lf` and `--ff` options, as well as the `cache` fixture.
|
| 5 |
+
|
| 6 |
+
**Do not** commit this to version control.
|
| 7 |
+
|
| 8 |
+
See [the docs](https://docs.pytest.org/en/stable/how-to/cache.html) for more information.
|
.pytest_cache/v/cache/lastfailed
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"tests/integration_test.py": true
|
| 3 |
+
}
|
.pytest_cache/v/cache/nodeids
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
[]
|
.pytest_cache/v/cache/stepwise
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
[]
|
README.md
CHANGED
|
@@ -1,12 +1,108 @@
|
|
| 1 |
---
|
| 2 |
-
title:
|
| 3 |
-
|
| 4 |
-
colorFrom: yellow
|
| 5 |
-
colorTo: pink
|
| 6 |
sdk: gradio
|
| 7 |
sdk_version: 5.1.0
|
| 8 |
-
app_file: app.py
|
| 9 |
-
pinned: false
|
| 10 |
---
|
|
|
|
| 11 |
|
| 12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
---
|
| 2 |
+
title: openai_swarm_firecrawl
|
| 3 |
+
app_file: user_interface.py
|
|
|
|
|
|
|
| 4 |
sdk: gradio
|
| 5 |
sdk_version: 5.1.0
|
|
|
|
|
|
|
| 6 |
---
|
| 7 |
+
# Swarm Firecrawl Marketing Agent
|
| 8 |
|
| 9 |
+
A multi-agent system using OpenAI's Swarm for AI-powered content analysis and generation, integrated with Firecrawl for web scraping. This project features a Gradio-based user interface for easy interaction and configuration of the agent swarm.
|
| 10 |
+
|
| 11 |
+
## Features
|
| 12 |
+
|
| 13 |
+
- Web scraping using Firecrawl API
|
| 14 |
+
- Configurable multi-agent system for content analysis and generation
|
| 15 |
+
- Interactive Gradio-based graphical user interface
|
| 16 |
+
- Real-time updates on scraping and agent progress
|
| 17 |
+
- Ability to modify agent configurations on-the-fly
|
| 18 |
+
|
| 19 |
+
## Requirements
|
| 20 |
+
|
| 21 |
+
- Python 3.7+
|
| 22 |
+
- Firecrawl API key
|
| 23 |
+
- OpenAI API key
|
| 24 |
+
|
| 25 |
+
## Setup
|
| 26 |
+
|
| 27 |
+
1. Clone the repository:
|
| 28 |
+
```
|
| 29 |
+
git clone https://github.com/your-username/swarm-firecrawl-marketing-agent.git
|
| 30 |
+
cd swarm-firecrawl-marketing-agent
|
| 31 |
+
```
|
| 32 |
+
|
| 33 |
+
2. Install the required packages:
|
| 34 |
+
```
|
| 35 |
+
pip install -r requirements.txt
|
| 36 |
+
```
|
| 37 |
+
|
| 38 |
+
3. Set up your environment variables in a `.env` file:
|
| 39 |
+
```
|
| 40 |
+
OPENAI_API_KEY=your_openai_api_key
|
| 41 |
+
FIRECRAWL_API_KEY=your_firecrawl_api_key
|
| 42 |
+
```
|
| 43 |
+
|
| 44 |
+
## Usage
|
| 45 |
+
|
| 46 |
+
### User Interface
|
| 47 |
+
|
| 48 |
+
To use the Gradio-based user interface:
|
| 49 |
+
|
| 50 |
+
1. Run the user interface:
|
| 51 |
+
```
|
| 52 |
+
python user_interface.py
|
| 53 |
+
```
|
| 54 |
+
|
| 55 |
+
2. In the Gradio interface:
|
| 56 |
+
- The URL is pre-filled with https://www.okgo.app/, but you can change it if needed.
|
| 57 |
+
- Click "Scrape Website" to fetch the content.
|
| 58 |
+
- Modify agent configurations in the respective tabs if desired.
|
| 59 |
+
- Click "Run Workflow" to process the scraped content through the SwarmEditor.
|
| 60 |
+
|
| 61 |
+
### Configuration
|
| 62 |
+
|
| 63 |
+
The system uses a configuration file to define the agents in the swarm. You can modify the `swarm_config.json` file to change the number of agents, their names, and instructions.
|
| 64 |
+
|
| 65 |
+
Example configuration:
|
| 66 |
+
```json
|
| 67 |
+
{
|
| 68 |
+
"agents": [
|
| 69 |
+
{
|
| 70 |
+
"name": "Agent 1",
|
| 71 |
+
"instructions": "Process the input data and provide initial insights."
|
| 72 |
+
},
|
| 73 |
+
{
|
| 74 |
+
"name": "Agent 2",
|
| 75 |
+
"instructions": "Analyze the insights from Agent 1 and generate recommendations."
|
| 76 |
+
},
|
| 77 |
+
{
|
| 78 |
+
"name": "Agent 3",
|
| 79 |
+
"instructions": "Create a final report based on the recommendations from Agent 2."
|
| 80 |
+
}
|
| 81 |
+
]
|
| 82 |
+
}
|
| 83 |
+
```
|
| 84 |
+
|
| 85 |
+
### Integration Test
|
| 86 |
+
|
| 87 |
+
To run the integration test:
|
| 88 |
+
|
| 89 |
+
```
|
| 90 |
+
python integration_test.py
|
| 91 |
+
```
|
| 92 |
+
|
| 93 |
+
This will use Firecrawl to scrape https://www.okgo.app/, then pass the scraped content through the SwarmEditor workflow, and finally output the result to stdout.
|
| 94 |
+
|
| 95 |
+
## Project Structure
|
| 96 |
+
|
| 97 |
+
- `swarm_editor.py`: Contains the SwarmEditor class for managing the agent swarm.
|
| 98 |
+
- `user_interface.py`: Implements the Gradio-based user interface.
|
| 99 |
+
- `integration_test.py`: Provides an end-to-end test of the system.
|
| 100 |
+
- `swarm_config.json`: Configuration file for defining the agent swarm.
|
| 101 |
+
|
| 102 |
+
## Contributing
|
| 103 |
+
|
| 104 |
+
Contributions are welcome! Please feel free to submit a Pull Request.
|
| 105 |
+
|
| 106 |
+
## License
|
| 107 |
+
|
| 108 |
+
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
gradio_ui.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
from main import user_interface_agent, scrape_website, analyze_website_content, create_campaign_idea, generate_copy
|
| 3 |
+
from swarm import Swarm, Agent, Response
|
| 4 |
+
from typing import Dict, Any
|
| 5 |
+
|
| 6 |
+
class GradioSwarmApp:
|
| 7 |
+
def __init__(self):
|
| 8 |
+
self.client = Swarm()
|
| 9 |
+
self.messages = []
|
| 10 |
+
self.agent = user_interface_agent
|
| 11 |
+
self.context_variables = {}
|
| 12 |
+
|
| 13 |
+
def process_message(self, message: str) -> Dict[str, Any]:
|
| 14 |
+
self.messages.append({"role": "user", "content": message})
|
| 15 |
+
response = self.client.run(
|
| 16 |
+
agent=self.agent,
|
| 17 |
+
messages=self.messages,
|
| 18 |
+
context_variables=self.context_variables,
|
| 19 |
+
stream=False
|
| 20 |
+
)
|
| 21 |
+
self.messages.extend(response.messages)
|
| 22 |
+
self.agent = response.agent
|
| 23 |
+
self.context_variables.update(response.context_variables)
|
| 24 |
+
return self.format_response(response)
|
| 25 |
+
|
| 26 |
+
def format_response(self, response: Response) -> Dict[str, Any]:
|
| 27 |
+
formatted_response = {
|
| 28 |
+
"assistant_message": "",
|
| 29 |
+
"scraper_output": "",
|
| 30 |
+
"agent_outputs": []
|
| 31 |
+
}
|
| 32 |
+
|
| 33 |
+
for message in response.messages:
|
| 34 |
+
if message["role"] == "assistant":
|
| 35 |
+
formatted_response["assistant_message"] += f"{message['sender']}: {message['content']}\n"
|
| 36 |
+
elif message["role"] == "tool":
|
| 37 |
+
if message["tool_name"] == "scrape_website":
|
| 38 |
+
formatted_response["scraper_output"] = message["content"]
|
| 39 |
+
else:
|
| 40 |
+
formatted_response["agent_outputs"].append(f"{message['tool_name']}:\n{message['content']}")
|
| 41 |
+
|
| 42 |
+
return formatted_response
|
| 43 |
+
|
| 44 |
+
def create_ui():
|
| 45 |
+
app = GradioSwarmApp()
|
| 46 |
+
|
| 47 |
+
with gr.Blocks() as interface:
|
| 48 |
+
gr.Markdown("# Swarm Firecrawl Marketing Agent")
|
| 49 |
+
|
| 50 |
+
with gr.Row():
|
| 51 |
+
input_text = gr.Textbox(label="Enter URL or message")
|
| 52 |
+
submit_btn = gr.Button("Submit")
|
| 53 |
+
|
| 54 |
+
with gr.Row():
|
| 55 |
+
assistant_output = gr.Textbox(label="Assistant Output", lines=10)
|
| 56 |
+
|
| 57 |
+
with gr.Row():
|
| 58 |
+
scraper_output = gr.Textbox(label="Scraper Output", lines=10)
|
| 59 |
+
agent_outputs = gr.Textbox(label="Agent Outputs", lines=10)
|
| 60 |
+
|
| 61 |
+
def process_input(message):
|
| 62 |
+
response = app.process_message(message)
|
| 63 |
+
return (
|
| 64 |
+
response["assistant_message"],
|
| 65 |
+
response["scraper_output"],
|
| 66 |
+
"\n\n".join(response["agent_outputs"])
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
submit_btn.click(
|
| 70 |
+
process_input,
|
| 71 |
+
inputs=[input_text],
|
| 72 |
+
outputs=[assistant_output, scraper_output, agent_outputs]
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
return interface
|
| 76 |
+
|
| 77 |
+
if __name__ == "__main__":
|
| 78 |
+
ui = create_ui()
|
| 79 |
+
ui.launch()
|
integration_test.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from swarm_editor import SwarmEditor
|
| 2 |
+
from firecrawl import FirecrawlApp
|
| 3 |
+
from dotenv import load_dotenv
|
| 4 |
+
import os
|
| 5 |
+
|
| 6 |
+
load_dotenv()
|
| 7 |
+
|
| 8 |
+
def scrape_website(url):
|
| 9 |
+
api_key = os.getenv("FIRECRAWL_API_KEY")
|
| 10 |
+
if not api_key:
|
| 11 |
+
raise ValueError("FIRECRAWL_API_KEY environment variable not set")
|
| 12 |
+
|
| 13 |
+
app = FirecrawlApp(api_key=api_key)
|
| 14 |
+
scrape_status = app.scrape_url(
|
| 15 |
+
url,
|
| 16 |
+
params={'formats': ['markdown']}
|
| 17 |
+
)
|
| 18 |
+
return scrape_status.get('markdown', 'No content scraped')
|
| 19 |
+
|
| 20 |
+
def integration_test():
|
| 21 |
+
# Create SwarmEditor instance
|
| 22 |
+
editor = SwarmEditor()
|
| 23 |
+
|
| 24 |
+
# Load configuration from JSON file
|
| 25 |
+
editor.load_configuration('swarm_config.json')
|
| 26 |
+
|
| 27 |
+
# Scrape the website using Firecrawl
|
| 28 |
+
url = "https://www.okgo.app/"
|
| 29 |
+
scraped_content = scrape_website(url)
|
| 30 |
+
|
| 31 |
+
# Run the workflow with scraped content as initial input
|
| 32 |
+
response = editor.run_workflow(scraped_content)
|
| 33 |
+
|
| 34 |
+
# Output the result of the final agent to stdout
|
| 35 |
+
print("Final output:")
|
| 36 |
+
print(response.messages[-1]["content"])
|
| 37 |
+
|
| 38 |
+
if __name__ == "__main__":
|
| 39 |
+
integration_test()
|
main.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from firecrawl import FirecrawlApp
|
| 3 |
+
from swarm import Agent
|
| 4 |
+
from swarm.repl import run_demo_loop
|
| 5 |
+
import dotenv
|
| 6 |
+
from openai import OpenAI
|
| 7 |
+
|
| 8 |
+
dotenv.load_dotenv()
|
| 9 |
+
|
| 10 |
+
# Initialize FirecrawlApp and OpenAI
|
| 11 |
+
app = FirecrawlApp(api_key=os.getenv("FIRECRAWL_API_KEY"))
|
| 12 |
+
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
|
| 13 |
+
|
| 14 |
+
def scrape_website(url):
|
| 15 |
+
"""Scrape a website using Firecrawl."""
|
| 16 |
+
scrape_status = app.scrape_url(
|
| 17 |
+
url,
|
| 18 |
+
params={'formats': ['markdown']}
|
| 19 |
+
)
|
| 20 |
+
return scrape_status
|
| 21 |
+
|
| 22 |
+
def generate_completion(role, task, content):
|
| 23 |
+
"""Generate a completion using OpenAI."""
|
| 24 |
+
response = client.chat.completions.create(
|
| 25 |
+
model="gpt-4o-mini",
|
| 26 |
+
messages=[
|
| 27 |
+
{"role": "system", "content": f"You are a {role}. {task}"},
|
| 28 |
+
{"role": "user", "content": content}
|
| 29 |
+
]
|
| 30 |
+
)
|
| 31 |
+
return response.choices[0].message.content
|
| 32 |
+
|
| 33 |
+
def analyze_website_content(content):
|
| 34 |
+
"""Analyze the scraped website content using OpenAI."""
|
| 35 |
+
analysis = generate_completion(
|
| 36 |
+
"marketing analyst",
|
| 37 |
+
"Analyze the following website content and provide key insights for marketing strategy.",
|
| 38 |
+
content
|
| 39 |
+
)
|
| 40 |
+
return {"analysis": analysis}
|
| 41 |
+
|
| 42 |
+
def generate_copy(brief):
|
| 43 |
+
"""Generate marketing copy based on a brief using OpenAI."""
|
| 44 |
+
copy = generate_completion(
|
| 45 |
+
"copywriter",
|
| 46 |
+
"Create compelling marketing copy based on the following brief.",
|
| 47 |
+
brief
|
| 48 |
+
)
|
| 49 |
+
return {"copy": copy}
|
| 50 |
+
|
| 51 |
+
def create_campaign_idea(target_audience, goals):
|
| 52 |
+
"""Create a campaign idea based on target audience and goals using OpenAI."""
|
| 53 |
+
campaign_idea = generate_completion(
|
| 54 |
+
"marketing strategist",
|
| 55 |
+
"Create an innovative campaign idea based on the target audience and goals provided.",
|
| 56 |
+
f"Target Audience: {target_audience}\nGoals: {goals}"
|
| 57 |
+
)
|
| 58 |
+
return {"campaign_idea": campaign_idea}
|
| 59 |
+
|
| 60 |
+
def handoff_to_copywriter():
|
| 61 |
+
"""Hand off the campaign idea to the copywriter agent."""
|
| 62 |
+
return copywriter_agent
|
| 63 |
+
|
| 64 |
+
def handoff_to_analyst():
|
| 65 |
+
"""Hand off the website content to the analyst agent."""
|
| 66 |
+
return analyst_agent
|
| 67 |
+
|
| 68 |
+
def handoff_to_campaign_idea():
|
| 69 |
+
"""Hand off the target audience and goals to the campaign idea agent."""
|
| 70 |
+
return campaign_idea_agent
|
| 71 |
+
|
| 72 |
+
def handoff_to_website_scraper():
|
| 73 |
+
"""Hand off the url to the website scraper agent."""
|
| 74 |
+
return website_scraper_agent
|
| 75 |
+
|
| 76 |
+
user_interface_agent = Agent(
|
| 77 |
+
name="User Interface Agent",
|
| 78 |
+
instructions="You are a user interface agent that handles all interactions with the user. You need to always start with a URL that the user wants to create a marketing strategy for. Ask clarification questions if needed. Be concise.",
|
| 79 |
+
functions=[handoff_to_website_scraper],
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
website_scraper_agent = Agent(
|
| 83 |
+
name="Website Scraper Agent",
|
| 84 |
+
instructions="You are a website scraper agent specialized in scraping website content.",
|
| 85 |
+
functions=[scrape_website, handoff_to_analyst],
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
analyst_agent = Agent(
|
| 89 |
+
name="Analyst Agent",
|
| 90 |
+
instructions="You are an analyst agent that examines website content and provides insights for marketing strategies. Be concise.",
|
| 91 |
+
functions=[analyze_website_content, handoff_to_campaign_idea],
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
campaign_idea_agent = Agent(
|
| 95 |
+
name="Campaign Idea Agent",
|
| 96 |
+
instructions="You are a campaign idea agent that creates innovative marketing campaign ideas based on website content and target audience. Be concise.",
|
| 97 |
+
functions=[create_campaign_idea, handoff_to_copywriter],
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
copywriter_agent = Agent(
|
| 101 |
+
name="Copywriter Agent",
|
| 102 |
+
instructions="You are a copywriter agent specialized in creating compelling marketing copy based on website content and campaign ideas. Be concise.",
|
| 103 |
+
functions=[generate_copy],
|
| 104 |
+
)
|
| 105 |
+
|
| 106 |
+
if __name__ == "__main__":
|
| 107 |
+
# Run the demo loop with the user interface agent
|
| 108 |
+
run_demo_loop(user_interface_agent, stream=True)
|
requirements.txt
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
firecrawl-py
|
| 2 |
+
openai
|
| 3 |
+
git+http://git@github.com/openai/swarm.git
|
| 4 |
+
gradio
|
swarm_config.json
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"agents": [
|
| 3 |
+
{
|
| 4 |
+
"name": "Analyst",
|
| 5 |
+
"instructions": "Analyze the scraped content and provide insights."
|
| 6 |
+
},
|
| 7 |
+
{
|
| 8 |
+
"name": "Campaign Idea Generator",
|
| 9 |
+
"instructions": "Generate campaign ideas based on the analysis."
|
| 10 |
+
},
|
| 11 |
+
{
|
| 12 |
+
"name": "Copywriter",
|
| 13 |
+
"instructions": "Create compelling copy based on the campaign idea."
|
| 14 |
+
}
|
| 15 |
+
]
|
| 16 |
+
}
|
swarm_editor.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from swarm import Swarm, Agent
|
| 2 |
+
from swarm.core import Result
|
| 3 |
+
import json
|
| 4 |
+
from typing import Dict, List
|
| 5 |
+
|
| 6 |
+
class SwarmEditor:
|
| 7 |
+
def __init__(self):
|
| 8 |
+
self.agents: List[Agent] = []
|
| 9 |
+
self.swarm = Swarm()
|
| 10 |
+
|
| 11 |
+
def add_agent(self, name: str, instructions: str):
|
| 12 |
+
self.agents.append(Agent(
|
| 13 |
+
name=name,
|
| 14 |
+
instructions=instructions
|
| 15 |
+
))
|
| 16 |
+
|
| 17 |
+
def update_agent(self, index: int, name: str, instructions: str):
|
| 18 |
+
if 0 <= index < len(self.agents):
|
| 19 |
+
self.agents[index] = Agent(
|
| 20 |
+
name=name,
|
| 21 |
+
instructions=instructions
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
def load_configuration(self, config_file: str):
|
| 25 |
+
with open(config_file, 'r') as f:
|
| 26 |
+
config = json.load(f)
|
| 27 |
+
|
| 28 |
+
self.agents = []
|
| 29 |
+
for agent_config in config['agents']:
|
| 30 |
+
self.add_agent(agent_config['name'], agent_config['instructions'])
|
| 31 |
+
|
| 32 |
+
def run_workflow(self, initial_input: str):
|
| 33 |
+
response = None
|
| 34 |
+
context_variables = {}
|
| 35 |
+
for agent in self.agents:
|
| 36 |
+
response = self.swarm.run(
|
| 37 |
+
agent=agent,
|
| 38 |
+
messages=[{"role": "user", "content": initial_input}] if response is None else response.messages,
|
| 39 |
+
context_variables=context_variables,
|
| 40 |
+
max_turns=1
|
| 41 |
+
)
|
| 42 |
+
context_variables.update(response.context_variables)
|
| 43 |
+
initial_input = response.messages[-1]["content"]
|
| 44 |
+
return response
|
user_interface.py
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
import json
|
| 3 |
+
from swarm_editor import SwarmEditor
|
| 4 |
+
from firecrawl import FirecrawlApp
|
| 5 |
+
from dotenv import load_dotenv
|
| 6 |
+
import os
|
| 7 |
+
|
| 8 |
+
load_dotenv()
|
| 9 |
+
|
| 10 |
+
CONFIG_FILE = 'swarm_config.json'
|
| 11 |
+
|
| 12 |
+
class UserInterface:
|
| 13 |
+
def __init__(self):
|
| 14 |
+
self.swarm_editor = SwarmEditor()
|
| 15 |
+
self.firecrawl_app = FirecrawlApp(api_key=os.getenv("FIRECRAWL_API_KEY"))
|
| 16 |
+
self.load_config()
|
| 17 |
+
|
| 18 |
+
def load_config(self):
|
| 19 |
+
with open(CONFIG_FILE, 'r') as f:
|
| 20 |
+
self.config = json.load(f)
|
| 21 |
+
self.swarm_editor.load_configuration(CONFIG_FILE)
|
| 22 |
+
return self.config
|
| 23 |
+
|
| 24 |
+
def scrape_website(self, url):
|
| 25 |
+
scrape_status = self.firecrawl_app.scrape_url(
|
| 26 |
+
url,
|
| 27 |
+
params={'formats': ['markdown']}
|
| 28 |
+
)
|
| 29 |
+
return scrape_status.get('markdown', 'No content scraped')
|
| 30 |
+
|
| 31 |
+
def update_agent(self, index, name, instructions):
|
| 32 |
+
self.swarm_editor.update_agent(index, name, instructions)
|
| 33 |
+
return f"Agent {index} updated"
|
| 34 |
+
|
| 35 |
+
def run_workflow(self, scraped_content):
|
| 36 |
+
response = self.swarm_editor.run_workflow(scraped_content)
|
| 37 |
+
return response.messages[-1]["content"]
|
| 38 |
+
|
| 39 |
+
def launch(self):
|
| 40 |
+
with gr.Blocks() as interface:
|
| 41 |
+
gr.Markdown("# Agent Workflow Editor")
|
| 42 |
+
|
| 43 |
+
url = gr.Textbox(label="URL to Scrape", value="https://www.lazzloe.com/")
|
| 44 |
+
scrape_button = gr.Button("Scrape Website")
|
| 45 |
+
scraped_content = gr.Textbox(label="Scraped Content")
|
| 46 |
+
|
| 47 |
+
agent_tabs = gr.Tabs()
|
| 48 |
+
|
| 49 |
+
with agent_tabs:
|
| 50 |
+
for i, agent in enumerate(self.config['agents']):
|
| 51 |
+
with gr.Tab(f"Agent {i}"):
|
| 52 |
+
name = gr.Textbox(label="Name", value=agent['name'])
|
| 53 |
+
instructions = gr.Textbox(label="Instructions", value=agent['instructions'], lines=3)
|
| 54 |
+
update_button = gr.Button(f"Update Agent {i}")
|
| 55 |
+
update_output = gr.Textbox(label="Update Status")
|
| 56 |
+
update_button.click(self.update_agent, inputs=[gr.Number(value=i, visible=False), name, instructions], outputs=[update_output])
|
| 57 |
+
|
| 58 |
+
run_button = gr.Button("Run Workflow")
|
| 59 |
+
workflow_output = gr.Textbox(label="Workflow Output")
|
| 60 |
+
|
| 61 |
+
scrape_button.click(self.scrape_website, inputs=[url], outputs=[scraped_content])
|
| 62 |
+
run_button.click(self.run_workflow, inputs=[scraped_content], outputs=[workflow_output])
|
| 63 |
+
|
| 64 |
+
interface.launch()
|
| 65 |
+
|
| 66 |
+
if __name__ == "__main__":
|
| 67 |
+
ui = UserInterface()
|
| 68 |
+
ui.launch()
|