Spaces:
Running
Running
Update to new app icon (1024px)
Browse files- .gitattributes +1 -0
- logo.png +0 -0
- reachy_ios_bridge/database.py +157 -0
- reachy_ios_bridge/openai_realtime.py +19 -1
- reachy_ios_bridge/routes/__init__.py +2 -0
- reachy_ios_bridge/routes/conversation.py +76 -0
- reachy_ios_bridge/routes/websites.py +221 -0
- reachy_ios_bridge/server.py +25 -0
- reachy_ios_bridge/tools/__init__.py +32 -1
- reachy_ios_bridge/tools/website_generator.py +474 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
logo.png filter=lfs diff=lfs merge=lfs -text
|
logo.png
CHANGED
|
|
Git LFS Details
|
reachy_ios_bridge/database.py
CHANGED
|
@@ -103,6 +103,23 @@ class DatabaseService:
|
|
| 103 |
)
|
| 104 |
""")
|
| 105 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 106 |
logger.debug("Database tables created/verified")
|
| 107 |
|
| 108 |
async def _migrate_add_enabled_tools(self, db: aiosqlite.Connection) -> None:
|
|
@@ -583,6 +600,129 @@ class DatabaseService:
|
|
| 583 |
rows = await cursor.fetchall()
|
| 584 |
return {row[0]: row[1] for row in rows}
|
| 585 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 586 |
# =========================================================================
|
| 587 |
# Helper Methods
|
| 588 |
# =========================================================================
|
|
@@ -636,6 +776,23 @@ class DatabaseService:
|
|
| 636 |
"updated_at": row["updated_at"],
|
| 637 |
}
|
| 638 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 639 |
async def close(self) -> None:
|
| 640 |
"""Close database connection (if using persistent connection)."""
|
| 641 |
if self._connection:
|
|
|
|
| 103 |
)
|
| 104 |
""")
|
| 105 |
|
| 106 |
+
# Websites table for generated websites
|
| 107 |
+
await db.execute("""
|
| 108 |
+
CREATE TABLE IF NOT EXISTS websites (
|
| 109 |
+
id TEXT PRIMARY KEY,
|
| 110 |
+
title TEXT NOT NULL,
|
| 111 |
+
description TEXT DEFAULT '',
|
| 112 |
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 113 |
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 114 |
+
)
|
| 115 |
+
""")
|
| 116 |
+
|
| 117 |
+
# Index for faster queries by creation date
|
| 118 |
+
await db.execute("""
|
| 119 |
+
CREATE INDEX IF NOT EXISTS idx_websites_created
|
| 120 |
+
ON websites(created_at)
|
| 121 |
+
""")
|
| 122 |
+
|
| 123 |
logger.debug("Database tables created/verified")
|
| 124 |
|
| 125 |
async def _migrate_add_enabled_tools(self, db: aiosqlite.Connection) -> None:
|
|
|
|
| 600 |
rows = await cursor.fetchall()
|
| 601 |
return {row[0]: row[1] for row in rows}
|
| 602 |
|
| 603 |
+
# =========================================================================
|
| 604 |
+
# Websites CRUD Operations
|
| 605 |
+
# =========================================================================
|
| 606 |
+
|
| 607 |
+
async def get_all_websites(self) -> list[dict]:
|
| 608 |
+
"""Get all saved websites from the database.
|
| 609 |
+
|
| 610 |
+
Returns:
|
| 611 |
+
List of website dictionaries.
|
| 612 |
+
"""
|
| 613 |
+
async with aiosqlite.connect(self.db_path) as db:
|
| 614 |
+
db.row_factory = aiosqlite.Row
|
| 615 |
+
cursor = await db.execute(
|
| 616 |
+
"SELECT * FROM websites ORDER BY created_at DESC"
|
| 617 |
+
)
|
| 618 |
+
rows = await cursor.fetchall()
|
| 619 |
+
return [self._row_to_website(row) for row in rows]
|
| 620 |
+
|
| 621 |
+
async def get_website(self, website_id: str) -> Optional[dict]:
|
| 622 |
+
"""Get a single website by ID.
|
| 623 |
+
|
| 624 |
+
Args:
|
| 625 |
+
website_id: The ID of the website.
|
| 626 |
+
|
| 627 |
+
Returns:
|
| 628 |
+
The website dictionary, or None if not found.
|
| 629 |
+
"""
|
| 630 |
+
async with aiosqlite.connect(self.db_path) as db:
|
| 631 |
+
db.row_factory = aiosqlite.Row
|
| 632 |
+
cursor = await db.execute(
|
| 633 |
+
"SELECT * FROM websites WHERE id = ?", (website_id,)
|
| 634 |
+
)
|
| 635 |
+
row = await cursor.fetchone()
|
| 636 |
+
return self._row_to_website(row) if row else None
|
| 637 |
+
|
| 638 |
+
async def create_website(self, website_data: dict) -> dict:
|
| 639 |
+
"""Create a new website record.
|
| 640 |
+
|
| 641 |
+
Args:
|
| 642 |
+
website_data: Dictionary containing website data (must include 'id', 'title').
|
| 643 |
+
|
| 644 |
+
Returns:
|
| 645 |
+
The created website dictionary.
|
| 646 |
+
"""
|
| 647 |
+
now = datetime.utcnow().isoformat()
|
| 648 |
+
created_at = website_data.get("created_at") or now
|
| 649 |
+
|
| 650 |
+
async with aiosqlite.connect(self.db_path) as db:
|
| 651 |
+
await db.execute(
|
| 652 |
+
"""
|
| 653 |
+
INSERT INTO websites (id, title, description, created_at, updated_at)
|
| 654 |
+
VALUES (?, ?, ?, ?, ?)
|
| 655 |
+
""",
|
| 656 |
+
(
|
| 657 |
+
website_data["id"],
|
| 658 |
+
website_data["title"],
|
| 659 |
+
website_data.get("description", ""),
|
| 660 |
+
created_at,
|
| 661 |
+
now,
|
| 662 |
+
),
|
| 663 |
+
)
|
| 664 |
+
await db.commit()
|
| 665 |
+
|
| 666 |
+
logger.info(f"Created website: {website_data['title']} ({website_data['id']})")
|
| 667 |
+
return await self.get_website(website_data["id"])
|
| 668 |
+
|
| 669 |
+
async def update_website(self, website_id: str, website_data: dict) -> Optional[dict]:
|
| 670 |
+
"""Update an existing website record.
|
| 671 |
+
|
| 672 |
+
Args:
|
| 673 |
+
website_id: The ID of the website to update.
|
| 674 |
+
website_data: Dictionary containing updated website data.
|
| 675 |
+
|
| 676 |
+
Returns:
|
| 677 |
+
The updated website dictionary, or None if not found.
|
| 678 |
+
"""
|
| 679 |
+
existing = await self.get_website(website_id)
|
| 680 |
+
if not existing:
|
| 681 |
+
return None
|
| 682 |
+
|
| 683 |
+
now = datetime.utcnow().isoformat()
|
| 684 |
+
|
| 685 |
+
async with aiosqlite.connect(self.db_path) as db:
|
| 686 |
+
await db.execute(
|
| 687 |
+
"""
|
| 688 |
+
UPDATE websites SET
|
| 689 |
+
title = ?,
|
| 690 |
+
description = ?,
|
| 691 |
+
updated_at = ?
|
| 692 |
+
WHERE id = ?
|
| 693 |
+
""",
|
| 694 |
+
(
|
| 695 |
+
website_data.get("title", existing["title"]),
|
| 696 |
+
website_data.get("description", existing["description"]),
|
| 697 |
+
now,
|
| 698 |
+
website_id,
|
| 699 |
+
),
|
| 700 |
+
)
|
| 701 |
+
await db.commit()
|
| 702 |
+
|
| 703 |
+
logger.info(f"Updated website: {website_id}")
|
| 704 |
+
return await self.get_website(website_id)
|
| 705 |
+
|
| 706 |
+
async def delete_website(self, website_id: str) -> bool:
|
| 707 |
+
"""Delete a website record.
|
| 708 |
+
|
| 709 |
+
Args:
|
| 710 |
+
website_id: The ID of the website to delete.
|
| 711 |
+
|
| 712 |
+
Returns:
|
| 713 |
+
True if deleted, False if not found.
|
| 714 |
+
"""
|
| 715 |
+
async with aiosqlite.connect(self.db_path) as db:
|
| 716 |
+
cursor = await db.execute(
|
| 717 |
+
"DELETE FROM websites WHERE id = ?", (website_id,)
|
| 718 |
+
)
|
| 719 |
+
await db.commit()
|
| 720 |
+
deleted = cursor.rowcount > 0
|
| 721 |
+
|
| 722 |
+
if deleted:
|
| 723 |
+
logger.info(f"Deleted website: {website_id}")
|
| 724 |
+
return deleted
|
| 725 |
+
|
| 726 |
# =========================================================================
|
| 727 |
# Helper Methods
|
| 728 |
# =========================================================================
|
|
|
|
| 776 |
"updated_at": row["updated_at"],
|
| 777 |
}
|
| 778 |
|
| 779 |
+
def _row_to_website(self, row: aiosqlite.Row) -> dict:
|
| 780 |
+
"""Convert a database row to a website dictionary.
|
| 781 |
+
|
| 782 |
+
Args:
|
| 783 |
+
row: The database row.
|
| 784 |
+
|
| 785 |
+
Returns:
|
| 786 |
+
Website dictionary with proper types.
|
| 787 |
+
"""
|
| 788 |
+
return {
|
| 789 |
+
"id": row["id"],
|
| 790 |
+
"title": row["title"],
|
| 791 |
+
"description": row["description"],
|
| 792 |
+
"created_at": row["created_at"],
|
| 793 |
+
"updated_at": row["updated_at"],
|
| 794 |
+
}
|
| 795 |
+
|
| 796 |
async def close(self) -> None:
|
| 797 |
"""Close database connection (if using persistent connection)."""
|
| 798 |
if self._connection:
|
reachy_ios_bridge/openai_realtime.py
CHANGED
|
@@ -138,6 +138,7 @@ class OpenAIRealtimeService:
|
|
| 138 |
self.on_error: Optional[Callable[[str], None]] = None
|
| 139 |
self.on_app_change: Optional[Callable[[dict], None]] = None # App activation changes
|
| 140 |
self.on_tool_usage: Optional[Callable[[str, str], None]] = None # (tool_name, status)
|
|
|
|
| 141 |
|
| 142 |
# Setup tools handler callbacks
|
| 143 |
self._setup_tools_handler()
|
|
@@ -264,7 +265,14 @@ Before using ANY tool, you MUST tell the user what you're about to do. This help
|
|
| 264 |
|
| 265 |
6. PERSONALIZATION: Remember user details for personal interactions.
|
| 266 |
- When user says their name: Use remember_user_name to save it
|
| 267 |
-
- To check if you know their name: Use get_user_name
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 268 |
|
| 269 |
def set_custom_personality(
|
| 270 |
self,
|
|
@@ -691,6 +699,16 @@ Before using ANY tool, you MUST tell the user what you're about to do. This help
|
|
| 691 |
|
| 692 |
logger.info(f"🔧 Tool result: {result}")
|
| 693 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 694 |
# Send result back to OpenAI
|
| 695 |
result_message = {
|
| 696 |
"type": "conversation.item.create",
|
|
|
|
| 138 |
self.on_error: Optional[Callable[[str], None]] = None
|
| 139 |
self.on_app_change: Optional[Callable[[dict], None]] = None # App activation changes
|
| 140 |
self.on_tool_usage: Optional[Callable[[str, str], None]] = None # (tool_name, status)
|
| 141 |
+
self.on_website_ready: Optional[Callable[[dict], None]] = None # Website generation result
|
| 142 |
|
| 143 |
# Setup tools handler callbacks
|
| 144 |
self._setup_tools_handler()
|
|
|
|
| 265 |
|
| 266 |
6. PERSONALIZATION: Remember user details for personal interactions.
|
| 267 |
- When user says their name: Use remember_user_name to save it
|
| 268 |
+
- To check if you know their name: Use get_user_name
|
| 269 |
+
|
| 270 |
+
7. WEBSITE GENERATION: Create beautiful websites with voice commands!
|
| 271 |
+
- Use generate_website when user asks to create a website, landing page, portfolio, etc.
|
| 272 |
+
- Examples: "Create a website for my bakery", "Make me a portfolio", "Build a landing page"
|
| 273 |
+
- You can also EDIT existing websites: "Change the colors", "Add a contact section"
|
| 274 |
+
- ALWAYS say "Let me create that website for you..." before using this tool.
|
| 275 |
+
- The website will be shown as a live preview while being built!""" + language_instruction
|
| 276 |
|
| 277 |
def set_custom_personality(
|
| 278 |
self,
|
|
|
|
| 699 |
|
| 700 |
logger.info(f"🔧 Tool result: {result}")
|
| 701 |
|
| 702 |
+
# Check if this is a website generation result
|
| 703 |
+
if tool_name == "generate_website" and result.get("success"):
|
| 704 |
+
if self.on_website_ready:
|
| 705 |
+
self.on_website_ready({
|
| 706 |
+
"website_id": result.get("website_id"),
|
| 707 |
+
"url": result.get("url"),
|
| 708 |
+
"title": result.get("title"),
|
| 709 |
+
"is_edit": result.get("is_edit", False),
|
| 710 |
+
})
|
| 711 |
+
|
| 712 |
# Send result back to OpenAI
|
| 713 |
result_message = {
|
| 714 |
"type": "conversation.item.create",
|
reachy_ios_bridge/routes/__init__.py
CHANGED
|
@@ -13,6 +13,7 @@ from .tools import router as tools_router
|
|
| 13 |
from .user_settings import router as user_settings_router
|
| 14 |
from .voice import router as voice_router
|
| 15 |
from .volume import router as volume_router
|
|
|
|
| 16 |
|
| 17 |
__all__ = [
|
| 18 |
"animations_router",
|
|
@@ -28,5 +29,6 @@ __all__ = [
|
|
| 28 |
"user_settings_router",
|
| 29 |
"voice_router",
|
| 30 |
"volume_router",
|
|
|
|
| 31 |
]
|
| 32 |
|
|
|
|
| 13 |
from .user_settings import router as user_settings_router
|
| 14 |
from .voice import router as voice_router
|
| 15 |
from .volume import router as volume_router
|
| 16 |
+
from .websites import router as websites_router
|
| 17 |
|
| 18 |
__all__ = [
|
| 19 |
"animations_router",
|
|
|
|
| 29 |
"user_settings_router",
|
| 30 |
"voice_router",
|
| 31 |
"volume_router",
|
| 32 |
+
"websites_router",
|
| 33 |
]
|
| 34 |
|
reachy_ios_bridge/routes/conversation.py
CHANGED
|
@@ -15,6 +15,7 @@ from ..animation_coordinator import AnimationCoordinator
|
|
| 15 |
from ..motion_service import MotionService
|
| 16 |
from ..openai_realtime import ConnectionState, OpenAIRealtimeService, SpeakingState
|
| 17 |
from ..speaking_gestures import SpeakingGesturesService
|
|
|
|
| 18 |
|
| 19 |
from .audio_stream_manager import AudioStreamManager, ConversationTimings
|
| 20 |
from .conversation_messages import (
|
|
@@ -84,6 +85,13 @@ def init_services() -> None:
|
|
| 84 |
openai_service.on_error = _on_error
|
| 85 |
openai_service.on_app_change = _on_app_change
|
| 86 |
openai_service.on_tool_usage = _on_tool_usage
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 87 |
|
| 88 |
logger.info("Conversation services initialized")
|
| 89 |
|
|
@@ -370,6 +378,74 @@ def _on_tool_usage(tool_name: str, status: str) -> None:
|
|
| 370 |
}))
|
| 371 |
|
| 372 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 373 |
def _on_app_change(data: dict) -> None:
|
| 374 |
"""Handle app activation/deactivation changes from voice commands.
|
| 375 |
|
|
|
|
| 15 |
from ..motion_service import MotionService
|
| 16 |
from ..openai_realtime import ConnectionState, OpenAIRealtimeService, SpeakingState
|
| 17 |
from ..speaking_gestures import SpeakingGesturesService
|
| 18 |
+
from ..tools import set_website_streaming_callbacks
|
| 19 |
|
| 20 |
from .audio_stream_manager import AudioStreamManager, ConversationTimings
|
| 21 |
from .conversation_messages import (
|
|
|
|
| 85 |
openai_service.on_error = _on_error
|
| 86 |
openai_service.on_app_change = _on_app_change
|
| 87 |
openai_service.on_tool_usage = _on_tool_usage
|
| 88 |
+
openai_service.on_website_ready = _on_website_ready
|
| 89 |
+
|
| 90 |
+
# Wire up website streaming callbacks for live preview
|
| 91 |
+
set_website_streaming_callbacks(
|
| 92 |
+
on_generating=_on_website_generating,
|
| 93 |
+
on_chunk=_on_website_chunk,
|
| 94 |
+
)
|
| 95 |
|
| 96 |
logger.info("Conversation services initialized")
|
| 97 |
|
|
|
|
| 378 |
}))
|
| 379 |
|
| 380 |
|
| 381 |
+
def _on_website_ready(data: dict) -> None:
|
| 382 |
+
"""Handle website generation completion.
|
| 383 |
+
|
| 384 |
+
Broadcasts to iOS clients so they can display the generated website.
|
| 385 |
+
|
| 386 |
+
Args:
|
| 387 |
+
data: Website data containing id, url, title, is_edit
|
| 388 |
+
"""
|
| 389 |
+
website_id = data.get("website_id", "")
|
| 390 |
+
url = data.get("url", "")
|
| 391 |
+
title = data.get("title", "Generated Website")
|
| 392 |
+
is_edit = data.get("is_edit", False)
|
| 393 |
+
|
| 394 |
+
action = "updated" if is_edit else "created"
|
| 395 |
+
logger.info(f"🌐 Website {action}: {title} ({website_id})")
|
| 396 |
+
|
| 397 |
+
asyncio.create_task(_broadcast({
|
| 398 |
+
"type": "website_ready",
|
| 399 |
+
"website_id": website_id,
|
| 400 |
+
"url": url,
|
| 401 |
+
"title": title,
|
| 402 |
+
"is_edit": is_edit,
|
| 403 |
+
}))
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
def _on_website_generating(website_id: str, description: str, is_edit: bool) -> None:
|
| 407 |
+
"""Handle website generation start - enables live preview on iOS.
|
| 408 |
+
|
| 409 |
+
Broadcasts to iOS clients so they can show a preview sheet.
|
| 410 |
+
|
| 411 |
+
Args:
|
| 412 |
+
website_id: Unique ID for this website.
|
| 413 |
+
description: User's description of the website.
|
| 414 |
+
is_edit: Whether this is editing an existing website.
|
| 415 |
+
"""
|
| 416 |
+
action = "Editing" if is_edit else "Creating"
|
| 417 |
+
logger.info(f"🎨 {action} website {website_id}: {description[:50]}...")
|
| 418 |
+
|
| 419 |
+
asyncio.create_task(_broadcast({
|
| 420 |
+
"type": "website_generating",
|
| 421 |
+
"website_id": website_id,
|
| 422 |
+
"description": description,
|
| 423 |
+
"is_edit": is_edit,
|
| 424 |
+
}))
|
| 425 |
+
|
| 426 |
+
|
| 427 |
+
def _on_website_chunk(website_id: str, accumulated_html: str, is_complete: bool) -> None:
|
| 428 |
+
"""Handle website HTML chunk for live preview streaming.
|
| 429 |
+
|
| 430 |
+
Broadcasts accumulated HTML to iOS clients for real-time preview.
|
| 431 |
+
|
| 432 |
+
Args:
|
| 433 |
+
website_id: The website being generated.
|
| 434 |
+
accumulated_html: All HTML content so far.
|
| 435 |
+
is_complete: Whether generation is complete.
|
| 436 |
+
"""
|
| 437 |
+
# Only log completion to avoid spam
|
| 438 |
+
if is_complete:
|
| 439 |
+
logger.info(f"📄 Website {website_id} complete: {len(accumulated_html)} chars")
|
| 440 |
+
|
| 441 |
+
asyncio.create_task(_broadcast({
|
| 442 |
+
"type": "website_chunk",
|
| 443 |
+
"website_id": website_id,
|
| 444 |
+
"html": accumulated_html,
|
| 445 |
+
"is_complete": is_complete,
|
| 446 |
+
}))
|
| 447 |
+
|
| 448 |
+
|
| 449 |
def _on_app_change(data: dict) -> None:
|
| 450 |
"""Handle app activation/deactivation changes from voice commands.
|
| 451 |
|
reachy_ios_bridge/routes/websites.py
ADDED
|
@@ -0,0 +1,221 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Websites API endpoints for managing generated websites.
|
| 2 |
+
|
| 3 |
+
Provides CRUD operations for websites and serves static website files.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import logging
|
| 7 |
+
import shutil
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
from typing import Optional
|
| 10 |
+
|
| 11 |
+
from fastapi import APIRouter, HTTPException
|
| 12 |
+
from fastapi.responses import FileResponse
|
| 13 |
+
from pydantic import BaseModel
|
| 14 |
+
|
| 15 |
+
from ..database import get_database
|
| 16 |
+
|
| 17 |
+
logger = logging.getLogger(__name__)
|
| 18 |
+
|
| 19 |
+
router = APIRouter(prefix="/websites", tags=["Websites"])
|
| 20 |
+
|
| 21 |
+
# Directory to store generated websites (alongside the database)
|
| 22 |
+
WEBSITES_DIR = Path.home() / ".reachy" / "websites"
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
# =============================================================================
|
| 26 |
+
# Response Models
|
| 27 |
+
# =============================================================================
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class WebsiteInfo(BaseModel):
|
| 31 |
+
"""Website metadata for API responses."""
|
| 32 |
+
id: str
|
| 33 |
+
title: str
|
| 34 |
+
description: str
|
| 35 |
+
url: str
|
| 36 |
+
created_at: str
|
| 37 |
+
updated_at: str
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class WebsitesListResponse(BaseModel):
|
| 41 |
+
"""Response for listing all websites."""
|
| 42 |
+
success: bool
|
| 43 |
+
websites: list[WebsiteInfo]
|
| 44 |
+
count: int
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class WebsiteResponse(BaseModel):
|
| 48 |
+
"""Response for single website operations."""
|
| 49 |
+
success: bool
|
| 50 |
+
website: Optional[WebsiteInfo] = None
|
| 51 |
+
message: Optional[str] = None
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
# =============================================================================
|
| 55 |
+
# Helper Functions
|
| 56 |
+
# =============================================================================
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def _get_website_url(website_id: str) -> str:
|
| 60 |
+
"""Get the URL path for a website."""
|
| 61 |
+
return f"/websites/{website_id}/index.html"
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def _website_to_info(website: dict) -> WebsiteInfo:
|
| 65 |
+
"""Convert a database website dict to WebsiteInfo."""
|
| 66 |
+
return WebsiteInfo(
|
| 67 |
+
id=website["id"],
|
| 68 |
+
title=website["title"],
|
| 69 |
+
description=website["description"],
|
| 70 |
+
url=_get_website_url(website["id"]),
|
| 71 |
+
created_at=website["created_at"],
|
| 72 |
+
updated_at=website["updated_at"],
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
# =============================================================================
|
| 77 |
+
# CRUD Endpoints
|
| 78 |
+
# =============================================================================
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
@router.get("", response_model=WebsitesListResponse)
|
| 82 |
+
async def list_websites() -> WebsitesListResponse:
|
| 83 |
+
"""List all saved websites."""
|
| 84 |
+
try:
|
| 85 |
+
db = get_database()
|
| 86 |
+
websites = await db.get_all_websites()
|
| 87 |
+
|
| 88 |
+
# Filter to only include websites that have files on disk
|
| 89 |
+
valid_websites = []
|
| 90 |
+
for website in websites:
|
| 91 |
+
website_dir = WEBSITES_DIR / website["id"]
|
| 92 |
+
if website_dir.exists() and (website_dir / "index.html").exists():
|
| 93 |
+
valid_websites.append(_website_to_info(website))
|
| 94 |
+
else:
|
| 95 |
+
# Clean up orphaned database entry
|
| 96 |
+
logger.warning(f"Website {website['id']} has no files, removing from database")
|
| 97 |
+
await db.delete_website(website["id"])
|
| 98 |
+
|
| 99 |
+
return WebsitesListResponse(
|
| 100 |
+
success=True,
|
| 101 |
+
websites=valid_websites,
|
| 102 |
+
count=len(valid_websites),
|
| 103 |
+
)
|
| 104 |
+
except Exception as e:
|
| 105 |
+
logger.error(f"Error listing websites: {e}")
|
| 106 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
@router.get("/{website_id}", response_model=WebsiteResponse)
|
| 110 |
+
async def get_website(website_id: str) -> WebsiteResponse:
|
| 111 |
+
"""Get a single website by ID."""
|
| 112 |
+
try:
|
| 113 |
+
db = get_database()
|
| 114 |
+
website = await db.get_website(website_id)
|
| 115 |
+
|
| 116 |
+
if not website:
|
| 117 |
+
raise HTTPException(status_code=404, detail="Website not found")
|
| 118 |
+
|
| 119 |
+
# Verify files exist
|
| 120 |
+
website_dir = WEBSITES_DIR / website_id
|
| 121 |
+
if not website_dir.exists() or not (website_dir / "index.html").exists():
|
| 122 |
+
# Clean up orphaned database entry
|
| 123 |
+
await db.delete_website(website_id)
|
| 124 |
+
raise HTTPException(status_code=404, detail="Website files not found")
|
| 125 |
+
|
| 126 |
+
return WebsiteResponse(
|
| 127 |
+
success=True,
|
| 128 |
+
website=_website_to_info(website),
|
| 129 |
+
)
|
| 130 |
+
except HTTPException:
|
| 131 |
+
raise
|
| 132 |
+
except Exception as e:
|
| 133 |
+
logger.error(f"Error getting website {website_id}: {e}")
|
| 134 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
@router.delete("/{website_id}")
|
| 138 |
+
async def delete_website(website_id: str) -> dict:
|
| 139 |
+
"""Delete a website (both database record and files)."""
|
| 140 |
+
try:
|
| 141 |
+
db = get_database()
|
| 142 |
+
|
| 143 |
+
# Delete from database
|
| 144 |
+
deleted = await db.delete_website(website_id)
|
| 145 |
+
|
| 146 |
+
# Delete files
|
| 147 |
+
website_dir = WEBSITES_DIR / website_id
|
| 148 |
+
if website_dir.exists():
|
| 149 |
+
shutil.rmtree(website_dir)
|
| 150 |
+
logger.info(f"Deleted website files: {website_dir}")
|
| 151 |
+
|
| 152 |
+
if not deleted and not website_dir.exists():
|
| 153 |
+
raise HTTPException(status_code=404, detail="Website not found")
|
| 154 |
+
|
| 155 |
+
return {
|
| 156 |
+
"success": True,
|
| 157 |
+
"message": f"Website {website_id} deleted successfully",
|
| 158 |
+
}
|
| 159 |
+
except HTTPException:
|
| 160 |
+
raise
|
| 161 |
+
except Exception as e:
|
| 162 |
+
logger.error(f"Error deleting website {website_id}: {e}")
|
| 163 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
# =============================================================================
|
| 167 |
+
# Static File Serving
|
| 168 |
+
# =============================================================================
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
@router.get("/{website_id}/index.html")
|
| 172 |
+
async def serve_website(website_id: str) -> FileResponse:
|
| 173 |
+
"""Serve the index.html file for a website."""
|
| 174 |
+
website_file = WEBSITES_DIR / website_id / "index.html"
|
| 175 |
+
|
| 176 |
+
if not website_file.exists():
|
| 177 |
+
raise HTTPException(status_code=404, detail="Website not found")
|
| 178 |
+
|
| 179 |
+
return FileResponse(
|
| 180 |
+
path=website_file,
|
| 181 |
+
media_type="text/html",
|
| 182 |
+
filename="index.html",
|
| 183 |
+
)
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
@router.get("/{website_id}/{file_path:path}")
|
| 187 |
+
async def serve_website_asset(website_id: str, file_path: str) -> FileResponse:
|
| 188 |
+
"""Serve additional assets for a website (CSS, JS, images, etc.)."""
|
| 189 |
+
# Security: Prevent directory traversal
|
| 190 |
+
if ".." in file_path:
|
| 191 |
+
raise HTTPException(status_code=400, detail="Invalid file path")
|
| 192 |
+
|
| 193 |
+
asset_file = WEBSITES_DIR / website_id / file_path
|
| 194 |
+
|
| 195 |
+
if not asset_file.exists():
|
| 196 |
+
raise HTTPException(status_code=404, detail="Asset not found")
|
| 197 |
+
|
| 198 |
+
# Determine media type based on extension
|
| 199 |
+
suffix = asset_file.suffix.lower()
|
| 200 |
+
media_types = {
|
| 201 |
+
".html": "text/html",
|
| 202 |
+
".css": "text/css",
|
| 203 |
+
".js": "application/javascript",
|
| 204 |
+
".json": "application/json",
|
| 205 |
+
".png": "image/png",
|
| 206 |
+
".jpg": "image/jpeg",
|
| 207 |
+
".jpeg": "image/jpeg",
|
| 208 |
+
".gif": "image/gif",
|
| 209 |
+
".svg": "image/svg+xml",
|
| 210 |
+
".ico": "image/x-icon",
|
| 211 |
+
".woff": "font/woff",
|
| 212 |
+
".woff2": "font/woff2",
|
| 213 |
+
".ttf": "font/ttf",
|
| 214 |
+
}
|
| 215 |
+
media_type = media_types.get(suffix, "application/octet-stream")
|
| 216 |
+
|
| 217 |
+
return FileResponse(
|
| 218 |
+
path=asset_file,
|
| 219 |
+
media_type=media_type,
|
| 220 |
+
)
|
| 221 |
+
|
reachy_ios_bridge/server.py
CHANGED
|
@@ -6,11 +6,13 @@ Individual endpoint handlers are organized in the routes/ package.
|
|
| 6 |
|
| 7 |
import logging
|
| 8 |
from contextlib import asynccontextmanager
|
|
|
|
| 9 |
from typing import AsyncGenerator
|
| 10 |
|
| 11 |
import uvicorn
|
| 12 |
from fastapi import FastAPI
|
| 13 |
from fastapi.middleware.cors import CORSMiddleware
|
|
|
|
| 14 |
|
| 15 |
from .database import init_database
|
| 16 |
from .models import RobotStatus
|
|
@@ -30,6 +32,7 @@ from .routes import (
|
|
| 30 |
user_settings_router,
|
| 31 |
voice_router,
|
| 32 |
volume_router,
|
|
|
|
| 33 |
)
|
| 34 |
from .routes import speech as speech_route
|
| 35 |
from .routes import motion as motion_route
|
|
@@ -117,6 +120,7 @@ app.include_router(tools_router)
|
|
| 117 |
app.include_router(user_settings_router)
|
| 118 |
app.include_router(voice_router)
|
| 119 |
app.include_router(volume_router)
|
|
|
|
| 120 |
|
| 121 |
|
| 122 |
# Health and status endpoints
|
|
@@ -131,6 +135,27 @@ async def root() -> dict:
|
|
| 131 |
}
|
| 132 |
|
| 133 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 134 |
@app.get("/status", response_model=RobotStatus, tags=["Status"])
|
| 135 |
async def get_status() -> RobotStatus:
|
| 136 |
"""Get current robot status."""
|
|
|
|
| 6 |
|
| 7 |
import logging
|
| 8 |
from contextlib import asynccontextmanager
|
| 9 |
+
from pathlib import Path
|
| 10 |
from typing import AsyncGenerator
|
| 11 |
|
| 12 |
import uvicorn
|
| 13 |
from fastapi import FastAPI
|
| 14 |
from fastapi.middleware.cors import CORSMiddleware
|
| 15 |
+
from fastapi.staticfiles import StaticFiles
|
| 16 |
|
| 17 |
from .database import init_database
|
| 18 |
from .models import RobotStatus
|
|
|
|
| 32 |
user_settings_router,
|
| 33 |
voice_router,
|
| 34 |
volume_router,
|
| 35 |
+
websites_router,
|
| 36 |
)
|
| 37 |
from .routes import speech as speech_route
|
| 38 |
from .routes import motion as motion_route
|
|
|
|
| 120 |
app.include_router(user_settings_router)
|
| 121 |
app.include_router(voice_router)
|
| 122 |
app.include_router(volume_router)
|
| 123 |
+
app.include_router(websites_router)
|
| 124 |
|
| 125 |
|
| 126 |
# Health and status endpoints
|
|
|
|
| 135 |
}
|
| 136 |
|
| 137 |
|
| 138 |
+
# Website endpoints
|
| 139 |
+
|
| 140 |
+
@app.get("/api/websites", tags=["Websites"])
|
| 141 |
+
async def list_generated_websites() -> dict:
|
| 142 |
+
"""List all generated websites."""
|
| 143 |
+
from .tools.website_generator import list_websites
|
| 144 |
+
return {
|
| 145 |
+
"websites": list_websites(),
|
| 146 |
+
}
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
@app.get("/api/websites/{website_id}", tags=["Websites"])
|
| 150 |
+
async def get_website_details(website_id: str) -> dict:
|
| 151 |
+
"""Get details about a specific generated website."""
|
| 152 |
+
from .tools.website_generator import get_website_info
|
| 153 |
+
info = get_website_info(website_id)
|
| 154 |
+
if not info:
|
| 155 |
+
return {"error": "Website not found"}
|
| 156 |
+
return info
|
| 157 |
+
|
| 158 |
+
|
| 159 |
@app.get("/status", response_model=RobotStatus, tags=["Status"])
|
| 160 |
async def get_status() -> RobotStatus:
|
| 161 |
"""Get current robot status."""
|
reachy_ios_bridge/tools/__init__.py
CHANGED
|
@@ -163,6 +163,7 @@ def _register_default_tools() -> None:
|
|
| 163 |
from .vision import register_vision_tools
|
| 164 |
from .weather import register_weather_tool
|
| 165 |
from .web_search import register_web_search_tool
|
|
|
|
| 166 |
|
| 167 |
registry = _registry
|
| 168 |
if registry:
|
|
@@ -170,9 +171,39 @@ def _register_default_tools() -> None:
|
|
| 170 |
register_weather_tool(registry)
|
| 171 |
register_web_search_tool(registry)
|
| 172 |
register_vision_tools(registry)
|
|
|
|
| 173 |
|
| 174 |
|
| 175 |
# Default tools for Reachy (all tools enabled)
|
| 176 |
# IDs must match the function names in the OpenAI definitions
|
| 177 |
-
DEFAULT_ENABLED_TOOLS = [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 178 |
|
|
|
|
| 163 |
from .vision import register_vision_tools
|
| 164 |
from .weather import register_weather_tool
|
| 165 |
from .web_search import register_web_search_tool
|
| 166 |
+
from .website_generator import register_website_generator_tool
|
| 167 |
|
| 168 |
registry = _registry
|
| 169 |
if registry:
|
|
|
|
| 171 |
register_weather_tool(registry)
|
| 172 |
register_web_search_tool(registry)
|
| 173 |
register_vision_tools(registry)
|
| 174 |
+
register_website_generator_tool(registry)
|
| 175 |
|
| 176 |
|
| 177 |
# Default tools for Reachy (all tools enabled)
|
| 178 |
# IDs must match the function names in the OpenAI definitions
|
| 179 |
+
DEFAULT_ENABLED_TOOLS = [
|
| 180 |
+
"get_current_datetime",
|
| 181 |
+
"get_weather",
|
| 182 |
+
"web_search",
|
| 183 |
+
"test_camera",
|
| 184 |
+
"recognize_object",
|
| 185 |
+
"generate_website",
|
| 186 |
+
]
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
# Re-export website streaming callbacks for easy access
|
| 190 |
+
def set_website_streaming_callbacks(
|
| 191 |
+
on_generating: Optional[Callable[[str, str, bool], None]] = None,
|
| 192 |
+
on_chunk: Optional[Callable[[str, str, bool], None]] = None,
|
| 193 |
+
) -> None:
|
| 194 |
+
"""Set callbacks for website streaming events.
|
| 195 |
+
|
| 196 |
+
Args:
|
| 197 |
+
on_generating: Called when website generation starts.
|
| 198 |
+
Signature: (website_id, description, is_edit) -> None
|
| 199 |
+
on_chunk: Called when a new HTML chunk is available.
|
| 200 |
+
Signature: (website_id, accumulated_html, is_complete) -> None
|
| 201 |
+
"""
|
| 202 |
+
from .website_generator import (
|
| 203 |
+
set_website_chunk_callback,
|
| 204 |
+
set_website_generating_callback,
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
set_website_generating_callback(on_generating)
|
| 208 |
+
set_website_chunk_callback(on_chunk)
|
| 209 |
|
reachy_ios_bridge/tools/website_generator.py
ADDED
|
@@ -0,0 +1,474 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Website generator tool using GPT-5.2 Responses API.
|
| 2 |
+
|
| 3 |
+
Generates HTML/CSS/Tailwind websites through voice commands.
|
| 4 |
+
Supports iterative editing through conversation context.
|
| 5 |
+
Supports streaming for real-time preview on iOS.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import asyncio
|
| 9 |
+
import json
|
| 10 |
+
import logging
|
| 11 |
+
import os
|
| 12 |
+
import uuid
|
| 13 |
+
from datetime import datetime
|
| 14 |
+
from pathlib import Path
|
| 15 |
+
from typing import TYPE_CHECKING, Callable, Optional
|
| 16 |
+
|
| 17 |
+
import aiohttp
|
| 18 |
+
|
| 19 |
+
from ..database import get_database
|
| 20 |
+
|
| 21 |
+
if TYPE_CHECKING:
|
| 22 |
+
from . import ToolRegistry
|
| 23 |
+
|
| 24 |
+
logger = logging.getLogger(__name__)
|
| 25 |
+
|
| 26 |
+
# OpenAI Responses API endpoint
|
| 27 |
+
OPENAI_RESPONSES_URL = "https://api.openai.com/v1/responses"
|
| 28 |
+
GPT52_MODEL = "gpt-5.2"
|
| 29 |
+
|
| 30 |
+
# Streaming chunk callback type
|
| 31 |
+
# Callback signature: (website_id: str, html_chunk: str, is_complete: bool) -> None
|
| 32 |
+
WebsiteChunkCallback = Callable[[str, str, bool], None]
|
| 33 |
+
|
| 34 |
+
# Global callback for streaming website chunks to iOS
|
| 35 |
+
_website_chunk_callback: Optional[WebsiteChunkCallback] = None
|
| 36 |
+
_website_generating_callback: Optional[Callable[[str, str, bool], None]] = None
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def set_website_chunk_callback(callback: Optional[WebsiteChunkCallback]) -> None:
|
| 40 |
+
"""Set the callback for streaming website HTML chunks.
|
| 41 |
+
|
| 42 |
+
Args:
|
| 43 |
+
callback: Function that receives (website_id, html_chunk, is_complete).
|
| 44 |
+
"""
|
| 45 |
+
global _website_chunk_callback
|
| 46 |
+
_website_chunk_callback = callback
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def set_website_generating_callback(
|
| 50 |
+
callback: Optional[Callable[[str, str, bool], None]]
|
| 51 |
+
) -> None:
|
| 52 |
+
"""Set the callback for when website generation starts.
|
| 53 |
+
|
| 54 |
+
Args:
|
| 55 |
+
callback: Function that receives (website_id, description, is_edit).
|
| 56 |
+
"""
|
| 57 |
+
global _website_generating_callback
|
| 58 |
+
_website_generating_callback = callback
|
| 59 |
+
|
| 60 |
+
# Directory to store generated websites (alongside the database)
|
| 61 |
+
WEBSITES_DIR = Path.home() / ".reachy" / "websites"
|
| 62 |
+
|
| 63 |
+
# System prompt for website generation
|
| 64 |
+
WEBSITE_SYSTEM_PROMPT = """You are an expert web developer who creates beautiful, modern websites.
|
| 65 |
+
|
| 66 |
+
IMPORTANT RULES:
|
| 67 |
+
1. Generate a SINGLE, complete HTML file with embedded CSS and inline Tailwind classes
|
| 68 |
+
2. Use Tailwind CSS via CDN: <script src="https://cdn.tailwindcss.com"></script>
|
| 69 |
+
3. Create responsive, mobile-first designs
|
| 70 |
+
4. Use modern design patterns: gradients, shadows, rounded corners, smooth transitions
|
| 71 |
+
5. Include beautiful typography and proper spacing
|
| 72 |
+
6. Add subtle animations and hover effects for interactivity
|
| 73 |
+
7. Use a cohesive color palette that matches the website's purpose
|
| 74 |
+
8. Include placeholder images from https://picsum.photos/ or similar
|
| 75 |
+
9. Make the website feel professional and polished
|
| 76 |
+
|
| 77 |
+
OUTPUT FORMAT:
|
| 78 |
+
- Return ONLY the HTML code, nothing else
|
| 79 |
+
- Start with <!DOCTYPE html> and end with </html>
|
| 80 |
+
- Do not include any explanations or markdown code blocks
|
| 81 |
+
- The HTML must be valid and complete
|
| 82 |
+
|
| 83 |
+
When editing an existing website:
|
| 84 |
+
- Make the requested changes while preserving the overall design
|
| 85 |
+
- Keep the same color scheme unless asked to change it
|
| 86 |
+
- Maintain responsive behavior
|
| 87 |
+
"""
|
| 88 |
+
|
| 89 |
+
# Store active website sessions for iterative editing
|
| 90 |
+
_website_sessions: dict[str, dict] = {}
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
# OpenAI function definition for the website generator tool
|
| 94 |
+
WEBSITE_GENERATOR_TOOL_DEFINITION = {
|
| 95 |
+
"type": "function",
|
| 96 |
+
"name": "generate_website",
|
| 97 |
+
"description": (
|
| 98 |
+
"Generate or edit a website using HTML, CSS, and Tailwind. "
|
| 99 |
+
"Use this when the user asks to create a website, landing page, portfolio, "
|
| 100 |
+
"or any web page. Also use this to make changes to an existing website."
|
| 101 |
+
),
|
| 102 |
+
"parameters": {
|
| 103 |
+
"type": "object",
|
| 104 |
+
"properties": {
|
| 105 |
+
"description": {
|
| 106 |
+
"type": "string",
|
| 107 |
+
"description": (
|
| 108 |
+
"Detailed description of the website to create or the changes to make. "
|
| 109 |
+
"Include: purpose, style preferences, colors, sections, and any specific features."
|
| 110 |
+
),
|
| 111 |
+
},
|
| 112 |
+
"website_id": {
|
| 113 |
+
"type": "string",
|
| 114 |
+
"description": (
|
| 115 |
+
"Optional. The ID of an existing website to edit. "
|
| 116 |
+
"Leave empty to create a new website."
|
| 117 |
+
),
|
| 118 |
+
},
|
| 119 |
+
},
|
| 120 |
+
"required": ["description"],
|
| 121 |
+
},
|
| 122 |
+
}
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
async def generate_website(arguments: dict) -> dict:
|
| 126 |
+
"""Generate or edit a website using GPT-5.2 with streaming support.
|
| 127 |
+
|
| 128 |
+
Streams HTML chunks to iOS via callback for real-time preview.
|
| 129 |
+
|
| 130 |
+
Args:
|
| 131 |
+
arguments: Tool arguments with 'description' and optional 'website_id'.
|
| 132 |
+
|
| 133 |
+
Returns:
|
| 134 |
+
Result dictionary with website URL and metadata.
|
| 135 |
+
"""
|
| 136 |
+
description = arguments.get("description", "").strip()
|
| 137 |
+
website_id = arguments.get("website_id", "").strip()
|
| 138 |
+
|
| 139 |
+
if not description:
|
| 140 |
+
return {"success": False, "error": "Website description is required"}
|
| 141 |
+
|
| 142 |
+
# Get API key
|
| 143 |
+
api_key = os.environ.get("OPENAI_API_KEY")
|
| 144 |
+
if not api_key:
|
| 145 |
+
return {"success": False, "error": "OpenAI API key not configured"}
|
| 146 |
+
|
| 147 |
+
# Ensure websites directory exists
|
| 148 |
+
WEBSITES_DIR.mkdir(parents=True, exist_ok=True)
|
| 149 |
+
|
| 150 |
+
# Determine if this is a new website or an edit
|
| 151 |
+
is_edit = bool(website_id) and website_id in _website_sessions
|
| 152 |
+
|
| 153 |
+
if is_edit:
|
| 154 |
+
session_data = _website_sessions[website_id]
|
| 155 |
+
previous_response_id = session_data.get("last_response_id")
|
| 156 |
+
prompt = f"Edit the website with the following changes:\n\n{description}"
|
| 157 |
+
logger.info(f"🎨 Editing website {website_id}: {description[:50]}...")
|
| 158 |
+
else:
|
| 159 |
+
# Create new website
|
| 160 |
+
website_id = str(uuid.uuid4())[:8]
|
| 161 |
+
previous_response_id = None
|
| 162 |
+
prompt = f"Create a website with the following requirements:\n\n{description}"
|
| 163 |
+
logger.info(f"🎨 Creating new website {website_id}: {description[:50]}...")
|
| 164 |
+
|
| 165 |
+
# Notify iOS that website generation is starting
|
| 166 |
+
if _website_generating_callback:
|
| 167 |
+
_website_generating_callback(website_id, description, is_edit)
|
| 168 |
+
|
| 169 |
+
headers = {
|
| 170 |
+
"Authorization": f"Bearer {api_key}",
|
| 171 |
+
"Content-Type": "application/json",
|
| 172 |
+
}
|
| 173 |
+
|
| 174 |
+
# Build the Responses API payload with streaming enabled
|
| 175 |
+
payload = {
|
| 176 |
+
"model": GPT52_MODEL,
|
| 177 |
+
"input": prompt,
|
| 178 |
+
"instructions": WEBSITE_SYSTEM_PROMPT,
|
| 179 |
+
"reasoning": {"effort": "medium"},
|
| 180 |
+
"text": {"format": {"type": "text"}},
|
| 181 |
+
"stream": True, # Enable streaming for real-time preview
|
| 182 |
+
}
|
| 183 |
+
|
| 184 |
+
# Add previous response ID for iterative editing
|
| 185 |
+
if previous_response_id:
|
| 186 |
+
payload["previous_response_id"] = previous_response_id
|
| 187 |
+
|
| 188 |
+
html_content = ""
|
| 189 |
+
response_id = ""
|
| 190 |
+
last_chunk_time = asyncio.get_event_loop().time()
|
| 191 |
+
chunk_buffer = ""
|
| 192 |
+
CHUNK_THROTTLE_MS = 200 # Throttle updates to avoid flickering
|
| 193 |
+
|
| 194 |
+
try:
|
| 195 |
+
async with aiohttp.ClientSession() as session:
|
| 196 |
+
async with session.post(
|
| 197 |
+
OPENAI_RESPONSES_URL,
|
| 198 |
+
headers=headers,
|
| 199 |
+
json=payload,
|
| 200 |
+
timeout=aiohttp.ClientTimeout(total=300), # 5 min timeout for streaming
|
| 201 |
+
) as response:
|
| 202 |
+
if response.status != 200:
|
| 203 |
+
error_text = await response.text()
|
| 204 |
+
logger.error(f"GPT-5.2 API error: {error_text}")
|
| 205 |
+
return {
|
| 206 |
+
"success": False,
|
| 207 |
+
"error": f"Website generation failed: {response.status}",
|
| 208 |
+
}
|
| 209 |
+
|
| 210 |
+
# Process streaming response (Server-Sent Events format)
|
| 211 |
+
async for line in response.content:
|
| 212 |
+
line_text = line.decode("utf-8").strip()
|
| 213 |
+
|
| 214 |
+
# Skip empty lines and "data: " prefix handling
|
| 215 |
+
if not line_text:
|
| 216 |
+
continue
|
| 217 |
+
|
| 218 |
+
if line_text.startswith("data: "):
|
| 219 |
+
json_str = line_text[6:] # Remove "data: " prefix
|
| 220 |
+
|
| 221 |
+
if json_str == "[DONE]":
|
| 222 |
+
# Stream complete
|
| 223 |
+
break
|
| 224 |
+
|
| 225 |
+
try:
|
| 226 |
+
event = json.loads(json_str)
|
| 227 |
+
event_type = event.get("type", "")
|
| 228 |
+
|
| 229 |
+
# Capture response ID
|
| 230 |
+
if event_type == "response.created":
|
| 231 |
+
response_id = event.get("response", {}).get("id", "")
|
| 232 |
+
|
| 233 |
+
# Handle text deltas (the HTML content)
|
| 234 |
+
elif event_type == "response.output_text.delta":
|
| 235 |
+
delta = event.get("delta", "")
|
| 236 |
+
if delta:
|
| 237 |
+
html_content += delta
|
| 238 |
+
chunk_buffer += delta
|
| 239 |
+
|
| 240 |
+
# Throttle streaming updates to avoid UI flickering
|
| 241 |
+
current_time = asyncio.get_event_loop().time()
|
| 242 |
+
if (current_time - last_chunk_time) * 1000 >= CHUNK_THROTTLE_MS:
|
| 243 |
+
if _website_chunk_callback and chunk_buffer:
|
| 244 |
+
_website_chunk_callback(
|
| 245 |
+
website_id,
|
| 246 |
+
html_content, # Send accumulated HTML
|
| 247 |
+
False # Not complete yet
|
| 248 |
+
)
|
| 249 |
+
chunk_buffer = ""
|
| 250 |
+
last_chunk_time = current_time
|
| 251 |
+
|
| 252 |
+
# Handle completion
|
| 253 |
+
elif event_type == "response.output_text.done":
|
| 254 |
+
# Final chunk - send complete HTML
|
| 255 |
+
if _website_chunk_callback:
|
| 256 |
+
_website_chunk_callback(
|
| 257 |
+
website_id,
|
| 258 |
+
html_content,
|
| 259 |
+
True # Complete
|
| 260 |
+
)
|
| 261 |
+
logger.info(f"📄 Streaming complete: {len(html_content)} chars")
|
| 262 |
+
|
| 263 |
+
except json.JSONDecodeError:
|
| 264 |
+
logger.debug(f"Non-JSON SSE line: {json_str[:50]}")
|
| 265 |
+
|
| 266 |
+
except aiohttp.ClientError as e:
|
| 267 |
+
logger.error(f"Website generation request failed: {e}")
|
| 268 |
+
return {"success": False, "error": f"Request failed: {str(e)}"}
|
| 269 |
+
except asyncio.TimeoutError:
|
| 270 |
+
logger.error("Website generation timed out after 5 minutes")
|
| 271 |
+
return {"success": False, "error": "Generation timed out after 5 minutes - try a simpler request"}
|
| 272 |
+
except Exception as e:
|
| 273 |
+
logger.error(f"Website generation error: {e}", exc_info=True)
|
| 274 |
+
return {"success": False, "error": str(e)}
|
| 275 |
+
|
| 276 |
+
# Clean up the HTML (remove markdown code blocks if present)
|
| 277 |
+
html_content = _clean_html_content(html_content)
|
| 278 |
+
|
| 279 |
+
if not html_content:
|
| 280 |
+
return {
|
| 281 |
+
"success": False,
|
| 282 |
+
"error": "No HTML content generated",
|
| 283 |
+
}
|
| 284 |
+
|
| 285 |
+
# Extract title from HTML
|
| 286 |
+
title = _extract_title(html_content) or "Generated Website"
|
| 287 |
+
|
| 288 |
+
# Save the website
|
| 289 |
+
website_dir = WEBSITES_DIR / website_id
|
| 290 |
+
website_dir.mkdir(parents=True, exist_ok=True)
|
| 291 |
+
|
| 292 |
+
index_file = website_dir / "index.html"
|
| 293 |
+
index_file.write_text(html_content, encoding="utf-8")
|
| 294 |
+
|
| 295 |
+
# Store session for future edits
|
| 296 |
+
_website_sessions[website_id] = {
|
| 297 |
+
"last_response_id": response_id,
|
| 298 |
+
"title": title,
|
| 299 |
+
"created_at": datetime.now().isoformat(),
|
| 300 |
+
"updated_at": datetime.now().isoformat(),
|
| 301 |
+
"description": description,
|
| 302 |
+
}
|
| 303 |
+
|
| 304 |
+
# Save to database for persistent storage
|
| 305 |
+
try:
|
| 306 |
+
db = get_database()
|
| 307 |
+
existing = await db.get_website(website_id)
|
| 308 |
+
if existing:
|
| 309 |
+
await db.update_website(website_id, {
|
| 310 |
+
"title": title,
|
| 311 |
+
"description": description,
|
| 312 |
+
})
|
| 313 |
+
else:
|
| 314 |
+
await db.create_website({
|
| 315 |
+
"id": website_id,
|
| 316 |
+
"title": title,
|
| 317 |
+
"description": description,
|
| 318 |
+
})
|
| 319 |
+
logger.info(f"💾 Website saved to database: {website_id}")
|
| 320 |
+
except Exception as e:
|
| 321 |
+
logger.warning(f"Failed to save website to database: {e}")
|
| 322 |
+
|
| 323 |
+
# Build the URL (will be served by FastAPI static files)
|
| 324 |
+
# The actual host will be determined by the server
|
| 325 |
+
relative_url = f"/websites/{website_id}/"
|
| 326 |
+
|
| 327 |
+
logger.info(f"✅ Website {'updated' if is_edit else 'created'}: {website_id} - {title}")
|
| 328 |
+
|
| 329 |
+
return {
|
| 330 |
+
"success": True,
|
| 331 |
+
"website_id": website_id,
|
| 332 |
+
"url": relative_url,
|
| 333 |
+
"title": title,
|
| 334 |
+
"is_edit": is_edit,
|
| 335 |
+
"message": f"Website {'updated' if is_edit else 'created'} successfully! You can view it at the provided URL.",
|
| 336 |
+
}
|
| 337 |
+
|
| 338 |
+
|
| 339 |
+
def _clean_html_content(html: str) -> str:
|
| 340 |
+
"""Clean up HTML content by removing markdown code blocks if present.
|
| 341 |
+
|
| 342 |
+
Args:
|
| 343 |
+
html: The raw HTML string from the API.
|
| 344 |
+
|
| 345 |
+
Returns:
|
| 346 |
+
Cleaned HTML string.
|
| 347 |
+
"""
|
| 348 |
+
text = html.strip()
|
| 349 |
+
if text.startswith("```html"):
|
| 350 |
+
text = text[7:]
|
| 351 |
+
elif text.startswith("```"):
|
| 352 |
+
text = text[3:]
|
| 353 |
+
if text.endswith("```"):
|
| 354 |
+
text = text[:-3]
|
| 355 |
+
text = text.strip()
|
| 356 |
+
|
| 357 |
+
# Validate it's HTML
|
| 358 |
+
if text.startswith("<!DOCTYPE") or text.startswith("<html"):
|
| 359 |
+
return text
|
| 360 |
+
|
| 361 |
+
return ""
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
def _extract_html_from_response(data: dict) -> Optional[str]:
|
| 365 |
+
"""Extract HTML content from GPT-5.2 Responses API output.
|
| 366 |
+
|
| 367 |
+
Args:
|
| 368 |
+
data: The API response dictionary.
|
| 369 |
+
|
| 370 |
+
Returns:
|
| 371 |
+
The HTML string, or None if not found.
|
| 372 |
+
"""
|
| 373 |
+
output = data.get("output", [])
|
| 374 |
+
|
| 375 |
+
for item in output:
|
| 376 |
+
if item.get("type") == "message":
|
| 377 |
+
content = item.get("content", [])
|
| 378 |
+
for content_item in content:
|
| 379 |
+
if content_item.get("type") in ("output_text", "text"):
|
| 380 |
+
text = content_item.get("text", "")
|
| 381 |
+
# Clean up the response - remove markdown code blocks if present
|
| 382 |
+
text = text.strip()
|
| 383 |
+
if text.startswith("```html"):
|
| 384 |
+
text = text[7:]
|
| 385 |
+
elif text.startswith("```"):
|
| 386 |
+
text = text[3:]
|
| 387 |
+
if text.endswith("```"):
|
| 388 |
+
text = text[:-3]
|
| 389 |
+
text = text.strip()
|
| 390 |
+
|
| 391 |
+
# Validate it's HTML
|
| 392 |
+
if text.startswith("<!DOCTYPE") or text.startswith("<html"):
|
| 393 |
+
return text
|
| 394 |
+
|
| 395 |
+
return None
|
| 396 |
+
|
| 397 |
+
|
| 398 |
+
def _extract_title(html: str) -> Optional[str]:
|
| 399 |
+
"""Extract the title from HTML content.
|
| 400 |
+
|
| 401 |
+
Args:
|
| 402 |
+
html: The HTML string.
|
| 403 |
+
|
| 404 |
+
Returns:
|
| 405 |
+
The title, or None if not found.
|
| 406 |
+
"""
|
| 407 |
+
import re
|
| 408 |
+
match = re.search(r"<title[^>]*>([^<]+)</title>", html, re.IGNORECASE)
|
| 409 |
+
if match:
|
| 410 |
+
return match.group(1).strip()
|
| 411 |
+
return None
|
| 412 |
+
|
| 413 |
+
|
| 414 |
+
def get_website_info(website_id: str) -> Optional[dict]:
|
| 415 |
+
"""Get information about a generated website.
|
| 416 |
+
|
| 417 |
+
Args:
|
| 418 |
+
website_id: The website ID.
|
| 419 |
+
|
| 420 |
+
Returns:
|
| 421 |
+
Website metadata, or None if not found.
|
| 422 |
+
"""
|
| 423 |
+
if website_id not in _website_sessions:
|
| 424 |
+
return None
|
| 425 |
+
|
| 426 |
+
session = _website_sessions[website_id]
|
| 427 |
+
website_dir = WEBSITES_DIR / website_id
|
| 428 |
+
|
| 429 |
+
if not website_dir.exists():
|
| 430 |
+
return None
|
| 431 |
+
|
| 432 |
+
return {
|
| 433 |
+
"id": website_id,
|
| 434 |
+
"title": session.get("title", "Untitled"),
|
| 435 |
+
"description": session.get("description", ""),
|
| 436 |
+
"created_at": session.get("created_at"),
|
| 437 |
+
"updated_at": session.get("updated_at"),
|
| 438 |
+
"url": f"/websites/{website_id}/",
|
| 439 |
+
}
|
| 440 |
+
|
| 441 |
+
|
| 442 |
+
def list_websites() -> list[dict]:
|
| 443 |
+
"""List all generated websites.
|
| 444 |
+
|
| 445 |
+
Returns:
|
| 446 |
+
List of website metadata dictionaries.
|
| 447 |
+
"""
|
| 448 |
+
websites = []
|
| 449 |
+
for website_id, session in _website_sessions.items():
|
| 450 |
+
info = get_website_info(website_id)
|
| 451 |
+
if info:
|
| 452 |
+
websites.append(info)
|
| 453 |
+
return websites
|
| 454 |
+
|
| 455 |
+
|
| 456 |
+
def register_website_generator_tool(registry: "ToolRegistry") -> None:
|
| 457 |
+
"""Register the website generator tool with the registry.
|
| 458 |
+
|
| 459 |
+
Args:
|
| 460 |
+
registry: The tool registry to register with.
|
| 461 |
+
"""
|
| 462 |
+
from . import ToolInfo
|
| 463 |
+
|
| 464 |
+
tool = ToolInfo(
|
| 465 |
+
id="generate_website",
|
| 466 |
+
name="Website Generator",
|
| 467 |
+
description="Create and edit websites using voice commands",
|
| 468 |
+
icon="globe",
|
| 469 |
+
definition=WEBSITE_GENERATOR_TOOL_DEFINITION,
|
| 470 |
+
handler=generate_website,
|
| 471 |
+
)
|
| 472 |
+
|
| 473 |
+
registry.register(tool)
|
| 474 |
+
|