Spaces:
Runtime error
Runtime error
| import os | |
| import gradio as gr | |
| from coordinator.task_parser import parse_brief_with_reasoning | |
| from coordinator.task_assigner import assign_tasks_with_reasoning | |
| from frontend_agent.ui_generator import generate_react_component_llm | |
| from frontend_agent.live_frontend import save_react_component | |
| from backend_agent.api_generator import generate_backend_code_llm | |
| from backend_agent.live_api import app as fastapi_app, add_dynamic_route, run_server | |
| import threading | |
| os.makedirs("frontend_live", exist_ok=True) | |
| os.makedirs("backend_live", exist_ok=True) | |
| # Start FastAPI server in a separate thread | |
| threading.Thread(target=run_server, daemon=True).start() | |
| def process_brief_live(project_brief): | |
| output_text = "" | |
| # Step 1: Parsing Tasks | |
| output_text += "### Step 1: Parsing Tasks\n" | |
| parsing_result = parse_brief_with_reasoning(project_brief) | |
| output_text += "**LLM Reasoning:**\n" + parsing_result["reasoning"] + "\n\n" | |
| output_text += "**Parsed Tasks:**\n" | |
| for t in parsing_result["tasks"]: | |
| output_text += f"- {t}\n" | |
| # Step 2: Assigning Tasks | |
| output_text += "\n### Step 2: Assigning Tasks\n" | |
| assignment_result = assign_tasks_with_reasoning(parsing_result["tasks"]) | |
| output_text += "**LLM Reasoning:**\n" + assignment_result["reasoning"] + "\n\n" | |
| output_text += "**Assigned Tasks:**\n" | |
| for task, agent in assignment_result["assignments"].items(): | |
| output_text += f"- {task} -> {agent}\n" | |
| # Step 3: Generate Live Frontend & Backend | |
| output_text += "\n### Step 3: Live Code Generation\n" | |
| for task, agent in assignment_result["assignments"].items(): | |
| output_text += f"\n#### {task} ({agent})\n" | |
| if "frontend" in agent.lower(): | |
| code = generate_react_component_llm(task) | |
| filename = save_react_component(task, code) | |
| output_text += f"**Frontend Component Generated:** {filename}\n" | |
| output_text += f"Preview: <iframe src='/{filename}' width='100%' height='300'></iframe>\n" | |
| elif "backend" in agent.lower(): | |
| code = generate_backend_code_llm(task) | |
| backend_file = f"backend_live/{task.replace(' ', '_')}.py" | |
| with open(backend_file, "w") as f: | |
| f.write(code) | |
| add_dynamic_route(task, "pass") # placeholder | |
| output_text += f"**Backend API Created:** http://127.0.0.1:8000/{task.replace(' ','_').lower()}\n" | |
| return output_text | |
| with gr.Blocks() as demo: | |
| gr.Markdown("## AI Project Coordinator - Live Frontend & Backend") | |
| project_brief_input = gr.Textbox(lines=3, placeholder="Enter your project brief here...") | |
| submit_button = gr.Button("Submit") | |
| console_output = gr.Textbox( | |
| label="Processing Log", | |
| placeholder="LLM reasoning and live code output will appear here...", | |
| lines=30 | |
| ) | |
| submit_button.click(fn=process_brief_live, inputs=project_brief_input, outputs=console_output) | |
| if __name__ == "__main__": | |
| demo.launch() | |