Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import requests | |
| import json | |
| import os | |
| import re | |
| import zipfile | |
| import tempfile | |
| from datetime import datetime | |
| from pathlib import Path | |
| # ============================================ | |
| # CONFIGURATION | |
| # ============================================ | |
| API_PROVIDERS = { | |
| "openai": { | |
| "url": "https://api.openai.com/v1/chat/completions", | |
| "models": ["gpt-4-turbo-preview", "gpt-4", "gpt-3.5-turbo"] | |
| }, | |
| "anthropic": { | |
| "url": "https://api.anthropic.com/v1/messages", | |
| "models": ["claude-3-opus-20240229", "claude-3-sonnet-20240229"] | |
| }, | |
| "gemini": { | |
| "url": "https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-pro:generateContent", | |
| "models": ["gemini-1.5-pro"] | |
| } | |
| } | |
| # ============================================ | |
| # CORE AGENT ENGINE (Elixir-inspired architecture) | |
| # ============================================ | |
| class ScaffoldEngine: | |
| def __init__(self): | |
| self.api_key = None | |
| self.provider = "openai" | |
| self.model = "gpt-4-turbo-preview" | |
| self.conversation_history = [] | |
| self.current_project = None | |
| self.session_id = self._generate_session_id() | |
| self.status = "idle" | |
| def _generate_session_id(self): | |
| import secrets | |
| return secrets.token_hex(16) | |
| def configure(self, api_key, provider, model=None): | |
| self.api_key = api_key | |
| self.provider = provider | |
| if model: | |
| self.model = model | |
| elif provider in API_PROVIDERS: | |
| self.model = API_PROVIDERS[provider]["models"][0] | |
| self.status = "ready" | |
| return {"status": "success", "message": f"Configured {provider} with {self.model}"} | |
| def _call_llm(self, system_prompt, user_prompt, max_tokens=4000): | |
| if not self.api_key: | |
| return {"error": "API key not configured"} | |
| self.status = "generating" | |
| try: | |
| if self.provider == "openai": | |
| return self._call_openai(system_prompt, user_prompt, max_tokens) | |
| elif self.provider == "anthropic": | |
| return self._call_anthropic(system_prompt, user_prompt, max_tokens) | |
| elif self.provider == "gemini": | |
| return self._call_gemini(system_prompt, user_prompt, max_tokens) | |
| else: | |
| return {"error": "Unknown provider"} | |
| except Exception as e: | |
| self.status = "error" | |
| return {"error": str(e)} | |
| def _call_openai(self, system, user, max_tokens): | |
| messages = [{"role": "system", "content": system}] | |
| for h in self.conversation_history: | |
| messages.append({"role": h["role"], "content": h["content"]}) | |
| messages.append({"role": "user", "content": user}) | |
| resp = requests.post( | |
| API_PROVIDERS["openai"]["url"], | |
| headers={"Authorization": f"Bearer {self.api_key}", "Content-Type": "application/json"}, | |
| json={"model": self.model, "messages": messages, "temperature": 0.3, "max_tokens": max_tokens}, | |
| timeout=120 | |
| ) | |
| data = resp.json() | |
| if "choices" in data: | |
| content = data["choices"][0]["message"]["content"] | |
| return {"content": content} | |
| return {"error": data.get("error", {}).get("message", "Unknown error")} | |
| def _call_anthropic(self, system, user, max_tokens): | |
| messages = [] | |
| for h in self.conversation_history: | |
| messages.append({"role": h["role"], "content": h["content"]}) | |
| messages.append({"role": "user", "content": user}) | |
| resp = requests.post( | |
| API_PROVIDERS["anthropic"]["url"], | |
| headers={"x-api-key": self.api_key, "Content-Type": "application/json", "anthropic-version": "2023-06-01"}, | |
| json={"model": self.model, "max_tokens": max_tokens, "system": system, "messages": messages}, | |
| timeout=120 | |
| ) | |
| data = resp.json() | |
| if "content" in data: | |
| content = data["content"][0]["text"] | |
| return {"content": content} | |
| return {"error": data.get("error", {}).get("message", "Unknown error")} | |
| def _call_gemini(self, system, user, max_tokens): | |
| url = f"{API_PROVIDERS['gemini']['url']}?key={self.api_key}" | |
| resp = requests.post( | |
| url, | |
| headers={"Content-Type": "application/json"}, | |
| json={ | |
| "contents": [{"parts": [{"text": f"{system}\n\n{user}"}]}], | |
| "generationConfig": {"temperature": 0.3, "maxOutputTokens": max_tokens} | |
| }, | |
| timeout=120 | |
| ) | |
| data = resp.json() | |
| if "candidates" in data: | |
| content = data["candidates"][0]["content"]["parts"][0]["text"] | |
| return {"content": content} | |
| return {"error": str(data.get("error", "Unknown error"))} | |
| def scaffold_project(self, description, tech_stack, features): | |
| if not self.api_key: | |
| return {"error": "API key not configured"} | |
| self.status = "scaffolding" | |
| system_prompt = f"""You are an expert software architect. Generate a multi-file project scaffold. | |
| Tech Stack: {tech_stack} | |
| Features: {features} | |
| Output format - use EXACTLY this structure: | |
| === FILE: path/to/file.ext === | |
| [complete file content] | |
| === ENDFILE === | |
| Rules: | |
| 1. Include ALL necessary files (config, source, tests, docs, README) | |
| 2. Each file must be complete and production-ready | |
| 3. Add proper error handling and logging | |
| 4. Include comments for complex logic | |
| 5. Follow best practices for {tech_stack} | |
| 6. Generate at minimum: main entry point, config, 2-3 modules, tests, README.md | |
| 7. Ensure files have proper relative paths""" | |
| user_prompt = f"Project: {description}\nTech: {tech_stack}\nFeatures: {features}" | |
| result = self._call_llm(system_prompt, user_prompt, 4000) | |
| if "error" in result: | |
| self.status = "error" | |
| return result | |
| files = self._parse_project_files(result["content"]) | |
| self.current_project = { | |
| "description": description, | |
| "tech_stack": tech_stack, | |
| "files": files, | |
| "created": datetime.now().isoformat() | |
| } | |
| self.conversation_history.extend([ | |
| {"role": "user", "content": user_prompt}, | |
| {"role": "assistant", "content": result["content"]} | |
| ]) | |
| self.status = "project_ready" | |
| return {"files": files, "file_count": len(files), "file_list": list(files.keys())} | |
| def _parse_project_files(self, response): | |
| pattern = r'=== FILE: (.+?) ===\n(.*?)=== ENDFILE ===' | |
| matches = re.findall(pattern, response, re.DOTALL) | |
| files = {} | |
| for path, content in matches: | |
| files[path.strip()] = content.strip() | |
| return files | |
| def generate_file(self, file_path, requirements): | |
| if not self.current_project: | |
| return {"error": "No project exists"} | |
| system = "Generate a complete, production-ready file. Only output the code content, no markdown." | |
| user = f"File: {file_path}\nRequirements: {requirements}\nProject context: {self.current_project['description']}" | |
| result = self._call_llm(system, user, 2000) | |
| if "error" in result: | |
| return result | |
| self.current_project["files"][file_path] = result["content"] | |
| return {"content": result["content"]} | |
| def get_file_content(self, path): | |
| if self.current_project and path in self.current_project["files"]: | |
| return self.current_project["files"][path] | |
| return None | |
| def update_file(self, path, content): | |
| if self.current_project: | |
| self.current_project["files"][path] = content | |
| return True | |
| return False | |
| def get_project_structure(self): | |
| if not self.current_project: | |
| return {} | |
| return self._build_tree(self.current_project["files"].keys()) | |
| def _build_tree(self, paths): | |
| tree = {} | |
| for path in paths: | |
| parts = path.split("/") | |
| current = tree | |
| for i, part in enumerate(parts): | |
| if i == len(parts) - 1: | |
| current[part] = "file" | |
| else: | |
| if part not in current: | |
| current[part] = {} | |
| current = current[part] | |
| return tree | |
| def create_zip(self): | |
| if not self.current_project: | |
| return None | |
| temp_dir = tempfile.mkdtemp() | |
| zip_path = os.path.join(temp_dir, f"project_{self.session_id[:8]}.zip") | |
| with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zf: | |
| for file_path, content in self.current_project["files"].items(): | |
| zf.writestr(file_path, content) | |
| return zip_path | |
| # ============================================ | |
| # GRADIO UI | |
| # ============================================ | |
| engine = ScaffoldEngine() | |
| def get_language_from_path(path): | |
| ext = Path(path).suffix.lower() | |
| lang_map = { | |
| '.ex': 'elixir', '.exs': 'elixir', '.py': 'python', | |
| '.js': 'javascript', '.ts': 'typescript', '.jsx': 'jsx', | |
| '.tsx': 'tsx', '.rs': 'rust', '.go': 'go', | |
| '.rb': 'ruby', '.java': 'java', '.c': 'c', | |
| '.cpp': 'cpp', '.h': 'c', '.cs': 'csharp', | |
| '.php': 'php', '.swift': 'swift', '.kt': 'kotlin', | |
| '.scala': 'scala', '.r': 'r', '.m': 'matlab', | |
| '.json': 'json', '.yaml': 'yaml', '.yml': 'yaml', | |
| '.toml': 'toml', '.md': 'markdown', '.sh': 'bash', | |
| '.dockerfile': 'dockerfile', '.html': 'html', '.css': 'css', | |
| '.sql': 'sql', '.graphql': 'graphql' | |
| } | |
| return lang_map.get(ext, 'text') | |
| custom_css = """ | |
| @import url('https://fonts.googleapis.com/css2?family=JetBrains+Mono:wght@400;600&family=Inter:wght@400;500;600;700&display=swap'); | |
| body { | |
| font-family: 'Inter', sans-serif !important; | |
| background: #0d1117 !important; | |
| } | |
| .gradio-container { | |
| background: #0d1117 !important; | |
| color: #c9d1d9 !important; | |
| } | |
| .header-box { | |
| background: linear-gradient(135deg, #161b22 0%, #0d1117 100%); | |
| border: 1px solid #30363d; | |
| border-radius: 16px; | |
| padding: 24px; | |
| margin-bottom: 20px; | |
| text-align: center; | |
| } | |
| .header-title { | |
| font-size: 32px; | |
| font-weight: 700; | |
| background: linear-gradient(90deg, #58a6ff, #a371f7); | |
| -webkit-background-clip: text; | |
| -webkit-text-fill-color: transparent; | |
| margin-bottom: 8px; | |
| } | |
| .header-subtitle { | |
| color: #8b949e; | |
| font-size: 14px; | |
| } | |
| .api-box { | |
| background: #161b22; | |
| border: 1px solid #30363d; | |
| border-radius: 12px; | |
| padding: 20px; | |
| } | |
| .config-btn { | |
| background: linear-gradient(135deg, #238636, #2ea043) !important; | |
| border: none !important; | |
| border-radius: 8px !important; | |
| font-weight: 600 !important; | |
| } | |
| .scaffold-btn { | |
| background: linear-gradient(135deg, #1f6feb, #58a6ff) !important; | |
| border: none !important; | |
| border-radius: 8px !important; | |
| font-weight: 600 !important; | |
| font-size: 16px !important; | |
| padding: 12px 24px !important; | |
| } | |
| .sidebar-box { | |
| background: #161b22; | |
| border: 1px solid #30363d; | |
| border-radius: 12px; | |
| padding: 16px; | |
| height: 100%; | |
| } | |
| .file-tree-box { | |
| background: #0d1117; | |
| border: 1px solid #30363d; | |
| border-radius: 8px; | |
| padding: 12px; | |
| font-family: 'JetBrains Mono', monospace; | |
| font-size: 13px; | |
| max-height: 500px; | |
| overflow-y: auto; | |
| } | |
| .status-bar { | |
| background: #161b22; | |
| border: 1px solid #30363d; | |
| border-radius: 8px; | |
| padding: 8px 16px; | |
| display: flex; | |
| align-items: center; | |
| gap: 8px; | |
| } | |
| .status-dot { | |
| width: 8px; | |
| height: 8px; | |
| border-radius: 50%; | |
| display: inline-block; | |
| } | |
| .status-idle { background: #8b949e; } | |
| .status-ready { background: #3fb950; box-shadow: 0 0 8px #3fb950; } | |
| .status-generating { background: #d29922; animation: pulse 1s infinite; } | |
| .status-error { background: #f85149; } | |
| @keyframes pulse { | |
| 0%, 100% { opacity: 1; } | |
| 50% { opacity: 0.5; } | |
| } | |
| .code-editor { | |
| border-radius: 8px; | |
| border: 1px solid #30363d; | |
| } | |
| .download-btn { | |
| background: #21262d !important; | |
| border: 1px solid #30363d !important; | |
| color: #c9d1d9 !important; | |
| } | |
| .download-btn:hover { | |
| border-color: #58a6ff !important; | |
| background: #30363d !important; | |
| } | |
| """ | |
| with gr.Blocks(css=custom_css, title="π CodeForge AI") as demo: | |
| # State | |
| current_files = gr.State({}) | |
| selected_file = gr.State("") | |
| # Header | |
| gr.HTML(""" | |
| <div class="header-box"> | |
| <div class="header-title">π CodeForge AI</div> | |
| <div class="header-subtitle"> | |
| Multi-File Project Scaffolding Agent β’ Elixir Architecture β’ HF Spaces Ready | |
| </div> | |
| </div> | |
| """) | |
| # API Configuration | |
| with gr.Row() as api_row: | |
| with gr.Column(scale=1): | |
| with gr.Group(elem_classes=["api-box"]): | |
| gr.Markdown("### π API Configuration") | |
| api_key = gr.Textbox( | |
| label="API Key", | |
| placeholder="sk-...", | |
| type="password" | |
| ) | |
| provider = gr.Dropdown( | |
| choices=["openai", "anthropic", "gemini"], | |
| value="openai", | |
| label="Provider" | |
| ) | |
| model = gr.Dropdown( | |
| choices=["gpt-4-turbo-preview", "gpt-4", "gpt-3.5-turbo"], | |
| value="gpt-4-turbo-preview", | |
| label="Model" | |
| ) | |
| config_btn = gr.Button("β‘ Configure API", variant="primary", elem_classes=["config-btn"]) | |
| config_status = gr.Textbox(label="Status", interactive=False) | |
| # Main App | |
| with gr.Row(visible=False) as main_row: | |
| # Left Sidebar | |
| with gr.Column(scale=1, min_width=300): | |
| with gr.Group(elem_classes=["sidebar-box"]): | |
| gr.Markdown("### π Project Config") | |
| project_desc = gr.Textbox( | |
| label="Description", | |
| placeholder="A REST API for task management with JWT auth and WebSocket notifications", | |
| lines=3 | |
| ) | |
| tech_stack = gr.Dropdown( | |
| choices=[ | |
| "Elixir/Phoenix", | |
| "Python/FastAPI", | |
| "Node.js/Express", | |
| "React/TypeScript", | |
| "Rust/Actix", | |
| "Go/Gin", | |
| "Ruby on Rails", | |
| "Java/Spring Boot", | |
| "Next.js/Prisma", | |
| "Vue.js/Nuxt" | |
| ], | |
| label="Tech Stack", | |
| value="Elixir/Phoenix" | |
| ) | |
| features = gr.Textbox( | |
| label="Features", | |
| placeholder="auth, crud, websockets, caching, tests, docker", | |
| value="authentication, CRUD operations, error handling, tests, README" | |
| ) | |
| scaffold_btn = gr.Button("π Generate Project", elem_classes=["scaffold-btn"]) | |
| gr.Markdown("---") | |
| gr.Markdown("### β Add File") | |
| new_file_path = gr.Textbox( | |
| label="File Path", | |
| placeholder="lib/my_app/context.ex" | |
| ) | |
| new_file_reqs = gr.Textbox( | |
| label="Requirements", | |
| placeholder="Generate a context module for users with CRUD...", | |
| lines=2 | |
| ) | |
| add_file_btn = gr.Button("Generate File", variant="secondary") | |
| # Center - Editor | |
| with gr.Column(scale=2, min_width=500): | |
| with gr.Row(): | |
| status_indicator = gr.HTML(""" | |
| <div class="status-bar"> | |
| <span class="status-dot status-idle"></span> | |
| <span>Idle - Configure API to start</span> | |
| </div> | |
| """) | |
| download_btn = gr.Button("π¦ Download ZIP", elem_classes=["download-btn"]) | |
| file_selector = gr.Dropdown( | |
| label="π Select File", | |
| choices=[], | |
| interactive=True | |
| ) | |
| code_editor = gr.Code( | |
| label="π Editor", | |
| language="elixir", | |
| lines=28, | |
| elem_classes=["code-editor"] | |
| ) | |
| with gr.Row(): | |
| save_btn = gr.Button("πΎ Save", variant="secondary") | |
| save_status = gr.Textbox(show_label=False, interactive=False) | |
| # Right Sidebar | |
| with gr.Column(scale=1, min_width=250): | |
| with gr.Group(elem_classes=["sidebar-box"]): | |
| gr.Markdown("### π File Tree") | |
| file_tree = gr.JSON(label="Structure", value={}) | |
| gr.Markdown("---") | |
| gr.Markdown("### π Stats") | |
| file_count = gr.Number(label="Files", value=0, interactive=False) | |
| total_lines = gr.Number(label="Lines", value=0, interactive=False) | |
| # Event Handlers | |
| def on_configure(key, prov, mod): | |
| result = engine.configure(key, prov, mod) | |
| if result["status"] == "success": | |
| return ( | |
| gr.update(visible=False), | |
| gr.update(visible=True), | |
| f"β {result['message']}", | |
| """<div class="status-bar"><span class="status-dot status-ready"></span><span>Ready - Agent configured</span></div>""" | |
| ) | |
| return gr.update(), gr.update(), f"β {result.get('message', 'Error')}", "" | |
| def on_scaffold(desc, stack, feats, progress=gr.Progress()): | |
| progress(0.1, desc="Initializing...") | |
| progress(0.3, desc="Calling AI...") | |
| result = engine.scaffold_project(desc, stack, feats) | |
| progress(0.8, desc="Processing...") | |
| if "error" in result: | |
| progress(1.0, desc="Error!") | |
| return ( | |
| f"β {result['error']}", | |
| gr.update(), | |
| "", | |
| "", | |
| {}, | |
| 0, | |
| 0, | |
| """<div class="status-bar"><span class="status-dot status-error"></span><span>Error occurred</span></div>""" | |
| ) | |
| files = result["files"] | |
| file_list = list(files.keys()) | |
| first_file = file_list[0] | |
| total = sum(len(f.split("\n")) for f in files.values()) | |
| tree = engine.get_project_structure() | |
| progress(1.0, desc="Done!") | |
| return ( | |
| f"β Generated {result['file_count']} files!", | |
| gr.update(choices=file_list, value=first_file), | |
| first_file, | |
| files[first_file], | |
| tree, | |
| len(files), | |
| total, | |
| """<div class="status-bar"><span class="status-dot status-ready"></span><span>Project ready</span></div>""" | |
| ) | |
| def on_file_select(path): | |
| content = engine.get_file_content(path) | |
| lang = get_language_from_path(path) | |
| return content, gr.update(language=lang) | |
| def on_save(path, content): | |
| if engine.update_file(path, content): | |
| return "πΎ Saved!" | |
| return "β Error" | |
| def on_add_file(path, reqs): | |
| result = engine.generate_file(path, reqs) | |
| if "error" in result: | |
| return f"β {result['error']}", gr.update(), "" | |
| files = engine.current_project["files"] | |
| file_list = list(files.keys()) | |
| tree = engine.get_project_structure() | |
| total = sum(len(f.split("\n")) for f in files.values()) | |
| return ( | |
| f"β Generated {path}", | |
| gr.update(choices=file_list, value=path), | |
| result["content"], | |
| tree, | |
| len(files), | |
| total | |
| ) | |
| def on_download(): | |
| zip_path = engine.create_zip() | |
| if zip_path: | |
| return zip_path | |
| return None | |
| # Wire events | |
| config_btn.click( | |
| on_configure, | |
| inputs=[api_key, provider, model], | |
| outputs=[api_row, main_row, config_status, status_indicator] | |
| ) | |
| scaffold_btn.click( | |
| on_scaffold, | |
| inputs=[project_desc, tech_stack, features], | |
| outputs=[config_status, file_selector, selected_file, code_editor, file_tree, file_count, total_lines, status_indicator] | |
| ) | |
| file_selector.change( | |
| on_file_select, | |
| inputs=[file_selector], | |
| outputs=[code_editor, code_editor] | |
| ) | |
| save_btn.click( | |
| on_save, | |
| inputs=[file_selector, code_editor], | |
| outputs=[save_status] | |
| ) | |
| add_file_btn.click( | |
| on_add_file, | |
| inputs=[new_file_path, new_file_reqs], | |
| outputs=[config_status, file_selector, code_editor, file_tree, file_count, total_lines] | |
| ) | |
| download_btn.click( | |
| on_download, | |
| inputs=[], | |
| outputs=[gr.File(label="Download Project")] | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch(server_name="0.0.0.0", server_port=7860) | |