Spaces:
Running
Running
| import os | |
| import re | |
| import json | |
| import html | |
| import time | |
| import uuid | |
| import shutil | |
| import zipfile | |
| import tempfile | |
| from pathlib import Path | |
| from typing import Dict, Any, Tuple, List | |
| import gradio as gr | |
| import torch | |
| from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM | |
| from huggingface_hub import InferenceClient, HfApi | |
| # ============================================================ | |
| # AI Website Builder for Hugging Face Spaces (Gradio) | |
| # Python 3 | |
| # Dependencies: gradio, transformers, torch, huggingface_hub, zipfile | |
| # ============================================================ | |
| # | |
| # Features: | |
| # - Free-prompt website generation | |
| # - 10+ selectable code models from Hugging Face Hub | |
| # - Side-by-side code editor and live preview | |
| # - Multi-page website generation | |
| # - Download as ZIP | |
| # - Save project locally | |
| # - Deploy generated site to a public Hugging Face Space | |
| # - Error handling with visible messages | |
| # | |
| # Notes: | |
| # - Some models may be too large for CPU Spaces if loaded locally. | |
| # - Therefore the app prefers Hugging Face InferenceClient if HF_TOKEN exists. | |
| # - A local transformers fallback is included for smaller models when possible. | |
| # - Deploying to a new HF Space requires HF_TOKEN secret. | |
| # ============================================================ | |
| APP_TITLE = "AI Website Builder" | |
| BASE_DIR = Path(".").resolve() | |
| WORK_DIR = BASE_DIR / "workspace" | |
| PROJECTS_DIR = WORK_DIR / "projects" | |
| EXPORTS_DIR = WORK_DIR / "exports" | |
| SAVED_DIR = WORK_DIR / "saved" | |
| DEPLOY_DIR = WORK_DIR / "deploy" | |
| for d in [WORK_DIR, PROJECTS_DIR, EXPORTS_DIR, SAVED_DIR, DEPLOY_DIR]: | |
| d.mkdir(parents=True, exist_ok=True) | |
| # ------------------------------------------------------------ | |
| # Model catalog: 10+ strong free/open HF code-capable models | |
| # ------------------------------------------------------------ | |
| # These are model IDs on Hugging Face Hub. | |
| # Availability on serverless inference may vary over time. | |
| MODELS = { | |
| "Qwen2.5-Coder-7B-Instruct (Default)": "Qwen/Qwen2.5-Coder-7B-Instruct", | |
| "Qwen2.5-Coder-3B-Instruct": "Qwen/Qwen2.5-Coder-3B-Instruct", | |
| "CodeQwen1.5-7B-Chat": "Qwen/CodeQwen1.5-7B-Chat", | |
| "StarCoder2-7B": "bigcode/starcoder2-7b", | |
| "StarCoder2-3B": "bigcode/starcoder2-3b", | |
| "CodeGemma-7B-it": "google/codegemma-7b-it", | |
| "CodeGemma-2B": "google/codegemma-2b", | |
| "DeepSeek-Coder-6.7B-Instruct": "deepseek-ai/deepseek-coder-6.7b-instruct", | |
| "DeepSeek-Coder-1.3B-Instruct": "deepseek-ai/deepseek-coder-1.3b-instruct", | |
| "Phi-3-mini-4k-instruct": "microsoft/Phi-3-mini-4k-instruct", | |
| "SmolLM2-1.7B-Instruct": "HuggingFaceTB/SmolLM2-1.7B-Instruct", | |
| "CodeLlama-7B-Instruct": "codellama/CodeLlama-7b-Instruct-hf", | |
| } | |
| DEFAULT_MODEL_LABEL = "Qwen2.5-Coder-7B-Instruct (Default)" | |
| DEFAULT_MODEL_ID = MODELS[DEFAULT_MODEL_LABEL] | |
| # small subset for local fallback on CPU | |
| LOCAL_FALLBACK_CANDIDATES = [ | |
| "HuggingFaceTB/SmolLM2-1.7B-Instruct", | |
| "google/codegemma-2b", | |
| "deepseek-ai/deepseek-coder-1.3b-instruct", | |
| ] | |
| HF_TOKEN = os.getenv("HF_TOKEN", "").strip() | |
| HF_USERNAME = os.getenv("HF_USERNAME", "").strip() # optional, can help deployment UX | |
| LOCAL_PIPELINE_CACHE: Dict[str, Any] = {} | |
| # ------------------------------------------------------------ | |
| # Prompting | |
| # ------------------------------------------------------------ | |
| SYSTEM_PROMPT = """You are an expert senior web developer and product designer. | |
| Create a complete production-style website from a user's free text prompt. | |
| Rules: | |
| 1. Return ONLY valid JSON. | |
| 2. The JSON root must have this exact schema: | |
| { | |
| "site_name": "string", | |
| "pages": [ | |
| { | |
| "filename": "index.html", | |
| "title": "string", | |
| "html": "full html page string" | |
| } | |
| ], | |
| "shared_css": "string", | |
| "shared_js": "string" | |
| } | |
| 3. Create multiple pages when appropriate, such as: | |
| - index.html | |
| - about.html | |
| - contact.html | |
| - blog.html | |
| - services.html | |
| 4. Every HTML page must: | |
| - be complete HTML5 | |
| - include <meta charset="UTF-8"> | |
| - include <meta name="viewport" content="width=device-width, initial-scale=1.0"> | |
| - link to style.css | |
| - link to script.js | |
| - contain navigation links between pages when multiple pages exist | |
| - contain meaningful English content, not lorem ipsum | |
| 5. Design everything automatically: | |
| - layout | |
| - colors | |
| - typography | |
| - sections | |
| - buttons | |
| - forms | |
| - cards | |
| - responsive design | |
| 6. Use plain HTML/CSS/JS only. | |
| 7. Do not use external CDNs. | |
| 8. Put all shared CSS into "shared_css". | |
| 9. Put all shared JS into "shared_js". | |
| 10. Keep JS safe and browser-friendly. | |
| 11. If the user asks for interactions, implement them in JavaScript. | |
| 12. Make the result polished and visually impressive. | |
| Return JSON only. No markdown. No explanations. | |
| """ | |
| def build_user_prompt(user_prompt: str) -> str: | |
| return f""" | |
| User request: | |
| {user_prompt} | |
| Generate a complete multi-page website as JSON according to the required schema. | |
| """ | |
| # ------------------------------------------------------------ | |
| # Utility helpers | |
| # ------------------------------------------------------------ | |
| def slugify(text: str) -> str: | |
| text = text.lower().strip() | |
| text = re.sub(r"[^a-z0-9]+", "-", text) | |
| text = re.sub(r"-+", "-", text).strip("-") | |
| return text or f"site-{uuid.uuid4().hex[:8]}" | |
| def safe_filename(name: str) -> str: | |
| name = os.path.basename(name.strip()) | |
| name = re.sub(r"[^a-zA-Z0-9._-]", "_", name) | |
| return name or "file.txt" | |
| def now_id() -> str: | |
| return time.strftime("%Y%m%d-%H%M%S") | |
| def json_extract(text: str) -> Dict[str, Any]: | |
| """ | |
| Try robust extraction of JSON from model output. | |
| """ | |
| text = text.strip() | |
| # direct parse | |
| try: | |
| return json.loads(text) | |
| except Exception: | |
| pass | |
| # code fence extraction | |
| fence_match = re.search(r"```(?:json)?\s*(\{.*\})\s*```", text, re.DOTALL) | |
| if fence_match: | |
| try: | |
| return json.loads(fence_match.group(1)) | |
| except Exception: | |
| pass | |
| # greedy first { last } | |
| start = text.find("{") | |
| end = text.rfind("}") | |
| if start != -1 and end != -1 and end > start: | |
| candidate = text[start:end + 1] | |
| return json.loads(candidate) | |
| raise ValueError("Could not parse valid JSON from model output.") | |
| def normalize_site_json(data: Dict[str, Any]) -> Dict[str, Any]: | |
| """ | |
| Normalize and validate model output. | |
| """ | |
| if not isinstance(data, dict): | |
| raise ValueError("Generated data is not a JSON object.") | |
| site_name = data.get("site_name", "Generated Website") | |
| pages = data.get("pages", []) | |
| shared_css = data.get("shared_css", "") | |
| shared_js = data.get("shared_js", "") | |
| if not isinstance(pages, list) or len(pages) == 0: | |
| raise ValueError("No pages were generated.") | |
| normalized_pages = [] | |
| seen = set() | |
| for i, page in enumerate(pages): | |
| if not isinstance(page, dict): | |
| continue | |
| filename = safe_filename(page.get("filename", f"page{i+1}.html")) | |
| if not filename.endswith(".html"): | |
| filename += ".html" | |
| if filename in seen: | |
| stem = filename[:-5] | |
| filename = f"{stem}_{i+1}.html" | |
| seen.add(filename) | |
| title = str(page.get("title", filename.replace(".html", "").title())) | |
| html_content = str(page.get("html", "")).strip() | |
| if not html_content: | |
| html_content = build_fallback_html(title=title, body=f"<h1>{html.escape(title)}</h1><p>Page generated with missing content.</p>") | |
| normalized_pages.append({ | |
| "filename": filename, | |
| "title": title, | |
| "html": ensure_assets_linked(html_content, title), | |
| }) | |
| if "index.html" not in [p["filename"] for p in normalized_pages]: | |
| first = normalized_pages[0] | |
| normalized_pages[0] = {**first, "filename": "index.html"} | |
| return { | |
| "site_name": str(site_name), | |
| "pages": normalized_pages, | |
| "shared_css": str(shared_css), | |
| "shared_js": str(shared_js), | |
| } | |
| def ensure_assets_linked(html_content: str, title: str) -> str: | |
| """ | |
| Make sure html page is full, and includes shared style.css + script.js. | |
| """ | |
| if "<html" not in html_content.lower(): | |
| html_content = build_fallback_html(title, html_content) | |
| lower = html_content.lower() | |
| if '<meta charset="utf-8">' not in lower and '<meta charset="utf-8"/>' not in lower: | |
| html_content = re.sub(r"<head([^>]*)>", r'<head\1>\n<meta charset="UTF-8">', html_content, count=1, flags=re.IGNORECASE) | |
| if 'name="viewport"' not in lower: | |
| html_content = re.sub( | |
| r"<head([^>]*)>", | |
| r'<head\1>\n<meta name="viewport" content="width=device-width, initial-scale=1.0">', | |
| html_content, | |
| count=1, | |
| flags=re.IGNORECASE | |
| ) | |
| if "<title>" not in lower: | |
| html_content = re.sub(r"<head([^>]*)>", rf'<head\1>\n<title>{html.escape(title)}</title>', html_content, count=1, flags=re.IGNORECASE) | |
| if 'href="style.css"' not in lower: | |
| html_content = re.sub(r"</head>", '\n<link rel="stylesheet" href="style.css">\n</head>', html_content, count=1, flags=re.IGNORECASE) | |
| if 'src="script.js"' not in lower: | |
| html_content = re.sub(r"</body>", '\n<script src="script.js"></script>\n</body>', html_content, count=1, flags=re.IGNORECASE) | |
| return html_content | |
| def build_fallback_html(title: str, body: str) -> str: | |
| return f"""<!DOCTYPE html> | |
| <html lang="en"> | |
| <head> | |
| <meta charset="UTF-8"> | |
| <meta name="viewport" content="width=device-width, initial-scale=1.0"> | |
| <title>{html.escape(title)}</title> | |
| <link rel="stylesheet" href="style.css"> | |
| </head> | |
| <body> | |
| {body} | |
| <script src="script.js"></script> | |
| </body> | |
| </html> | |
| """ | |
| def build_preview_html(project_dir: Path, selected_page: str) -> str: | |
| """ | |
| Build iframe preview using srcdoc so it works inside Gradio easily. | |
| Rewrites local links to inline references where possible. | |
| """ | |
| target = project_dir / selected_page | |
| if not target.exists(): | |
| files = list(project_dir.glob("*.html")) | |
| if not files: | |
| return "<div style='padding:1rem;color:red;'>Error: No HTML page found.</div>" | |
| target = files[0] | |
| html_text = target.read_text(encoding="utf-8", errors="ignore") | |
| css_text = "" | |
| js_text = "" | |
| css_path = project_dir / "style.css" | |
| js_path = project_dir / "script.js" | |
| if css_path.exists(): | |
| css_text = css_path.read_text(encoding="utf-8", errors="ignore") | |
| if js_path.exists(): | |
| js_text = js_path.read_text(encoding="utf-8", errors="ignore") | |
| # inline shared assets for preview | |
| html_text = re.sub(r'<link[^>]+href="style\.css"[^>]*>', f"<style>\n{css_text}\n</style>", html_text, flags=re.IGNORECASE) | |
| html_text = re.sub(r'<script[^>]+src="script\.js"[^>]*>\s*</script>', f"<script>\n{js_text}\n</script>", html_text, flags=re.IGNORECASE) | |
| # keep navigation working in preview by intercepting clicks in iframe | |
| # page switching in iframe across local files isn't easy with srcdoc, so we add a notice | |
| preview_wrapper = f""" | |
| <div style="border:1px solid #333;border-radius:12px;overflow:hidden;background:white;"> | |
| <div style="padding:8px 12px;background:#111;color:#fff;font-family:Arial,sans-serif;font-size:12px;"> | |
| Live Preview: {html.escape(target.name)} | |
| </div> | |
| <iframe | |
| style="width:100%;height:900px;border:none;background:white;" | |
| sandbox="allow-scripts allow-forms allow-modals" | |
| srcdoc='{html.escape(html_text)}' | |
| ></iframe> | |
| </div> | |
| """ | |
| return preview_wrapper | |
| def write_project_files(site_data: Dict[str, Any], project_id: str) -> Path: | |
| project_dir = PROJECTS_DIR / project_id | |
| if project_dir.exists(): | |
| shutil.rmtree(project_dir) | |
| project_dir.mkdir(parents=True, exist_ok=True) | |
| for page in site_data["pages"]: | |
| (project_dir / page["filename"]).write_text(page["html"], encoding="utf-8") | |
| (project_dir / "style.css").write_text(site_data["shared_css"], encoding="utf-8") | |
| (project_dir / "script.js").write_text(site_data["shared_js"], encoding="utf-8") | |
| (project_dir / "project.json").write_text(json.dumps(site_data, ensure_ascii=False, indent=2), encoding="utf-8") | |
| return project_dir | |
| def zip_project(project_dir: Path, zip_name: str) -> str: | |
| zip_path = EXPORTS_DIR / zip_name | |
| if zip_path.exists(): | |
| zip_path.unlink() | |
| with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zf: | |
| for file_path in project_dir.rglob("*"): | |
| if file_path.is_file(): | |
| zf.write(file_path, arcname=file_path.relative_to(project_dir)) | |
| return str(zip_path) | |
| def save_project_copy(project_dir: Path, site_name: str) -> str: | |
| save_id = f"{slugify(site_name)}-{now_id()}" | |
| target_dir = SAVED_DIR / save_id | |
| shutil.copytree(project_dir, target_dir) | |
| return str(target_dir) | |
| def load_project(saved_name: str) -> Dict[str, Any]: | |
| saved_dir = SAVED_DIR / saved_name | |
| meta_file = saved_dir / "project.json" | |
| if not meta_file.exists(): | |
| raise FileNotFoundError("Saved project.json not found.") | |
| return json.loads(meta_file.read_text(encoding="utf-8")) | |
| def list_saved_projects() -> List[str]: | |
| if not SAVED_DIR.exists(): | |
| return [] | |
| return sorted([p.name for p in SAVED_DIR.iterdir() if p.is_dir()]) | |
| def list_project_pages(project_dir: Path) -> List[str]: | |
| if not project_dir.exists(): | |
| return [] | |
| return sorted([p.name for p in project_dir.glob("*.html")]) | |
| def collect_code_view(project_dir: Path, selected_page: str) -> Tuple[str, str, str]: | |
| html_code = "" | |
| css_code = "" | |
| js_code = "" | |
| page_path = project_dir / selected_page | |
| if page_path.exists(): | |
| html_code = page_path.read_text(encoding="utf-8", errors="ignore") | |
| css_path = project_dir / "style.css" | |
| js_path = project_dir / "script.js" | |
| if css_path.exists(): | |
| css_code = css_path.read_text(encoding="utf-8", errors="ignore") | |
| if js_path.exists(): | |
| js_code = js_path.read_text(encoding="utf-8", errors="ignore") | |
| return html_code, css_code, js_code | |
| def save_edited_files(project_dir: Path, selected_page: str, html_code: str, css_code: str, js_code: str) -> None: | |
| if not project_dir.exists(): | |
| raise FileNotFoundError("Project directory does not exist.") | |
| (project_dir / selected_page).write_text(html_code or "", encoding="utf-8") | |
| (project_dir / "style.css").write_text(css_code or "", encoding="utf-8") | |
| (project_dir / "script.js").write_text(js_code or "", encoding="utf-8") | |
| # ------------------------------------------------------------ | |
| # Generation backends | |
| # ------------------------------------------------------------ | |
| def try_inference_client(model_id: str, user_prompt: str) -> str: | |
| if not HF_TOKEN: | |
| raise RuntimeError("HF_TOKEN is not configured for serverless inference.") | |
| client = InferenceClient(token=HF_TOKEN) | |
| prompt = f"{SYSTEM_PROMPT}\n\n{build_user_prompt(user_prompt)}" | |
| # The task/API supported can vary by provider/model. | |
| # We use chat_completion first, then text_generation fallback. | |
| try: | |
| completion = client.chat.completions.create( | |
| model=model_id, | |
| messages=[ | |
| {"role": "system", "content": SYSTEM_PROMPT}, | |
| {"role": "user", "content": build_user_prompt(user_prompt)}, | |
| ], | |
| temperature=0.2, | |
| max_tokens=4096, | |
| ) | |
| return completion.choices[0].message.content | |
| except Exception: | |
| pass | |
| try: | |
| result = client.text_generation( | |
| prompt, | |
| model=model_id, | |
| max_new_tokens=4096, | |
| temperature=0.2, | |
| return_full_text=False, | |
| ) | |
| return result | |
| except Exception as e: | |
| raise RuntimeError(f"Inference API failed for model {model_id}: {e}") | |
| def load_local_pipeline(model_id: str): | |
| if model_id in LOCAL_PIPELINE_CACHE: | |
| return LOCAL_PIPELINE_CACHE[model_id] | |
| tokenizer = AutoTokenizer.from_pretrained(model_id) | |
| model = AutoModelForCausalLM.from_pretrained( | |
| model_id, | |
| torch_dtype=torch.float32, | |
| device_map="cpu", | |
| ) | |
| pipe = pipeline( | |
| "text-generation", | |
| model=model, | |
| tokenizer=tokenizer, | |
| ) | |
| LOCAL_PIPELINE_CACHE[model_id] = pipe | |
| return pipe | |
| def try_local_generation(model_id: str, user_prompt: str) -> str: | |
| pipe = load_local_pipeline(model_id) | |
| prompt = f"{SYSTEM_PROMPT}\n\n{build_user_prompt(user_prompt)}" | |
| output = pipe( | |
| prompt, | |
| max_new_tokens=2048, | |
| do_sample=True, | |
| temperature=0.2, | |
| top_p=0.95, | |
| pad_token_id=pipe.tokenizer.eos_token_id, | |
| ) | |
| text = output[0]["generated_text"] | |
| if text.startswith(prompt): | |
| text = text[len(prompt):].strip() | |
| return text | |
| def generate_site_data(model_label: str, user_prompt: str) -> Dict[str, Any]: | |
| if not user_prompt or not user_prompt.strip(): | |
| raise ValueError("Please enter a website description.") | |
| model_id = MODELS.get(model_label, DEFAULT_MODEL_ID) | |
| errors = [] | |
| # 1) Try chosen model via HF Inference | |
| try: | |
| raw = try_inference_client(model_id, user_prompt) | |
| return normalize_site_json(json_extract(raw)) | |
| except Exception as e: | |
| errors.append(f"Primary model inference failed: {e}") | |
| # 2) Try fallback serverless models | |
| fallback_ids = [DEFAULT_MODEL_ID, "Qwen/Qwen2.5-Coder-3B-Instruct", "HuggingFaceTB/SmolLM2-1.7B-Instruct"] | |
| for fid in fallback_ids: | |
| if fid == model_id: | |
| continue | |
| try: | |
| raw = try_inference_client(fid, user_prompt) | |
| return normalize_site_json(json_extract(raw)) | |
| except Exception as e: | |
| errors.append(f"Fallback inference failed for {fid}: {e}") | |
| # 3) Try local smaller models | |
| for local_model in LOCAL_FALLBACK_CANDIDATES: | |
| try: | |
| raw = try_local_generation(local_model, user_prompt) | |
| return normalize_site_json(json_extract(raw)) | |
| except Exception as e: | |
| errors.append(f"Local fallback failed for {local_model}: {e}") | |
| raise RuntimeError("All generation strategies failed.\n" + "\n".join(errors)) | |
| # ------------------------------------------------------------ | |
| # Deployment to Hugging Face Space | |
| # ------------------------------------------------------------ | |
| def make_static_space_readme(space_title: str) -> str: | |
| return f"""--- | |
| title: {space_title} | |
| emoji: ๐ | |
| colorFrom: blue | |
| colorTo: indigo | |
| sdk: static | |
| pinned: false | |
| --- | |
| # {space_title} | |
| This Space hosts a generated static website. | |
| """ | |
| def deploy_to_hf_space(project_dir: Path, desired_space_name: str, make_public: bool = True) -> str: | |
| """ | |
| Deploy generated site as a static HF Space. | |
| Requires HF_TOKEN. | |
| """ | |
| if not HF_TOKEN: | |
| raise RuntimeError("Deployment requires HF_TOKEN in Space secrets.") | |
| api = HfApi(token=HF_TOKEN) | |
| # Determine username if not supplied | |
| username = HF_USERNAME | |
| if not username: | |
| me = api.whoami() | |
| username = me["name"] | |
| space_slug = slugify(desired_space_name) | |
| repo_id = f"{username}/{space_slug}" | |
| temp_dir = Path(tempfile.mkdtemp(prefix="hf_static_space_")) | |
| try: | |
| # Copy website files | |
| for item in project_dir.iterdir(): | |
| if item.is_file(): | |
| shutil.copy2(item, temp_dir / item.name) | |
| # Ensure README metadata for static Space | |
| (temp_dir / "README.md").write_text(make_static_space_readme(desired_space_name), encoding="utf-8") | |
| # Create space with SDK=static | |
| api.create_repo( | |
| repo_id=repo_id, | |
| repo_type="space", | |
| space_sdk="static", | |
| private=not make_public, | |
| exist_ok=True, | |
| ) | |
| # Upload all files | |
| api.upload_folder( | |
| repo_id=repo_id, | |
| repo_type="space", | |
| folder_path=str(temp_dir), | |
| ) | |
| return f"https://huggingface.co/spaces/{repo_id}" | |
| finally: | |
| shutil.rmtree(temp_dir, ignore_errors=True) | |
| # ------------------------------------------------------------ | |
| # UI event handlers | |
| # ------------------------------------------------------------ | |
| def generate_action(user_prompt: str, model_label: str): | |
| try: | |
| site_data = generate_site_data(model_label, user_prompt) | |
| site_name = site_data["site_name"] or "Generated Website" | |
| project_id = f"{slugify(site_name)}-{uuid.uuid4().hex[:8]}" | |
| project_dir = write_project_files(site_data, project_id) | |
| pages = list_project_pages(project_dir) | |
| selected_page = "index.html" if "index.html" in pages else pages[0] | |
| html_code, css_code, js_code = collect_code_view(project_dir, selected_page) | |
| preview = build_preview_html(project_dir, selected_page) | |
| zip_path = zip_project(project_dir, f"{project_id}.zip") | |
| state = { | |
| "project_id": project_id, | |
| "project_dir": str(project_dir), | |
| "site_name": site_name, | |
| "pages": pages, | |
| } | |
| msg = f"โ Site generated successfully: {site_name}" | |
| return ( | |
| state, | |
| gr.update(choices=pages, value=selected_page), | |
| html_code, | |
| css_code, | |
| js_code, | |
| preview, | |
| zip_path, | |
| msg, | |
| json.dumps(site_data, ensure_ascii=False, indent=2), | |
| ) | |
| except Exception as e: | |
| err = f"โ ืฉืืืื: {e}" | |
| return ( | |
| {}, | |
| gr.update(choices=[], value=None), | |
| "", | |
| "", | |
| "", | |
| f"<div style='padding:1rem;color:red;font-weight:bold;'>{html.escape(err)}</div>", | |
| None, | |
| err, | |
| "", | |
| ) | |
| def page_change_action(state: Dict[str, Any], selected_page: str): | |
| try: | |
| project_dir = Path(state.get("project_dir", "")) | |
| if not project_dir.exists(): | |
| raise FileNotFoundError("No generated project is loaded.") | |
| html_code, css_code, js_code = collect_code_view(project_dir, selected_page) | |
| preview = build_preview_html(project_dir, selected_page) | |
| return html_code, css_code, js_code, preview, "โ Page loaded." | |
| except Exception as e: | |
| err = f"โ ืฉืืืื: {e}" | |
| return "", "", "", f"<div style='padding:1rem;color:red;'>{html.escape(err)}</div>", err | |
| def save_edits_action(state: Dict[str, Any], selected_page: str, html_code: str, css_code: str, js_code: str): | |
| try: | |
| project_dir = Path(state.get("project_dir", "")) | |
| if not project_dir.exists(): | |
| raise FileNotFoundError("No generated project is loaded.") | |
| save_edited_files(project_dir, selected_page, html_code, css_code, js_code) | |
| preview = build_preview_html(project_dir, selected_page) | |
| zip_path = zip_project(project_dir, f"{state.get('project_id', 'project')}.zip") | |
| return preview, zip_path, "โ Changes saved and preview updated." | |
| except Exception as e: | |
| err = f"โ ืฉืืืื: {e}" | |
| return f"<div style='padding:1rem;color:red;'>{html.escape(err)}</div>", None, err | |
| def save_project_action(state: Dict[str, Any]): | |
| try: | |
| project_dir = Path(state.get("project_dir", "")) | |
| site_name = state.get("site_name", "generated-site") | |
| if not project_dir.exists(): | |
| raise FileNotFoundError("No generated project to save.") | |
| saved_path = save_project_copy(project_dir, site_name) | |
| choices = list_saved_projects() | |
| return gr.update(choices=choices, value=Path(saved_path).name), f"โ Project saved: {saved_path}" | |
| except Exception as e: | |
| return gr.update(choices=list_saved_projects()), f"โ ืฉืืืื: {e}" | |
| def load_project_action(saved_name: str): | |
| try: | |
| if not saved_name: | |
| raise ValueError("Please choose a saved project.") | |
| site_data = load_project(saved_name) | |
| project_id = f"loaded-{saved_name}" | |
| project_dir = write_project_files(site_data, project_id) | |
| pages = list_project_pages(project_dir) | |
| selected_page = "index.html" if "index.html" in pages else pages[0] | |
| html_code, css_code, js_code = collect_code_view(project_dir, selected_page) | |
| preview = build_preview_html(project_dir, selected_page) | |
| zip_path = zip_project(project_dir, f"{project_id}.zip") | |
| state = { | |
| "project_id": project_id, | |
| "project_dir": str(project_dir), | |
| "site_name": site_data.get("site_name", saved_name), | |
| "pages": pages, | |
| } | |
| return ( | |
| state, | |
| gr.update(choices=pages, value=selected_page), | |
| html_code, | |
| css_code, | |
| js_code, | |
| preview, | |
| zip_path, | |
| f"โ Loaded saved project: {saved_name}", | |
| json.dumps(site_data, ensure_ascii=False, indent=2), | |
| ) | |
| except Exception as e: | |
| err = f"โ ืฉืืืื: {e}" | |
| return ( | |
| {}, | |
| gr.update(choices=[], value=None), | |
| "", | |
| "", | |
| "", | |
| f"<div style='padding:1rem;color:red;'>{html.escape(err)}</div>", | |
| None, | |
| err, | |
| "", | |
| ) | |
| def deploy_action(state: Dict[str, Any], deploy_name: str): | |
| try: | |
| project_dir = Path(state.get("project_dir", "")) | |
| site_name = state.get("site_name", "generated-site") | |
| if not project_dir.exists(): | |
| raise FileNotFoundError("No generated project to deploy.") | |
| deploy_name = (deploy_name or site_name).strip() | |
| url = deploy_to_hf_space(project_dir, deploy_name, make_public=True) | |
| return f"โ Site deployed successfully: {url}" | |
| except Exception as e: | |
| return f"โ ืฉืืืื: {e}" | |
| # ------------------------------------------------------------ | |
| # Gradio UI | |
| # ------------------------------------------------------------ | |
| CUSTOM_CSS = """ | |
| #app-root { | |
| max-width: 100% !important; | |
| } | |
| .code-panel .cm-editor, .code-panel textarea { | |
| min-height: 280px !important; | |
| } | |
| .status-box textarea { | |
| font-weight: 600; | |
| } | |
| """ | |
| with gr.Blocks(css=CUSTOM_CSS, title=APP_TITLE, fill_height=True) as demo: | |
| gr.Markdown( | |
| """ | |
| # ๐ AI Website Builder | |
| ืืชืื ืชืืืืจ ืืืคืฉื ืฉื ืืืชืจ ืฉืืชื ืจืืฆื โ ืืโAI ืืืฆืืจ ืขืืืจื ืืชืจ ืฉืื, ืืืื ืืื ืืคืื, CSS, JavaScript, ืชืฆืืื ืืงืืืื ืืื, ืืืจืื ืโZIP ืืฉืืืจื/ืคืจืืกื. | |
| """ | |
| ) | |
| project_state = gr.State({}) | |
| with gr.Row(): | |
| prompt_box = gr.Textbox( | |
| label="ืชืืืืจ ืืืคืฉื ืฉื ืืืชืจ", | |
| placeholder="ืืืืืื: Build a modern startup website for an AI productivity company with home, about, pricing, blog and contact pages. Use dark mode, animated hero, testimonials, FAQ and contact form.", | |
| lines=6, | |
| scale=6, | |
| ) | |
| model_dropdown = gr.Dropdown( | |
| label="ืืืื AI", | |
| choices=list(MODELS.keys()), | |
| value=DEFAULT_MODEL_LABEL, | |
| scale=2, | |
| ) | |
| with gr.Row(): | |
| generate_btn = gr.Button("ืฆืืจ ืืชืจ", variant="primary") | |
| save_project_btn = gr.Button("ืฉืืืจ ืคืจืืืงื") | |
| deploy_btn = gr.Button("ืคืจืืก ืืืชืจ") | |
| deploy_name_box = gr.Textbox(label="ืฉื ืโSpace ืืคืจืืกื", placeholder="my-generated-website") | |
| with gr.Row(): | |
| saved_projects_dropdown = gr.Dropdown( | |
| label="ืคืจืืืงืืื ืฉืืืจืื", | |
| choices=list_saved_projects(), | |
| value=None, | |
| allow_custom_value=False, | |
| ) | |
| load_project_btn = gr.Button("ืืขื ืคืจืืืงื ืฉืืืจ") | |
| status_box = gr.Textbox(label="ืกืืืืก", interactive=False) | |
| with gr.Row(equal_height=True): | |
| with gr.Column(scale=5): | |
| preview_html = gr.HTML(label="ืชืฆืืื ืืงืืืื") | |
| with gr.Column(scale=5): | |
| page_dropdown = gr.Dropdown(label="ืืฃ ื ืืืจ", choices=[], value=None) | |
| html_editor = gr.Code(label="HTML", language="html", interactive=True, lines=18, elem_classes=["code-panel"]) | |
| css_editor = gr.Code(label="CSS", language="css", interactive=True, lines=14, elem_classes=["code-panel"]) | |
| js_editor = gr.Code(label="JavaScript", language="javascript", interactive=True, lines=14, elem_classes=["code-panel"]) | |
| save_edits_btn = gr.Button("ืฉืืืจ ืฉืื ืืืื ืืขืืจื") | |
| with gr.Row(): | |
| zip_file = gr.File(label="ืืืจืืช ืืืชืจ ืโZIP") | |
| raw_json = gr.Code(label="Project JSON", language="json", interactive=False, lines=18) | |
| generate_btn.click( | |
| fn=generate_action, | |
| inputs=[prompt_box, model_dropdown], | |
| outputs=[project_state, page_dropdown, html_editor, css_editor, js_editor, preview_html, zip_file, status_box, raw_json], | |
| show_progress="full" | |
| ) | |
| page_dropdown.change( | |
| fn=page_change_action, | |
| inputs=[project_state, page_dropdown], | |
| outputs=[html_editor, css_editor, js_editor, preview_html, status_box] | |
| ) | |
| save_edits_btn.click( | |
| fn=save_edits_action, | |
| inputs=[project_state, page_dropdown, html_editor, css_editor, js_editor], | |
| outputs=[preview_html, zip_file, status_box] | |
| ) | |
| save_project_btn.click( | |
| fn=save_project_action, | |
| inputs=[project_state], | |
| outputs=[saved_projects_dropdown, status_box] | |
| ) | |
| load_project_btn.click( | |
| fn=load_project_action, | |
| inputs=[saved_projects_dropdown], | |
| outputs=[project_state, page_dropdown, html_editor, css_editor, js_editor, preview_html, zip_file, status_box, raw_json] | |
| ) | |
| deploy_btn.click( | |
| fn=deploy_action, | |
| inputs=[project_state, deploy_name_box], | |
| outputs=[status_box] | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() | |