Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from PIL import Image | |
| import numpy as np | |
| import cv2 | |
| import os | |
| import tempfile | |
| import time | |
| from gradio_client import Client, handle_file | |
| # --- CONFIGURATION: FALLBACK LIST --- | |
| # We try these 4 distinct spaces. If one is down, we jump to the next. | |
| MODELS = [ | |
| # 1. LGM (ashawkey/LGM) - Very fast, usually online. | |
| {"id": "ashawkey/LGM", "api": "/process", "type": "lgm"}, | |
| # 2. InstantMesh (TencentARC) - High quality, try again. | |
| {"id": "TencentARC/InstantMesh", "api": "/generate", "type": "instantmesh"}, | |
| # 3. Zero123++ (sudo-ai) - Good alternative architecture. | |
| {"id": "sudo-ai/zero123plus-v1.2", "api": "/generate", "type": "zero123"}, | |
| # 4. Shap-E (OpenAI) - The reliable backup. | |
| {"id": "hysts/Shap-E", "api": "/image-to-3d", "type": "shape"} | |
| ] | |
| def photo_to_sketch(image): | |
| """Instant local sketch""" | |
| if image is None: return None | |
| if isinstance(image, np.ndarray): | |
| image = Image.fromarray(image.astype('uint8')) | |
| # Resize to safe dimensions (512x512) to prevent downstream API crashes | |
| image = image.resize((512, 512)) | |
| gray = image.convert("L") | |
| img_array = np.array(gray) | |
| blurred = cv2.GaussianBlur(img_array, (5, 5), 0) | |
| edges = cv2.Canny(blurred, 60, 150) | |
| sketch_np = 255 - edges | |
| return Image.fromarray(sketch_np).convert("RGB") | |
| def generate_3d_avatar(sketch_image, height, weight, muscle, gender, breast): | |
| """Try multiple remote models until one succeeds""" | |
| print(f"-> Starting 3D Generation Process...") | |
| if sketch_image is None: | |
| raise gr.Error("Please upload an image first!") | |
| # --- CRITICAL FIX: Sanitize Image --- | |
| # Many 3D APIs crash if the image is not 256x256 or 512x512 RGB. | |
| if isinstance(sketch_image, np.ndarray): | |
| sketch_image = Image.fromarray(sketch_image.astype('uint8')) | |
| sketch_image = sketch_image.convert("RGB").resize((512, 512)) | |
| temp_dir = tempfile.gettempdir() | |
| sketch_path = os.path.join(temp_dir, f"sketch_{int(time.time())}.png") | |
| sketch_image.save(sketch_path) | |
| print(f"-> Saved clean input to {sketch_path}") | |
| last_error = "" | |
| # --- RETRY LOOP --- | |
| for model in MODELS: | |
| try: | |
| model_id = model["id"] | |
| print(f"------------------------------------------") | |
| print(f"-> Attempting Connection to: {model_id}...") | |
| client = Client(model_id) | |
| if model["type"] == "lgm": | |
| # LGM Parameters: [Image, Scale, Steps, Seed] | |
| print("-> Sending request (LGM format)...") | |
| result = client.predict( | |
| handle_file(sketch_path), | |
| api_name=model["api"] | |
| ) | |
| elif model["type"] == "instantmesh": | |
| print("-> Sending request (InstantMesh format)...") | |
| result = client.predict( | |
| handle_file(sketch_path), # Image | |
| True, # Remove Background | |
| 30, # Steps | |
| 42, # Seed | |
| api_name=model["api"] | |
| ) | |
| elif model["type"] == "zero123": | |
| print("-> Sending request (Zero123 format)...") | |
| result = client.predict( | |
| handle_file(sketch_path), # Image | |
| True, # Remove Background | |
| api_name=model["api"] | |
| ) | |
| elif model["type"] == "shape": | |
| print("-> Sending request (Shap-E format)...") | |
| # Shap-E is strictly: Image, Prompt, Seed, Guidance, Steps | |
| result = client.predict( | |
| handle_file(sketch_path), | |
| "", # Prompt must be string (empty is fine) | |
| 0, # Seed | |
| 15, # Guidance | |
| 64, # Steps | |
| api_name=model["api"] | |
| ) | |
| # If we get here, it worked! | |
| print(f"-> SUCCESS! Model generated by {model_id}") | |
| # Handle return types (list of files vs single path) | |
| if isinstance(result, (list, tuple)): | |
| # Find the first .glb or .obj | |
| final_model = next((item for item in result if isinstance(item, str) and item.endswith(('.glb', '.obj', '.gltf', '.ply'))), result[0]) | |
| else: | |
| final_model = result | |
| return final_model, final_model | |
| except Exception as e: | |
| print(f"-> FAILED: {model_id} | Error: {e}") | |
| last_error = str(e) | |
| continue # Try next model | |
| # If all fail | |
| raise gr.Error(f"CRITICAL OUTAGE: All 4 backup models failed. The Hugging Face inference cloud is severely degraded right now. Last Error: {last_error}") | |
| # =============== UI =============== | |
| with gr.Blocks(title="SketchToLife") as demo: | |
| gr.Markdown("# SketchToLife – Emergency Backup Mode") | |
| gr.Markdown("**Status:** Trying LGM → InstantMesh → Zero123 → Shap-E") | |
| with gr.Row(): | |
| with gr.Column(): | |
| inp = gr.Image(label="Upload Photo", type="pil", height=420) | |
| btn1 = gr.Button("Generate Clean Sketch", variant="secondary", size="lg") | |
| out_sketch = gr.Image(label="Your Sketch", height=420, type="pil") | |
| with gr.Column(): | |
| gr.Markdown("### Customize Body") | |
| h = gr.Dropdown(["short", "average", "tall", "giant"], value="average", label="Height") | |
| w = gr.Dropdown(["slim", "average", "curvy", "heavy"], value="average", label="Weight") | |
| m = gr.Dropdown(["slim", "fit", "muscular", "bodybuilder"], value="fit", label="Muscle") | |
| g = gr.Radio(["male", "female", "neutral"], value="neutral", label="Gender") | |
| b = gr.Dropdown(["small", "medium", "large"], value="medium", label="Breast/Form") | |
| btn2 = gr.Button("Generate 3D Model", variant="primary", size="lg") | |
| with gr.Row(): | |
| view3d = gr.Model3D(label="3D Result", height=520, interactive=True) | |
| download = gr.File(label="Download .GLB") | |
| btn1.click(photo_to_sketch, inputs=inp, outputs=out_sketch) | |
| btn2.click(generate_3d_avatar, inputs=[out_sketch, h, w, m, g, b], outputs=[view3d, download]) | |
| if __name__ == "__main__": | |
| demo.launch( | |
| server_name="0.0.0.0", | |
| server_port=7860, | |
| ssr_mode=False | |
| ) |