File size: 6,543 Bytes
e6a1522
 
3646ce9
fbf7bc1
0cc6913
51dcd8e
 
325970c
0cc6913
46e1f05
 
325970c
46e1f05
 
 
 
 
 
 
 
 
 
325970c
 
1c912ef
e6a1522
325970c
 
5bb11b6
 
46e1f05
 
 
 
5bb11b6
 
 
fbf7bc1
39f8848
325970c
5bb11b6
39f8848
325970c
 
0cc6913
e6a1522
03bb86c
fbf7bc1
46e1f05
 
c323882
 
46e1f05
 
 
51dcd8e
 
 
46e1f05
325970c
 
51dcd8e
325970c
 
 
 
 
 
 
 
 
46e1f05
 
 
933118b
46e1f05
933118b
 
 
46e1f05
 
 
 
 
 
 
 
 
 
 
 
325970c
46e1f05
 
325970c
 
 
 
 
46e1f05
325970c
46e1f05
 
 
 
 
325970c
 
 
 
 
 
46e1f05
325970c
46e1f05
 
325970c
933118b
325970c
933118b
325970c
 
 
 
 
 
46e1f05
 
5bb11b6
c323882
39f8848
46e1f05
 
fbf7bc1
e6a1522
fbf7bc1
5bb11b6
 
c323882
fbf7bc1
 
325970c
39f8848
 
 
 
a391056
51dcd8e
5bb11b6
fbf7bc1
e6a1522
325970c
0cc6913
39f8848
5bb11b6
a391056
39f8848
c323882
 
 
 
0cc6913
c323882
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
import gradio as gr
from PIL import Image
import numpy as np
import cv2
import os
import tempfile
import time
from gradio_client import Client, handle_file

# --- CONFIGURATION: FALLBACK LIST ---
# We try these 4 distinct spaces. If one is down, we jump to the next.
MODELS = [
    # 1. LGM (ashawkey/LGM) - Very fast, usually online.
    {"id": "ashawkey/LGM", "api": "/process", "type": "lgm"},
    
    # 2. InstantMesh (TencentARC) - High quality, try again.
    {"id": "TencentARC/InstantMesh", "api": "/generate", "type": "instantmesh"},

    # 3. Zero123++ (sudo-ai) - Good alternative architecture.
    {"id": "sudo-ai/zero123plus-v1.2", "api": "/generate", "type": "zero123"},
    
    # 4. Shap-E (OpenAI) - The reliable backup.
    {"id": "hysts/Shap-E", "api": "/image-to-3d", "type": "shape"}
]

def photo_to_sketch(image):
    """Instant local sketch"""
    if image is None: return None
    if isinstance(image, np.ndarray):
        image = Image.fromarray(image.astype('uint8'))
    
    # Resize to safe dimensions (512x512) to prevent downstream API crashes
    image = image.resize((512, 512))
    
    gray = image.convert("L")
    img_array = np.array(gray)
    blurred = cv2.GaussianBlur(img_array, (5, 5), 0)
    edges = cv2.Canny(blurred, 60, 150)
    sketch_np = 255 - edges
    return Image.fromarray(sketch_np).convert("RGB")

def generate_3d_avatar(sketch_image, height, weight, muscle, gender, breast):
    """Try multiple remote models until one succeeds"""
    print(f"-> Starting 3D Generation Process...")
    
    if sketch_image is None:
        raise gr.Error("Please upload an image first!")

    # --- CRITICAL FIX: Sanitize Image ---
    # Many 3D APIs crash if the image is not 256x256 or 512x512 RGB.
    if isinstance(sketch_image, np.ndarray):
        sketch_image = Image.fromarray(sketch_image.astype('uint8'))
    
    sketch_image = sketch_image.convert("RGB").resize((512, 512))
    
    temp_dir = tempfile.gettempdir()
    sketch_path = os.path.join(temp_dir, f"sketch_{int(time.time())}.png")
    sketch_image.save(sketch_path)
    print(f"-> Saved clean input to {sketch_path}")

    last_error = ""

    # --- RETRY LOOP ---
    for model in MODELS:
        try:
            model_id = model["id"]
            print(f"------------------------------------------")
            print(f"-> Attempting Connection to: {model_id}...")
            
            client = Client(model_id)
            
            if model["type"] == "lgm":
                # LGM Parameters: [Image, Scale, Steps, Seed]
                print("-> Sending request (LGM format)...")
                result = client.predict(
                    handle_file(sketch_path), 
                    api_name=model["api"]
                )

            elif model["type"] == "instantmesh":
                print("-> Sending request (InstantMesh format)...")
                result = client.predict(
                    handle_file(sketch_path), # Image
                    True,                     # Remove Background
                    30,                       # Steps
                    42,                       # Seed
                    api_name=model["api"]
                )
                
            elif model["type"] == "zero123":
                print("-> Sending request (Zero123 format)...")
                result = client.predict(
                    handle_file(sketch_path), # Image
                    True,                     # Remove Background
                    api_name=model["api"]
                )
                
            elif model["type"] == "shape":
                print("-> Sending request (Shap-E format)...")
                # Shap-E is strictly: Image, Prompt, Seed, Guidance, Steps
                result = client.predict(
                    handle_file(sketch_path), 
                    "",  # Prompt must be string (empty is fine)
                    0,   # Seed
                    15,  # Guidance
                    64,  # Steps
                    api_name=model["api"]
                )

            # If we get here, it worked!
            print(f"-> SUCCESS! Model generated by {model_id}")
            
            # Handle return types (list of files vs single path)
            if isinstance(result, (list, tuple)):
                # Find the first .glb or .obj
                final_model = next((item for item in result if isinstance(item, str) and item.endswith(('.glb', '.obj', '.gltf', '.ply'))), result[0])
            else:
                final_model = result
                
            return final_model, final_model

        except Exception as e:
            print(f"-> FAILED: {model_id} | Error: {e}")
            last_error = str(e)
            continue # Try next model

    # If all fail
    raise gr.Error(f"CRITICAL OUTAGE: All 4 backup models failed. The Hugging Face inference cloud is severely degraded right now. Last Error: {last_error}")

# =============== UI ===============
with gr.Blocks(title="SketchToLife") as demo:
    gr.Markdown("# SketchToLife – Emergency Backup Mode")
    gr.Markdown("**Status:** Trying LGM → InstantMesh → Zero123 → Shap-E")

    with gr.Row():
        with gr.Column():
            inp = gr.Image(label="Upload Photo", type="pil", height=420)
            btn1 = gr.Button("Generate Clean Sketch", variant="secondary", size="lg")
            out_sketch = gr.Image(label="Your Sketch", height=420, type="pil")

        with gr.Column():
            gr.Markdown("### Customize Body")
            h = gr.Dropdown(["short", "average", "tall", "giant"], value="average", label="Height")
            w = gr.Dropdown(["slim", "average", "curvy", "heavy"], value="average", label="Weight")
            m = gr.Dropdown(["slim", "fit", "muscular", "bodybuilder"], value="fit", label="Muscle")
            g = gr.Radio(["male", "female", "neutral"], value="neutral", label="Gender")
            b = gr.Dropdown(["small", "medium", "large"], value="medium", label="Breast/Form")
            
            btn2 = gr.Button("Generate 3D Model", variant="primary", size="lg")

    with gr.Row():
        view3d = gr.Model3D(label="3D Result", height=520, interactive=True)
        download = gr.File(label="Download .GLB")

    btn1.click(photo_to_sketch, inputs=inp, outputs=out_sketch)
    btn2.click(generate_3d_avatar, inputs=[out_sketch, h, w, m, g, b], outputs=[view3d, download])

if __name__ == "__main__":
    demo.launch(
        server_name="0.0.0.0",
        server_port=7860,
        ssr_mode=False
    )