Spaces:
Sleeping
Sleeping
CORS
Browse files
app.py
CHANGED
|
@@ -6,6 +6,7 @@ import cv2
|
|
| 6 |
import numpy as np
|
| 7 |
from PIL import Image
|
| 8 |
from io import BytesIO
|
|
|
|
| 9 |
import os
|
| 10 |
|
| 11 |
# ===============================
|
|
@@ -48,7 +49,7 @@ def depth_to_normal(depth):
|
|
| 48 |
# CORE PROCESSING FUNCTION
|
| 49 |
# ===============================
|
| 50 |
def _process_saree_core(base_image: Image.Image, pattern_image: Image.Image):
|
| 51 |
-
#
|
| 52 |
img_pil = base_image.convert("RGB")
|
| 53 |
img_np = np.array(img_pil)
|
| 54 |
|
|
@@ -136,26 +137,25 @@ def _process_saree_core(base_image: Image.Image, pattern_image: Image.Image):
|
|
| 136 |
return Image.fromarray(pattern_rgba, mode="RGBA")
|
| 137 |
|
| 138 |
# ===============================
|
| 139 |
-
# WRAPPER: ACCEPT
|
| 140 |
# ===============================
|
| 141 |
def process_saree(data):
|
| 142 |
"""
|
| 143 |
-
Accepts [base_blob, pattern_blob] as bytes
|
| 144 |
"""
|
| 145 |
if not isinstance(data, (list, tuple)) or len(data) != 2:
|
| 146 |
raise ValueError("Expected an array with two elements: [base_blob, pattern_blob]")
|
| 147 |
|
| 148 |
base_blob, pattern_blob = data
|
| 149 |
|
| 150 |
-
|
| 151 |
-
|
| 152 |
-
|
| 153 |
-
|
|
|
|
| 154 |
|
| 155 |
-
|
| 156 |
-
|
| 157 |
-
else:
|
| 158 |
-
raise ValueError("Pattern image must be provided as bytes")
|
| 159 |
|
| 160 |
return _process_saree_core(base_image, pattern_image)
|
| 161 |
|
|
@@ -167,8 +167,9 @@ iface = gr.Interface(
|
|
| 167 |
inputs=gr.Dataframe(headers=["Base Blob", "Pattern Blob"], type="array"),
|
| 168 |
outputs=gr.Image(type="pil", label="Final Saree Output"),
|
| 169 |
title="Saree Depth + Pattern Draping (Blob API Compatible)",
|
| 170 |
-
description="Send image blobs as array [base, pattern] or use
|
| 171 |
)
|
| 172 |
|
| 173 |
if __name__ == "__main__":
|
| 174 |
-
|
|
|
|
|
|
| 6 |
import numpy as np
|
| 7 |
from PIL import Image
|
| 8 |
from io import BytesIO
|
| 9 |
+
import base64
|
| 10 |
import os
|
| 11 |
|
| 12 |
# ===============================
|
|
|
|
| 49 |
# CORE PROCESSING FUNCTION
|
| 50 |
# ===============================
|
| 51 |
def _process_saree_core(base_image: Image.Image, pattern_image: Image.Image):
|
| 52 |
+
# (Your existing depth estimation + pattern blending logic unchanged)
|
| 53 |
img_pil = base_image.convert("RGB")
|
| 54 |
img_np = np.array(img_pil)
|
| 55 |
|
|
|
|
| 137 |
return Image.fromarray(pattern_rgba, mode="RGBA")
|
| 138 |
|
| 139 |
# ===============================
|
| 140 |
+
# WRAPPER: ACCEPT BYTES OR BASE64
|
| 141 |
# ===============================
|
| 142 |
def process_saree(data):
|
| 143 |
"""
|
| 144 |
+
Accepts [base_blob, pattern_blob] as bytes OR base64 strings
|
| 145 |
"""
|
| 146 |
if not isinstance(data, (list, tuple)) or len(data) != 2:
|
| 147 |
raise ValueError("Expected an array with two elements: [base_blob, pattern_blob]")
|
| 148 |
|
| 149 |
base_blob, pattern_blob = data
|
| 150 |
|
| 151 |
+
# Convert base64 to bytes if needed
|
| 152 |
+
if isinstance(base_blob, str):
|
| 153 |
+
base_blob = base64.b64decode(base_blob.split(",")[-1])
|
| 154 |
+
if isinstance(pattern_blob, str):
|
| 155 |
+
pattern_blob = base64.b64decode(pattern_blob.split(",")[-1])
|
| 156 |
|
| 157 |
+
base_image = Image.open(BytesIO(base_blob)).convert("RGBA")
|
| 158 |
+
pattern_image = Image.open(BytesIO(pattern_blob)).convert("RGBA")
|
|
|
|
|
|
|
| 159 |
|
| 160 |
return _process_saree_core(base_image, pattern_image)
|
| 161 |
|
|
|
|
| 167 |
inputs=gr.Dataframe(headers=["Base Blob", "Pattern Blob"], type="array"),
|
| 168 |
outputs=gr.Image(type="pil", label="Final Saree Output"),
|
| 169 |
title="Saree Depth + Pattern Draping (Blob API Compatible)",
|
| 170 |
+
description="Send image blobs (bytes or base64) as array [base, pattern] or use UI for testing."
|
| 171 |
)
|
| 172 |
|
| 173 |
if __name__ == "__main__":
|
| 174 |
+
# Enable CORS for Vite
|
| 175 |
+
iface.launch(server_name="0.0.0.0", share=True)
|