MeshPalettizer / app.py
dylanebert's picture
initial commit
346b70f
#!/usr/bin/env python3
import gradio as gr
import trimesh
import numpy as np
import tempfile
import zipfile
import requests
import os
from pathlib import Path
from typing import List, Tuple, Optional, Dict, Any
from src import convert_meshes
def load_mesh(
file_path: str,
) -> Optional[Tuple[List[Tuple[str, trimesh.Trimesh]], Optional[Dict]]]:
try:
loaded = trimesh.load(str(file_path))
if isinstance(loaded, trimesh.Scene):
mesh_list = []
scene_data = {"graph": loaded.graph, "transforms": {}}
for geom_name, geom in loaded.geometry.items():
if hasattr(geom, "faces") and len(geom.faces) > 0:
mesh_list.append((geom_name, geom))
# Store transform for this geometry
nodes = loaded.graph.geometry_nodes.get(geom_name, [])
if nodes:
scene_data["transforms"][geom_name] = loaded.graph.get(
nodes[0]
)[0]
else:
scene_data["transforms"][geom_name] = np.eye(4)
return (mesh_list, scene_data) if mesh_list else None
elif hasattr(loaded, "faces"):
# Single mesh case
return ([("mesh", loaded)], None)
else:
return None
except Exception as e:
print(f"Error loading {file_path}: {e}")
return None
def export_processed_meshes(result, output_path, progress, total_files):
"""Export processed meshes, reconstructing scenes when appropriate."""
processed_models = []
# Group meshes by their original file
file_groups = {}
for name, mesh in result.meshes:
# Extract file name from combined name (e.g., "Lantern_LanternPole_Body" -> "Lantern")
if "_" in name and result.scene_metadata:
parts = name.split("_", 1)
file_name = parts[0]
mesh_name = parts[1] if len(parts) > 1 else "mesh"
else:
file_name = name
mesh_name = "mesh"
if file_name not in file_groups:
file_groups[file_name] = []
file_groups[file_name].append((mesh_name, mesh))
# Export each file group
for i, (file_name, meshes) in enumerate(file_groups.items()):
progress_desc = f"Saving {file_name}..."
progress(
(total_files + 1 + i / len(file_groups)) / (total_files + 2),
desc=progress_desc,
)
# Check if this file had scene metadata
has_scene = result.scene_metadata and file_name in result.scene_metadata
if has_scene and len(meshes) > 1:
# Reconstruct and export as Scene
scene = trimesh.Scene()
scene_data = result.scene_metadata[file_name]
for mesh_name, mesh in meshes:
# Get transform for this mesh
transform = scene_data["transforms"].get(mesh_name, np.eye(4))
# Add to scene with proper naming
scene.add_geometry(
mesh, node_name=mesh_name, geom_name=mesh_name, transform=transform
)
# Export the scene
model_path = output_path / f"{file_name}_palettized.glb"
scene.export(str(model_path))
processed_models.append(str(model_path))
else:
# Export individual meshes
for mesh_name, mesh in meshes:
if len(meshes) > 1:
model_path = output_path / f"{file_name}_{mesh_name}_palettized.glb"
else:
model_path = output_path / f"{file_name}_palettized.glb"
mesh.export(str(model_path), include_normals=True)
processed_models.append(str(model_path))
return processed_models
def download_from_urls(
urls_text: str, progress=gr.Progress()
) -> Tuple[List[str], List[str]]:
if not urls_text or not urls_text.strip():
return [], []
urls = [url.strip() for url in urls_text.strip().split("\n") if url.strip()]
downloaded_files = []
failed_urls = []
temp_dir = tempfile.mkdtemp(prefix="glb_downloads_")
for i, url in enumerate(urls):
progress((i + 1) / len(urls), desc=f"Downloading {i + 1}/{len(urls)}...")
try:
filename = os.path.basename(url.split("?")[0])
if not filename or not filename.endswith((".glb", ".gltf")):
filename = f"model_{i + 1}.glb"
file_path = os.path.join(temp_dir, filename)
response = requests.get(url, timeout=30)
response.raise_for_status()
with open(file_path, "wb") as f:
f.write(response.content)
downloaded_files.append(file_path)
except Exception as e:
print(f"Failed to download {url}: {e}")
failed_urls.append(url)
return downloaded_files, failed_urls
def process_batch(
files: List[Any],
atlas_size: int,
sample_rate: float,
simplify_details: bool,
detail_filter_diameter: int,
detail_color_sigma: int,
detail_space_sigma: int,
progress=gr.Progress(),
) -> Tuple[Optional[str], List[str], Optional[str], str, Dict]:
if not files:
return None, [], None, "No files to process.", {}
progress(0, desc="Starting batch processing...")
output_dir = tempfile.mkdtemp(prefix="glb_atlas_")
output_path = Path(output_dir)
mesh_list = []
failed_files = []
scene_metadata = {}
for i, file in enumerate(files):
if hasattr(file, "name"):
file_path = file.name
display_name = Path(file.name).name
else:
file_path = file
display_name = Path(file).name
progress((i + 1) / (len(files) + 2), desc=f"Loading {display_name}...")
file_name = Path(file_path).stem
loaded_data = load_mesh(file_path)
if loaded_data is not None:
meshes, scene_data = loaded_data
# Store scene data if present
if scene_data:
scene_metadata[file_name] = scene_data
# Add all meshes from this file to the list
for mesh_name, mesh in meshes:
# Create unique name combining file and mesh names
if len(meshes) > 1:
combined_name = f"{file_name}_{mesh_name}"
else:
combined_name = file_name
mesh_list.append((combined_name, mesh))
else:
failed_files.append(display_name)
if not mesh_list:
return (
None,
[],
None,
"No valid meshes could be loaded from the uploaded files.",
{},
)
try:
progress(len(files) / (len(files) + 2), desc="Generating texture atlas...")
detail_sensitivity = (
(detail_filter_diameter, detail_color_sigma, detail_space_sigma)
if simplify_details
else None
)
result = convert_meshes(
mesh_list,
atlas_size=atlas_size,
face_sampling_ratio=sample_rate,
simplify_details=simplify_details,
detail_sensitivity=detail_sensitivity,
scene_metadata=scene_metadata,
)
atlas_path = output_path / "shared_palette.png"
result.atlas.save(atlas_path)
# Export processed meshes, reconstructing scenes when appropriate
processed_models = export_processed_meshes(
result, output_path, progress, len(files)
)
status = f"✓ Processed {len(result.meshes)} model(s)\n📊 Atlas: {atlas_size}×{atlas_size} pixels"
if failed_files:
status += f"\n⚠ Failed: {len(failed_files)} file(s)"
# Extract display names for the processed models
display_names = []
for model_path in processed_models:
model_name = Path(model_path).stem
if model_name.endswith("_palettized"):
model_name = model_name[:-11] # Remove "_palettized" suffix
display_names.append(model_name)
metadata = {
"models": processed_models,
"names": display_names,
"atlas_path": str(atlas_path),
"output_dir": output_dir,
"total": len(processed_models),
}
progress(1.0, desc="Processing complete!")
first_model = processed_models[0] if processed_models else None
return str(atlas_path), processed_models, first_model, status, metadata
except Exception as e:
return None, [], None, f"Error during processing: {str(e)}", {}
def update_model_viewer(
direction: str, current_index: int, metadata: Dict
) -> Tuple[Optional[str], int, str]:
if not metadata or "models" not in metadata:
return None, 0, "No models to display"
models = metadata["models"]
names = metadata["names"]
total = metadata["total"]
if not models:
return None, 0, "No models available"
if direction == "next":
new_index = (current_index + 1) % total
elif direction == "prev":
new_index = (current_index - 1) % total
else:
new_index = 0
model_path = models[new_index]
model_name = names[new_index]
label = f"Model {new_index + 1} of {total}: {model_name}"
return model_path, new_index, label
def create_download_zip(metadata: Dict) -> Optional[str]:
if not metadata or "output_dir" not in metadata:
return None
output_dir = Path(metadata["output_dir"])
zip_path = output_dir / "glb_atlas_output.zip"
try:
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
if "atlas_path" in metadata:
atlas_path = Path(metadata["atlas_path"])
if atlas_path.exists():
zipf.write(atlas_path, atlas_path.name)
if "models" in metadata:
for model_path in metadata["models"]:
model_file = Path(model_path)
if model_file.exists():
zipf.write(model_file, model_file.name)
return str(zip_path)
except Exception as e:
print(f"Error creating ZIP: {e}")
return None
with gr.Blocks(
title="Mesh Palettizer",
theme=gr.themes.Soft(),
css="""
#atlas-display img {
width: 100%;
height: 100%;
object-fit: contain;
image-rendering: pixelated;
image-rendering: -moz-crisp-edges;
image-rendering: crisp-edges;
}
""",
) as demo:
model_index = gr.State(value=0)
processing_metadata = gr.State(value={})
gr.Markdown(
"""
# 🎨 Mesh Palettizer
Simplify 3D model textures using optimized color palettes.
Upload GLB/GLTF models to create clean, palettized textures for stylized rendering.
"""
)
with gr.Row():
with gr.Column(scale=1):
with gr.Tabs() as input_tabs:
with gr.Tab("📁 Upload Files"):
file_input = gr.File(
label="Select GLB/GLTF Files",
file_count="multiple",
file_types=[".glb", ".gltf"],
type="filepath",
)
gr.Examples(
examples=[[["examples/Duck.glb", "examples/Lantern.glb"]]],
inputs=file_input,
label="Example Models",
)
with gr.Tab("🔗 Load from URLs"):
url_input = gr.Textbox(
label="Enter URLs (one per line)",
placeholder="https://example.com/model1.glb\nhttps://example.com/model2.glb",
lines=5,
interactive=True,
)
atlas_size = gr.Dropdown(
choices=[8, 16, 32, 64, 128, 256, 512, 1024],
value=32,
label="Atlas Size",
info="N×N pixels",
)
with gr.Accordion("Advanced", open=False):
sample_rate = gr.Slider(
minimum=0.01,
maximum=1.0,
value=0.1,
step=0.01,
label="Sampling Rate",
info="% of faces to sample",
)
simplify_details = gr.Checkbox(
value=True,
label="Remove Texture Details",
info="Apply bilateral filter to remove fine details (scales, fur, etc.)",
)
with gr.Row(visible=True) as detail_controls:
detail_filter_diameter = gr.Slider(
minimum=5,
maximum=15,
value=9,
step=2,
label="Filter Diameter",
info="Pixel neighborhood diameter (higher = stronger smoothing)",
)
detail_color_sigma = gr.Slider(
minimum=25,
maximum=150,
value=75,
step=5,
label="Color Sensitivity",
info="Color difference threshold (higher = more colors mixed)",
)
detail_space_sigma = gr.Slider(
minimum=25,
maximum=150,
value=75,
step=5,
label="Spatial Sensitivity",
info="Spatial extent (higher = pixels farther apart influence each other)",
)
process_btn = gr.Button("🚀 Process", variant="primary", size="lg")
status_text = gr.Textbox(
label="Status", lines=2, interactive=False, show_label=False
)
with gr.Column(scale=2):
with gr.Tabs():
with gr.Tab("📊 Palette"):
atlas_image = gr.Image(
label="Color Palette",
type="filepath",
show_download_button=True,
height=400,
container=True,
elem_id="atlas-display",
)
with gr.Tab("🎮 3D Preview"):
model_label = gr.Markdown("")
model_viewer = gr.Model3D(
label="Model", height=400, clear_color=[0.95, 0.95, 0.95, 1.0]
)
with gr.Row():
prev_btn = gr.Button("◀", size="sm")
model_counter = gr.Markdown(
"Model 1 of 1", elem_id="model-counter"
)
next_btn = gr.Button("▶", size="sm")
with gr.Row():
download_btn = gr.Button(
"📦 Download All", variant="secondary", size="lg"
)
download_file = gr.File(label="Package", visible=False)
def toggle_detail_controls(enabled):
return gr.update(visible=enabled)
simplify_details.change(
fn=toggle_detail_controls, inputs=[simplify_details], outputs=[detail_controls]
)
def process_from_files(
files,
atlas_size,
sample_rate,
simplify_details,
detail_filter_diameter,
detail_color_sigma,
detail_space_sigma,
):
if not files:
return (
None,
None,
"Please upload files first.",
{},
0,
"",
"",
gr.update(visible=False),
)
atlas_path, models, first_model, status, metadata = process_batch(
files,
atlas_size,
sample_rate,
simplify_details,
detail_filter_diameter,
detail_color_sigma,
detail_space_sigma,
)
if models:
viewer_label = metadata["names"][0]
counter_text = f"Model 1 of {len(models)}"
else:
viewer_label = ""
counter_text = ""
return (
atlas_path,
first_model,
status,
metadata,
0,
viewer_label,
counter_text,
gr.update(visible=False),
)
def process_from_urls(
urls_text,
atlas_size,
sample_rate,
simplify_details,
detail_filter_diameter,
detail_color_sigma,
detail_space_sigma,
):
if not urls_text or not urls_text.strip():
return (
None,
None,
"Please enter URLs first.",
{},
0,
"",
"",
gr.update(visible=False),
)
downloaded_files, failed_urls = download_from_urls(urls_text)
if not downloaded_files:
error_msg = "Failed to download any files."
if failed_urls:
error_msg += f" URLs that failed: {len(failed_urls)}"
return None, None, error_msg, {}, 0, "", "", gr.update(visible=False)
atlas_path, models, first_model, status, metadata = process_batch(
downloaded_files,
atlas_size,
sample_rate,
simplify_details,
detail_filter_diameter,
detail_color_sigma,
detail_space_sigma,
)
if failed_urls:
status += f"\n⚠ Failed to download {len(failed_urls)} URL(s)"
if models:
viewer_label = metadata["names"][0]
counter_text = f"Model 1 of {len(models)}"
else:
viewer_label = ""
counter_text = ""
return (
atlas_path,
first_model,
status,
metadata,
0,
viewer_label,
counter_text,
gr.update(visible=False),
)
def process_wrapper(
files,
urls_text,
atlas_size,
sample_rate,
simplify_details,
detail_filter_diameter,
detail_color_sigma,
detail_space_sigma,
):
if files and len(files) > 0:
return process_from_files(
files,
atlas_size,
sample_rate,
simplify_details,
detail_filter_diameter,
detail_color_sigma,
detail_space_sigma,
)
elif urls_text and urls_text.strip():
return process_from_urls(
urls_text,
atlas_size,
sample_rate,
simplify_details,
detail_filter_diameter,
detail_color_sigma,
detail_space_sigma,
)
else:
return (
None,
None,
"Please provide files or URLs.",
{},
0,
"",
"",
gr.update(visible=False),
)
process_btn.click(
fn=process_wrapper,
inputs=[
file_input,
url_input,
atlas_size,
sample_rate,
simplify_details,
detail_filter_diameter,
detail_color_sigma,
detail_space_sigma,
],
outputs=[
atlas_image,
model_viewer,
status_text,
processing_metadata,
model_index,
model_label,
model_counter,
download_file,
],
)
def navigate_prev(current_index, metadata):
model_path, new_index, _ = update_model_viewer("prev", current_index, metadata)
counter_text = (
f"Model {new_index + 1} of {metadata['total']}"
if metadata and "total" in metadata
else ""
)
name_text = (
metadata["names"][new_index] if metadata and "names" in metadata else ""
)
return model_path, new_index, name_text, counter_text
def navigate_next(current_index, metadata):
model_path, new_index, _ = update_model_viewer("next", current_index, metadata)
counter_text = (
f"Model {new_index + 1} of {metadata['total']}"
if metadata and "total" in metadata
else ""
)
name_text = (
metadata["names"][new_index] if metadata and "names" in metadata else ""
)
return model_path, new_index, name_text, counter_text
prev_btn.click(
fn=navigate_prev,
inputs=[model_index, processing_metadata],
outputs=[model_viewer, model_index, model_label, model_counter],
)
next_btn.click(
fn=navigate_next,
inputs=[model_index, processing_metadata],
outputs=[model_viewer, model_index, model_label, model_counter],
)
def prepare_download(metadata):
zip_path = create_download_zip(metadata)
if zip_path:
return gr.update(value=zip_path, visible=True)
return gr.update(visible=False)
download_btn.click(
fn=prepare_download, inputs=[processing_metadata], outputs=[download_file]
)
if __name__ == "__main__":
demo.launch(share=False, server_name="0.0.0.0", server_port=7860, show_error=True)