File size: 4,300 Bytes
77e37fc
 
 
 
 
 
 
 
 
af54811
 
77e37fc
 
af54811
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77e37fc
af54811
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77e37fc
 
af54811
 
77e37fc
 
af54811
 
 
 
77e37fc
 
af54811
 
77e37fc
 
af54811
 
 
 
77e37fc
 
 
 
 
af54811
 
 
 
77e37fc
af54811
 
 
77e37fc
af54811
77e37fc
 
 
af54811
77e37fc
 
af54811
77e37fc
 
 
 
 
af54811
77e37fc
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
from __future__ import annotations

import tempfile
from pathlib import Path
from typing import Generator

import numpy as np
import trimesh

from generator import build_particle_blueprint, export_point_cloud_as_ply, points_to_mesh
from viewer import point_cloud_viewer_html


def _normalize_mesh_to_glb(mesh: trimesh.Trimesh, out_path: Path) -> str:
    mesh = mesh.copy()
    mesh.remove_unreferenced_vertices()
    try:
        mesh.remove_degenerate_faces()
    except Exception:
        pass
    try:
        mesh.remove_duplicate_faces()
    except Exception:
        pass
    centroid = mesh.bounding_box.centroid
    mesh.apply_translation(-centroid)
    scale = float(max(mesh.extents)) if len(mesh.vertices) else 1.0
    if scale <= 0:
        scale = 1.0
    mesh.apply_scale(1.0 / scale)
    mesh.export(out_path)
    return str(out_path)


def iter_blueprint_session(prompt: str, detail: int = 22, parser_mode: str = "heuristic") -> Generator[dict, None, dict]:
    session_dir = Path(tempfile.mkdtemp(prefix="pb3d_fallback_"))
    yield {"status": "Building scaffold plan…", "session_dir": str(session_dir)}

    points, normals, labels, spec, parser_backend = build_particle_blueprint(
        prompt=prompt,
        detail=int(detail),
        parser_mode=parser_mode,
    )

    blueprint_path = export_point_cloud_as_ply(points, labels, str(session_dir / "blueprint.ply"))

    stages = [0.18, 0.42, 0.68, 1.0]
    for i, frac in enumerate(stages, start=1):
        count = max(180, int(len(points) * frac))
        preview = points[:count]
        yield {
            "status": f"Blueprint forming ({i}/{len(stages)})…",
            "summary": {
                "prompt": prompt,
                "parser_backend": parser_backend,
                "spec": spec.to_dict() if hasattr(spec, "to_dict") else {},
                "point_count": int(count),
                "stage": i,
                "stage_count": len(stages),
                "mode": "fallback_scaffold",
            },
            "blueprint_path": blueprint_path,
            "state": {
                "session_dir": str(session_dir),
                "blueprint_path": blueprint_path,
                "points_path": str(session_dir / "points.npy"),
                "labels_path": str(session_dir / "labels.npy"),
                "prompt": prompt,
                "parser_backend": parser_backend,
                "spec": spec.to_dict() if hasattr(spec, "to_dict") else {},
            },
        }

    np.save(session_dir / "points.npy", points)
    np.save(session_dir / "labels.npy", labels)

    state = {
        "session_dir": str(session_dir),
        "blueprint_path": blueprint_path,
        "points_path": str(session_dir / "points.npy"),
        "labels_path": str(session_dir / "labels.npy"),
        "prompt": prompt,
        "parser_backend": parser_backend,
        "spec": spec.to_dict() if hasattr(spec, "to_dict") else {},
        "point_count": int(len(points)),
    }
    yield {
        "status": "Blueprint ready. Inspect it, then make the mesh when happy.",
        "viewer_html": point_cloud_viewer_html(points, status=f"Blueprint • {len(points)} points"),
        "summary": {**state, "mode": "fallback_scaffold"},
        "blueprint_path": blueprint_path,
        "state": state,
    }
    return state


def iter_meshify_session(state: dict, voxel_pitch: float = 0.085, use_target_model_cache: bool = True) -> Generator[dict, None, dict]:
    points_path = state.get("points_path")
    if not points_path or not Path(points_path).exists():
        raise RuntimeError("Blueprint points were not found. Generate the blueprint again.")

    yield {"status": "Converting blueprint into a mesh…"}
    points = np.load(points_path)
    mesh = points_to_mesh(points, pitch=float(voxel_pitch))
    session_dir = Path(state["session_dir"])
    glb_path = _normalize_mesh_to_glb(mesh, session_dir / "fallback_mesh.glb")

    summary = {
        **state,
        "mesh_path": glb_path,
        "vertex_count": int(len(mesh.vertices)),
        "face_count": int(len(mesh.faces)),
        "mesh_source": "fallback_voxel_mesher",
    }
    yield {
        "status": "Mesh ready.",
        "mesh_path": glb_path,
        "mesh_file": glb_path,
        "summary": summary,
    }
    return summary