daidedou commited on
Commit
4af2d41
·
1 Parent(s): d1a23f1

Added examples (files incoming)

Browse files
Files changed (1) hide show
  1. app.py +54 -13
app.py CHANGED
@@ -42,6 +42,18 @@ def _safe_ext(path: str) -> str:
42
  return ext
43
  return os.path.splitext(path)[1].lower()
44
 
 
 
 
 
 
 
 
 
 
 
 
 
45
 
46
  def normalize_vertices(vertices: np.ndarray) -> np.ndarray:
47
  v = vertices.astype(np.float64)
@@ -78,9 +90,9 @@ def export_for_view(surf: Surface, colors: np.ndarray, basename: str, outdir: st
78
  # -----------------------------
79
  DEFAULT_SETTINGS = {
80
  "deepfeat_conf.fmap.lambda_": 1,
81
- "sds_conf.zoomout": 40.0,
82
  "diffusion.time": 1.0,
83
- "opt.n_loop": 300,
84
  "loss.sds": 1.0,
85
  "loss.proper": 1.0,
86
  }
@@ -211,28 +223,57 @@ def run_clicked(mesh1_path, mesh2_path, yaml_path, lambda_val, zoomout_val, time
211
  evecs1, evecs2 = datadicts.shape_dict["evecs"], datadicts.target_dict["evecs"]
212
  evecs_2trans = evecs2.t() @ torch.diag(datadicts.target_dict["mass"])
213
  with torch.no_grad():
214
- C12_end_zo = torch_zoomout(evecs1, evecs2, evecs_2trans, C12_new.squeeze()[:15, :15], matcher.cfg.sds_conf.zoomout)
215
  p2p_zo, _ = extract_p2p_torch_fmap(C12_end_zo, datadicts.shape_dict["evecs"], datadicts.target_dict["evecs"])
216
  return build_outputs(datadicts.shape_surf, datadicts.target_surf, datadicts.cmap1, p2p_zo, tag="run")
217
 
218
 
219
  with gr.Blocks(title="DiffuMatch demo") as demo:
220
- text_in = "Upload two meshes and try our ICCV zero-shot method DiffuMatch! \n"
221
- text_in += "*Init* will give you a rough correspondence, and you can click on *Run* to see if our method is able to match the two shapes! \n"
222
- text_in += "*Recommended*: The method requires that the meshes are aligned (rotation-wise) to work well. Also might not work with scans (but try it out!)."
223
- gr.Markdown(text_in)
 
 
 
 
 
 
 
224
  with gr.Row():
225
- mesh1 = gr.File(label="Mesh A (.ply/.obj/.off)")
226
- mesh2 = gr.File(label="Mesh B (.ply/.obj/.off)")
 
 
 
 
 
 
 
 
227
  yaml_file = gr.File(label="Optional YAML config", file_types=[".yaml", ".yml"], visible=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
228
  # except Exception:
229
  with gr.Accordion("Settings", open=True):
230
  with gr.Row():
231
- lambda_val = gr.Slider(minimum=FLOAT_SLIDERS["deepfeat_conf.fmap.lambda_"][0], maximum=FLOAT_SLIDERS["deepfeat_conf.fmap.lambda_"][1], step=FLOAT_SLIDERS["deepfeat_conf.fmap.lambda_"][2], value=1, label="deepfeat_conf.fmap.lambda_")
232
- zoomout_val = gr.Slider(minimum=INT_SLIDERS["sds_conf.zoomout"][0], maximum=INT_SLIDERS["sds_conf.zoomout"][1], step=INT_SLIDERS["sds_conf.zoomout"][2], value=40, label="sds_conf.zoomout")
233
- time_val = gr.Slider(minimum=FLOAT_SLIDERS["diffusion.time"][0], maximum=FLOAT_SLIDERS["diffusion.time"][1], step=FLOAT_SLIDERS["diffusion.time"][2], value=1, label="diffusion.time")
234
  with gr.Row():
235
- nloop_val = gr.Slider(minimum=INT_SLIDERS["opt.n_loop"][0], maximum=INT_SLIDERS["opt.n_loop"][1], step=INT_SLIDERS["opt.n_loop"][2], value=300, label="opt.n_loop")
236
  sds_val = gr.Slider(minimum=FLOAT_SLIDERS["loss.sds"][0], maximum=FLOAT_SLIDERS["loss.sds"][1], step=FLOAT_SLIDERS["loss.sds"][2], value=1, label="loss.sds")
237
  proper_val = gr.Slider(minimum=FLOAT_SLIDERS["loss.proper"][0], maximum=FLOAT_SLIDERS["loss.proper"][1], step=FLOAT_SLIDERS["loss.proper"][2], value=1, label="loss.proper")
238
 
 
42
  return ext
43
  return os.path.splitext(path)[1].lower()
44
 
45
+ def convert_and_show(mesh_file):
46
+ os.makedirs("tmp/glbs", exist_ok=True)
47
+ if mesh_file is None:
48
+ return None
49
+ mesh = trimesh.load(mesh_file.name)
50
+ tn = int(np.random.rand()*1e10)
51
+ f_name = f"tmp/glbs/mesh_{tn}.glb"
52
+ mesh.export(f_name)
53
+ return f_name
54
+
55
+ def convert_and_show_twice(mesh_file_1, mesh_file_2):
56
+ return convert_and_show(mesh_file_1), convert_and_show(mesh_file_2)
57
 
58
  def normalize_vertices(vertices: np.ndarray) -> np.ndarray:
59
  v = vertices.astype(np.float64)
 
90
  # -----------------------------
91
  DEFAULT_SETTINGS = {
92
  "deepfeat_conf.fmap.lambda_": 1,
93
+ "sds_conf.zoomout": 32,
94
  "diffusion.time": 1.0,
95
+ "opt.n_loop": 250,
96
  "loss.sds": 1.0,
97
  "loss.proper": 1.0,
98
  }
 
223
  evecs1, evecs2 = datadicts.shape_dict["evecs"], datadicts.target_dict["evecs"]
224
  evecs_2trans = evecs2.t() @ torch.diag(datadicts.target_dict["mass"])
225
  with torch.no_grad():
226
+ C12_end_zo = torch_zoomout(evecs1, evecs2, evecs_2trans, C12_new.squeeze(), matcher.cfg.sds_conf.zoomout)
227
  p2p_zo, _ = extract_p2p_torch_fmap(C12_end_zo, datadicts.shape_dict["evecs"], datadicts.target_dict["evecs"])
228
  return build_outputs(datadicts.shape_surf, datadicts.target_surf, datadicts.cmap1, p2p_zo, tag="run")
229
 
230
 
231
  with gr.Blocks(title="DiffuMatch demo") as demo:
232
+ gr.Markdown(
233
+ """
234
+ <div align="center">
235
+ <h1>DiffuMatch: Category-Agnostic Spectral Diffusion Priors for Robust Non-rigid Shape Matching</h1>
236
+ </div>
237
+ <br/>
238
+ Upload two meshes and try our ICCV zero-shot method <a href="https://daidedou.github.io/publication/nonrigiddiff">DiffuMatch</a> <br/>
239
+ <b>Init</b> will give you a rough correspondence, and you can click on <b>Run</b> to see if our method is able to match the two shapes! <br/>
240
+ <b>Recommended</b/>: The method requires that the meshes are aligned (rotation-wise) to work well. Also might not work with scans (but try it out!). <br/>
241
+ """
242
+ )
243
  with gr.Row():
244
+ with gr.Column():
245
+ mesh1 = gr.File(label="Source Mesh (.ply, .off, .obj)", file_types=[".ply", ".off", ".obj"])
246
+ mesh1_viewer = gr.Model3D(label="Preview Source")
247
+ mesh1.upload(fn=convert_and_show, inputs=mesh1, outputs=mesh1_viewer)
248
+
249
+ with gr.Column():
250
+ mesh2 = gr.File(label="Target Mesh (.ply, .off, .obj)", file_types=[".ply", ".off", ".obj"])
251
+ mesh2_viewer = gr.Model3D(label="Preview Target")
252
+ mesh2.upload(fn=convert_and_show, inputs=mesh2, outputs=mesh2_viewer)
253
+
254
  yaml_file = gr.File(label="Optional YAML config", file_types=[".yaml", ".yml"], visible=True)
255
+
256
+ gr.Examples(
257
+ examples=[
258
+ ["examples/man.ply", "examples/woman.ply"],
259
+ ["examples/wolf.ply", "examples/horse.ply"],
260
+ ["examples/cactus.off", "examples/cactus_deformed.off"],
261
+ ],
262
+ fn=convert_and_show_twice,
263
+ inputs=[mesh1, mesh2],
264
+ outputs=[mesh1_viewer, mesh2_viewer],
265
+ label="Try some example pairs",
266
+ cache_examples=True
267
+ )
268
+
269
  # except Exception:
270
  with gr.Accordion("Settings", open=True):
271
  with gr.Row():
272
+ lambda_val = gr.Slider(minimum=FLOAT_SLIDERS["deepfeat_conf.fmap.lambda_"][0], maximum=FLOAT_SLIDERS["deepfeat_conf.fmap.lambda_"][1], step=FLOAT_SLIDERS["deepfeat_conf.fmap.lambda_"][2], value=DEFAULT_SETTINGS["deepfeat_conf.fmap.lambda_"], label="deepfeat_conf.fmap.lambda_")
273
+ zoomout_val = gr.Slider(minimum=INT_SLIDERS["sds_conf.zoomout"][0], maximum=INT_SLIDERS["sds_conf.zoomout"][1], step=INT_SLIDERS["sds_conf.zoomout"][2], value=DEFAULT_SETTINGS["sds_conf.zoomout"], label="sds_conf.zoomout")
274
+ time_val = gr.Slider(minimum=FLOAT_SLIDERS["diffusion.time"][0], maximum=FLOAT_SLIDERS["diffusion.time"][1], step=FLOAT_SLIDERS["diffusion.time"][2], value=DEFAULT_SETTINGS["diffusion.time"], label="diffusion.time")
275
  with gr.Row():
276
+ nloop_val = gr.Slider(minimum=INT_SLIDERS["opt.n_loop"][0], maximum=INT_SLIDERS["opt.n_loop"][1], step=INT_SLIDERS["opt.n_loop"][2], value=DEFAULT_SETTINGS["opt.n_loop"], label="opt.n_loop")
277
  sds_val = gr.Slider(minimum=FLOAT_SLIDERS["loss.sds"][0], maximum=FLOAT_SLIDERS["loss.sds"][1], step=FLOAT_SLIDERS["loss.sds"][2], value=1, label="loss.sds")
278
  proper_val = gr.Slider(minimum=FLOAT_SLIDERS["loss.proper"][0], maximum=FLOAT_SLIDERS["loss.proper"][1], step=FLOAT_SLIDERS["loss.proper"][2], value=1, label="loss.proper")
279