megalado commited on
Commit
003fe98
Β·
1 Parent(s): e8a6a9b

fix app to use demo.py and update requirements

Browse files
Files changed (2) hide show
  1. app.py +17 -31
  2. requirements.txt +2 -1
app.py CHANGED
@@ -1,57 +1,43 @@
1
- import subprocess
2
- import uuid
3
- import os
4
  from pathlib import Path
5
  import gradio as gr
6
 
 
 
 
 
 
7
  # ──────────────────────────────────────────────────────────────
8
- # CONFIG
9
- # ──────────────────────────────────────────────────────────────
10
- CKPT = "checkpoints/mld_humanml.pt" # <β€” make sure this file exists
11
- DEVICE = "cpu" # free HF Spaces = CPU only
12
- STEPS = "50" # diffusion steps (β‰ˆ60 s on CPU)
13
- # ──────────────────────────────────────────────────────────────
14
-
15
 
16
  def generate_motion(prompt: str) -> str:
17
- """
18
- Call the MLD text-to-motion sampler and return a BVH path for Gradio.
19
- """
20
- # unique temp-file to hold the BVH
21
  out_bvh = Path("/tmp") / f"{uuid.uuid4().hex}.bvh"
22
 
23
  cmd = [
24
  "python",
25
- "-m", "motion_latent_diffusion.mld.sample.text2motion", # ← sampler module
 
26
  "--checkpoint", CKPT,
27
  "--prompt", prompt,
28
  "--device", DEVICE,
29
- "--output", str(out_bvh),
30
- "--steps", STEPS
31
  ]
32
 
33
- # make sure Python can import motion_latent_diffusion.*
34
  env = os.environ.copy()
35
- repo_root = Path(__file__).parent
36
- env["PYTHONPATH"] = f"{env.get('PYTHONPATH', '')}:{repo_root}"
37
 
38
- # run the sampler
39
  subprocess.run(cmd, env=env, check=True)
40
-
41
  return str(out_bvh)
42
 
43
-
44
- # ────────────────── Gradio UI ───────────────────────────────
45
  iface = gr.Interface(
46
  fn=generate_motion,
47
- inputs=gr.Textbox(
48
- lines=2,
49
- placeholder="e.g. a person walks forward and waves",
50
- label="Text Prompt"
51
- ),
52
  outputs=gr.File(label="Download BVH"),
53
- title="Motion-Latent-Diffusion – Text β†’ 3D Motion",
54
- description="Enter a natural-language prompt and get a 3-second BVH animation (CPU demo)."
55
  )
56
 
57
  if __name__ == "__main__":
 
1
+ import subprocess, uuid, os
 
 
2
  from pathlib import Path
3
  import gradio as gr
4
 
5
+ # ── paths ─────────────────────────────────────────────────────
6
+ CKPT = "checkpoints/mld_humanml.pt" # your 90 MB weight
7
+ DEVICE = "cpu" # free HF Spaces
8
+ STEPS = "50" # diffusion steps
9
+ CFG = "configs/config_mld_humanml3d.yaml" # default config file
10
  # ──────────────────────────────────────────────────────────────
 
 
 
 
 
 
 
11
 
12
  def generate_motion(prompt: str) -> str:
 
 
 
 
13
  out_bvh = Path("/tmp") / f"{uuid.uuid4().hex}.bvh"
14
 
15
  cmd = [
16
  "python",
17
+ "-m", "motion_latent_diffusion.demo", # ← new: run demo.py
18
+ "--cfg", CFG,
19
  "--checkpoint", CKPT,
20
  "--prompt", prompt,
21
  "--device", DEVICE,
22
+ "--steps", STEPS,
23
+ "--output", str(out_bvh)
24
  ]
25
 
26
+ # make repo importable
27
  env = os.environ.copy()
28
+ root = Path(__file__).parent
29
+ env["PYTHONPATH"] = f"{env.get('PYTHONPATH','')}:{root}"
30
 
 
31
  subprocess.run(cmd, env=env, check=True)
 
32
  return str(out_bvh)
33
 
34
+ # ── Gradio UI ─────────────────────────────────────────────────
 
35
  iface = gr.Interface(
36
  fn=generate_motion,
37
+ inputs=gr.Textbox(lines=2, placeholder="e.g. a person walks and waves"),
 
 
 
 
38
  outputs=gr.File(label="Download BVH"),
39
+ title="Motion-Latent-Diffusion – Text β†’ BVH (CPU demo)",
40
+ description="Enter a prompt to generate a 3-second motion clip."
41
  )
42
 
43
  if __name__ == "__main__":
requirements.txt CHANGED
@@ -13,4 +13,5 @@ scipy>=1.10.1
13
  tensorboard>=2.12.3
14
  torch>=2.0.0
15
  tqdm>=4.65.0
16
- transformers>=4.28.1
 
 
13
  tensorboard>=2.12.3
14
  torch>=2.0.0
15
  tqdm>=4.65.0
16
+ transformers>=4.28.1
17
+ git+https://github.com/openai/CLIP.git