megalado commited on
Commit
f14e8c5
Β·
1 Parent(s): 9e5eaf1

run demo.py via file path, not -m

Browse files
Files changed (1) hide show
  1. app.py +22 -173
app.py CHANGED
@@ -1,195 +1,44 @@
1
- import subprocess, uuid, os, sys
2
  from pathlib import Path
3
  import gradio as gr
4
 
5
  # ── paths ─────────────────────────────────────────────────────
6
- CKPT = "checkpoints/mld_humanml.pt" # your 90 MB weight
7
- DEVICE = "cpu" # free HF Spaces
8
- STEPS = "50" # diffusion steps
9
- CFG = "motion_latent_diffusion/configs/config_mld_humanml3d.yaml" # default config file
10
  # ──────────────────────────────────────────────────────────────
11
 
12
- def ensure_dependencies():
13
- """Ensure all required packages are installed"""
14
- required_packages = [
15
- "omegaconf",
16
- "torch",
17
- "numpy",
18
- "tqdm",
19
- "einops"
20
- ]
21
- for package in required_packages:
22
- try:
23
- __import__(package)
24
- except ImportError:
25
- print(f"Installing {package}...")
26
- subprocess.check_call([sys.executable, "-m", "pip", "install", package])
27
-
28
  def generate_motion(prompt: str) -> str:
29
- """Generate motion from text prompt and return path to BVH file"""
30
- ensure_dependencies()
31
-
32
- # Create a unique temporary file for output
33
  out_bvh = Path("/tmp") / f"{uuid.uuid4().hex}.bvh"
34
-
35
- # Get absolute paths
36
- root = Path(__file__).parent.absolute()
37
- demo_script = root / "motion_latent_diffusion" / "demo.py"
38
- config_path = root / CFG
39
- ckpt_path = root / CKPT
40
-
41
- # Log paths for debugging
42
- print(f"Root directory: {root}")
43
- print(f"Demo script: {demo_script}")
44
- print(f"Config file: {config_path}")
45
- print(f"Checkpoint: {ckpt_path}")
46
-
47
- # Check that files exist
48
- if not demo_script.exists():
49
- return f"Error: Demo script not found at {demo_script}"
50
- if not config_path.exists():
51
- return f"Error: Config file not found at {config_path}"
52
- if not ckpt_path.exists():
53
- return f"Error: Checkpoint not found at {ckpt_path}"
54
-
55
- # Run the demo script directly
56
  cmd = [
57
- sys.executable, # Use the current Python interpreter
58
- str(demo_script),
59
- "--cfg", str(config_path),
60
- "--checkpoint", str(ckpt_path),
61
- "--prompt", prompt,
62
- "--device", DEVICE,
63
- "--steps", STEPS,
64
- "--output", str(out_bvh)
65
  ]
66
 
67
- # Set up environment
68
  env = os.environ.copy()
69
-
70
- # Add the repository root to PYTHONPATH so imports work
71
- mld_path = root / "motion_latent_diffusion"
72
- env["PYTHONPATH"] = f"{env.get('PYTHONPATH', '')}:{root}:{mld_path}"
73
-
74
- print(f"Running command: {' '.join(cmd)}")
75
- print(f"PYTHONPATH: {env['PYTHONPATH']}")
76
-
77
- try:
78
- # Run the command and capture output
79
- result = subprocess.run(
80
- cmd,
81
- env=env,
82
- check=True,
83
- stderr=subprocess.PIPE,
84
- stdout=subprocess.PIPE,
85
- text=True
86
- )
87
-
88
- print("Command output:", result.stdout)
89
-
90
- # Check if the BVH file was created
91
- if out_bvh.exists():
92
- print(f"BVH file created at {out_bvh}")
93
- return str(out_bvh)
94
- else:
95
- return f"Error: BVH file not created. Output: {result.stdout}\nError: {result.stderr}"
96
-
97
- except subprocess.CalledProcessError as e:
98
- error_message = f"Command failed with exit code {e.returncode}.\nStdout: {e.stdout}\nStderr: {e.stderr}"
99
- print(error_message)
100
- return f"Error generating motion: {error_message}"
101
 
102
- # Alternative direct approach if subprocess method fails
103
- def generate_motion_direct(prompt: str) -> str:
104
- """Generate motion using direct API calls instead of subprocess"""
105
- ensure_dependencies()
106
-
107
- out_bvh = Path("/tmp") / f"{uuid.uuid4().hex}.bvh"
108
-
109
- # Add the repository root and MLD to path
110
- root = Path(__file__).parent.absolute()
111
- mld_path = root / "motion_latent_diffusion"
112
-
113
- if str(root) not in sys.path:
114
- sys.path.insert(0, str(root))
115
- if str(mld_path) not in sys.path:
116
- sys.path.insert(0, str(mld_path))
117
-
118
- try:
119
- # Import necessary modules
120
- from omegaconf import OmegaConf
121
- import torch
122
- from motion_latent_diffusion.mld.config import get_cfg_defaults
123
-
124
- # Load config
125
- cfg = get_cfg_defaults()
126
- config_path = root / CFG
127
- print(f"Loading config from {config_path}")
128
- cfg.merge_from_file(str(config_path))
129
-
130
- # Override config values
131
- cfg.TEST.CHECKPOINT = str(root / CKPT)
132
- cfg.DEVICE = DEVICE
133
-
134
- # Import model
135
- from motion_latent_diffusion.mld.models import get_model
136
-
137
- # Load model
138
- print("Loading model...")
139
- model = get_model(cfg)
140
- state_dict = torch.load(cfg.TEST.CHECKPOINT, map_location='cpu')
141
- model.load_state_dict(state_dict)
142
- model.eval()
143
-
144
- # Generate motion
145
- print(f"Generating motion for prompt: {prompt}")
146
- with torch.no_grad():
147
- # Tokenize text
148
- tokens = model.tokenizer.encode([prompt])
149
- tokens = torch.LongTensor(tokens).to(model.device)
150
-
151
- # Generate motion
152
- motion_data = model.sample(tokens, steps=int(STEPS))
153
-
154
- # Save as BVH
155
- from motion_latent_diffusion.mld.data.humanml.scripts.motion_process import recover_from_ric
156
- from motion_latent_diffusion.mld.data.humanml.utils.visualization import plot_3d_motion
157
-
158
- motion = motion_data[0][0] # First sample
159
- motion_np = motion.cpu().numpy()
160
- motion_joints = recover_from_ric(motion_np, 22)
161
-
162
- # Save motion
163
- plot_3d_motion(motion_joints, str(out_bvh), save_bvh=True, title=f"Motion from: {prompt}")
164
-
165
- if out_bvh.exists():
166
- print(f"BVH file created at {out_bvh}")
167
- return str(out_bvh)
168
- else:
169
- return f"Error: BVH file not created."
170
-
171
- except Exception as e:
172
- import traceback
173
- error_details = traceback.format_exc()
174
- print(f"Error in direct generation: {str(e)}\n{error_details}")
175
- return f"Error generating motion: {str(e)}"
176
 
177
  # ── Gradio UI ─────────────────────────────────────────────────
178
  iface = gr.Interface(
179
- fn=generate_motion, # Try the subprocess version first
180
- # If subprocess version doesn't work, switch to direct:
181
- # fn=generate_motion_direct,
182
  inputs=gr.Textbox(lines=2, placeholder="e.g. a person walks and waves"),
183
  outputs=gr.File(label="Download BVH"),
184
  title="Motion-Latent-Diffusion – Text β†’ BVH (CPU demo)",
185
- description="Enter a prompt to generate a 3-second motion clip.",
186
- examples=[
187
- "a person walks forward and waves",
188
- "a person dances happily",
189
- "a person jumps up and down",
190
- "a person does a backflip"
191
- ]
192
  )
193
 
194
  if __name__ == "__main__":
195
- iface.launch()
 
1
+ import subprocess, uuid, os
2
  from pathlib import Path
3
  import gradio as gr
4
 
5
  # ── paths ─────────────────────────────────────────────────────
6
+ CKPT = "checkpoints/mld_humanml.pt" # your 90 MB weight
7
+ DEVICE = "cpu" # free HF Spaces
8
+ STEPS = "50" # diffusion steps
9
+ CFG = "configs/config_mld_humanml3d.yaml" # default config file
10
  # ──────────────────────────────────────────────────────────────
11
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  def generate_motion(prompt: str) -> str:
 
 
 
 
13
  out_bvh = Path("/tmp") / f"{uuid.uuid4().hex}.bvh"
14
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  cmd = [
16
+ "python",
17
+ "-m", "motion_latent_diffusion/demo.py", # ← new: run demo.py
18
+ "--cfg", CFG,
19
+ "--checkpoint", CKPT,
20
+ "--prompt", prompt,
21
+ "--device", DEVICE,
22
+ "--steps", STEPS,
23
+ "--output", str(out_bvh)
24
  ]
25
 
26
+ # make repo importable
27
  env = os.environ.copy()
28
+ root = Path(__file__).parent
29
+ env["PYTHONPATH"] = f"{env.get('PYTHONPATH','')}:{root}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
 
31
+ subprocess.run(cmd, env=env, check=True)
32
+ return str(out_bvh)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
 
34
  # ── Gradio UI ─────────────────────────────────────────────────
35
  iface = gr.Interface(
36
+ fn=generate_motion,
 
 
37
  inputs=gr.Textbox(lines=2, placeholder="e.g. a person walks and waves"),
38
  outputs=gr.File(label="Download BVH"),
39
  title="Motion-Latent-Diffusion – Text β†’ BVH (CPU demo)",
40
+ description="Enter a prompt to generate a 3-second motion clip."
 
 
 
 
 
 
41
  )
42
 
43
  if __name__ == "__main__":
44
+ iface.launch()