ChuxiJ commited on
Commit
e7130fe
·
2 Parent(s): 376c43e 90d4594

Merge branch 'main' of github.com:ace-step/ACE-Step-1.5 into main

Browse files
Files changed (2) hide show
  1. acestep/handler.py +8 -1
  2. test.py +1 -11
acestep/handler.py CHANGED
@@ -146,7 +146,14 @@ class AceStepHandler:
146
  """
147
  try:
148
  if device == "auto":
149
- device = "cuda" if torch.cuda.is_available() else "cpu"
 
 
 
 
 
 
 
150
 
151
  status_msg = ""
152
 
 
146
  """
147
  try:
148
  if device == "auto":
149
+ if hasattr(torch, 'xpu') and torch.xpu.is_available():
150
+ device = "xpu"
151
+ elif torch.cuda.is_available():
152
+ device = "cuda"
153
+ elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available():
154
+ device = "mps"
155
+ else:
156
+ device = "cpu"
157
 
158
  status_msg = ""
159
 
test.py CHANGED
@@ -35,23 +35,13 @@ def main():
35
  print(f"Using model: {model_name}")
36
 
37
  # Initialize service
38
- if hasattr(torch, 'xpu') and torch.xpu.is_available():
39
- device = "xpu"
40
- elif torch.cuda.is_available():
41
- device = "cuda"
42
- elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available():
43
- device = "mps"
44
- else:
45
- device = "cpu"
46
- print(f"Using device: {device}")
47
 
48
  use_llm = False
49
 
50
  status, enabled = handler.initialize_service(
51
  project_root=project_root,
52
  config_path=model_name,
53
- device=device,
54
- init_llm=use_llm,
55
  use_flash_attention=True, # Default in UI
56
  compile_model=True,
57
  offload_to_cpu=True,
 
35
  print(f"Using model: {model_name}")
36
 
37
  # Initialize service
 
 
 
 
 
 
 
 
 
38
 
39
  use_llm = False
40
 
41
  status, enabled = handler.initialize_service(
42
  project_root=project_root,
43
  config_path=model_name,
44
+ device='auto',
 
45
  use_flash_attention=True, # Default in UI
46
  compile_model=True,
47
  offload_to_cpu=True,