llaa33219 commited on
Commit
0fe415a
verified
1 Parent(s): 919bd60

Update requirements.txt

Browse files
Files changed (1) hide show
  1. requirements.txt +19 -4
requirements.txt CHANGED
@@ -5,6 +5,7 @@ spaces
5
  # PyTorch (ZeroGPU鞐愳劀 鞝滉车)
6
  torch>=2.0.0
7
  torchvision
 
8
 
9
  # Hugging Face
10
  transformers>=4.35.0
@@ -12,6 +13,16 @@ diffusers>=0.25.0
12
  accelerate
13
  safetensors
14
  huggingface_hub
 
 
 
 
 
 
 
 
 
 
15
 
16
  # Pose detection
17
  controlnet_aux>=0.0.7
@@ -21,16 +32,20 @@ opencv-python
21
  opencv-python-headless
22
  Pillow>=9.0.0
23
  numpy
 
24
 
25
  # Video export
26
  imageio
27
  imageio-ffmpeg
 
 
28
 
29
  # Git for cloning repo
30
  GitPython
31
 
32
- # Flash attention (optional, for speed)
33
- # https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.0.8/flash_attn-2.7.4.post1+cu124torch2.5-cp310-cp310-linux_x86_64.whl
34
 
35
- # xformers (optional, for memory efficiency)
36
- # xformers>=0.0.25
 
 
5
  # PyTorch (ZeroGPU鞐愳劀 鞝滉车)
6
  torch>=2.0.0
7
  torchvision
8
+ torchaudio
9
 
10
  # Hugging Face
11
  transformers>=4.35.0
 
13
  accelerate
14
  safetensors
15
  huggingface_hub
16
+ sentencepiece
17
+
18
+ # SteadyDancer / Wan 2.1 dependencies
19
+ easydict
20
+ einops
21
+ ftfy
22
+ regex
23
+ tqdm
24
+ pyyaml
25
+ omegaconf
26
 
27
  # Pose detection
28
  controlnet_aux>=0.0.7
 
32
  opencv-python-headless
33
  Pillow>=9.0.0
34
  numpy
35
+ scipy
36
 
37
  # Video export
38
  imageio
39
  imageio-ffmpeg
40
+ moviepy
41
+ decord
42
 
43
  # Git for cloning repo
44
  GitPython
45
 
46
+ # xfuser (optional, for distributed inference)
47
+ # xfuser
48
 
49
+ # Flash attention (prebuilt wheel for ZeroGPU)
50
+ # Uncomment if needed:
51
+ # https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.0.8/flash_attn-2.7.4.post1+cu124torch2.5-cp310-cp310-linux_x86_64.whl