limitedonly41 commited on
Commit
0b768dc
·
verified ·
1 Parent(s): 187e54f

Update requirements.txt

Browse files
Files changed (1) hide show
  1. requirements.txt +12 -4
requirements.txt CHANGED
@@ -1,13 +1,21 @@
1
 
2
  huggingface_hub
3
  gradio
4
- transformers
5
- torch
6
  spaces
7
  Pillow
8
- PyMuPDF
9
  numpy
10
  addict
11
  matplotlib
 
 
 
 
 
 
 
 
 
12
  torchvision
13
- einops
 
 
 
1
 
2
  huggingface_hub
3
  gradio
 
 
4
  spaces
5
  Pillow
 
6
  numpy
7
  addict
8
  matplotlib
9
+ einops
10
+
11
+ torch==2.6.0
12
+ transformers==4.46.3
13
+ tokenizers==0.20.3
14
+ accelerate
15
+ einops
16
+ addict
17
+ easydict
18
  torchvision
19
+ flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.3/flash_attn-2.7.3+cu12torch2.6cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
20
+ PyMuPDF
21
+ hf_transfer