Spaces:
Running
on
Zero
Running
on
Zero
update requirements
Browse files- requirements.txt +35 -36
requirements.txt
CHANGED
|
@@ -1,37 +1,36 @@
|
|
| 1 |
-
flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.3/flash_attn-2.7.3+cu12torch2.6cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
|
| 2 |
-
transformers-stream-generator
|
| 3 |
-
transformers==4.57.1
|
| 4 |
-
huggingface_hub
|
| 5 |
-
albumentations
|
| 6 |
-
qwen-vl-utils
|
| 7 |
-
pyvips-binary
|
| 8 |
-
sentencepiece
|
| 9 |
-
opencv-python
|
| 10 |
-
docling-core
|
| 11 |
-
torch==2.6.0
|
| 12 |
-
python-docx
|
| 13 |
-
torchvision
|
| 14 |
-
supervision
|
| 15 |
-
matplotlib
|
| 16 |
-
accelerate
|
| 17 |
-
pdf2image
|
| 18 |
-
num2words
|
| 19 |
-
reportlab
|
| 20 |
-
html2text
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
timm
|
| 37 |
av
|
|
|
|
| 1 |
+
flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.3/flash_attn-2.7.3+cu12torch2.6cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
|
| 2 |
+
transformers-stream-generator
|
| 3 |
+
transformers==4.57.1
|
| 4 |
+
huggingface_hub
|
| 5 |
+
albumentations
|
| 6 |
+
qwen-vl-utils
|
| 7 |
+
pyvips-binary
|
| 8 |
+
sentencepiece
|
| 9 |
+
opencv-python
|
| 10 |
+
docling-core
|
| 11 |
+
torch==2.6.0
|
| 12 |
+
python-docx
|
| 13 |
+
torchvision
|
| 14 |
+
supervision
|
| 15 |
+
matplotlib
|
| 16 |
+
accelerate
|
| 17 |
+
pdf2image
|
| 18 |
+
num2words
|
| 19 |
+
reportlab
|
| 20 |
+
html2text
|
| 21 |
+
markdown
|
| 22 |
+
requests
|
| 23 |
+
pymupdf
|
| 24 |
+
loguru
|
| 25 |
+
hf_xet
|
| 26 |
+
spaces
|
| 27 |
+
pyvips
|
| 28 |
+
pillow
|
| 29 |
+
gradio
|
| 30 |
+
einops
|
| 31 |
+
httpx
|
| 32 |
+
click
|
| 33 |
+
peft
|
| 34 |
+
fpdf
|
| 35 |
+
timm
|
|
|
|
| 36 |
av
|