Update requirements.txt
Browse files- requirements.txt +5 -13
requirements.txt
CHANGED
|
@@ -1,12 +1,7 @@
|
|
| 1 |
-
|
| 2 |
-
#
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
--upgrade
|
| 6 |
-
--force-reinstall
|
| 7 |
-
|
| 8 |
-
torch==2.1.0+cu122
|
| 9 |
-
torchvision==0.16.0+cu122
|
| 10 |
|
| 11 |
###############################################################################
|
| 12 |
# Transformers / training utils
|
|
@@ -41,7 +36,4 @@ huggingface_hub
|
|
| 41 |
sentencepiece
|
| 42 |
safetensors
|
| 43 |
|
| 44 |
-
|
| 45 |
-
# Flash-Attention 2.5.8 — CUDA 12.2, Torch 2.1, Python 3.10
|
| 46 |
-
###############################################################################
|
| 47 |
-
https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.8/flash_attn-2.5.8+cu122torch2.1cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
|
|
|
|
| 1 |
+
--extra-index-url https://download.pytorch.org/whl/cu118
|
| 2 |
+
# basic dependencies
|
| 3 |
+
torch==2.4.0
|
| 4 |
+
torchvision==0.19.0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
|
| 6 |
###############################################################################
|
| 7 |
# Transformers / training utils
|
|
|
|
| 36 |
sentencepiece
|
| 37 |
safetensors
|
| 38 |
|
| 39 |
+
https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.9.post1/flash_attn-2.5.9.post1+cu118torch1.12cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
|
|
|
|
|
|
|
|
|