| packaging==23.2 | |
| peft==0.10.0 | |
| transformers==4.40.2 | |
| tokenizers==0.19.1 | |
| bitsandbytes==0.43.1 | |
| accelerate==0.30.1 | |
| pydantic==2.6.3 | |
| addict | |
| fire | |
| PyYAML>=6.0 | |
| requests | |
| datasets==2.19.1 | |
| sentencepiece | |
| wandb | |
| einops | |
| xformers==0.0.23.post1 | |
| optimum==1.16.2 | |
| hf_transfer | |
| colorama | |
| numba | |
| numpy>=1.24.4 | |
| evaluate==0.4.1 | |
| scipy | |
| scikit-learn==1.2.2 | |
| pynvml | |
| art | |
| fschat@ git+https://github.com/lm-sys/FastChat.git@27a05b04a35510afb1d767ae7e5990cbd278f8fe | |
| gradio==3.50.2 | |
| tensorboard | |
| s3fs | |
| gcsfs | |
| trl==0.8.5 | |
| zstandard==0.22.0 | |
| fastcore | |
| torch==2.1.2+cu118 | |
| [auto-gptq] | |
| auto-gptq==0.5.1 | |
| [deepspeed] | |
| deepspeed==0.14.2 | |
| deepspeed-kernels | |
| [flash-attn] | |
| flash-attn==2.5.8 | |
| [fused-dense-lib] | |
| fused-dense-lib@ git+https://github.com/Dao-AILab/flash-attention@v2.5.8#subdirectory=csrc/fused_dense_lib | |
| [galore] | |
| galore_torch | |
| [lion-pytorch] | |
| lion-pytorch==0.1.2 | |
| [mamba-ssm] | |
| mamba-ssm==1.2.0.post1 | |
| [mlflow] | |
| mlflow | |