| #!/usr/bin/env bash |
| set -euo pipefail |
|
|
| export CUDA_VISIBLE_DEVICES="${CUDA_VISIBLE_DEVICES:-2}" |
|
|
| ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" |
|
|
| if [[ $# -lt 2 ]]; then |
| cat <<'USAGE' |
| Usage: |
| script/loratune.sh <base_model> <output_dir> [extra lora args...] |
|
|
| Example: |
| script/loratune.sh /path/to/base_model /path/to/output_dir --epochs 2 --batch_size 32 |
| USAGE |
| exit 1 |
| fi |
|
|
| BASE_MODEL="$1" |
| OUTPUT_DIR="$2" |
| shift 2 |
|
|
| python "$ROOT/src/loratune.py" \ |
| --base_model "$BASE_MODEL" \ |
| --output_dir "$OUTPUT_DIR" \ |
| --device cuda \ |
| --dtype "${DTYPE:-bfloat16}" \ |
| --instruction_dataset "${INSTRUCTION_DATASET:-tatsu-lab/alpaca}" \ |
| --instruction_split "${INSTRUCTION_SPLIT:-train}" \ |
| --max_samples "${MAX_SAMPLES:-0}" \ |
| --seq_len "${SEQ_LEN:-1024}" \ |
| --batch_size "${BATCH_SIZE:-64}" \ |
| --micro_batch_size "${MICRO_BATCH_SIZE:-8}" \ |
| --epochs "${EPOCHS:-1.0}" \ |
| --learning_rate "${LEARNING_RATE:-1e-4}" \ |
| --log_steps "${LOG_STEPS:-100}" \ |
| --lora_rank "${LORA_RANK:-8}" \ |
| "$@" |
|
|