File size: 1,004 Bytes
3738140
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
#!/usr/bin/env bash
set -euo pipefail

export CUDA_VISIBLE_DEVICES="${CUDA_VISIBLE_DEVICES:-2}"

ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"

if [[ $# -lt 2 ]]; then
  cat <<'USAGE'
Usage:
  script/loratune.sh <base_model> <output_dir> [extra lora args...]

Example:
  script/loratune.sh /path/to/base_model /path/to/output_dir --epochs 2 --batch_size 32
USAGE
  exit 1
fi

BASE_MODEL="$1"
OUTPUT_DIR="$2"
shift 2

python "$ROOT/src/loratune.py" \
  --base_model "$BASE_MODEL" \
  --output_dir "$OUTPUT_DIR" \
  --device cuda \
  --dtype "${DTYPE:-bfloat16}" \
  --instruction_dataset "${INSTRUCTION_DATASET:-tatsu-lab/alpaca}" \
  --instruction_split "${INSTRUCTION_SPLIT:-train}" \
  --max_samples "${MAX_SAMPLES:-0}" \
  --seq_len "${SEQ_LEN:-1024}" \
  --batch_size "${BATCH_SIZE:-64}" \
  --micro_batch_size "${MICRO_BATCH_SIZE:-8}" \
  --epochs "${EPOCHS:-1.0}" \
  --learning_rate "${LEARNING_RATE:-1e-4}" \
  --log_steps "${LOG_STEPS:-100}" \
  --lora_rank "${LORA_RANK:-8}" \
  "$@"