File size: 642 Bytes
277ed5d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
set -eux
LLM_RECIPES_DIR=/project
INPUT_JSONL_FILE_PATH=${1:-/share/yans/datasets/jsonl/llm-jp-corpus-v1/ja/ja_wiki/train_0.jsonl}
OUTPUT_FILE_PREFIX=${2:-/work/llm_recipes/datasets/bin/baseline_phi2/llm_jp_corpus_v1_ja_wiki_train_0/data}
TOKENIZER_PATH=${3:-/share/pretrained_lm/Phi/Phi-2}
mkdir -p $(dirname $OUTPUT_FILE_PREFIX)
python $LLM_RECIPES_DIR/megatron_lm/tools/preprocess_data.py \
--input $INPUT_JSONL_FILE_PATH \
--output-prefix $OUTPUT_FILE_PREFIX \
--tokenizer-type HFPreTrainedTokenizer \
--tokenizer-model $TOKENIZER_PATH \
--workers 32 \
--append-eod \
--log-interval 1000
|