File size: 1,818 Bytes
a817e85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
400b579
 
a817e85
400b579
 
 
a817e85
400b579
 
 
a817e85
400b579
a817e85
 
 
 
 
 
 
 
 
 
400b579
a817e85
 
400b579
a817e85
400b579
a817e85
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
#!/usr/bin/env bash
#
# Copyright 2025 PKU-Alignment Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================

MODEL_NAME_OR_PATH="/aifs4su/yaodong/hantao/models/chameleon-7b-hf" # model path


TRAIN_DATASETS_ROOT="/aifs4su/yaodong/hantao/datasets/MMInstruct-GPT4V_mistral-7b_cosi_cut/merged/" # dataset path

DATASETS_NAME=("top1-100_valid")

for DATASET_NAME in ${DATASETS_NAME[@]}; do
    TRAIN_DATASETS="${TRAIN_DATASETS_ROOT}/${DATASET_NAME}" # dataset name
    TRAIN_DATA_FILES="pre_tokenized/train_no_pixel_values.pt" # dataset name

    OUTPUT_DIR="../outputs/chameleon_sft/${DATASET_NAME}" # output dir

     # For wandb online logging
     export WANDB_API_KEY="7e2dcc0c310ebcb7cdcafd5e9320d6be55cf1a33"

     # Source the setup script
     source ./setup.sh

     # Execute deepspeed command
     deepspeed \
          --master_port ${MASTER_PORT} \
          --module align_anything.trainers.text_image_to_text_image.sft \
          --model_name_or_path ${MODEL_NAME_OR_PATH} \
          --train_datasets ${TRAIN_DATASETS} \
          --train_data_files ${TRAIN_DATA_FILES} \
          --output_dir ${OUTPUT_DIR} \
          --save_total_limit 12 \
          --train_batch_size 16 \
          --epochs 3
done