Thought-Aligner-7B / arguments.json
fgdrg's picture
Upload 15 files
f01a64e verified
raw
history blame
1.98 kB
{
"model_name_or_path": "/inspire/hdd/ws-f4d69b29-e0a5-44e6-bd92-acf4de9990f0/public-project/jiangchangyue-240114020171/workspace/jcy/reasoning_safety/Thought_Aligner/warmup_model/warmup-qwen-7b",
"max_length": 2048,
"trust_remote_code": true,
"train_datasets": [
[
"correction-json",
{
"proportion": 1.0,
"path": "/inspire/hdd/ws-f4d69b29-e0a5-44e6-bd92-acf4de9990f0/public-project/jiangchangyue-240114020171/workspace/jcy/reasoning_safety/data/train_data.json"
}
]
],
"eval_datasets": null,
"epochs": 3,
"per_device_train_batch_size": 4,
"per_device_eval_batch_size": 4,
"gradient_accumulation_steps": 8,
"gradient_checkpointing": true,
"lr": 2e-05,
"lr_scheduler_type": "cosine",
"lr_warmup_ratio": 0.03,
"weight_decay": 0.0,
"seed": 42,
"fp16": false,
"bf16": true,
"tf32": true,
"eval_strategy": "epoch",
"eval_interval": 1000000,
"need_eval": false,
"eval_split_ratio": null,
"output_dir": "/inspire/hdd/ws-f4d69b29-e0a5-44e6-bd92-acf4de9990f0/public-project/jiangchangyue-240114020171/workspace/jcy/reasoning_safety/Thought_Aligner/sft_model/thought-aligner-qwen-7b",
"log_type": "wandb",
"log_dir": "/inspire/hdd/ws-f4d69b29-e0a5-44e6-bd92-acf4de9990f0/public-project/jiangchangyue-240114020171/workspace/jcy/reasoning_safety/Thought_Aligner/sft_model/thought-aligner-qwen-7b",
"log_project": "Aligner-SFT",
"log_run_name": "sft-2025-03-10-22-03-10",
"save_16bit": true,
"save_interval": 1000000,
"local_rank": 0,
"zero_stage": 3,
"offload": "none",
"deepspeed": false,
"deepspeed_config": null,
"deepscale": false,
"deepscale_config": null,
"global_rank": 0,
"device": {
"type": "torch.device",
"repr": "device(type='cuda', index=0)"
},
"num_update_steps_per_epoch": 86,
"total_training_steps": 258
}