File size: 1,283 Bytes
72ad2fb | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 | {
"dataset_dir": "/root/persona/datasets/finance-instruct-iter2/sft_train",
"val_data_dir": "/root/persona/datasets/finance-instruct-iter2/sft_val",
"training_data_size": 57337,
"val_data_size": 6371,
"lora_r": 64,
"lora_alpha": 64,
"target_modules": "{'up_proj', 'k_proj', 'down_proj', 'gate_proj', 'q_proj', 'o_proj', 'v_proj'}",
"bias": "none",
"task_type": "CAUSAL_LM",
"output_dir": "/root/persona/training_results/finance-instruct-iter2/qwen3-4b-finance_instruct_iter2-20260313_231627",
"num_train_epochs": 3,
"per_device_train_batch_size": 2,
"learning_rate": 1e-06,
"lr_scheduler_type": "cosine",
"warmup_ratio": 0.03,
"bf16": true,
"gradient_checkpointing": true,
"gradient_checkpointing_kwargs": {
"use_reentrant": false
},
"max_length": 4096,
"logging_steps": 10,
"save_steps": 200,
"save_total_limit": 3,
"optim": "adamw_torch",
"report_to": "none",
"run_name": null,
"seed": 42,
"ddp_find_unused_parameters": false,
"dataloader_num_workers": 4,
"model_init_kwargs": {
"attn_implementation": "sdpa",
"torch_dtype": "torch.bfloat16",
"trust_remote_code": true
},
"eval_strategy": "steps",
"eval_steps": 200
} |