AskBuddyX / adapter_config.json
salakash's picture
Upload folder using huggingface_hub
1471476 verified
raw
history blame contribute delete
946 Bytes
{
"adapter_path": "outputs/adapters/dev",
"batch_size": 4,
"config": null,
"data": "data/training_ready",
"fine_tune_type": "lora",
"grad_accumulation_steps": 1,
"grad_checkpoint": false,
"iters": 100,
"learning_rate": 2e-05,
"lora_parameters": {
"rank": 8,
"dropout": 0.0,
"scale": 20.0
},
"lr_schedule": null,
"mask_prompt": false,
"max_seq_length": 2048,
"model": "mlx-community/Qwen2.5-Coder-0.5B-Instruct-4bit",
"num_layers": 16,
"optimizer": "adam",
"optimizer_config": {
"adam": {},
"adamw": {},
"muon": {},
"sgd": {},
"adafactor": {}
},
"project_name": null,
"report_to": null,
"resume_adapter_file": null,
"save_every": 100,
"seed": 0,
"steps_per_eval": 200,
"steps_per_report": 10,
"test": false,
"test_batches": 500,
"train": true,
"val_batches": 25
}