mlx_slm_ft / adapter_config.json
wzebrowski's picture
Upload adapter_config.json with huggingface_hub
c3c47d9 verified
{
"base_model_name_or_path": "wzebrowski/mlx_slm",
"peft_type": "LORA",
"r": 16,
"lora_alpha": 32,
"target_modules": [
"encoder.layers.10",
"encoder.layers.11",
"encoder.layers.12",
"encoder.layers.13",
"encoder.layers.14",
"encoder.layers.15",
"encoder.layers.16",
"encoder.layers.17",
"encoder.layers.2",
"encoder.layers.3",
"encoder.layers.4",
"encoder.layers.5",
"encoder.layers.6",
"encoder.layers.7",
"encoder.layers.8",
"encoder.layers.9"
],
"bias": "none",
"task_type": "CAUSAL_LM"
}