File size: 613 Bytes
5adabe6 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 | {
"model_name": "CycleCore-Maaza-SLM-360M-JSON",
"base_model": "HuggingFaceTB/SmolLM2-360M",
"training_date": "2025-11-20 13:07:28",
"num_epochs": 3,
"learning_rate": 0.00015,
"batch_size": 32,
"train_examples": 629,
"validation_examples": 0,
"test_examples": 158,
"lora_config": {
"enabled": true,
"r": 32,
"lora_alpha": 64,
"lora_dropout": 0.1,
"target_modules": [
"q_proj",
"v_proj",
"k_proj",
"o_proj",
"gate_proj",
"up_proj",
"down_proj"
],
"bias": "none",
"task_type": "CAUSAL_LM"
},
"validation_run": false
} |