File size: 788 Bytes
f78ae2e | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 | {
"base_model": "openai/gpt-oss-120b",
"method": "8GPU_Model_Parallelism_SFTTrainer",
"quantization": "Mxfp4",
"model_parallelism": true,
"gpu_count": 8,
"lora_config": {
"r": 16,
"alpha": 32,
"target_modules": [
"q_proj",
"k_proj",
"v_proj",
"o_proj",
"gate_proj",
"up_proj",
"down_proj",
"router"
],
"optimized_for": "Project_Euler_Mathematical_Reasoning"
},
"training_config": {
"batch_size": 2,
"gradient_accumulation": 16,
"effective_batch_size": 32,
"learning_rate": 0.0001,
"max_length": 1024,
"epochs": 1
},
"dataset_type": "Project_Euler_Mathematical_Problems",
"training_date": "2025-08-26T14:45:11.908481",
"layer_distribution": "36_layers_across_8_H200_GPUs"
} |