| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.9375, |
| "eval_steps": 500, |
| "global_step": 45, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.10416666666666667, |
| "grad_norm": 1.5762656927108765, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 1.1189, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.20833333333333334, |
| "grad_norm": 1.5815638303756714, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 1.1288, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.3125, |
| "grad_norm": 1.5717893838882446, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 1.115, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.4166666666666667, |
| "grad_norm": 1.5886478424072266, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 1.1272, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.5208333333333334, |
| "grad_norm": 1.6140114068984985, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 1.1297, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.625, |
| "grad_norm": 1.531928300857544, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 1.0938, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.7291666666666666, |
| "grad_norm": 1.651231288909912, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 1.1527, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.8333333333333334, |
| "grad_norm": 1.581553339958191, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 1.1459, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.9375, |
| "grad_norm": 1.597037434577942, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 1.1213, |
| "step": 9 |
| }, |
| { |
| "epoch": 1.1041666666666667, |
| "grad_norm": 2.725217819213867, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 2.2635, |
| "step": 10 |
| }, |
| { |
| "epoch": 1.2083333333333333, |
| "grad_norm": 1.5833462476730347, |
| "learning_rate": 5.5e-07, |
| "loss": 1.1192, |
| "step": 11 |
| }, |
| { |
| "epoch": 1.3125, |
| "grad_norm": 1.4980436563491821, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 1.1164, |
| "step": 12 |
| }, |
| { |
| "epoch": 1.4166666666666667, |
| "grad_norm": 1.4632362127304077, |
| "learning_rate": 6.5e-07, |
| "loss": 1.1132, |
| "step": 13 |
| }, |
| { |
| "epoch": 1.5208333333333335, |
| "grad_norm": 1.5239213705062866, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 1.1396, |
| "step": 14 |
| }, |
| { |
| "epoch": 1.625, |
| "grad_norm": 1.3787108659744263, |
| "learning_rate": 7.5e-07, |
| "loss": 1.1137, |
| "step": 15 |
| }, |
| { |
| "epoch": 1.7291666666666665, |
| "grad_norm": 1.3480465412139893, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 1.0939, |
| "step": 16 |
| }, |
| { |
| "epoch": 1.8333333333333335, |
| "grad_norm": 1.2965192794799805, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 1.0985, |
| "step": 17 |
| }, |
| { |
| "epoch": 1.9375, |
| "grad_norm": 1.2198084592819214, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 1.0928, |
| "step": 18 |
| }, |
| { |
| "epoch": 2.1041666666666665, |
| "grad_norm": 2.0358712673187256, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 2.189, |
| "step": 19 |
| }, |
| { |
| "epoch": 2.2083333333333335, |
| "grad_norm": 1.0546678304672241, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 1.0608, |
| "step": 20 |
| }, |
| { |
| "epoch": 2.3125, |
| "grad_norm": 1.016335129737854, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 1.1053, |
| "step": 21 |
| }, |
| { |
| "epoch": 2.4166666666666665, |
| "grad_norm": 1.0008971691131592, |
| "learning_rate": 1.1e-06, |
| "loss": 1.0673, |
| "step": 22 |
| }, |
| { |
| "epoch": 2.5208333333333335, |
| "grad_norm": 0.9263089299201965, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 1.0643, |
| "step": 23 |
| }, |
| { |
| "epoch": 2.625, |
| "grad_norm": 0.8754690885543823, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 1.0882, |
| "step": 24 |
| }, |
| { |
| "epoch": 2.7291666666666665, |
| "grad_norm": 0.830348551273346, |
| "learning_rate": 1.25e-06, |
| "loss": 1.0666, |
| "step": 25 |
| }, |
| { |
| "epoch": 2.8333333333333335, |
| "grad_norm": 0.7341996431350708, |
| "learning_rate": 1.3e-06, |
| "loss": 1.0602, |
| "step": 26 |
| }, |
| { |
| "epoch": 2.9375, |
| "grad_norm": 0.6803218126296997, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 1.0458, |
| "step": 27 |
| }, |
| { |
| "epoch": 3.1041666666666665, |
| "grad_norm": 1.1280782222747803, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 2.0944, |
| "step": 28 |
| }, |
| { |
| "epoch": 3.2083333333333335, |
| "grad_norm": 0.5887872576713562, |
| "learning_rate": 1.45e-06, |
| "loss": 1.0524, |
| "step": 29 |
| }, |
| { |
| "epoch": 3.3125, |
| "grad_norm": 0.5985190868377686, |
| "learning_rate": 1.5e-06, |
| "loss": 1.0297, |
| "step": 30 |
| }, |
| { |
| "epoch": 3.4166666666666665, |
| "grad_norm": 0.5712792277336121, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 1.027, |
| "step": 31 |
| }, |
| { |
| "epoch": 3.5208333333333335, |
| "grad_norm": 0.5488481521606445, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 1.0446, |
| "step": 32 |
| }, |
| { |
| "epoch": 3.625, |
| "grad_norm": 0.5281394720077515, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 1.0115, |
| "step": 33 |
| }, |
| { |
| "epoch": 3.7291666666666665, |
| "grad_norm": 0.5257819890975952, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 0.9916, |
| "step": 34 |
| }, |
| { |
| "epoch": 3.8333333333333335, |
| "grad_norm": 0.5087182521820068, |
| "learning_rate": 1.75e-06, |
| "loss": 1.0156, |
| "step": 35 |
| }, |
| { |
| "epoch": 3.9375, |
| "grad_norm": 0.47776201367378235, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 0.996, |
| "step": 36 |
| }, |
| { |
| "epoch": 4.104166666666667, |
| "grad_norm": 0.6453813314437866, |
| "learning_rate": 1.85e-06, |
| "loss": 2.0508, |
| "step": 37 |
| }, |
| { |
| "epoch": 4.208333333333333, |
| "grad_norm": 0.43450039625167847, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 0.9939, |
| "step": 38 |
| }, |
| { |
| "epoch": 4.3125, |
| "grad_norm": 0.4001302123069763, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 1.0002, |
| "step": 39 |
| }, |
| { |
| "epoch": 4.416666666666667, |
| "grad_norm": 0.38437095284461975, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.9686, |
| "step": 40 |
| }, |
| { |
| "epoch": 4.520833333333333, |
| "grad_norm": 0.3577100336551666, |
| "learning_rate": 2.05e-06, |
| "loss": 1.0269, |
| "step": 41 |
| }, |
| { |
| "epoch": 4.625, |
| "grad_norm": 0.3496115803718567, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 1.0011, |
| "step": 42 |
| }, |
| { |
| "epoch": 4.729166666666667, |
| "grad_norm": 0.3460167646408081, |
| "learning_rate": 2.15e-06, |
| "loss": 0.9796, |
| "step": 43 |
| }, |
| { |
| "epoch": 4.833333333333333, |
| "grad_norm": 0.3256818354129791, |
| "learning_rate": 2.2e-06, |
| "loss": 0.9732, |
| "step": 44 |
| }, |
| { |
| "epoch": 4.9375, |
| "grad_norm": 0.3234882950782776, |
| "learning_rate": 2.25e-06, |
| "loss": 0.964, |
| "step": 45 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 54, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 9, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 3.4498201563797914e+17, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|