| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.99609375, |
| "eval_steps": 500, |
| "global_step": 85, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.01171875, |
| "grad_norm": 36.23282241821289, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 2.3839, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0234375, |
| "grad_norm": 35.918636322021484, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 2.3798, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.03515625, |
| "grad_norm": 35.62618637084961, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 2.386, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.046875, |
| "grad_norm": 35.966087341308594, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 2.3803, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.05859375, |
| "grad_norm": 35.38177490234375, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 2.3937, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0703125, |
| "grad_norm": 35.99677658081055, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 2.3906, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.08203125, |
| "grad_norm": 35.44341278076172, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 2.3539, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.09375, |
| "grad_norm": 35.300697326660156, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 2.3459, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.10546875, |
| "grad_norm": 34.092952728271484, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 2.2959, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.1171875, |
| "grad_norm": 34.46371841430664, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 2.2661, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.12890625, |
| "grad_norm": 34.62260818481445, |
| "learning_rate": 5.5e-07, |
| "loss": 2.2918, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.140625, |
| "grad_norm": 33.790374755859375, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 2.223, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.15234375, |
| "grad_norm": 33.766536712646484, |
| "learning_rate": 6.5e-07, |
| "loss": 2.2267, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.1640625, |
| "grad_norm": 33.894081115722656, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 2.1465, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.17578125, |
| "grad_norm": 33.162452697753906, |
| "learning_rate": 7.5e-07, |
| "loss": 2.0495, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.1875, |
| "grad_norm": 32.954341888427734, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 1.9627, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.19921875, |
| "grad_norm": 33.96324157714844, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 1.8867, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.2109375, |
| "grad_norm": 33.81139373779297, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 1.7752, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.22265625, |
| "grad_norm": 34.87086868286133, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 1.6944, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.234375, |
| "grad_norm": 34.84965133666992, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 1.5707, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.24609375, |
| "grad_norm": 35.227317810058594, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 1.4369, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.2578125, |
| "grad_norm": 34.91344451904297, |
| "learning_rate": 1.1e-06, |
| "loss": 1.3202, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.26953125, |
| "grad_norm": 31.7376766204834, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 1.1398, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.28125, |
| "grad_norm": 30.24741554260254, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 1.0421, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.29296875, |
| "grad_norm": 28.292400360107422, |
| "learning_rate": 1.25e-06, |
| "loss": 0.8817, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.3046875, |
| "grad_norm": 30.44672393798828, |
| "learning_rate": 1.3e-06, |
| "loss": 0.7073, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.31640625, |
| "grad_norm": 29.416427612304688, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 0.5444, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.328125, |
| "grad_norm": 24.820096969604492, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 0.4025, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.33984375, |
| "grad_norm": 21.023277282714844, |
| "learning_rate": 1.45e-06, |
| "loss": 0.307, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.3515625, |
| "grad_norm": 19.656967163085938, |
| "learning_rate": 1.5e-06, |
| "loss": 0.2151, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.36328125, |
| "grad_norm": 14.91929817199707, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 0.1448, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.375, |
| "grad_norm": 5.083199977874756, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 0.09, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.38671875, |
| "grad_norm": 2.320681571960449, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 0.0641, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.3984375, |
| "grad_norm": 1.6233159303665161, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 0.0584, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.41015625, |
| "grad_norm": 1.6057201623916626, |
| "learning_rate": 1.75e-06, |
| "loss": 0.0626, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.421875, |
| "grad_norm": 1.8360320329666138, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 0.0563, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.43359375, |
| "grad_norm": 1.736350178718567, |
| "learning_rate": 1.85e-06, |
| "loss": 0.0609, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.4453125, |
| "grad_norm": 1.1473922729492188, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 0.0541, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.45703125, |
| "grad_norm": 1.1722168922424316, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 0.0534, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.46875, |
| "grad_norm": 1.356987714767456, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.0496, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.48046875, |
| "grad_norm": 0.8023216724395752, |
| "learning_rate": 2.05e-06, |
| "loss": 0.0527, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.4921875, |
| "grad_norm": 0.9803515672683716, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.0478, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.50390625, |
| "grad_norm": 0.8733468651771545, |
| "learning_rate": 2.15e-06, |
| "loss": 0.052, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.515625, |
| "grad_norm": 0.8213743567466736, |
| "learning_rate": 2.2e-06, |
| "loss": 0.0448, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.52734375, |
| "grad_norm": 0.843189537525177, |
| "learning_rate": 2.25e-06, |
| "loss": 0.0498, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.5390625, |
| "grad_norm": 0.8801079392433167, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.0408, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.55078125, |
| "grad_norm": 0.7131401300430298, |
| "learning_rate": 2.35e-06, |
| "loss": 0.0405, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.5625, |
| "grad_norm": 0.8996126651763916, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.0525, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.57421875, |
| "grad_norm": 0.8606986403465271, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.0438, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.5859375, |
| "grad_norm": 0.6918051838874817, |
| "learning_rate": 2.5e-06, |
| "loss": 0.0394, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.59765625, |
| "grad_norm": 0.6177802085876465, |
| "learning_rate": 2.55e-06, |
| "loss": 0.0387, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.609375, |
| "grad_norm": 0.7042555809020996, |
| "learning_rate": 2.6e-06, |
| "loss": 0.0434, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.62109375, |
| "grad_norm": 0.6537717580795288, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.0396, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.6328125, |
| "grad_norm": 0.7834082841873169, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.0411, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.64453125, |
| "grad_norm": 0.7287272810935974, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.0408, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.65625, |
| "grad_norm": 0.7186263203620911, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.0394, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.66796875, |
| "grad_norm": 0.7264899611473083, |
| "learning_rate": 2.85e-06, |
| "loss": 0.0427, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.6796875, |
| "grad_norm": 0.7665618062019348, |
| "learning_rate": 2.9e-06, |
| "loss": 0.0368, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.69140625, |
| "grad_norm": 0.7222962379455566, |
| "learning_rate": 2.95e-06, |
| "loss": 0.0412, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.703125, |
| "grad_norm": 0.7061101794242859, |
| "learning_rate": 3e-06, |
| "loss": 0.0377, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.71484375, |
| "grad_norm": 0.5724324584007263, |
| "learning_rate": 3.05e-06, |
| "loss": 0.0387, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.7265625, |
| "grad_norm": 0.5535506010055542, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.0403, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.73828125, |
| "grad_norm": 0.6553678512573242, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.0415, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.6137285828590393, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.0383, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.76171875, |
| "grad_norm": 0.5985754132270813, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.0355, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.7734375, |
| "grad_norm": 0.5903909802436829, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.0374, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.78515625, |
| "grad_norm": 0.5718765258789062, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.0339, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.796875, |
| "grad_norm": 0.6844965815544128, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.0405, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.80859375, |
| "grad_norm": 0.5959618091583252, |
| "learning_rate": 3.45e-06, |
| "loss": 0.0338, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.8203125, |
| "grad_norm": 0.6095123291015625, |
| "learning_rate": 3.5e-06, |
| "loss": 0.0362, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.83203125, |
| "grad_norm": 0.543708086013794, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.0355, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.84375, |
| "grad_norm": 0.6969983577728271, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.0325, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.85546875, |
| "grad_norm": 0.6022969484329224, |
| "learning_rate": 3.65e-06, |
| "loss": 0.0342, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.8671875, |
| "grad_norm": 0.6262147426605225, |
| "learning_rate": 3.7e-06, |
| "loss": 0.0348, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.87890625, |
| "grad_norm": 0.5729933381080627, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.0318, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.890625, |
| "grad_norm": 0.5846775770187378, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.0309, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.90234375, |
| "grad_norm": 0.6469219923019409, |
| "learning_rate": 3.85e-06, |
| "loss": 0.0324, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.9140625, |
| "grad_norm": 0.6574859023094177, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.0325, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.92578125, |
| "grad_norm": 0.5833832025527954, |
| "learning_rate": 3.95e-06, |
| "loss": 0.0232, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.9375, |
| "grad_norm": 0.7503570318222046, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0267, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.94921875, |
| "grad_norm": 0.7181633114814758, |
| "learning_rate": 4.05e-06, |
| "loss": 0.0304, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.9609375, |
| "grad_norm": 0.6477274298667908, |
| "learning_rate": 4.1e-06, |
| "loss": 0.0297, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.97265625, |
| "grad_norm": 0.6768563389778137, |
| "learning_rate": 4.15e-06, |
| "loss": 0.0279, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.984375, |
| "grad_norm": 0.7905837297439575, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.0301, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.99609375, |
| "grad_norm": 0.5576608777046204, |
| "learning_rate": 4.25e-06, |
| "loss": 0.0322, |
| "step": 85 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 510, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 85, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.147131192246272e+17, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|