| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 504, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.003968253968253968, | |
| "grad_norm": 633.4865112304688, | |
| "learning_rate": 3.846153846153847e-07, | |
| "loss": 2.502, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.007936507936507936, | |
| "grad_norm": 785.3289794921875, | |
| "learning_rate": 7.692307692307694e-07, | |
| "loss": 2.6318, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.011904761904761904, | |
| "grad_norm": 672.2508544921875, | |
| "learning_rate": 1.153846153846154e-06, | |
| "loss": 2.632, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.015873015873015872, | |
| "grad_norm": 721.1585693359375, | |
| "learning_rate": 1.5384615384615387e-06, | |
| "loss": 2.7043, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01984126984126984, | |
| "grad_norm": 616.75634765625, | |
| "learning_rate": 1.9230769230769234e-06, | |
| "loss": 2.8056, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.023809523809523808, | |
| "grad_norm": 924.5324096679688, | |
| "learning_rate": 2.307692307692308e-06, | |
| "loss": 2.4236, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.027777777777777776, | |
| "grad_norm": 710.423828125, | |
| "learning_rate": 2.6923076923076923e-06, | |
| "loss": 2.2459, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.031746031746031744, | |
| "grad_norm": 436.8348083496094, | |
| "learning_rate": 3.0769230769230774e-06, | |
| "loss": 1.6703, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.03571428571428571, | |
| "grad_norm": 352.2169494628906, | |
| "learning_rate": 3.4615384615384617e-06, | |
| "loss": 1.5919, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.03968253968253968, | |
| "grad_norm": 201.2940216064453, | |
| "learning_rate": 3.846153846153847e-06, | |
| "loss": 1.5429, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.04365079365079365, | |
| "grad_norm": 150.095947265625, | |
| "learning_rate": 4.230769230769231e-06, | |
| "loss": 1.3502, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.047619047619047616, | |
| "grad_norm": 263.4969787597656, | |
| "learning_rate": 4.615384615384616e-06, | |
| "loss": 1.293, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.051587301587301584, | |
| "grad_norm": 91.6253662109375, | |
| "learning_rate": 5e-06, | |
| "loss": 1.2868, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.05555555555555555, | |
| "grad_norm": 38.65793991088867, | |
| "learning_rate": 5.384615384615385e-06, | |
| "loss": 1.3058, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.05952380952380952, | |
| "grad_norm": 9.5725736618042, | |
| "learning_rate": 5.769230769230769e-06, | |
| "loss": 1.1845, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.06349206349206349, | |
| "grad_norm": 7.073225975036621, | |
| "learning_rate": 6.153846153846155e-06, | |
| "loss": 1.1134, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.06746031746031746, | |
| "grad_norm": 5.2752685546875, | |
| "learning_rate": 6.538461538461539e-06, | |
| "loss": 1.0195, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.07142857142857142, | |
| "grad_norm": 5.149896144866943, | |
| "learning_rate": 6.923076923076923e-06, | |
| "loss": 0.9872, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.07539682539682539, | |
| "grad_norm": 4.404977798461914, | |
| "learning_rate": 7.307692307692308e-06, | |
| "loss": 0.9158, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.07936507936507936, | |
| "grad_norm": 3.5225303173065186, | |
| "learning_rate": 7.692307692307694e-06, | |
| "loss": 0.8901, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.08333333333333333, | |
| "grad_norm": 3.5606799125671387, | |
| "learning_rate": 8.076923076923077e-06, | |
| "loss": 0.8609, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.0873015873015873, | |
| "grad_norm": 4.136419296264648, | |
| "learning_rate": 8.461538461538462e-06, | |
| "loss": 1.0483, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.09126984126984126, | |
| "grad_norm": 4.8621907234191895, | |
| "learning_rate": 8.846153846153847e-06, | |
| "loss": 1.1016, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.09523809523809523, | |
| "grad_norm": 4.0967607498168945, | |
| "learning_rate": 9.230769230769232e-06, | |
| "loss": 1.0202, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.0992063492063492, | |
| "grad_norm": 3.8439812660217285, | |
| "learning_rate": 9.615384615384616e-06, | |
| "loss": 0.9234, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.10317460317460317, | |
| "grad_norm": 2.8038058280944824, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6207, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.10714285714285714, | |
| "grad_norm": 2.593336582183838, | |
| "learning_rate": 9.999892010284378e-06, | |
| "loss": 0.6212, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.1111111111111111, | |
| "grad_norm": 2.506002187728882, | |
| "learning_rate": 9.999568045802216e-06, | |
| "loss": 0.6497, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.11507936507936507, | |
| "grad_norm": 1.821508526802063, | |
| "learning_rate": 9.999028120547456e-06, | |
| "loss": 0.5594, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.11904761904761904, | |
| "grad_norm": 1.7968438863754272, | |
| "learning_rate": 9.99827225784264e-06, | |
| "loss": 0.6066, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.12301587301587301, | |
| "grad_norm": 1.6199862957000732, | |
| "learning_rate": 9.99730049033793e-06, | |
| "loss": 0.5615, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.12698412698412698, | |
| "grad_norm": 1.6812413930892944, | |
| "learning_rate": 9.996112860009689e-06, | |
| "loss": 0.5753, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.13095238095238096, | |
| "grad_norm": 1.7352900505065918, | |
| "learning_rate": 9.994709418158652e-06, | |
| "loss": 0.5316, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.1349206349206349, | |
| "grad_norm": 1.8648426532745361, | |
| "learning_rate": 9.993090225407743e-06, | |
| "loss": 0.6222, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.1388888888888889, | |
| "grad_norm": 1.6306684017181396, | |
| "learning_rate": 9.991255351699422e-06, | |
| "loss": 0.5495, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.14285714285714285, | |
| "grad_norm": 1.7700245380401611, | |
| "learning_rate": 9.98920487629269e-06, | |
| "loss": 0.6285, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.14682539682539683, | |
| "grad_norm": 1.8033316135406494, | |
| "learning_rate": 9.986938887759643e-06, | |
| "loss": 0.6265, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.15079365079365079, | |
| "grad_norm": 1.8331193923950195, | |
| "learning_rate": 9.98445748398167e-06, | |
| "loss": 0.6142, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.15476190476190477, | |
| "grad_norm": 1.8912376165390015, | |
| "learning_rate": 9.981760772145201e-06, | |
| "loss": 0.6441, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.15873015873015872, | |
| "grad_norm": 1.9558063745498657, | |
| "learning_rate": 9.978848868737099e-06, | |
| "loss": 0.6511, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.1626984126984127, | |
| "grad_norm": 1.9705612659454346, | |
| "learning_rate": 9.975721899539607e-06, | |
| "loss": 0.6478, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.16666666666666666, | |
| "grad_norm": 2.212285041809082, | |
| "learning_rate": 9.972379999624935e-06, | |
| "loss": 0.6926, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.17063492063492064, | |
| "grad_norm": 2.1685657501220703, | |
| "learning_rate": 9.968823313349412e-06, | |
| "loss": 0.6087, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.1746031746031746, | |
| "grad_norm": 2.1909096240997314, | |
| "learning_rate": 9.96505199434725e-06, | |
| "loss": 0.6221, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.17857142857142858, | |
| "grad_norm": 2.326509952545166, | |
| "learning_rate": 9.961066205523917e-06, | |
| "loss": 0.6594, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.18253968253968253, | |
| "grad_norm": 2.343661308288574, | |
| "learning_rate": 9.956866119049095e-06, | |
| "loss": 0.669, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.1865079365079365, | |
| "grad_norm": 2.516611099243164, | |
| "learning_rate": 9.952451916349242e-06, | |
| "loss": 0.6106, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.19047619047619047, | |
| "grad_norm": 2.2367687225341797, | |
| "learning_rate": 9.947823788099754e-06, | |
| "loss": 0.5637, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.19444444444444445, | |
| "grad_norm": 2.2247214317321777, | |
| "learning_rate": 9.942981934216731e-06, | |
| "loss": 0.5675, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.1984126984126984, | |
| "grad_norm": 2.7175707817077637, | |
| "learning_rate": 9.937926563848345e-06, | |
| "loss": 0.6192, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.20238095238095238, | |
| "grad_norm": 2.350743532180786, | |
| "learning_rate": 9.9326578953658e-06, | |
| "loss": 0.5041, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.20634920634920634, | |
| "grad_norm": 2.33209228515625, | |
| "learning_rate": 9.9271761563539e-06, | |
| "loss": 0.4875, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.21031746031746032, | |
| "grad_norm": 2.323399066925049, | |
| "learning_rate": 9.921481583601218e-06, | |
| "loss": 0.5545, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.21428571428571427, | |
| "grad_norm": 2.38028621673584, | |
| "learning_rate": 9.915574423089872e-06, | |
| "loss": 0.4818, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.21825396825396826, | |
| "grad_norm": 2.376560926437378, | |
| "learning_rate": 9.909454929984894e-06, | |
| "loss": 0.4909, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 2.4907236099243164, | |
| "learning_rate": 9.903123368623216e-06, | |
| "loss": 0.4707, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.2261904761904762, | |
| "grad_norm": 2.5726191997528076, | |
| "learning_rate": 9.896580012502238e-06, | |
| "loss": 0.56, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.23015873015873015, | |
| "grad_norm": 2.649724245071411, | |
| "learning_rate": 9.889825144268029e-06, | |
| "loss": 0.4891, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.23412698412698413, | |
| "grad_norm": 2.672144889831543, | |
| "learning_rate": 9.882859055703109e-06, | |
| "loss": 0.4723, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.23809523809523808, | |
| "grad_norm": 2.6838862895965576, | |
| "learning_rate": 9.875682047713847e-06, | |
| "loss": 0.4901, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.24206349206349206, | |
| "grad_norm": 3.0208964347839355, | |
| "learning_rate": 9.868294430317464e-06, | |
| "loss": 0.532, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.24603174603174602, | |
| "grad_norm": 2.9846720695495605, | |
| "learning_rate": 9.860696522628638e-06, | |
| "loss": 0.5285, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 3.075972080230713, | |
| "learning_rate": 9.852888652845729e-06, | |
| "loss": 0.5347, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.25396825396825395, | |
| "grad_norm": 3.26065993309021, | |
| "learning_rate": 9.84487115823659e-06, | |
| "loss": 0.5147, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.25793650793650796, | |
| "grad_norm": 3.3229222297668457, | |
| "learning_rate": 9.836644385124006e-06, | |
| "loss": 0.505, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.2619047619047619, | |
| "grad_norm": 3.4491543769836426, | |
| "learning_rate": 9.828208688870736e-06, | |
| "loss": 0.5394, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.26587301587301587, | |
| "grad_norm": 3.734246253967285, | |
| "learning_rate": 9.81956443386415e-06, | |
| "loss": 0.5753, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.2698412698412698, | |
| "grad_norm": 3.7881813049316406, | |
| "learning_rate": 9.810711993500506e-06, | |
| "loss": 0.4866, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.27380952380952384, | |
| "grad_norm": 4.006939888000488, | |
| "learning_rate": 9.801651750168815e-06, | |
| "loss": 0.4963, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.2777777777777778, | |
| "grad_norm": 4.0254058837890625, | |
| "learning_rate": 9.792384095234312e-06, | |
| "loss": 0.4843, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.28174603174603174, | |
| "grad_norm": 4.199477672576904, | |
| "learning_rate": 9.782909429021568e-06, | |
| "loss": 0.4745, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.2857142857142857, | |
| "grad_norm": 3.7433111667633057, | |
| "learning_rate": 9.773228160797187e-06, | |
| "loss": 0.4808, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.2896825396825397, | |
| "grad_norm": 3.47202205657959, | |
| "learning_rate": 9.76334070875213e-06, | |
| "loss": 0.4751, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.29365079365079366, | |
| "grad_norm": 3.6417016983032227, | |
| "learning_rate": 9.753247499983649e-06, | |
| "loss": 0.4286, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.2976190476190476, | |
| "grad_norm": 3.988434076309204, | |
| "learning_rate": 9.742948970476845e-06, | |
| "loss": 0.4307, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.30158730158730157, | |
| "grad_norm": 3.0164480209350586, | |
| "learning_rate": 9.732445565085823e-06, | |
| "loss": 0.3589, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.3055555555555556, | |
| "grad_norm": 3.287938356399536, | |
| "learning_rate": 9.721737737514492e-06, | |
| "loss": 0.333, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.30952380952380953, | |
| "grad_norm": 4.00580358505249, | |
| "learning_rate": 9.71082595029695e-06, | |
| "loss": 0.38, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.3134920634920635, | |
| "grad_norm": 3.4738285541534424, | |
| "learning_rate": 9.699710674777519e-06, | |
| "loss": 0.3405, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.31746031746031744, | |
| "grad_norm": 3.5850601196289062, | |
| "learning_rate": 9.688392391090374e-06, | |
| "loss": 0.3996, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.32142857142857145, | |
| "grad_norm": 3.5362892150878906, | |
| "learning_rate": 9.676871588138812e-06, | |
| "loss": 0.3523, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.3253968253968254, | |
| "grad_norm": 3.584883451461792, | |
| "learning_rate": 9.665148763574123e-06, | |
| "loss": 0.3574, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.32936507936507936, | |
| "grad_norm": 3.612873077392578, | |
| "learning_rate": 9.653224423774107e-06, | |
| "loss": 0.3392, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 3.6358349323272705, | |
| "learning_rate": 9.64109908382119e-06, | |
| "loss": 0.3219, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.3373015873015873, | |
| "grad_norm": 3.6871836185455322, | |
| "learning_rate": 9.628773267480177e-06, | |
| "loss": 0.3899, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.3412698412698413, | |
| "grad_norm": 3.712912082672119, | |
| "learning_rate": 9.616247507175624e-06, | |
| "loss": 0.3664, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.34523809523809523, | |
| "grad_norm": 3.7706570625305176, | |
| "learning_rate": 9.603522343968852e-06, | |
| "loss": 0.383, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.3492063492063492, | |
| "grad_norm": 3.76802921295166, | |
| "learning_rate": 9.590598327534563e-06, | |
| "loss": 0.3578, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.3531746031746032, | |
| "grad_norm": 3.82389497756958, | |
| "learning_rate": 9.577476016137105e-06, | |
| "loss": 0.3338, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.35714285714285715, | |
| "grad_norm": 3.8095247745513916, | |
| "learning_rate": 9.56415597660634e-06, | |
| "loss": 0.3635, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.3611111111111111, | |
| "grad_norm": 3.9774465560913086, | |
| "learning_rate": 9.550638784313187e-06, | |
| "loss": 0.355, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.36507936507936506, | |
| "grad_norm": 4.000519752502441, | |
| "learning_rate": 9.536925023144742e-06, | |
| "loss": 0.3222, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.36904761904761907, | |
| "grad_norm": 4.113778591156006, | |
| "learning_rate": 9.523015285479076e-06, | |
| "loss": 0.3696, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.373015873015873, | |
| "grad_norm": 4.078273296356201, | |
| "learning_rate": 9.508910172159635e-06, | |
| "loss": 0.3314, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.376984126984127, | |
| "grad_norm": 3.9479820728302, | |
| "learning_rate": 9.494610292469287e-06, | |
| "loss": 0.312, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.38095238095238093, | |
| "grad_norm": 4.088863849639893, | |
| "learning_rate": 9.48011626410401e-06, | |
| "loss": 0.3466, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.38492063492063494, | |
| "grad_norm": 3.4150962829589844, | |
| "learning_rate": 9.465428713146206e-06, | |
| "loss": 0.3269, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.3888888888888889, | |
| "grad_norm": 3.144921064376831, | |
| "learning_rate": 9.450548274037652e-06, | |
| "loss": 0.3169, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.39285714285714285, | |
| "grad_norm": 3.206676959991455, | |
| "learning_rate": 9.435475589552107e-06, | |
| "loss": 0.3008, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.3968253968253968, | |
| "grad_norm": 3.576977491378784, | |
| "learning_rate": 9.420211310767534e-06, | |
| "loss": 0.3241, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.4007936507936508, | |
| "grad_norm": 2.5074713230133057, | |
| "learning_rate": 9.40475609703798e-06, | |
| "loss": 0.2553, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.40476190476190477, | |
| "grad_norm": 2.8422958850860596, | |
| "learning_rate": 9.389110615965102e-06, | |
| "loss": 0.2696, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.4087301587301587, | |
| "grad_norm": 2.709233045578003, | |
| "learning_rate": 9.37327554336932e-06, | |
| "loss": 0.2104, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.4126984126984127, | |
| "grad_norm": 2.8244667053222656, | |
| "learning_rate": 9.35725156326063e-06, | |
| "loss": 0.2484, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.4166666666666667, | |
| "grad_norm": 2.7401509284973145, | |
| "learning_rate": 9.341039367809056e-06, | |
| "loss": 0.2704, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.42063492063492064, | |
| "grad_norm": 2.7700276374816895, | |
| "learning_rate": 9.324639657314742e-06, | |
| "loss": 0.2757, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.4246031746031746, | |
| "grad_norm": 2.6525895595550537, | |
| "learning_rate": 9.308053140177722e-06, | |
| "loss": 0.2334, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.42857142857142855, | |
| "grad_norm": 2.696650743484497, | |
| "learning_rate": 9.291280532867301e-06, | |
| "loss": 0.2321, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.43253968253968256, | |
| "grad_norm": 2.552873134613037, | |
| "learning_rate": 9.27432255989112e-06, | |
| "loss": 0.2591, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.4365079365079365, | |
| "grad_norm": 2.5129666328430176, | |
| "learning_rate": 9.257179953763846e-06, | |
| "loss": 0.2466, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.44047619047619047, | |
| "grad_norm": 2.4460017681121826, | |
| "learning_rate": 9.239853454975548e-06, | |
| "loss": 0.2433, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 2.297093629837036, | |
| "learning_rate": 9.222343811959694e-06, | |
| "loss": 0.2558, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.44841269841269843, | |
| "grad_norm": 2.3328933715820312, | |
| "learning_rate": 9.204651781060832e-06, | |
| "loss": 0.211, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.4523809523809524, | |
| "grad_norm": 2.244962453842163, | |
| "learning_rate": 9.186778126501916e-06, | |
| "loss": 0.2255, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.45634920634920634, | |
| "grad_norm": 2.136331796646118, | |
| "learning_rate": 9.168723620351298e-06, | |
| "loss": 0.2348, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.4603174603174603, | |
| "grad_norm": 2.0183467864990234, | |
| "learning_rate": 9.150489042489368e-06, | |
| "loss": 0.2064, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.4642857142857143, | |
| "grad_norm": 1.9812227487564087, | |
| "learning_rate": 9.13207518057488e-06, | |
| "loss": 0.2331, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.46825396825396826, | |
| "grad_norm": 1.797218918800354, | |
| "learning_rate": 9.113482830010918e-06, | |
| "loss": 0.1955, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.4722222222222222, | |
| "grad_norm": 1.8273296356201172, | |
| "learning_rate": 9.094712793910541e-06, | |
| "loss": 0.2163, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.47619047619047616, | |
| "grad_norm": 1.7825205326080322, | |
| "learning_rate": 9.075765883062093e-06, | |
| "loss": 0.2155, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.4801587301587302, | |
| "grad_norm": 1.6554731130599976, | |
| "learning_rate": 9.056642915894182e-06, | |
| "loss": 0.2091, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.48412698412698413, | |
| "grad_norm": 1.5502135753631592, | |
| "learning_rate": 9.037344718440321e-06, | |
| "loss": 0.2302, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.4880952380952381, | |
| "grad_norm": 1.3913822174072266, | |
| "learning_rate": 9.017872124303255e-06, | |
| "loss": 0.2309, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.49206349206349204, | |
| "grad_norm": 1.4674021005630493, | |
| "learning_rate": 8.99822597461894e-06, | |
| "loss": 0.1834, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.49603174603174605, | |
| "grad_norm": 1.5195338726043701, | |
| "learning_rate": 8.978407118020226e-06, | |
| "loss": 0.1963, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.2685987949371338, | |
| "learning_rate": 8.958416410600188e-06, | |
| "loss": 0.2093, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.503968253968254, | |
| "grad_norm": 1.0815094709396362, | |
| "learning_rate": 8.938254715875152e-06, | |
| "loss": 0.1725, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.5079365079365079, | |
| "grad_norm": 1.0909056663513184, | |
| "learning_rate": 8.917922904747385e-06, | |
| "loss": 0.1647, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.5119047619047619, | |
| "grad_norm": 1.3164684772491455, | |
| "learning_rate": 8.897421855467491e-06, | |
| "loss": 0.2161, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.5158730158730159, | |
| "grad_norm": 1.3709324598312378, | |
| "learning_rate": 8.876752453596462e-06, | |
| "loss": 0.2525, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5198412698412699, | |
| "grad_norm": 1.2522313594818115, | |
| "learning_rate": 8.85591559196743e-06, | |
| "loss": 0.2302, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.5238095238095238, | |
| "grad_norm": 1.1471476554870605, | |
| "learning_rate": 8.834912170647102e-06, | |
| "loss": 0.2056, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.5277777777777778, | |
| "grad_norm": 1.0447298288345337, | |
| "learning_rate": 8.813743096896872e-06, | |
| "loss": 0.187, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.5317460317460317, | |
| "grad_norm": 1.077537178993225, | |
| "learning_rate": 8.792409285133644e-06, | |
| "loss": 0.1912, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.5357142857142857, | |
| "grad_norm": 1.0684843063354492, | |
| "learning_rate": 8.770911656890325e-06, | |
| "loss": 0.2147, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.5396825396825397, | |
| "grad_norm": 1.1531140804290771, | |
| "learning_rate": 8.749251140776016e-06, | |
| "loss": 0.202, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.5436507936507936, | |
| "grad_norm": 1.050750732421875, | |
| "learning_rate": 8.727428672435911e-06, | |
| "loss": 0.2023, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.5476190476190477, | |
| "grad_norm": 1.1867473125457764, | |
| "learning_rate": 8.705445194510868e-06, | |
| "loss": 0.1952, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.5515873015873016, | |
| "grad_norm": 1.2326518297195435, | |
| "learning_rate": 8.6833016565967e-06, | |
| "loss": 0.2274, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.5555555555555556, | |
| "grad_norm": 1.1424859762191772, | |
| "learning_rate": 8.660999015203152e-06, | |
| "loss": 0.1861, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.5595238095238095, | |
| "grad_norm": 1.2383249998092651, | |
| "learning_rate": 8.638538233712581e-06, | |
| "loss": 0.202, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.5634920634920635, | |
| "grad_norm": 1.1419405937194824, | |
| "learning_rate": 8.615920282338355e-06, | |
| "loss": 0.2067, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.5674603174603174, | |
| "grad_norm": 1.3676294088363647, | |
| "learning_rate": 8.593146138082925e-06, | |
| "loss": 0.2578, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.5714285714285714, | |
| "grad_norm": 1.1975699663162231, | |
| "learning_rate": 8.570216784695637e-06, | |
| "loss": 0.1973, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.5753968253968254, | |
| "grad_norm": 1.2448006868362427, | |
| "learning_rate": 8.54713321263023e-06, | |
| "loss": 0.1908, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.5793650793650794, | |
| "grad_norm": 1.2053043842315674, | |
| "learning_rate": 8.52389641900206e-06, | |
| "loss": 0.1904, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.5833333333333334, | |
| "grad_norm": 1.4186623096466064, | |
| "learning_rate": 8.50050740754502e-06, | |
| "loss": 0.2368, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.5873015873015873, | |
| "grad_norm": 1.3027533292770386, | |
| "learning_rate": 8.476967188568187e-06, | |
| "loss": 0.1953, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.5912698412698413, | |
| "grad_norm": 1.2131279706954956, | |
| "learning_rate": 8.453276778912186e-06, | |
| "loss": 0.2061, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.5952380952380952, | |
| "grad_norm": 1.123726487159729, | |
| "learning_rate": 8.429437201905254e-06, | |
| "loss": 0.1692, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.5992063492063492, | |
| "grad_norm": 1.117845892906189, | |
| "learning_rate": 8.405449487319049e-06, | |
| "loss": 0.1735, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.6031746031746031, | |
| "grad_norm": 1.2137281894683838, | |
| "learning_rate": 8.38131467132416e-06, | |
| "loss": 0.1605, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.6071428571428571, | |
| "grad_norm": 1.270015001296997, | |
| "learning_rate": 8.357033796445356e-06, | |
| "loss": 0.1807, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.6111111111111112, | |
| "grad_norm": 1.173836588859558, | |
| "learning_rate": 8.332607911516545e-06, | |
| "loss": 0.1536, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.6150793650793651, | |
| "grad_norm": 1.3719104528427124, | |
| "learning_rate": 8.308038071635475e-06, | |
| "loss": 0.2692, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.6190476190476191, | |
| "grad_norm": 1.2437547445297241, | |
| "learning_rate": 8.283325338118154e-06, | |
| "loss": 0.1748, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.623015873015873, | |
| "grad_norm": 1.3231357336044312, | |
| "learning_rate": 8.258470778453005e-06, | |
| "loss": 0.2046, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.626984126984127, | |
| "grad_norm": 1.3754706382751465, | |
| "learning_rate": 8.233475466254766e-06, | |
| "loss": 0.1728, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.6309523809523809, | |
| "grad_norm": 1.4062494039535522, | |
| "learning_rate": 8.208340481218094e-06, | |
| "loss": 0.1806, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.6349206349206349, | |
| "grad_norm": 1.3490930795669556, | |
| "learning_rate": 8.183066909070946e-06, | |
| "loss": 0.1637, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.6388888888888888, | |
| "grad_norm": 1.3720214366912842, | |
| "learning_rate": 8.15765584152767e-06, | |
| "loss": 0.1745, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.6428571428571429, | |
| "grad_norm": 1.3661401271820068, | |
| "learning_rate": 8.132108376241849e-06, | |
| "loss": 0.1515, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.6468253968253969, | |
| "grad_norm": 1.402787446975708, | |
| "learning_rate": 8.106425616758886e-06, | |
| "loss": 0.2036, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.6507936507936508, | |
| "grad_norm": 1.5093472003936768, | |
| "learning_rate": 8.08060867246834e-06, | |
| "loss": 0.2195, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.6547619047619048, | |
| "grad_norm": 1.5254817008972168, | |
| "learning_rate": 8.054658658555998e-06, | |
| "loss": 0.2016, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.6587301587301587, | |
| "grad_norm": 1.4287675619125366, | |
| "learning_rate": 8.028576695955711e-06, | |
| "loss": 0.2022, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.6626984126984127, | |
| "grad_norm": 1.4379841089248657, | |
| "learning_rate": 8.002363911300966e-06, | |
| "loss": 0.176, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 1.4868255853652954, | |
| "learning_rate": 7.976021436876232e-06, | |
| "loss": 0.1625, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.6706349206349206, | |
| "grad_norm": 1.5931072235107422, | |
| "learning_rate": 7.949550410568033e-06, | |
| "loss": 0.1766, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.6746031746031746, | |
| "grad_norm": 1.5761399269104004, | |
| "learning_rate": 7.92295197581581e-06, | |
| "loss": 0.1537, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.6785714285714286, | |
| "grad_norm": 1.6826977729797363, | |
| "learning_rate": 7.89622728156253e-06, | |
| "loss": 0.169, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.6825396825396826, | |
| "grad_norm": 1.6579521894454956, | |
| "learning_rate": 7.869377482205042e-06, | |
| "loss": 0.2257, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.6865079365079365, | |
| "grad_norm": 1.3506243228912354, | |
| "learning_rate": 7.842403737544226e-06, | |
| "loss": 0.1705, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.6904761904761905, | |
| "grad_norm": 1.3154139518737793, | |
| "learning_rate": 7.815307212734888e-06, | |
| "loss": 0.1368, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.6944444444444444, | |
| "grad_norm": 1.386035680770874, | |
| "learning_rate": 7.788089078235432e-06, | |
| "loss": 0.1778, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.6984126984126984, | |
| "grad_norm": 1.3075768947601318, | |
| "learning_rate": 7.7607505097573e-06, | |
| "loss": 0.1803, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.7023809523809523, | |
| "grad_norm": 1.1526188850402832, | |
| "learning_rate": 7.733292688214182e-06, | |
| "loss": 0.1409, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.7063492063492064, | |
| "grad_norm": 1.2490124702453613, | |
| "learning_rate": 7.705716799671019e-06, | |
| "loss": 0.1736, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.7103174603174603, | |
| "grad_norm": 1.2936805486679077, | |
| "learning_rate": 7.678024035292757e-06, | |
| "loss": 0.1453, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.7142857142857143, | |
| "grad_norm": 1.1860262155532837, | |
| "learning_rate": 7.650215591292888e-06, | |
| "loss": 0.1349, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.7182539682539683, | |
| "grad_norm": 1.2360485792160034, | |
| "learning_rate": 7.622292668881805e-06, | |
| "loss": 0.1694, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.7222222222222222, | |
| "grad_norm": 1.190096139907837, | |
| "learning_rate": 7.594256474214883e-06, | |
| "loss": 0.1442, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.7261904761904762, | |
| "grad_norm": 1.2557333707809448, | |
| "learning_rate": 7.566108218340399e-06, | |
| "loss": 0.1278, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.7301587301587301, | |
| "grad_norm": 1.2672311067581177, | |
| "learning_rate": 7.537849117147212e-06, | |
| "loss": 0.1691, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.7341269841269841, | |
| "grad_norm": 1.2038776874542236, | |
| "learning_rate": 7.509480391312243e-06, | |
| "loss": 0.1149, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.7380952380952381, | |
| "grad_norm": 1.3333827257156372, | |
| "learning_rate": 7.481003266247745e-06, | |
| "loss": 0.1827, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.7420634920634921, | |
| "grad_norm": 1.3675445318222046, | |
| "learning_rate": 7.452418972048372e-06, | |
| "loss": 0.1697, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.746031746031746, | |
| "grad_norm": 1.1908029317855835, | |
| "learning_rate": 7.4237287434380485e-06, | |
| "loss": 0.1705, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.3662751913070679, | |
| "learning_rate": 7.394933819716625e-06, | |
| "loss": 0.1796, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.753968253968254, | |
| "grad_norm": 1.10394287109375, | |
| "learning_rate": 7.366035444706346e-06, | |
| "loss": 0.1332, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.7579365079365079, | |
| "grad_norm": 1.2990213632583618, | |
| "learning_rate": 7.337034866698138e-06, | |
| "loss": 0.1511, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.7619047619047619, | |
| "grad_norm": 1.3027970790863037, | |
| "learning_rate": 7.307933338397667e-06, | |
| "loss": 0.2062, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.7658730158730159, | |
| "grad_norm": 1.2948023080825806, | |
| "learning_rate": 7.278732116871239e-06, | |
| "loss": 0.1611, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.7698412698412699, | |
| "grad_norm": 1.194381833076477, | |
| "learning_rate": 7.249432463491498e-06, | |
| "loss": 0.1298, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.7738095238095238, | |
| "grad_norm": 1.3206703662872314, | |
| "learning_rate": 7.220035643882938e-06, | |
| "loss": 0.146, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.7777777777777778, | |
| "grad_norm": 1.1986666917800903, | |
| "learning_rate": 7.190542927867234e-06, | |
| "loss": 0.1273, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.7817460317460317, | |
| "grad_norm": 1.4849882125854492, | |
| "learning_rate": 7.160955589408395e-06, | |
| "loss": 0.2231, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.7857142857142857, | |
| "grad_norm": 1.2542904615402222, | |
| "learning_rate": 7.131274906557725e-06, | |
| "loss": 0.1791, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.7896825396825397, | |
| "grad_norm": 1.1533300876617432, | |
| "learning_rate": 7.101502161398626e-06, | |
| "loss": 0.1427, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.7936507936507936, | |
| "grad_norm": 1.0504995584487915, | |
| "learning_rate": 7.0716386399912075e-06, | |
| "loss": 0.1326, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.7976190476190477, | |
| "grad_norm": 0.9723128080368042, | |
| "learning_rate": 7.041685632316748e-06, | |
| "loss": 0.1426, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.8015873015873016, | |
| "grad_norm": 0.9460880756378174, | |
| "learning_rate": 7.0116444322219575e-06, | |
| "loss": 0.1146, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.8055555555555556, | |
| "grad_norm": 0.9208608269691467, | |
| "learning_rate": 6.981516337363099e-06, | |
| "loss": 0.1069, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.8095238095238095, | |
| "grad_norm": 0.9688347578048706, | |
| "learning_rate": 6.95130264914993e-06, | |
| "loss": 0.1235, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.8134920634920635, | |
| "grad_norm": 0.9681044816970825, | |
| "learning_rate": 6.9210046726894885e-06, | |
| "loss": 0.1116, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.8174603174603174, | |
| "grad_norm": 1.0371317863464355, | |
| "learning_rate": 6.890623716729724e-06, | |
| "loss": 0.1467, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.8214285714285714, | |
| "grad_norm": 1.0888309478759766, | |
| "learning_rate": 6.860161093602949e-06, | |
| "loss": 0.1446, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.8253968253968254, | |
| "grad_norm": 0.9896472096443176, | |
| "learning_rate": 6.829618119169169e-06, | |
| "loss": 0.1449, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.8293650793650794, | |
| "grad_norm": 1.078661322593689, | |
| "learning_rate": 6.798996112759233e-06, | |
| "loss": 0.1521, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 0.9813553094863892, | |
| "learning_rate": 6.768296397117848e-06, | |
| "loss": 0.1699, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.8373015873015873, | |
| "grad_norm": 1.0994206666946411, | |
| "learning_rate": 6.737520298346438e-06, | |
| "loss": 0.1243, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.8412698412698413, | |
| "grad_norm": 0.9839799404144287, | |
| "learning_rate": 6.706669145845863e-06, | |
| "loss": 0.1114, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.8452380952380952, | |
| "grad_norm": 1.1935815811157227, | |
| "learning_rate": 6.6757442722590015e-06, | |
| "loss": 0.1774, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.8492063492063492, | |
| "grad_norm": 1.0476075410842896, | |
| "learning_rate": 6.6447470134131685e-06, | |
| "loss": 0.1234, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.8531746031746031, | |
| "grad_norm": 0.9438747763633728, | |
| "learning_rate": 6.613678708262439e-06, | |
| "loss": 0.1029, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.8571428571428571, | |
| "grad_norm": 1.168129324913025, | |
| "learning_rate": 6.5825406988297815e-06, | |
| "loss": 0.1448, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.8611111111111112, | |
| "grad_norm": 1.10924232006073, | |
| "learning_rate": 6.551334330149114e-06, | |
| "loss": 0.1473, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.8650793650793651, | |
| "grad_norm": 1.0005338191986084, | |
| "learning_rate": 6.520060950207186e-06, | |
| "loss": 0.144, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.8690476190476191, | |
| "grad_norm": 1.0865845680236816, | |
| "learning_rate": 6.488721909885359e-06, | |
| "loss": 0.1785, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.873015873015873, | |
| "grad_norm": 0.8459860682487488, | |
| "learning_rate": 6.457318562901257e-06, | |
| "loss": 0.0961, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.876984126984127, | |
| "grad_norm": 1.114251732826233, | |
| "learning_rate": 6.425852265750282e-06, | |
| "loss": 0.1548, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.8809523809523809, | |
| "grad_norm": 1.1973581314086914, | |
| "learning_rate": 6.394324377647028e-06, | |
| "loss": 0.1635, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.8849206349206349, | |
| "grad_norm": 1.2623138427734375, | |
| "learning_rate": 6.362736260466561e-06, | |
| "loss": 0.1376, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 1.1490073204040527, | |
| "learning_rate": 6.331089278685599e-06, | |
| "loss": 0.1876, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.8928571428571429, | |
| "grad_norm": 1.2201417684555054, | |
| "learning_rate": 6.299384799323568e-06, | |
| "loss": 0.1191, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.8968253968253969, | |
| "grad_norm": 1.0290501117706299, | |
| "learning_rate": 6.267624191883551e-06, | |
| "loss": 0.1355, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.9007936507936508, | |
| "grad_norm": 0.9492494463920593, | |
| "learning_rate": 6.235808828293135e-06, | |
| "loss": 0.1267, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.9047619047619048, | |
| "grad_norm": 1.0143413543701172, | |
| "learning_rate": 6.203940082845144e-06, | |
| "loss": 0.164, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.9087301587301587, | |
| "grad_norm": 0.7956936359405518, | |
| "learning_rate": 6.172019332138285e-06, | |
| "loss": 0.1064, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.9126984126984127, | |
| "grad_norm": 0.9389650225639343, | |
| "learning_rate": 6.140047955017672e-06, | |
| "loss": 0.155, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.9166666666666666, | |
| "grad_norm": 0.8040848970413208, | |
| "learning_rate": 6.108027332515276e-06, | |
| "loss": 0.0941, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.9206349206349206, | |
| "grad_norm": 1.0012954473495483, | |
| "learning_rate": 6.075958847790262e-06, | |
| "loss": 0.1329, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.9246031746031746, | |
| "grad_norm": 0.8182567358016968, | |
| "learning_rate": 6.043843886069251e-06, | |
| "loss": 0.1169, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.9285714285714286, | |
| "grad_norm": 0.9240618348121643, | |
| "learning_rate": 6.011683834586474e-06, | |
| "loss": 0.1266, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.9325396825396826, | |
| "grad_norm": 1.002416968345642, | |
| "learning_rate": 5.979480082523858e-06, | |
| "loss": 0.1615, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.9365079365079365, | |
| "grad_norm": 0.9440792202949524, | |
| "learning_rate": 5.947234020951015e-06, | |
| "loss": 0.1335, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.9404761904761905, | |
| "grad_norm": 1.0118427276611328, | |
| "learning_rate": 5.914947042765149e-06, | |
| "loss": 0.1543, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.9444444444444444, | |
| "grad_norm": 0.9193865060806274, | |
| "learning_rate": 5.882620542630901e-06, | |
| "loss": 0.1418, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.9484126984126984, | |
| "grad_norm": 0.9237385392189026, | |
| "learning_rate": 5.850255916920093e-06, | |
| "loss": 0.1318, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.9523809523809523, | |
| "grad_norm": 0.9004793167114258, | |
| "learning_rate": 5.817854563651415e-06, | |
| "loss": 0.1405, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.9563492063492064, | |
| "grad_norm": 0.933459997177124, | |
| "learning_rate": 5.785417882430035e-06, | |
| "loss": 0.1161, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.9603174603174603, | |
| "grad_norm": 0.9780202507972717, | |
| "learning_rate": 5.752947274387147e-06, | |
| "loss": 0.1296, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.9642857142857143, | |
| "grad_norm": 1.3364917039871216, | |
| "learning_rate": 5.720444142119445e-06, | |
| "loss": 0.1521, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.9682539682539683, | |
| "grad_norm": 0.864460825920105, | |
| "learning_rate": 5.687909889628529e-06, | |
| "loss": 0.1169, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.9722222222222222, | |
| "grad_norm": 0.9481056332588196, | |
| "learning_rate": 5.6553459222602714e-06, | |
| "loss": 0.1522, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.9761904761904762, | |
| "grad_norm": 0.8183348774909973, | |
| "learning_rate": 5.622753646644102e-06, | |
| "loss": 0.099, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.9801587301587301, | |
| "grad_norm": 0.9387556314468384, | |
| "learning_rate": 5.59013447063225e-06, | |
| "loss": 0.1622, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.9841269841269841, | |
| "grad_norm": 0.9889479875564575, | |
| "learning_rate": 5.557489803238934e-06, | |
| "loss": 0.1212, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.9880952380952381, | |
| "grad_norm": 0.8334296345710754, | |
| "learning_rate": 5.524821054579491e-06, | |
| "loss": 0.1315, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.9920634920634921, | |
| "grad_norm": 0.9019352197647095, | |
| "learning_rate": 5.492129635809473e-06, | |
| "loss": 0.1144, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.996031746031746, | |
| "grad_norm": 0.9499675035476685, | |
| "learning_rate": 5.459416959063688e-06, | |
| "loss": 0.1335, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.9597108364105225, | |
| "learning_rate": 5.426684437395196e-06, | |
| "loss": 0.1735, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.003968253968254, | |
| "grad_norm": 0.8483631610870361, | |
| "learning_rate": 5.393933484714284e-06, | |
| "loss": 0.1089, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.007936507936508, | |
| "grad_norm": 0.8303000926971436, | |
| "learning_rate": 5.361165515727374e-06, | |
| "loss": 0.114, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.0119047619047619, | |
| "grad_norm": 0.8750895261764526, | |
| "learning_rate": 5.328381945875933e-06, | |
| "loss": 0.1269, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.0158730158730158, | |
| "grad_norm": 0.9397597312927246, | |
| "learning_rate": 5.295584191275308e-06, | |
| "loss": 0.124, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.0198412698412698, | |
| "grad_norm": 1.002083659172058, | |
| "learning_rate": 5.26277366865358e-06, | |
| "loss": 0.1523, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.0238095238095237, | |
| "grad_norm": 0.882999062538147, | |
| "learning_rate": 5.229951795290353e-06, | |
| "loss": 0.1199, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.0277777777777777, | |
| "grad_norm": 0.7423396110534668, | |
| "learning_rate": 5.197119988955534e-06, | |
| "loss": 0.0781, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.0317460317460316, | |
| "grad_norm": 0.7801339626312256, | |
| "learning_rate": 5.164279667848094e-06, | |
| "loss": 0.0966, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.0357142857142858, | |
| "grad_norm": 0.7711037397384644, | |
| "learning_rate": 5.131432250534809e-06, | |
| "loss": 0.0748, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.0396825396825398, | |
| "grad_norm": 0.953778862953186, | |
| "learning_rate": 5.0985791558889785e-06, | |
| "loss": 0.1179, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.0436507936507937, | |
| "grad_norm": 1.0090770721435547, | |
| "learning_rate": 5.065721803029146e-06, | |
| "loss": 0.1336, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.0476190476190477, | |
| "grad_norm": 0.8958060145378113, | |
| "learning_rate": 5.032861611257783e-06, | |
| "loss": 0.0954, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.0515873015873016, | |
| "grad_norm": 0.9291710257530212, | |
| "learning_rate": 5e-06, | |
| "loss": 0.1007, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.0555555555555556, | |
| "grad_norm": 0.9543421864509583, | |
| "learning_rate": 4.967138388742218e-06, | |
| "loss": 0.1149, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.0595238095238095, | |
| "grad_norm": 0.8300063014030457, | |
| "learning_rate": 4.934278196970857e-06, | |
| "loss": 0.1022, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.0634920634920635, | |
| "grad_norm": 1.0179977416992188, | |
| "learning_rate": 4.9014208441110215e-06, | |
| "loss": 0.1314, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.0674603174603174, | |
| "grad_norm": 0.7856260538101196, | |
| "learning_rate": 4.868567749465192e-06, | |
| "loss": 0.0858, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.0714285714285714, | |
| "grad_norm": 0.8286792039871216, | |
| "learning_rate": 4.835720332151907e-06, | |
| "loss": 0.0921, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.0753968253968254, | |
| "grad_norm": 0.7985828518867493, | |
| "learning_rate": 4.802880011044467e-06, | |
| "loss": 0.0913, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.0793650793650793, | |
| "grad_norm": 0.8170755505561829, | |
| "learning_rate": 4.770048204709648e-06, | |
| "loss": 0.085, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.0833333333333333, | |
| "grad_norm": 0.8687835931777954, | |
| "learning_rate": 4.73722633134642e-06, | |
| "loss": 0.0869, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.0873015873015872, | |
| "grad_norm": 0.9110769629478455, | |
| "learning_rate": 4.7044158087246926e-06, | |
| "loss": 0.1079, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.0912698412698412, | |
| "grad_norm": 0.9366803169250488, | |
| "learning_rate": 4.67161805412407e-06, | |
| "loss": 0.1034, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.0952380952380953, | |
| "grad_norm": 0.8663371801376343, | |
| "learning_rate": 4.6388344842726266e-06, | |
| "loss": 0.0892, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.0992063492063493, | |
| "grad_norm": 0.8069078922271729, | |
| "learning_rate": 4.606066515285719e-06, | |
| "loss": 0.1053, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.1031746031746033, | |
| "grad_norm": 0.7186819314956665, | |
| "learning_rate": 4.573315562604804e-06, | |
| "loss": 0.0869, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.1071428571428572, | |
| "grad_norm": 0.8153153657913208, | |
| "learning_rate": 4.540583040936313e-06, | |
| "loss": 0.0948, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.1111111111111112, | |
| "grad_norm": 0.7923120260238647, | |
| "learning_rate": 4.5078703641905275e-06, | |
| "loss": 0.0821, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.1150793650793651, | |
| "grad_norm": 0.7948088645935059, | |
| "learning_rate": 4.4751789454205105e-06, | |
| "loss": 0.0886, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.119047619047619, | |
| "grad_norm": 0.9115673303604126, | |
| "learning_rate": 4.442510196761068e-06, | |
| "loss": 0.0945, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.123015873015873, | |
| "grad_norm": 0.8398977518081665, | |
| "learning_rate": 4.409865529367751e-06, | |
| "loss": 0.0871, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.126984126984127, | |
| "grad_norm": 0.8136438131332397, | |
| "learning_rate": 4.377246353355899e-06, | |
| "loss": 0.0853, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.130952380952381, | |
| "grad_norm": 0.8385344743728638, | |
| "learning_rate": 4.34465407773973e-06, | |
| "loss": 0.085, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.1349206349206349, | |
| "grad_norm": 0.8965757489204407, | |
| "learning_rate": 4.312090110371473e-06, | |
| "loss": 0.1147, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.1388888888888888, | |
| "grad_norm": 0.8546063899993896, | |
| "learning_rate": 4.279555857880558e-06, | |
| "loss": 0.0822, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.1428571428571428, | |
| "grad_norm": 0.8850679397583008, | |
| "learning_rate": 4.247052725612853e-06, | |
| "loss": 0.0906, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.1468253968253967, | |
| "grad_norm": 0.8934538960456848, | |
| "learning_rate": 4.214582117569966e-06, | |
| "loss": 0.0999, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.1507936507936507, | |
| "grad_norm": 0.8979964852333069, | |
| "learning_rate": 4.182145436348587e-06, | |
| "loss": 0.1035, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.1547619047619047, | |
| "grad_norm": 0.8589382171630859, | |
| "learning_rate": 4.1497440830799084e-06, | |
| "loss": 0.0892, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.1587301587301586, | |
| "grad_norm": 0.8852213025093079, | |
| "learning_rate": 4.1173794573691e-06, | |
| "loss": 0.1102, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.1626984126984128, | |
| "grad_norm": 0.8531112670898438, | |
| "learning_rate": 4.0850529572348505e-06, | |
| "loss": 0.0867, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.1666666666666667, | |
| "grad_norm": 0.8462398052215576, | |
| "learning_rate": 4.052765979048986e-06, | |
| "loss": 0.0922, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.1706349206349207, | |
| "grad_norm": 0.8210939764976501, | |
| "learning_rate": 4.0205199174761435e-06, | |
| "loss": 0.0856, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.1746031746031746, | |
| "grad_norm": 0.8710091710090637, | |
| "learning_rate": 3.988316165413528e-06, | |
| "loss": 0.0884, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.1785714285714286, | |
| "grad_norm": 0.8986042141914368, | |
| "learning_rate": 3.956156113930752e-06, | |
| "loss": 0.0837, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.1825396825396826, | |
| "grad_norm": 0.8113792538642883, | |
| "learning_rate": 3.924041152209739e-06, | |
| "loss": 0.0989, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.1865079365079365, | |
| "grad_norm": 0.8271967768669128, | |
| "learning_rate": 3.891972667484726e-06, | |
| "loss": 0.0942, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.1904761904761905, | |
| "grad_norm": 0.808840274810791, | |
| "learning_rate": 3.859952044982329e-06, | |
| "loss": 0.1004, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.1944444444444444, | |
| "grad_norm": 0.7779503464698792, | |
| "learning_rate": 3.827980667861716e-06, | |
| "loss": 0.1018, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.1984126984126984, | |
| "grad_norm": 0.7859336733818054, | |
| "learning_rate": 3.7960599171548572e-06, | |
| "loss": 0.0828, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.2023809523809523, | |
| "grad_norm": 0.8485936522483826, | |
| "learning_rate": 3.764191171706867e-06, | |
| "loss": 0.1024, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.2063492063492063, | |
| "grad_norm": 0.808078944683075, | |
| "learning_rate": 3.732375808116451e-06, | |
| "loss": 0.0815, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.2103174603174602, | |
| "grad_norm": 0.9072874188423157, | |
| "learning_rate": 3.7006152006764336e-06, | |
| "loss": 0.1024, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.2142857142857142, | |
| "grad_norm": 0.9134707450866699, | |
| "learning_rate": 3.6689107213144025e-06, | |
| "loss": 0.0863, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.2182539682539684, | |
| "grad_norm": 0.924827516078949, | |
| "learning_rate": 3.6372637395334416e-06, | |
| "loss": 0.1225, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.2222222222222223, | |
| "grad_norm": 0.8721131086349487, | |
| "learning_rate": 3.6056756223529734e-06, | |
| "loss": 0.0924, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.2261904761904763, | |
| "grad_norm": 0.7114807963371277, | |
| "learning_rate": 3.574147734249719e-06, | |
| "loss": 0.0765, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.2301587301587302, | |
| "grad_norm": 0.9052945971488953, | |
| "learning_rate": 3.542681437098745e-06, | |
| "loss": 0.1027, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.2341269841269842, | |
| "grad_norm": 0.9625210762023926, | |
| "learning_rate": 3.5112780901146426e-06, | |
| "loss": 0.1059, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.2380952380952381, | |
| "grad_norm": 0.8923853039741516, | |
| "learning_rate": 3.479939049792817e-06, | |
| "loss": 0.0991, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.242063492063492, | |
| "grad_norm": 0.8069635033607483, | |
| "learning_rate": 3.448665669850888e-06, | |
| "loss": 0.0879, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.246031746031746, | |
| "grad_norm": 0.9612098932266235, | |
| "learning_rate": 3.4174593011702197e-06, | |
| "loss": 0.121, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.7984933257102966, | |
| "learning_rate": 3.386321291737563e-06, | |
| "loss": 0.0773, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.253968253968254, | |
| "grad_norm": 1.0030648708343506, | |
| "learning_rate": 3.3552529865868323e-06, | |
| "loss": 0.1035, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.257936507936508, | |
| "grad_norm": 0.7630086541175842, | |
| "learning_rate": 3.3242557277410015e-06, | |
| "loss": 0.0749, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.2619047619047619, | |
| "grad_norm": 0.8207107782363892, | |
| "learning_rate": 3.2933308541541365e-06, | |
| "loss": 0.0999, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.2658730158730158, | |
| "grad_norm": 0.7766290307044983, | |
| "learning_rate": 3.2624797016535626e-06, | |
| "loss": 0.0817, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.2698412698412698, | |
| "grad_norm": 0.9152076840400696, | |
| "learning_rate": 3.2317036028821523e-06, | |
| "loss": 0.1218, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.2738095238095237, | |
| "grad_norm": 0.8119193315505981, | |
| "learning_rate": 3.201003887240768e-06, | |
| "loss": 0.0897, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.2777777777777777, | |
| "grad_norm": 0.8598250150680542, | |
| "learning_rate": 3.1703818808308327e-06, | |
| "loss": 0.0751, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.2817460317460316, | |
| "grad_norm": 0.9571657776832581, | |
| "learning_rate": 3.1398389063970512e-06, | |
| "loss": 0.0966, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.2857142857142856, | |
| "grad_norm": 0.8368863463401794, | |
| "learning_rate": 3.1093762832702775e-06, | |
| "loss": 0.1032, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.2896825396825398, | |
| "grad_norm": 0.8748520612716675, | |
| "learning_rate": 3.0789953273105123e-06, | |
| "loss": 0.1, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.2936507936507937, | |
| "grad_norm": 1.0862089395523071, | |
| "learning_rate": 3.048697350850073e-06, | |
| "loss": 0.1282, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.2976190476190477, | |
| "grad_norm": 0.9345917701721191, | |
| "learning_rate": 3.0184836626369034e-06, | |
| "loss": 0.1038, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.3015873015873016, | |
| "grad_norm": 0.7406806349754333, | |
| "learning_rate": 2.988355567778043e-06, | |
| "loss": 0.0851, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.3055555555555556, | |
| "grad_norm": 0.8222859501838684, | |
| "learning_rate": 2.9583143676832526e-06, | |
| "loss": 0.0985, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.3095238095238095, | |
| "grad_norm": 0.7789116501808167, | |
| "learning_rate": 2.9283613600087933e-06, | |
| "loss": 0.0685, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.3134920634920635, | |
| "grad_norm": 0.7244381904602051, | |
| "learning_rate": 2.8984978386013767e-06, | |
| "loss": 0.0896, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.3174603174603174, | |
| "grad_norm": 0.7870732545852661, | |
| "learning_rate": 2.8687250934422774e-06, | |
| "loss": 0.084, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.3214285714285714, | |
| "grad_norm": 0.9733986854553223, | |
| "learning_rate": 2.839044410591606e-06, | |
| "loss": 0.1347, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.3253968253968254, | |
| "grad_norm": 0.8321599364280701, | |
| "learning_rate": 2.809457072132766e-06, | |
| "loss": 0.1036, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.3293650793650793, | |
| "grad_norm": 0.7535873651504517, | |
| "learning_rate": 2.779964356117063e-06, | |
| "loss": 0.0822, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 0.8251418471336365, | |
| "learning_rate": 2.750567536508504e-06, | |
| "loss": 0.0918, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.3373015873015874, | |
| "grad_norm": 0.8015106916427612, | |
| "learning_rate": 2.7212678831287627e-06, | |
| "loss": 0.0929, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.3412698412698414, | |
| "grad_norm": 0.8322468400001526, | |
| "learning_rate": 2.692066661602333e-06, | |
| "loss": 0.0879, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.3452380952380953, | |
| "grad_norm": 0.8991337418556213, | |
| "learning_rate": 2.662965133301862e-06, | |
| "loss": 0.0985, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.3492063492063493, | |
| "grad_norm": 0.8582090139389038, | |
| "learning_rate": 2.633964555293654e-06, | |
| "loss": 0.0939, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.3531746031746033, | |
| "grad_norm": 0.7890231013298035, | |
| "learning_rate": 2.605066180283378e-06, | |
| "loss": 0.0775, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.3571428571428572, | |
| "grad_norm": 0.8904759287834167, | |
| "learning_rate": 2.576271256561953e-06, | |
| "loss": 0.0905, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.3611111111111112, | |
| "grad_norm": 0.8841497302055359, | |
| "learning_rate": 2.5475810279516287e-06, | |
| "loss": 0.0971, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.3650793650793651, | |
| "grad_norm": 0.8513339757919312, | |
| "learning_rate": 2.5189967337522574e-06, | |
| "loss": 0.0879, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.369047619047619, | |
| "grad_norm": 1.0061885118484497, | |
| "learning_rate": 2.49051960868776e-06, | |
| "loss": 0.1101, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.373015873015873, | |
| "grad_norm": 0.814134418964386, | |
| "learning_rate": 2.46215088285279e-06, | |
| "loss": 0.0872, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.376984126984127, | |
| "grad_norm": 0.8118036389350891, | |
| "learning_rate": 2.433891781659603e-06, | |
| "loss": 0.0732, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.380952380952381, | |
| "grad_norm": 0.890362560749054, | |
| "learning_rate": 2.4057435257851173e-06, | |
| "loss": 0.1115, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.3849206349206349, | |
| "grad_norm": 0.7876933813095093, | |
| "learning_rate": 2.377707331118196e-06, | |
| "loss": 0.0966, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.3888888888888888, | |
| "grad_norm": 0.6942503452301025, | |
| "learning_rate": 2.349784408707112e-06, | |
| "loss": 0.0849, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.3928571428571428, | |
| "grad_norm": 0.7224787473678589, | |
| "learning_rate": 2.3219759647072467e-06, | |
| "loss": 0.078, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.3968253968253967, | |
| "grad_norm": 0.8572763204574585, | |
| "learning_rate": 2.2942832003289823e-06, | |
| "loss": 0.0885, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.4007936507936507, | |
| "grad_norm": 0.9127199649810791, | |
| "learning_rate": 2.2667073117858185e-06, | |
| "loss": 0.1113, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.4047619047619047, | |
| "grad_norm": 0.6862319111824036, | |
| "learning_rate": 2.2392494902427027e-06, | |
| "loss": 0.0732, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.4087301587301586, | |
| "grad_norm": 0.7402455806732178, | |
| "learning_rate": 2.2119109217645697e-06, | |
| "loss": 0.0882, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.4126984126984126, | |
| "grad_norm": 0.7556574940681458, | |
| "learning_rate": 2.1846927872651135e-06, | |
| "loss": 0.0827, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.4166666666666667, | |
| "grad_norm": 0.791344404220581, | |
| "learning_rate": 2.1575962624557754e-06, | |
| "loss": 0.0948, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.4206349206349207, | |
| "grad_norm": 0.8610579967498779, | |
| "learning_rate": 2.1306225177949584e-06, | |
| "loss": 0.1086, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.4246031746031746, | |
| "grad_norm": 0.7268580794334412, | |
| "learning_rate": 2.1037727184374705e-06, | |
| "loss": 0.073, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.4285714285714286, | |
| "grad_norm": 0.8346232175827026, | |
| "learning_rate": 2.07704802418419e-06, | |
| "loss": 0.0966, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.4325396825396826, | |
| "grad_norm": 0.962031900882721, | |
| "learning_rate": 2.050449589431969e-06, | |
| "loss": 0.1086, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.4365079365079365, | |
| "grad_norm": 0.8065364956855774, | |
| "learning_rate": 2.023978563123771e-06, | |
| "loss": 0.0831, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.4404761904761905, | |
| "grad_norm": 0.8483532071113586, | |
| "learning_rate": 1.997636088699035e-06, | |
| "loss": 0.092, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.4444444444444444, | |
| "grad_norm": 0.8966907858848572, | |
| "learning_rate": 1.9714233040442915e-06, | |
| "loss": 0.0955, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.4484126984126984, | |
| "grad_norm": 0.8100058436393738, | |
| "learning_rate": 1.9453413414440043e-06, | |
| "loss": 0.091, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.4523809523809523, | |
| "grad_norm": 0.9515202045440674, | |
| "learning_rate": 1.919391327531663e-06, | |
| "loss": 0.1208, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.4563492063492063, | |
| "grad_norm": 0.7544227242469788, | |
| "learning_rate": 1.8935743832411163e-06, | |
| "loss": 0.0784, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.4603174603174602, | |
| "grad_norm": 0.953455924987793, | |
| "learning_rate": 1.8678916237581524e-06, | |
| "loss": 0.1001, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.4642857142857144, | |
| "grad_norm": 0.9809265732765198, | |
| "learning_rate": 1.8423441584723312e-06, | |
| "loss": 0.0983, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.4682539682539684, | |
| "grad_norm": 0.9195934534072876, | |
| "learning_rate": 1.816933090929055e-06, | |
| "loss": 0.0957, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.4722222222222223, | |
| "grad_norm": 0.8482993245124817, | |
| "learning_rate": 1.791659518781908e-06, | |
| "loss": 0.0807, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.4761904761904763, | |
| "grad_norm": 1.0404033660888672, | |
| "learning_rate": 1.7665245337452368e-06, | |
| "loss": 0.1036, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.4801587301587302, | |
| "grad_norm": 0.8592943549156189, | |
| "learning_rate": 1.7415292215469948e-06, | |
| "loss": 0.0834, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.4841269841269842, | |
| "grad_norm": 0.8227203488349915, | |
| "learning_rate": 1.716674661881848e-06, | |
| "loss": 0.0969, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.4880952380952381, | |
| "grad_norm": 1.006204605102539, | |
| "learning_rate": 1.6919619283645262e-06, | |
| "loss": 0.1053, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.492063492063492, | |
| "grad_norm": 0.7945590019226074, | |
| "learning_rate": 1.667392088483456e-06, | |
| "loss": 0.0879, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.496031746031746, | |
| "grad_norm": 0.8825644254684448, | |
| "learning_rate": 1.6429662035546451e-06, | |
| "loss": 0.0848, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.6469714641571045, | |
| "learning_rate": 1.6186853286758397e-06, | |
| "loss": 0.0714, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.503968253968254, | |
| "grad_norm": 0.638662576675415, | |
| "learning_rate": 1.5945505126809524e-06, | |
| "loss": 0.0586, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.507936507936508, | |
| "grad_norm": 0.648895263671875, | |
| "learning_rate": 1.570562798094747e-06, | |
| "loss": 0.0648, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.5119047619047619, | |
| "grad_norm": 0.8174787759780884, | |
| "learning_rate": 1.5467232210878153e-06, | |
| "loss": 0.1115, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.5158730158730158, | |
| "grad_norm": 0.8738933801651001, | |
| "learning_rate": 1.5230328114318127e-06, | |
| "loss": 0.0946, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.5198412698412698, | |
| "grad_norm": 0.681943416595459, | |
| "learning_rate": 1.4994925924549797e-06, | |
| "loss": 0.0672, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.5238095238095237, | |
| "grad_norm": 0.7789627313613892, | |
| "learning_rate": 1.4761035809979395e-06, | |
| "loss": 0.0692, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.5277777777777777, | |
| "grad_norm": 0.7515274882316589, | |
| "learning_rate": 1.452866787369771e-06, | |
| "loss": 0.0922, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.5317460317460316, | |
| "grad_norm": 0.7735458612442017, | |
| "learning_rate": 1.4297832153043657e-06, | |
| "loss": 0.0826, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.5357142857142856, | |
| "grad_norm": 0.8033286333084106, | |
| "learning_rate": 1.4068538619170763e-06, | |
| "loss": 0.074, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.5396825396825395, | |
| "grad_norm": 0.7917870879173279, | |
| "learning_rate": 1.3840797176616467e-06, | |
| "loss": 0.0768, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.5436507936507935, | |
| "grad_norm": 0.854568362236023, | |
| "learning_rate": 1.3614617662874197e-06, | |
| "loss": 0.0994, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.5476190476190477, | |
| "grad_norm": 0.8531866669654846, | |
| "learning_rate": 1.3390009847968505e-06, | |
| "loss": 0.0869, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.5515873015873016, | |
| "grad_norm": 0.8268758654594421, | |
| "learning_rate": 1.316698343403302e-06, | |
| "loss": 0.1045, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.5555555555555556, | |
| "grad_norm": 1.00911283493042, | |
| "learning_rate": 1.2945548054891322e-06, | |
| "loss": 0.1214, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.5595238095238095, | |
| "grad_norm": 0.8218592405319214, | |
| "learning_rate": 1.27257132756409e-06, | |
| "loss": 0.0811, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.5634920634920635, | |
| "grad_norm": 0.900049090385437, | |
| "learning_rate": 1.2507488592239848e-06, | |
| "loss": 0.0926, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.5674603174603174, | |
| "grad_norm": 0.9404058456420898, | |
| "learning_rate": 1.2290883431096778e-06, | |
| "loss": 0.0859, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.5714285714285714, | |
| "grad_norm": 0.8277155160903931, | |
| "learning_rate": 1.2075907148663579e-06, | |
| "loss": 0.0813, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.5753968253968254, | |
| "grad_norm": 0.83008873462677, | |
| "learning_rate": 1.186256903103129e-06, | |
| "loss": 0.0748, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.5793650793650795, | |
| "grad_norm": 0.9396790266036987, | |
| "learning_rate": 1.1650878293528994e-06, | |
| "loss": 0.0999, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.5833333333333335, | |
| "grad_norm": 0.8161706328392029, | |
| "learning_rate": 1.1440844080325703e-06, | |
| "loss": 0.0897, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.5873015873015874, | |
| "grad_norm": 0.8118431568145752, | |
| "learning_rate": 1.1232475464035386e-06, | |
| "loss": 0.0894, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.5912698412698414, | |
| "grad_norm": 0.6506233215332031, | |
| "learning_rate": 1.10257814453251e-06, | |
| "loss": 0.062, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.5952380952380953, | |
| "grad_norm": 0.8417549729347229, | |
| "learning_rate": 1.0820770952526155e-06, | |
| "loss": 0.0899, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.5992063492063493, | |
| "grad_norm": 0.6556459665298462, | |
| "learning_rate": 1.0617452841248494e-06, | |
| "loss": 0.0865, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.6031746031746033, | |
| "grad_norm": 0.7962585687637329, | |
| "learning_rate": 1.0415835893998116e-06, | |
| "loss": 0.0924, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.6071428571428572, | |
| "grad_norm": 0.8392485976219177, | |
| "learning_rate": 1.0215928819797744e-06, | |
| "loss": 0.0998, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.6111111111111112, | |
| "grad_norm": 0.6995525360107422, | |
| "learning_rate": 1.0017740253810608e-06, | |
| "loss": 0.0597, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.6150793650793651, | |
| "grad_norm": 0.7181254625320435, | |
| "learning_rate": 9.821278756967467e-07, | |
| "loss": 0.0771, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.619047619047619, | |
| "grad_norm": 0.9979564547538757, | |
| "learning_rate": 9.62655281559679e-07, | |
| "loss": 0.1159, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.623015873015873, | |
| "grad_norm": 0.7781193852424622, | |
| "learning_rate": 9.433570841058187e-07, | |
| "loss": 0.079, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.626984126984127, | |
| "grad_norm": 0.8185853362083435, | |
| "learning_rate": 9.242341169379077e-07, | |
| "loss": 0.0921, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.630952380952381, | |
| "grad_norm": 0.8298288583755493, | |
| "learning_rate": 9.052872060894613e-07, | |
| "loss": 0.0854, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.6349206349206349, | |
| "grad_norm": 0.8486355543136597, | |
| "learning_rate": 8.865171699890835e-07, | |
| "loss": 0.0778, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.6388888888888888, | |
| "grad_norm": 0.838982880115509, | |
| "learning_rate": 8.679248194251211e-07, | |
| "loss": 0.0973, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.6428571428571428, | |
| "grad_norm": 0.753867506980896, | |
| "learning_rate": 8.495109575106331e-07, | |
| "loss": 0.0812, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.6468253968253967, | |
| "grad_norm": 0.775990903377533, | |
| "learning_rate": 8.312763796487038e-07, | |
| "loss": 0.0725, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.6507936507936507, | |
| "grad_norm": 0.7727320194244385, | |
| "learning_rate": 8.132218734980852e-07, | |
| "loss": 0.083, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.6547619047619047, | |
| "grad_norm": 0.8327669501304626, | |
| "learning_rate": 7.953482189391687e-07, | |
| "loss": 0.0903, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.6587301587301586, | |
| "grad_norm": 0.8732079863548279, | |
| "learning_rate": 7.776561880403072e-07, | |
| "loss": 0.0879, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.6626984126984126, | |
| "grad_norm": 1.070827603340149, | |
| "learning_rate": 7.601465450244528e-07, | |
| "loss": 0.1549, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 0.801916778087616, | |
| "learning_rate": 7.42820046236154e-07, | |
| "loss": 0.0774, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.6706349206349205, | |
| "grad_norm": 0.8970922231674194, | |
| "learning_rate": 7.256774401088817e-07, | |
| "loss": 0.096, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.6746031746031746, | |
| "grad_norm": 0.8890314698219299, | |
| "learning_rate": 7.087194671326986e-07, | |
| "loss": 0.0803, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.6785714285714286, | |
| "grad_norm": 0.9875659942626953, | |
| "learning_rate": 6.91946859822279e-07, | |
| "loss": 0.1082, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.6825396825396826, | |
| "grad_norm": 0.8933368921279907, | |
| "learning_rate": 6.753603426852589e-07, | |
| "loss": 0.1128, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.6865079365079365, | |
| "grad_norm": 0.6899388432502747, | |
| "learning_rate": 6.589606321909464e-07, | |
| "loss": 0.0816, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.6904761904761905, | |
| "grad_norm": 0.8186425566673279, | |
| "learning_rate": 6.427484367393699e-07, | |
| "loss": 0.1017, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.6944444444444444, | |
| "grad_norm": 0.7349991798400879, | |
| "learning_rate": 6.267244566306801e-07, | |
| "loss": 0.0791, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.6984126984126984, | |
| "grad_norm": 0.7643943428993225, | |
| "learning_rate": 6.108893840348995e-07, | |
| "loss": 0.1029, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.7023809523809523, | |
| "grad_norm": 0.7339365482330322, | |
| "learning_rate": 5.952439029620222e-07, | |
| "loss": 0.0724, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.7063492063492065, | |
| "grad_norm": 0.6962631344795227, | |
| "learning_rate": 5.797886892324695e-07, | |
| "loss": 0.0677, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.7103174603174605, | |
| "grad_norm": 0.7242050766944885, | |
| "learning_rate": 5.645244104478947e-07, | |
| "loss": 0.0793, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.7142857142857144, | |
| "grad_norm": 0.8381595611572266, | |
| "learning_rate": 5.494517259623478e-07, | |
| "loss": 0.1134, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.7182539682539684, | |
| "grad_norm": 0.8156391978263855, | |
| "learning_rate": 5.34571286853795e-07, | |
| "loss": 0.1071, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.7222222222222223, | |
| "grad_norm": 0.859851062297821, | |
| "learning_rate": 5.198837358959901e-07, | |
| "loss": 0.1053, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.7261904761904763, | |
| "grad_norm": 0.7489827871322632, | |
| "learning_rate": 5.05389707530714e-07, | |
| "loss": 0.0647, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.7301587301587302, | |
| "grad_norm": 0.6945165395736694, | |
| "learning_rate": 4.91089827840367e-07, | |
| "loss": 0.0787, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.7341269841269842, | |
| "grad_norm": 0.8907659649848938, | |
| "learning_rate": 4.769847145209244e-07, | |
| "loss": 0.1104, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.7380952380952381, | |
| "grad_norm": 0.8202477097511292, | |
| "learning_rate": 4.6307497685525894e-07, | |
| "loss": 0.0894, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.742063492063492, | |
| "grad_norm": 0.8185092806816101, | |
| "learning_rate": 4.4936121568681546e-07, | |
| "loss": 0.0851, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.746031746031746, | |
| "grad_norm": 0.7551487684249878, | |
| "learning_rate": 4.3584402339366174e-07, | |
| "loss": 0.0748, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 0.7413176894187927, | |
| "learning_rate": 4.225239838628981e-07, | |
| "loss": 0.0734, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.753968253968254, | |
| "grad_norm": 0.8327194452285767, | |
| "learning_rate": 4.0940167246543595e-07, | |
| "loss": 0.0886, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.757936507936508, | |
| "grad_norm": 0.8573850989341736, | |
| "learning_rate": 3.964776560311484e-07, | |
| "loss": 0.0936, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.7619047619047619, | |
| "grad_norm": 0.809066653251648, | |
| "learning_rate": 3.8375249282437743e-07, | |
| "loss": 0.0889, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.7658730158730158, | |
| "grad_norm": 0.8904972076416016, | |
| "learning_rate": 3.71226732519826e-07, | |
| "loss": 0.0987, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.7698412698412698, | |
| "grad_norm": 0.8638467788696289, | |
| "learning_rate": 3.589009161788104e-07, | |
| "loss": 0.0842, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.7738095238095237, | |
| "grad_norm": 0.7743827700614929, | |
| "learning_rate": 3.4677557622589175e-07, | |
| "loss": 0.0758, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.7777777777777777, | |
| "grad_norm": 0.8045199513435364, | |
| "learning_rate": 3.3485123642587657e-07, | |
| "loss": 0.0796, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.7817460317460316, | |
| "grad_norm": 0.7937319874763489, | |
| "learning_rate": 3.2312841186118937e-07, | |
| "loss": 0.0799, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.7857142857142856, | |
| "grad_norm": 0.8425573706626892, | |
| "learning_rate": 3.116076089096265e-07, | |
| "loss": 0.0849, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.7896825396825395, | |
| "grad_norm": 0.7336445450782776, | |
| "learning_rate": 3.0028932522248256e-07, | |
| "loss": 0.0788, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.7936507936507935, | |
| "grad_norm": 0.7380035519599915, | |
| "learning_rate": 2.8917404970305096e-07, | |
| "loss": 0.0725, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.7976190476190477, | |
| "grad_norm": 0.7742536067962646, | |
| "learning_rate": 2.782622624855097e-07, | |
| "loss": 0.0868, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.8015873015873016, | |
| "grad_norm": 0.7211416363716125, | |
| "learning_rate": 2.6755443491417786e-07, | |
| "loss": 0.0831, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.8055555555555556, | |
| "grad_norm": 0.6837066411972046, | |
| "learning_rate": 2.570510295231571e-07, | |
| "loss": 0.0657, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.8095238095238095, | |
| "grad_norm": 0.8093054294586182, | |
| "learning_rate": 2.467525000163523e-07, | |
| "loss": 0.0862, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.8134920634920635, | |
| "grad_norm": 0.8175082206726074, | |
| "learning_rate": 2.36659291247871e-07, | |
| "loss": 0.0955, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.8174603174603174, | |
| "grad_norm": 0.6955847144126892, | |
| "learning_rate": 2.2677183920281342e-07, | |
| "loss": 0.0571, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.8214285714285714, | |
| "grad_norm": 0.7741391062736511, | |
| "learning_rate": 2.1709057097843266e-07, | |
| "loss": 0.0729, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.8253968253968254, | |
| "grad_norm": 0.7679141759872437, | |
| "learning_rate": 2.0761590476568893e-07, | |
| "loss": 0.0687, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.8293650793650795, | |
| "grad_norm": 0.9118426442146301, | |
| "learning_rate": 1.9834824983118673e-07, | |
| "loss": 0.1124, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.8333333333333335, | |
| "grad_norm": 0.6824527978897095, | |
| "learning_rate": 1.892880064994934e-07, | |
| "loss": 0.0624, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.8373015873015874, | |
| "grad_norm": 0.8877853155136108, | |
| "learning_rate": 1.8043556613585143e-07, | |
| "loss": 0.1013, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.8412698412698414, | |
| "grad_norm": 0.9003763794898987, | |
| "learning_rate": 1.7179131112926628e-07, | |
| "loss": 0.096, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.8452380952380953, | |
| "grad_norm": 0.7879639267921448, | |
| "learning_rate": 1.6335561487599406e-07, | |
| "loss": 0.0773, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.8492063492063493, | |
| "grad_norm": 1.05550217628479, | |
| "learning_rate": 1.551288417634106e-07, | |
| "loss": 0.1423, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.8531746031746033, | |
| "grad_norm": 0.8541834354400635, | |
| "learning_rate": 1.471113471542712e-07, | |
| "loss": 0.0891, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.8571428571428572, | |
| "grad_norm": 0.8560542464256287, | |
| "learning_rate": 1.3930347737136195e-07, | |
| "loss": 0.1024, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.8611111111111112, | |
| "grad_norm": 1.0217139720916748, | |
| "learning_rate": 1.3170556968253756e-07, | |
| "loss": 0.1216, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.8650793650793651, | |
| "grad_norm": 0.8782179355621338, | |
| "learning_rate": 1.2431795228615372e-07, | |
| "loss": 0.086, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.869047619047619, | |
| "grad_norm": 0.8972673416137695, | |
| "learning_rate": 1.1714094429689127e-07, | |
| "loss": 0.0831, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.873015873015873, | |
| "grad_norm": 0.8749038577079773, | |
| "learning_rate": 1.1017485573197151e-07, | |
| "loss": 0.0931, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.876984126984127, | |
| "grad_norm": 0.7099663019180298, | |
| "learning_rate": 1.0341998749776316e-07, | |
| "loss": 0.0666, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.880952380952381, | |
| "grad_norm": 0.8432213664054871, | |
| "learning_rate": 9.687663137678605e-08, | |
| "loss": 0.0966, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.8849206349206349, | |
| "grad_norm": 0.9321752786636353, | |
| "learning_rate": 9.054507001510727e-08, | |
| "loss": 0.106, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.8888888888888888, | |
| "grad_norm": 0.689958393573761, | |
| "learning_rate": 8.442557691013042e-08, | |
| "loss": 0.0799, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.8928571428571428, | |
| "grad_norm": 0.8482077717781067, | |
| "learning_rate": 7.851841639878399e-08, | |
| "loss": 0.0816, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.8968253968253967, | |
| "grad_norm": 0.8182678818702698, | |
| "learning_rate": 7.282384364610207e-08, | |
| "loss": 0.103, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.9007936507936507, | |
| "grad_norm": 0.639568030834198, | |
| "learning_rate": 6.734210463420099e-08, | |
| "loss": 0.0622, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.9047619047619047, | |
| "grad_norm": 0.6604679226875305, | |
| "learning_rate": 6.207343615165562e-08, | |
| "loss": 0.0676, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.9087301587301586, | |
| "grad_norm": 0.7304494976997375, | |
| "learning_rate": 5.701806578327029e-08, | |
| "loss": 0.0676, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.9126984126984126, | |
| "grad_norm": 0.72718745470047, | |
| "learning_rate": 5.21762119002478e-08, | |
| "loss": 0.0748, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.9166666666666665, | |
| "grad_norm": 0.6965732574462891, | |
| "learning_rate": 4.7548083650759134e-08, | |
| "loss": 0.0666, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.9206349206349205, | |
| "grad_norm": 0.6254513263702393, | |
| "learning_rate": 4.31338809509052e-08, | |
| "loss": 0.056, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.9246031746031746, | |
| "grad_norm": 0.7826492786407471, | |
| "learning_rate": 3.8933794476083143e-08, | |
| "loss": 0.0728, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.9285714285714286, | |
| "grad_norm": 0.7751713395118713, | |
| "learning_rate": 3.494800565275125e-08, | |
| "loss": 0.0723, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.9325396825396826, | |
| "grad_norm": 0.7251322865486145, | |
| "learning_rate": 3.1176686650589147e-08, | |
| "loss": 0.0759, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.9365079365079365, | |
| "grad_norm": 0.8554272055625916, | |
| "learning_rate": 2.7620000375064848e-08, | |
| "loss": 0.0983, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.9404761904761905, | |
| "grad_norm": 0.8263299465179443, | |
| "learning_rate": 2.4278100460393138e-08, | |
| "loss": 0.0716, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.9444444444444444, | |
| "grad_norm": 0.909213125705719, | |
| "learning_rate": 2.115113126290258e-08, | |
| "loss": 0.1007, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.9484126984126984, | |
| "grad_norm": 0.8255181312561035, | |
| "learning_rate": 1.8239227854799368e-08, | |
| "loss": 0.0736, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.9523809523809523, | |
| "grad_norm": 0.852088987827301, | |
| "learning_rate": 1.554251601833201e-08, | |
| "loss": 0.091, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.9563492063492065, | |
| "grad_norm": 0.8087424635887146, | |
| "learning_rate": 1.3061112240357887e-08, | |
| "loss": 0.07, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.9603174603174605, | |
| "grad_norm": 0.7878557443618774, | |
| "learning_rate": 1.0795123707312283e-08, | |
| "loss": 0.0727, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.9642857142857144, | |
| "grad_norm": 0.8727295398712158, | |
| "learning_rate": 8.744648300578196e-09, | |
| "loss": 0.0909, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.9682539682539684, | |
| "grad_norm": 0.8698612451553345, | |
| "learning_rate": 6.9097745922580564e-09, | |
| "loss": 0.0807, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.9722222222222223, | |
| "grad_norm": 0.795814573764801, | |
| "learning_rate": 5.2905818413478975e-09, | |
| "loss": 0.0669, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.9761904761904763, | |
| "grad_norm": 0.8243165612220764, | |
| "learning_rate": 3.887139990313427e-09, | |
| "loss": 0.0866, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.9801587301587302, | |
| "grad_norm": 0.820289671421051, | |
| "learning_rate": 2.699509662069666e-09, | |
| "loss": 0.1, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.9841269841269842, | |
| "grad_norm": 0.6659083962440491, | |
| "learning_rate": 1.7277421573608234e-09, | |
| "loss": 0.0714, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.9880952380952381, | |
| "grad_norm": 0.7066503763198853, | |
| "learning_rate": 9.71879452545399e-10, | |
| "loss": 0.0789, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.992063492063492, | |
| "grad_norm": 0.9347036480903625, | |
| "learning_rate": 4.3195419778319095e-10, | |
| "loss": 0.1064, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.996031746031746, | |
| "grad_norm": 0.8103813529014587, | |
| "learning_rate": 1.0798971562364647e-10, | |
| "loss": 0.0803, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.6373736262321472, | |
| "learning_rate": 0.0, | |
| "loss": 0.0533, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 504, | |
| "total_flos": 54880760659968.0, | |
| "train_loss": 0.24779036479987324, | |
| "train_runtime": 4013.6412, | |
| "train_samples_per_second": 2.005, | |
| "train_steps_per_second": 0.126 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 504, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": false, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 54880760659968.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |