| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 231, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.012987012987012988, |
| "grad_norm": 30.513944625854492, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 4.1013, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.025974025974025976, |
| "grad_norm": 30.965682983398438, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 4.1757, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.03896103896103896, |
| "grad_norm": 30.834516525268555, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 4.1371, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.05194805194805195, |
| "grad_norm": 30.98870849609375, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 4.0543, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.06493506493506493, |
| "grad_norm": 30.404874801635742, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 4.09, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.07792207792207792, |
| "grad_norm": 31.433774948120117, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 4.1777, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.09090909090909091, |
| "grad_norm": 30.850692749023438, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 4.142, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.1038961038961039, |
| "grad_norm": 31.00360870361328, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 4.1115, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.11688311688311688, |
| "grad_norm": 31.221616744995117, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 4.1585, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.12987012987012986, |
| "grad_norm": 30.560623168945312, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 4.1155, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.14285714285714285, |
| "grad_norm": 30.089527130126953, |
| "learning_rate": 5.5e-07, |
| "loss": 4.0638, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.15584415584415584, |
| "grad_norm": 30.662569046020508, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 4.0152, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.16883116883116883, |
| "grad_norm": 29.455503463745117, |
| "learning_rate": 6.5e-07, |
| "loss": 3.9501, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.18181818181818182, |
| "grad_norm": 28.851863861083984, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 3.9066, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.19480519480519481, |
| "grad_norm": 27.41941261291504, |
| "learning_rate": 7.5e-07, |
| "loss": 3.816, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.2077922077922078, |
| "grad_norm": 25.754905700683594, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 3.6715, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.22077922077922077, |
| "grad_norm": 24.467802047729492, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 3.6226, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.23376623376623376, |
| "grad_norm": 23.671884536743164, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 3.5756, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.24675324675324675, |
| "grad_norm": 21.28356170654297, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 3.3822, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.2597402597402597, |
| "grad_norm": 20.54037857055664, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 3.3799, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.2727272727272727, |
| "grad_norm": 18.18222427368164, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 3.1964, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.2857142857142857, |
| "grad_norm": 17.782703399658203, |
| "learning_rate": 1.1e-06, |
| "loss": 3.1745, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.2987012987012987, |
| "grad_norm": 15.965900421142578, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 2.9002, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.3116883116883117, |
| "grad_norm": 16.00725746154785, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 2.9123, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.3246753246753247, |
| "grad_norm": 15.301984786987305, |
| "learning_rate": 1.25e-06, |
| "loss": 2.6531, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.33766233766233766, |
| "grad_norm": 16.359207153320312, |
| "learning_rate": 1.3e-06, |
| "loss": 2.6089, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.35064935064935066, |
| "grad_norm": 17.74176025390625, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 2.4805, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.36363636363636365, |
| "grad_norm": 18.702547073364258, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 2.3312, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.37662337662337664, |
| "grad_norm": 18.44041633605957, |
| "learning_rate": 1.45e-06, |
| "loss": 2.2273, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.38961038961038963, |
| "grad_norm": 15.867247581481934, |
| "learning_rate": 1.5e-06, |
| "loss": 2.0514, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.4025974025974026, |
| "grad_norm": 13.97642707824707, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 1.8689, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.4155844155844156, |
| "grad_norm": 13.599323272705078, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 1.7907, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.42857142857142855, |
| "grad_norm": 13.204086303710938, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 1.6537, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.44155844155844154, |
| "grad_norm": 13.255966186523438, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 1.5693, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.45454545454545453, |
| "grad_norm": 13.00086784362793, |
| "learning_rate": 1.75e-06, |
| "loss": 1.3916, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.4675324675324675, |
| "grad_norm": 12.996549606323242, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 1.2734, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.4805194805194805, |
| "grad_norm": 12.506110191345215, |
| "learning_rate": 1.85e-06, |
| "loss": 1.1444, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.4935064935064935, |
| "grad_norm": 12.379636764526367, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 1.033, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.5064935064935064, |
| "grad_norm": 12.38772201538086, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 0.8932, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.5194805194805194, |
| "grad_norm": 12.370598793029785, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.7914, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.5324675324675324, |
| "grad_norm": 11.510255813598633, |
| "learning_rate": 2.05e-06, |
| "loss": 0.6336, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.5454545454545454, |
| "grad_norm": 10.088979721069336, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.5115, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.5584415584415584, |
| "grad_norm": 9.241965293884277, |
| "learning_rate": 2.15e-06, |
| "loss": 0.416, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.5714285714285714, |
| "grad_norm": 7.457204341888428, |
| "learning_rate": 2.2e-06, |
| "loss": 0.2947, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.5844155844155844, |
| "grad_norm": 6.164954662322998, |
| "learning_rate": 2.25e-06, |
| "loss": 0.2202, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.5974025974025974, |
| "grad_norm": 4.587556838989258, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.1721, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.6103896103896104, |
| "grad_norm": 3.2366952896118164, |
| "learning_rate": 2.35e-06, |
| "loss": 0.1451, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.6233766233766234, |
| "grad_norm": 2.397469997406006, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.1116, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.6363636363636364, |
| "grad_norm": 1.872839093208313, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.1, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.6493506493506493, |
| "grad_norm": 1.2464176416397095, |
| "learning_rate": 2.5e-06, |
| "loss": 0.1024, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.6623376623376623, |
| "grad_norm": 1.0786665678024292, |
| "learning_rate": 2.55e-06, |
| "loss": 0.087, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.6753246753246753, |
| "grad_norm": 1.10042142868042, |
| "learning_rate": 2.6e-06, |
| "loss": 0.0838, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.6883116883116883, |
| "grad_norm": 0.9031299948692322, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.0804, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.7012987012987013, |
| "grad_norm": 0.8072167634963989, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.0786, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.7142857142857143, |
| "grad_norm": 0.6948329210281372, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.0773, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.7272727272727273, |
| "grad_norm": 0.6062957644462585, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.0756, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.7402597402597403, |
| "grad_norm": 0.4804394841194153, |
| "learning_rate": 2.85e-06, |
| "loss": 0.0697, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.7532467532467533, |
| "grad_norm": 0.5353919863700867, |
| "learning_rate": 2.9e-06, |
| "loss": 0.0697, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.7662337662337663, |
| "grad_norm": 0.4344547390937805, |
| "learning_rate": 2.95e-06, |
| "loss": 0.0704, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.7792207792207793, |
| "grad_norm": 0.574044942855835, |
| "learning_rate": 3e-06, |
| "loss": 0.0778, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.7922077922077922, |
| "grad_norm": 0.5392284393310547, |
| "learning_rate": 3.05e-06, |
| "loss": 0.0658, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.8051948051948052, |
| "grad_norm": 0.5098194479942322, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.0661, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.8181818181818182, |
| "grad_norm": 0.3733481168746948, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.0667, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.8311688311688312, |
| "grad_norm": 0.3714415431022644, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.0635, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.8441558441558441, |
| "grad_norm": 0.37473437190055847, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.0661, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.8571428571428571, |
| "grad_norm": 0.35993579030036926, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.0637, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.8701298701298701, |
| "grad_norm": 0.41175705194473267, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.0646, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.8831168831168831, |
| "grad_norm": 0.29564622044563293, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.0627, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.8961038961038961, |
| "grad_norm": 0.3018590211868286, |
| "learning_rate": 3.45e-06, |
| "loss": 0.0613, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.9090909090909091, |
| "grad_norm": 0.499183714389801, |
| "learning_rate": 3.5e-06, |
| "loss": 0.0664, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.922077922077922, |
| "grad_norm": 0.3450847864151001, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.0589, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.935064935064935, |
| "grad_norm": 0.26693543791770935, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.0633, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.948051948051948, |
| "grad_norm": 0.2867635190486908, |
| "learning_rate": 3.65e-06, |
| "loss": 0.062, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.961038961038961, |
| "grad_norm": 0.3328873813152313, |
| "learning_rate": 3.7e-06, |
| "loss": 0.0587, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.974025974025974, |
| "grad_norm": 0.33611243963241577, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.058, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.987012987012987, |
| "grad_norm": 0.2983446419239044, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.0632, |
| "step": 76 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.3246895372867584, |
| "learning_rate": 3.85e-06, |
| "loss": 0.0583, |
| "step": 77 |
| }, |
| { |
| "epoch": 1.0129870129870129, |
| "grad_norm": 0.2912401556968689, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.0609, |
| "step": 78 |
| }, |
| { |
| "epoch": 1.025974025974026, |
| "grad_norm": 0.37205109000205994, |
| "learning_rate": 3.95e-06, |
| "loss": 0.0646, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.0389610389610389, |
| "grad_norm": 0.2430678755044937, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0567, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.051948051948052, |
| "grad_norm": 0.32996001839637756, |
| "learning_rate": 4.05e-06, |
| "loss": 0.0517, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.0649350649350648, |
| "grad_norm": 0.274720698595047, |
| "learning_rate": 4.1e-06, |
| "loss": 0.0579, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.077922077922078, |
| "grad_norm": 0.3047677278518677, |
| "learning_rate": 4.15e-06, |
| "loss": 0.0575, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.0909090909090908, |
| "grad_norm": 0.2987304925918579, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.0576, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.103896103896104, |
| "grad_norm": 0.32784077525138855, |
| "learning_rate": 4.25e-06, |
| "loss": 0.0607, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.1168831168831168, |
| "grad_norm": 0.254140704870224, |
| "learning_rate": 4.3e-06, |
| "loss": 0.0582, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.12987012987013, |
| "grad_norm": 0.2915174663066864, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.0614, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.1428571428571428, |
| "grad_norm": 0.2874818444252014, |
| "learning_rate": 4.4e-06, |
| "loss": 0.0541, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.155844155844156, |
| "grad_norm": 0.27614182233810425, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.0556, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.1688311688311688, |
| "grad_norm": 0.3335096836090088, |
| "learning_rate": 4.5e-06, |
| "loss": 0.0601, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.1818181818181819, |
| "grad_norm": 0.2302844524383545, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.056, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.1948051948051948, |
| "grad_norm": 0.22703984379768372, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.0577, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.2077922077922079, |
| "grad_norm": 0.2960239350795746, |
| "learning_rate": 4.65e-06, |
| "loss": 0.0597, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.2207792207792207, |
| "grad_norm": 0.20548498630523682, |
| "learning_rate": 4.7e-06, |
| "loss": 0.0531, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.2337662337662338, |
| "grad_norm": 0.2792559564113617, |
| "learning_rate": 4.75e-06, |
| "loss": 0.0552, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.2467532467532467, |
| "grad_norm": 0.25889474153518677, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.053, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.2597402597402598, |
| "grad_norm": 0.2224734127521515, |
| "learning_rate": 4.85e-06, |
| "loss": 0.0528, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.2727272727272727, |
| "grad_norm": 0.2264651507139206, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.0545, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.2857142857142856, |
| "grad_norm": 0.29424387216567993, |
| "learning_rate": 4.95e-06, |
| "loss": 0.0571, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.2987012987012987, |
| "grad_norm": 0.3200651705265045, |
| "learning_rate": 5e-06, |
| "loss": 0.0605, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.3116883116883118, |
| "grad_norm": 0.284705251455307, |
| "learning_rate": 4.999905856597241e-06, |
| "loss": 0.0512, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.3246753246753247, |
| "grad_norm": 0.25781720876693726, |
| "learning_rate": 4.999623433479346e-06, |
| "loss": 0.053, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.3376623376623376, |
| "grad_norm": 0.2585068643093109, |
| "learning_rate": 4.999152751916936e-06, |
| "loss": 0.0557, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.3506493506493507, |
| "grad_norm": 0.24417804181575775, |
| "learning_rate": 4.99849384735926e-06, |
| "loss": 0.0599, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.3636363636363638, |
| "grad_norm": 0.2860206067562103, |
| "learning_rate": 4.997646769431532e-06, |
| "loss": 0.053, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.3766233766233766, |
| "grad_norm": 0.22996841371059418, |
| "learning_rate": 4.9966115819311926e-06, |
| "loss": 0.0544, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.3896103896103895, |
| "grad_norm": 0.2630089819431305, |
| "learning_rate": 4.9953883628231e-06, |
| "loss": 0.0509, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.4025974025974026, |
| "grad_norm": 0.2588723599910736, |
| "learning_rate": 4.99397720423366e-06, |
| "loss": 0.0521, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.4155844155844157, |
| "grad_norm": 0.3795599937438965, |
| "learning_rate": 4.992378212443891e-06, |
| "loss": 0.0527, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.4285714285714286, |
| "grad_norm": 0.3013223111629486, |
| "learning_rate": 4.990591507881416e-06, |
| "loss": 0.0523, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.4415584415584415, |
| "grad_norm": 0.24418188631534576, |
| "learning_rate": 4.988617225111392e-06, |
| "loss": 0.0494, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.4545454545454546, |
| "grad_norm": 0.3064674735069275, |
| "learning_rate": 4.986455512826377e-06, |
| "loss": 0.0438, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.4675324675324675, |
| "grad_norm": 0.2957422137260437, |
| "learning_rate": 4.984106533835132e-06, |
| "loss": 0.0496, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.4805194805194806, |
| "grad_norm": 0.21359635889530182, |
| "learning_rate": 4.981570465050357e-06, |
| "loss": 0.0508, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.4935064935064934, |
| "grad_norm": 0.20495343208312988, |
| "learning_rate": 4.978847497475369e-06, |
| "loss": 0.0535, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.5064935064935066, |
| "grad_norm": 0.26068198680877686, |
| "learning_rate": 4.975937836189712e-06, |
| "loss": 0.0485, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.5194805194805194, |
| "grad_norm": 0.26806536316871643, |
| "learning_rate": 4.97284170033372e-06, |
| "loss": 0.0505, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.5324675324675323, |
| "grad_norm": 0.24916547536849976, |
| "learning_rate": 4.969559323092004e-06, |
| "loss": 0.0507, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.5454545454545454, |
| "grad_norm": 0.26345163583755493, |
| "learning_rate": 4.966090951675893e-06, |
| "loss": 0.049, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.5584415584415585, |
| "grad_norm": 0.2529750168323517, |
| "learning_rate": 4.962436847304818e-06, |
| "loss": 0.0471, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.5714285714285714, |
| "grad_norm": 0.2879778742790222, |
| "learning_rate": 4.958597285186635e-06, |
| "loss": 0.047, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.5844155844155843, |
| "grad_norm": 0.24268512427806854, |
| "learning_rate": 4.954572554496897e-06, |
| "loss": 0.0448, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.5974025974025974, |
| "grad_norm": 0.3030378520488739, |
| "learning_rate": 4.950362958357078e-06, |
| "loss": 0.0483, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.6103896103896105, |
| "grad_norm": 0.3552945852279663, |
| "learning_rate": 4.945968813811743e-06, |
| "loss": 0.0518, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.6233766233766234, |
| "grad_norm": 0.32233837246894836, |
| "learning_rate": 4.941390451804668e-06, |
| "loss": 0.0488, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.6363636363636362, |
| "grad_norm": 0.24810399115085602, |
| "learning_rate": 4.936628217153914e-06, |
| "loss": 0.0487, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.6493506493506493, |
| "grad_norm": 0.2843495011329651, |
| "learning_rate": 4.931682468525863e-06, |
| "loss": 0.0485, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.6623376623376624, |
| "grad_norm": 0.4453549087047577, |
| "learning_rate": 4.9265535784081965e-06, |
| "loss": 0.0441, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.6753246753246753, |
| "grad_norm": 0.2711978852748871, |
| "learning_rate": 4.92124193308185e-06, |
| "loss": 0.0456, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.6883116883116882, |
| "grad_norm": 0.2913718521595001, |
| "learning_rate": 4.915747932591916e-06, |
| "loss": 0.0478, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.7012987012987013, |
| "grad_norm": 0.26739779114723206, |
| "learning_rate": 4.910071990717516e-06, |
| "loss": 0.0418, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.7142857142857144, |
| "grad_norm": 0.33316293358802795, |
| "learning_rate": 4.9042145349406335e-06, |
| "loss": 0.0466, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.7272727272727273, |
| "grad_norm": 0.3216155469417572, |
| "learning_rate": 4.898176006413925e-06, |
| "loss": 0.043, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.7402597402597402, |
| "grad_norm": 0.3745043873786926, |
| "learning_rate": 4.891956859927489e-06, |
| "loss": 0.0471, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.7532467532467533, |
| "grad_norm": 0.2518353760242462, |
| "learning_rate": 4.885557563874614e-06, |
| "loss": 0.045, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.7662337662337664, |
| "grad_norm": 0.29494860768318176, |
| "learning_rate": 4.8789786002165055e-06, |
| "loss": 0.0437, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.7792207792207793, |
| "grad_norm": 0.2658509612083435, |
| "learning_rate": 4.872220464445983e-06, |
| "loss": 0.0523, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.7922077922077921, |
| "grad_norm": 0.3264024257659912, |
| "learning_rate": 4.865283665550167e-06, |
| "loss": 0.047, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.8051948051948052, |
| "grad_norm": 0.29048386216163635, |
| "learning_rate": 4.8581687259721375e-06, |
| "loss": 0.0513, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.8181818181818183, |
| "grad_norm": 0.21431702375411987, |
| "learning_rate": 4.850876181571592e-06, |
| "loss": 0.0426, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.8311688311688312, |
| "grad_norm": 0.29871106147766113, |
| "learning_rate": 4.843406581584487e-06, |
| "loss": 0.0498, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.844155844155844, |
| "grad_norm": 0.2374885231256485, |
| "learning_rate": 4.83576048858167e-06, |
| "loss": 0.0423, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.8571428571428572, |
| "grad_norm": 0.32642969489097595, |
| "learning_rate": 4.8279384784265124e-06, |
| "loss": 0.043, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.87012987012987, |
| "grad_norm": 0.3235689103603363, |
| "learning_rate": 4.8199411402315356e-06, |
| "loss": 0.0431, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.883116883116883, |
| "grad_norm": 0.25325465202331543, |
| "learning_rate": 4.811769076314044e-06, |
| "loss": 0.0405, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.896103896103896, |
| "grad_norm": 0.25112220644950867, |
| "learning_rate": 4.803422902150762e-06, |
| "loss": 0.0399, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.9090909090909092, |
| "grad_norm": 0.2481043040752411, |
| "learning_rate": 4.794903246331477e-06, |
| "loss": 0.0405, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.922077922077922, |
| "grad_norm": 0.28779706358909607, |
| "learning_rate": 4.786210750511701e-06, |
| "loss": 0.0407, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.935064935064935, |
| "grad_norm": 0.2663511335849762, |
| "learning_rate": 4.777346069364343e-06, |
| "loss": 0.0404, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.948051948051948, |
| "grad_norm": 0.27789220213890076, |
| "learning_rate": 4.7683098705303995e-06, |
| "loss": 0.0384, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.9610389610389611, |
| "grad_norm": 0.34537413716316223, |
| "learning_rate": 4.7591028345686765e-06, |
| "loss": 0.0466, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.974025974025974, |
| "grad_norm": 0.3259763717651367, |
| "learning_rate": 4.749725654904529e-06, |
| "loss": 0.0398, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.987012987012987, |
| "grad_norm": 0.3018707036972046, |
| "learning_rate": 4.740179037777639e-06, |
| "loss": 0.0415, |
| "step": 153 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.27019912004470825, |
| "learning_rate": 4.730463702188824e-06, |
| "loss": 0.0428, |
| "step": 154 |
| }, |
| { |
| "epoch": 2.012987012987013, |
| "grad_norm": 0.34202930331230164, |
| "learning_rate": 4.720580379845884e-06, |
| "loss": 0.04, |
| "step": 155 |
| }, |
| { |
| "epoch": 2.0259740259740258, |
| "grad_norm": 0.23893073201179504, |
| "learning_rate": 4.710529815108496e-06, |
| "loss": 0.0352, |
| "step": 156 |
| }, |
| { |
| "epoch": 2.038961038961039, |
| "grad_norm": 0.3122200071811676, |
| "learning_rate": 4.700312764932151e-06, |
| "loss": 0.0369, |
| "step": 157 |
| }, |
| { |
| "epoch": 2.051948051948052, |
| "grad_norm": 0.37174174189567566, |
| "learning_rate": 4.689929998811145e-06, |
| "loss": 0.035, |
| "step": 158 |
| }, |
| { |
| "epoch": 2.064935064935065, |
| "grad_norm": 0.30702632665634155, |
| "learning_rate": 4.679382298720625e-06, |
| "loss": 0.0335, |
| "step": 159 |
| }, |
| { |
| "epoch": 2.0779220779220777, |
| "grad_norm": 0.2560974955558777, |
| "learning_rate": 4.668670459057693e-06, |
| "loss": 0.0307, |
| "step": 160 |
| }, |
| { |
| "epoch": 2.090909090909091, |
| "grad_norm": 0.25084158778190613, |
| "learning_rate": 4.657795286581576e-06, |
| "loss": 0.033, |
| "step": 161 |
| }, |
| { |
| "epoch": 2.103896103896104, |
| "grad_norm": 0.25736576318740845, |
| "learning_rate": 4.64675760035287e-06, |
| "loss": 0.0336, |
| "step": 162 |
| }, |
| { |
| "epoch": 2.116883116883117, |
| "grad_norm": 0.29564720392227173, |
| "learning_rate": 4.635558231671846e-06, |
| "loss": 0.0325, |
| "step": 163 |
| }, |
| { |
| "epoch": 2.1298701298701297, |
| "grad_norm": 0.32640376687049866, |
| "learning_rate": 4.624198024015845e-06, |
| "loss": 0.0336, |
| "step": 164 |
| }, |
| { |
| "epoch": 2.142857142857143, |
| "grad_norm": 0.27138862013816833, |
| "learning_rate": 4.612677832975751e-06, |
| "loss": 0.0324, |
| "step": 165 |
| }, |
| { |
| "epoch": 2.155844155844156, |
| "grad_norm": 0.28909239172935486, |
| "learning_rate": 4.6009985261915536e-06, |
| "loss": 0.0351, |
| "step": 166 |
| }, |
| { |
| "epoch": 2.168831168831169, |
| "grad_norm": 0.26905709505081177, |
| "learning_rate": 4.5891609832869964e-06, |
| "loss": 0.0348, |
| "step": 167 |
| }, |
| { |
| "epoch": 2.1818181818181817, |
| "grad_norm": 0.3421840965747833, |
| "learning_rate": 4.577166095803336e-06, |
| "loss": 0.0362, |
| "step": 168 |
| }, |
| { |
| "epoch": 2.1948051948051948, |
| "grad_norm": 0.37504997849464417, |
| "learning_rate": 4.565014767132191e-06, |
| "loss": 0.038, |
| "step": 169 |
| }, |
| { |
| "epoch": 2.207792207792208, |
| "grad_norm": 0.3792576789855957, |
| "learning_rate": 4.552707912447504e-06, |
| "loss": 0.0345, |
| "step": 170 |
| }, |
| { |
| "epoch": 2.220779220779221, |
| "grad_norm": 0.38099661469459534, |
| "learning_rate": 4.540246458636619e-06, |
| "loss": 0.0265, |
| "step": 171 |
| }, |
| { |
| "epoch": 2.2337662337662336, |
| "grad_norm": 0.40823256969451904, |
| "learning_rate": 4.527631344230466e-06, |
| "loss": 0.0368, |
| "step": 172 |
| }, |
| { |
| "epoch": 2.2467532467532467, |
| "grad_norm": 0.28929030895233154, |
| "learning_rate": 4.514863519332882e-06, |
| "loss": 0.0282, |
| "step": 173 |
| }, |
| { |
| "epoch": 2.25974025974026, |
| "grad_norm": 0.32313477993011475, |
| "learning_rate": 4.501943945549054e-06, |
| "loss": 0.0292, |
| "step": 174 |
| }, |
| { |
| "epoch": 2.2727272727272725, |
| "grad_norm": 0.4433124363422394, |
| "learning_rate": 4.488873595913092e-06, |
| "loss": 0.0298, |
| "step": 175 |
| }, |
| { |
| "epoch": 2.2857142857142856, |
| "grad_norm": 0.32401469349861145, |
| "learning_rate": 4.475653454814746e-06, |
| "loss": 0.0296, |
| "step": 176 |
| }, |
| { |
| "epoch": 2.2987012987012987, |
| "grad_norm": 0.35543468594551086, |
| "learning_rate": 4.4622845179252735e-06, |
| "loss": 0.0313, |
| "step": 177 |
| }, |
| { |
| "epoch": 2.311688311688312, |
| "grad_norm": 0.37388646602630615, |
| "learning_rate": 4.44876779212244e-06, |
| "loss": 0.0305, |
| "step": 178 |
| }, |
| { |
| "epoch": 2.324675324675325, |
| "grad_norm": 0.33474496006965637, |
| "learning_rate": 4.435104295414697e-06, |
| "loss": 0.0289, |
| "step": 179 |
| }, |
| { |
| "epoch": 2.3376623376623376, |
| "grad_norm": 0.33111104369163513, |
| "learning_rate": 4.421295056864501e-06, |
| "loss": 0.0312, |
| "step": 180 |
| }, |
| { |
| "epoch": 2.3506493506493507, |
| "grad_norm": 0.3842385709285736, |
| "learning_rate": 4.407341116510818e-06, |
| "loss": 0.0278, |
| "step": 181 |
| }, |
| { |
| "epoch": 2.3636363636363638, |
| "grad_norm": 0.4244345724582672, |
| "learning_rate": 4.3932435252907914e-06, |
| "loss": 0.0275, |
| "step": 182 |
| }, |
| { |
| "epoch": 2.3766233766233764, |
| "grad_norm": 0.5842180252075195, |
| "learning_rate": 4.379003344960585e-06, |
| "loss": 0.035, |
| "step": 183 |
| }, |
| { |
| "epoch": 2.3896103896103895, |
| "grad_norm": 0.35604268312454224, |
| "learning_rate": 4.364621648015426e-06, |
| "loss": 0.0275, |
| "step": 184 |
| }, |
| { |
| "epoch": 2.4025974025974026, |
| "grad_norm": 0.41529580950737, |
| "learning_rate": 4.3500995176088235e-06, |
| "loss": 0.0315, |
| "step": 185 |
| }, |
| { |
| "epoch": 2.4155844155844157, |
| "grad_norm": 0.4051007926464081, |
| "learning_rate": 4.335438047470996e-06, |
| "loss": 0.0271, |
| "step": 186 |
| }, |
| { |
| "epoch": 2.4285714285714284, |
| "grad_norm": 0.33859074115753174, |
| "learning_rate": 4.320638341826494e-06, |
| "loss": 0.0271, |
| "step": 187 |
| }, |
| { |
| "epoch": 2.4415584415584415, |
| "grad_norm": 0.33747461438179016, |
| "learning_rate": 4.305701515311037e-06, |
| "loss": 0.0235, |
| "step": 188 |
| }, |
| { |
| "epoch": 2.4545454545454546, |
| "grad_norm": 0.3473111093044281, |
| "learning_rate": 4.290628692887564e-06, |
| "loss": 0.0284, |
| "step": 189 |
| }, |
| { |
| "epoch": 2.4675324675324677, |
| "grad_norm": 0.3321944773197174, |
| "learning_rate": 4.27542100976151e-06, |
| "loss": 0.024, |
| "step": 190 |
| }, |
| { |
| "epoch": 2.4805194805194803, |
| "grad_norm": 0.3281101584434509, |
| "learning_rate": 4.260079611295303e-06, |
| "loss": 0.0262, |
| "step": 191 |
| }, |
| { |
| "epoch": 2.4935064935064934, |
| "grad_norm": 0.4142085611820221, |
| "learning_rate": 4.244605652922108e-06, |
| "loss": 0.0218, |
| "step": 192 |
| }, |
| { |
| "epoch": 2.5064935064935066, |
| "grad_norm": 0.3764965832233429, |
| "learning_rate": 4.229000300058802e-06, |
| "loss": 0.0238, |
| "step": 193 |
| }, |
| { |
| "epoch": 2.5194805194805197, |
| "grad_norm": 0.4414701461791992, |
| "learning_rate": 4.2132647280182e-06, |
| "loss": 0.0267, |
| "step": 194 |
| }, |
| { |
| "epoch": 2.5324675324675323, |
| "grad_norm": 0.4074808657169342, |
| "learning_rate": 4.197400121920539e-06, |
| "loss": 0.022, |
| "step": 195 |
| }, |
| { |
| "epoch": 2.5454545454545454, |
| "grad_norm": 0.5768939852714539, |
| "learning_rate": 4.1814076766042206e-06, |
| "loss": 0.0311, |
| "step": 196 |
| }, |
| { |
| "epoch": 2.5584415584415585, |
| "grad_norm": 0.35587024688720703, |
| "learning_rate": 4.165288596535821e-06, |
| "loss": 0.0246, |
| "step": 197 |
| }, |
| { |
| "epoch": 2.571428571428571, |
| "grad_norm": 0.36500850319862366, |
| "learning_rate": 4.149044095719377e-06, |
| "loss": 0.0195, |
| "step": 198 |
| }, |
| { |
| "epoch": 2.5844155844155843, |
| "grad_norm": 0.3946477472782135, |
| "learning_rate": 4.132675397604956e-06, |
| "loss": 0.0208, |
| "step": 199 |
| }, |
| { |
| "epoch": 2.5974025974025974, |
| "grad_norm": 0.3371339440345764, |
| "learning_rate": 4.11618373499651e-06, |
| "loss": 0.0204, |
| "step": 200 |
| }, |
| { |
| "epoch": 2.6103896103896105, |
| "grad_norm": 0.41154706478118896, |
| "learning_rate": 4.099570349959025e-06, |
| "loss": 0.0223, |
| "step": 201 |
| }, |
| { |
| "epoch": 2.6233766233766236, |
| "grad_norm": 0.39301300048828125, |
| "learning_rate": 4.082836493724981e-06, |
| "loss": 0.0204, |
| "step": 202 |
| }, |
| { |
| "epoch": 2.6363636363636362, |
| "grad_norm": 0.3674089014530182, |
| "learning_rate": 4.065983426600113e-06, |
| "loss": 0.0195, |
| "step": 203 |
| }, |
| { |
| "epoch": 2.6493506493506493, |
| "grad_norm": 0.40296265482902527, |
| "learning_rate": 4.0490124178684884e-06, |
| "loss": 0.021, |
| "step": 204 |
| }, |
| { |
| "epoch": 2.6623376623376624, |
| "grad_norm": 0.4691706895828247, |
| "learning_rate": 4.031924745696916e-06, |
| "loss": 0.0179, |
| "step": 205 |
| }, |
| { |
| "epoch": 2.675324675324675, |
| "grad_norm": 0.44443100690841675, |
| "learning_rate": 4.014721697038678e-06, |
| "loss": 0.0242, |
| "step": 206 |
| }, |
| { |
| "epoch": 2.688311688311688, |
| "grad_norm": 0.31682541966438293, |
| "learning_rate": 3.997404567536606e-06, |
| "loss": 0.0193, |
| "step": 207 |
| }, |
| { |
| "epoch": 2.7012987012987013, |
| "grad_norm": 0.33720874786376953, |
| "learning_rate": 3.979974661425497e-06, |
| "loss": 0.0212, |
| "step": 208 |
| }, |
| { |
| "epoch": 2.7142857142857144, |
| "grad_norm": 0.41582977771759033, |
| "learning_rate": 3.962433291433889e-06, |
| "loss": 0.0242, |
| "step": 209 |
| }, |
| { |
| "epoch": 2.7272727272727275, |
| "grad_norm": 0.4688783884048462, |
| "learning_rate": 3.944781778685189e-06, |
| "loss": 0.0218, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.74025974025974, |
| "grad_norm": 0.3696669638156891, |
| "learning_rate": 3.927021452598177e-06, |
| "loss": 0.0209, |
| "step": 211 |
| }, |
| { |
| "epoch": 2.7532467532467533, |
| "grad_norm": 0.36734238266944885, |
| "learning_rate": 3.909153650786878e-06, |
| "loss": 0.0165, |
| "step": 212 |
| }, |
| { |
| "epoch": 2.7662337662337664, |
| "grad_norm": 0.33928048610687256, |
| "learning_rate": 3.891179718959822e-06, |
| "loss": 0.0137, |
| "step": 213 |
| }, |
| { |
| "epoch": 2.779220779220779, |
| "grad_norm": 0.656510055065155, |
| "learning_rate": 3.873101010818692e-06, |
| "loss": 0.0258, |
| "step": 214 |
| }, |
| { |
| "epoch": 2.792207792207792, |
| "grad_norm": 0.5539777278900146, |
| "learning_rate": 3.8549188879563685e-06, |
| "loss": 0.0208, |
| "step": 215 |
| }, |
| { |
| "epoch": 2.8051948051948052, |
| "grad_norm": 0.4914684593677521, |
| "learning_rate": 3.836634719754385e-06, |
| "loss": 0.0192, |
| "step": 216 |
| }, |
| { |
| "epoch": 2.8181818181818183, |
| "grad_norm": 0.38955259323120117, |
| "learning_rate": 3.818249883279791e-06, |
| "loss": 0.0185, |
| "step": 217 |
| }, |
| { |
| "epoch": 2.8311688311688314, |
| "grad_norm": 0.45634275674819946, |
| "learning_rate": 3.7997657631814366e-06, |
| "loss": 0.0202, |
| "step": 218 |
| }, |
| { |
| "epoch": 2.844155844155844, |
| "grad_norm": 0.41473978757858276, |
| "learning_rate": 3.781183751585693e-06, |
| "loss": 0.0133, |
| "step": 219 |
| }, |
| { |
| "epoch": 2.857142857142857, |
| "grad_norm": 0.6281463503837585, |
| "learning_rate": 3.762505247991601e-06, |
| "loss": 0.0188, |
| "step": 220 |
| }, |
| { |
| "epoch": 2.87012987012987, |
| "grad_norm": 0.34762486815452576, |
| "learning_rate": 3.7437316591654726e-06, |
| "loss": 0.0199, |
| "step": 221 |
| }, |
| { |
| "epoch": 2.883116883116883, |
| "grad_norm": 0.43128135800361633, |
| "learning_rate": 3.724864399034932e-06, |
| "loss": 0.0175, |
| "step": 222 |
| }, |
| { |
| "epoch": 2.896103896103896, |
| "grad_norm": 0.4040665328502655, |
| "learning_rate": 3.7059048885824367e-06, |
| "loss": 0.0119, |
| "step": 223 |
| }, |
| { |
| "epoch": 2.909090909090909, |
| "grad_norm": 0.4182071089744568, |
| "learning_rate": 3.686854555738249e-06, |
| "loss": 0.0171, |
| "step": 224 |
| }, |
| { |
| "epoch": 2.9220779220779223, |
| "grad_norm": 0.38667839765548706, |
| "learning_rate": 3.6677148352728947e-06, |
| "loss": 0.0155, |
| "step": 225 |
| }, |
| { |
| "epoch": 2.935064935064935, |
| "grad_norm": 0.3983423709869385, |
| "learning_rate": 3.6484871686891044e-06, |
| "loss": 0.013, |
| "step": 226 |
| }, |
| { |
| "epoch": 2.948051948051948, |
| "grad_norm": 0.48213276267051697, |
| "learning_rate": 3.629173004113245e-06, |
| "loss": 0.0131, |
| "step": 227 |
| }, |
| { |
| "epoch": 2.961038961038961, |
| "grad_norm": 0.4538249969482422, |
| "learning_rate": 3.609773796186256e-06, |
| "loss": 0.0137, |
| "step": 228 |
| }, |
| { |
| "epoch": 2.974025974025974, |
| "grad_norm": 0.4226680397987366, |
| "learning_rate": 3.590291005954094e-06, |
| "loss": 0.0144, |
| "step": 229 |
| }, |
| { |
| "epoch": 2.987012987012987, |
| "grad_norm": 0.603485107421875, |
| "learning_rate": 3.570726100757693e-06, |
| "loss": 0.0179, |
| "step": 230 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.5114914774894714, |
| "learning_rate": 3.5510805541224536e-06, |
| "loss": 0.0134, |
| "step": 231 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 462, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 77, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 5.1635296228972954e+17, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|