| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.985148514851485, | |
| "eval_steps": 500, | |
| "global_step": 201, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01485148514851485, | |
| "grad_norm": 6.48456557234758, | |
| "learning_rate": 4.7619047619047623e-07, | |
| "loss": 1.1703, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0297029702970297, | |
| "grad_norm": 6.594689369570434, | |
| "learning_rate": 9.523809523809525e-07, | |
| "loss": 1.1918, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.04455445544554455, | |
| "grad_norm": 6.23293374565609, | |
| "learning_rate": 1.4285714285714286e-06, | |
| "loss": 1.1576, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0594059405940594, | |
| "grad_norm": 6.401466218218343, | |
| "learning_rate": 1.904761904761905e-06, | |
| "loss": 1.2099, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.07425742574257425, | |
| "grad_norm": 5.7694144179716, | |
| "learning_rate": 2.380952380952381e-06, | |
| "loss": 1.1447, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0891089108910891, | |
| "grad_norm": 4.584773158856208, | |
| "learning_rate": 2.8571428571428573e-06, | |
| "loss": 1.1128, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.10396039603960396, | |
| "grad_norm": 4.408264566567633, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 1.1444, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.1188118811881188, | |
| "grad_norm": 2.6961190293519213, | |
| "learning_rate": 3.80952380952381e-06, | |
| "loss": 1.0333, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.13366336633663367, | |
| "grad_norm": 2.366910263126932, | |
| "learning_rate": 4.2857142857142855e-06, | |
| "loss": 1.039, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.1485148514851485, | |
| "grad_norm": 3.6999770055634555, | |
| "learning_rate": 4.761904761904762e-06, | |
| "loss": 1.0171, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.16336633663366337, | |
| "grad_norm": 3.894316339063172, | |
| "learning_rate": 5.2380952380952384e-06, | |
| "loss": 1.0232, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.1782178217821782, | |
| "grad_norm": 3.8249399482892703, | |
| "learning_rate": 5.7142857142857145e-06, | |
| "loss": 1.0875, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.19306930693069307, | |
| "grad_norm": 3.0670108504304334, | |
| "learning_rate": 6.1904761904761914e-06, | |
| "loss": 1.0108, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.2079207920792079, | |
| "grad_norm": 2.6030053112293676, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.9857, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.22277227722772278, | |
| "grad_norm": 2.370576635506437, | |
| "learning_rate": 7.1428571428571436e-06, | |
| "loss": 0.9728, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.2376237623762376, | |
| "grad_norm": 2.3399338429829615, | |
| "learning_rate": 7.61904761904762e-06, | |
| "loss": 0.9694, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.2524752475247525, | |
| "grad_norm": 1.8311702968068935, | |
| "learning_rate": 8.095238095238097e-06, | |
| "loss": 0.934, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.26732673267326734, | |
| "grad_norm": 1.4308728197686567, | |
| "learning_rate": 8.571428571428571e-06, | |
| "loss": 0.9175, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.28217821782178215, | |
| "grad_norm": 1.5620018816986423, | |
| "learning_rate": 9.047619047619049e-06, | |
| "loss": 0.9595, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.297029702970297, | |
| "grad_norm": 1.4122378912732825, | |
| "learning_rate": 9.523809523809525e-06, | |
| "loss": 0.9082, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.3118811881188119, | |
| "grad_norm": 1.2544223318659375, | |
| "learning_rate": 1e-05, | |
| "loss": 0.8293, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.32673267326732675, | |
| "grad_norm": 1.0621864931131515, | |
| "learning_rate": 9.999238475781957e-06, | |
| "loss": 0.8615, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.3415841584158416, | |
| "grad_norm": 1.067756678297183, | |
| "learning_rate": 9.99695413509548e-06, | |
| "loss": 0.8963, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.3564356435643564, | |
| "grad_norm": 1.1625635155258327, | |
| "learning_rate": 9.993147673772869e-06, | |
| "loss": 0.875, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.3712871287128713, | |
| "grad_norm": 0.9411109178322155, | |
| "learning_rate": 9.987820251299121e-06, | |
| "loss": 0.8488, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.38613861386138615, | |
| "grad_norm": 0.8920662981646181, | |
| "learning_rate": 9.980973490458728e-06, | |
| "loss": 0.8838, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.400990099009901, | |
| "grad_norm": 0.9110888685882579, | |
| "learning_rate": 9.972609476841368e-06, | |
| "loss": 0.8299, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.4158415841584158, | |
| "grad_norm": 0.8423480290666405, | |
| "learning_rate": 9.962730758206612e-06, | |
| "loss": 0.866, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.4306930693069307, | |
| "grad_norm": 0.8825677961798845, | |
| "learning_rate": 9.951340343707852e-06, | |
| "loss": 0.8509, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.44554455445544555, | |
| "grad_norm": 0.7581834656729458, | |
| "learning_rate": 9.938441702975689e-06, | |
| "loss": 0.8288, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.4603960396039604, | |
| "grad_norm": 1.055473120057636, | |
| "learning_rate": 9.924038765061042e-06, | |
| "loss": 0.838, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.4752475247524752, | |
| "grad_norm": 0.7922082592771019, | |
| "learning_rate": 9.908135917238321e-06, | |
| "loss": 0.8609, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.4900990099009901, | |
| "grad_norm": 0.754302150819188, | |
| "learning_rate": 9.890738003669029e-06, | |
| "loss": 0.8583, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.504950495049505, | |
| "grad_norm": 1.0626447047968044, | |
| "learning_rate": 9.871850323926178e-06, | |
| "loss": 0.7944, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.5198019801980198, | |
| "grad_norm": 0.8923345386621004, | |
| "learning_rate": 9.851478631379982e-06, | |
| "loss": 0.8975, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.5346534653465347, | |
| "grad_norm": 0.7366372348142536, | |
| "learning_rate": 9.829629131445342e-06, | |
| "loss": 0.8676, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.5495049504950495, | |
| "grad_norm": 1.2738146882781316, | |
| "learning_rate": 9.806308479691595e-06, | |
| "loss": 0.8975, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.5643564356435643, | |
| "grad_norm": 0.8592154350621992, | |
| "learning_rate": 9.781523779815178e-06, | |
| "loss": 0.8207, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.5792079207920792, | |
| "grad_norm": 0.7049611640819059, | |
| "learning_rate": 9.755282581475769e-06, | |
| "loss": 0.8416, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.594059405940594, | |
| "grad_norm": 0.8172609258295951, | |
| "learning_rate": 9.727592877996585e-06, | |
| "loss": 0.9003, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.6089108910891089, | |
| "grad_norm": 0.943383839148404, | |
| "learning_rate": 9.698463103929542e-06, | |
| "loss": 0.8279, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.6237623762376238, | |
| "grad_norm": 0.8200827897891733, | |
| "learning_rate": 9.667902132486009e-06, | |
| "loss": 0.8252, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.6386138613861386, | |
| "grad_norm": 0.750459606762128, | |
| "learning_rate": 9.635919272833938e-06, | |
| "loss": 0.8417, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.6534653465346535, | |
| "grad_norm": 0.8256180076390092, | |
| "learning_rate": 9.602524267262202e-06, | |
| "loss": 0.8068, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.6683168316831684, | |
| "grad_norm": 0.7362971479000804, | |
| "learning_rate": 9.567727288213005e-06, | |
| "loss": 0.8116, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.6831683168316832, | |
| "grad_norm": 0.6611484879378139, | |
| "learning_rate": 9.531538935183252e-06, | |
| "loss": 0.8049, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.698019801980198, | |
| "grad_norm": 0.6483590920807921, | |
| "learning_rate": 9.493970231495836e-06, | |
| "loss": 0.8158, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.7128712871287128, | |
| "grad_norm": 0.6499715006108466, | |
| "learning_rate": 9.45503262094184e-06, | |
| "loss": 0.7963, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.7277227722772277, | |
| "grad_norm": 0.6445318374705665, | |
| "learning_rate": 9.414737964294636e-06, | |
| "loss": 0.8372, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.7425742574257426, | |
| "grad_norm": 0.6749851299246947, | |
| "learning_rate": 9.37309853569698e-06, | |
| "loss": 0.7974, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.7574257425742574, | |
| "grad_norm": 0.6430920805349686, | |
| "learning_rate": 9.330127018922195e-06, | |
| "loss": 0.797, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.7722772277227723, | |
| "grad_norm": 0.7122774157069339, | |
| "learning_rate": 9.285836503510562e-06, | |
| "loss": 0.8178, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.7871287128712872, | |
| "grad_norm": 0.6222782441453497, | |
| "learning_rate": 9.24024048078213e-06, | |
| "loss": 0.8286, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.801980198019802, | |
| "grad_norm": 0.6774898848973883, | |
| "learning_rate": 9.193352839727122e-06, | |
| "loss": 0.8199, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.8168316831683168, | |
| "grad_norm": 0.6685577033399003, | |
| "learning_rate": 9.145187862775208e-06, | |
| "loss": 0.8325, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.8316831683168316, | |
| "grad_norm": 0.6954610466193606, | |
| "learning_rate": 9.09576022144496e-06, | |
| "loss": 0.7876, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.8465346534653465, | |
| "grad_norm": 0.6717917465533401, | |
| "learning_rate": 9.045084971874738e-06, | |
| "loss": 0.7889, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.8613861386138614, | |
| "grad_norm": 0.6710948817812145, | |
| "learning_rate": 8.993177550236464e-06, | |
| "loss": 0.819, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.8762376237623762, | |
| "grad_norm": 0.6677437938362905, | |
| "learning_rate": 8.94005376803361e-06, | |
| "loss": 0.815, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.8910891089108911, | |
| "grad_norm": 0.6333145369648289, | |
| "learning_rate": 8.885729807284855e-06, | |
| "loss": 0.8033, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.905940594059406, | |
| "grad_norm": 0.810414558269581, | |
| "learning_rate": 8.83022221559489e-06, | |
| "loss": 0.8495, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.9207920792079208, | |
| "grad_norm": 0.8092632573192287, | |
| "learning_rate": 8.773547901113862e-06, | |
| "loss": 0.7934, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.9356435643564357, | |
| "grad_norm": 0.6595329129270622, | |
| "learning_rate": 8.715724127386971e-06, | |
| "loss": 0.8037, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.9504950495049505, | |
| "grad_norm": 0.8763924940559503, | |
| "learning_rate": 8.656768508095853e-06, | |
| "loss": 0.8148, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.9653465346534653, | |
| "grad_norm": 0.7170557641262976, | |
| "learning_rate": 8.596699001693257e-06, | |
| "loss": 0.8634, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.9801980198019802, | |
| "grad_norm": 0.7120219047413958, | |
| "learning_rate": 8.535533905932739e-06, | |
| "loss": 0.8066, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.995049504950495, | |
| "grad_norm": 0.7743156368261691, | |
| "learning_rate": 8.473291852294986e-06, | |
| "loss": 0.7911, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.00990099009901, | |
| "grad_norm": 1.3350443160879135, | |
| "learning_rate": 8.409991800312493e-06, | |
| "loss": 1.252, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.0247524752475248, | |
| "grad_norm": 0.7990302178349659, | |
| "learning_rate": 8.345653031794292e-06, | |
| "loss": 0.8425, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.0396039603960396, | |
| "grad_norm": 0.6239804634649714, | |
| "learning_rate": 8.280295144952537e-06, | |
| "loss": 0.6491, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.0544554455445545, | |
| "grad_norm": 0.6694927788559727, | |
| "learning_rate": 8.213938048432697e-06, | |
| "loss": 0.7202, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.0693069306930694, | |
| "grad_norm": 0.7432686527943043, | |
| "learning_rate": 8.146601955249187e-06, | |
| "loss": 0.741, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.0841584158415842, | |
| "grad_norm": 0.6416160685114952, | |
| "learning_rate": 8.078307376628292e-06, | |
| "loss": 0.8075, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.099009900990099, | |
| "grad_norm": 0.698781285729984, | |
| "learning_rate": 8.009075115760243e-06, | |
| "loss": 0.6919, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.113861386138614, | |
| "grad_norm": 0.6958006649391136, | |
| "learning_rate": 7.938926261462366e-06, | |
| "loss": 0.6921, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.1287128712871288, | |
| "grad_norm": 0.6237956171364151, | |
| "learning_rate": 7.86788218175523e-06, | |
| "loss": 0.7287, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.1435643564356435, | |
| "grad_norm": 0.7252061300181446, | |
| "learning_rate": 7.795964517353734e-06, | |
| "loss": 0.8264, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.1584158415841583, | |
| "grad_norm": 0.674352802334064, | |
| "learning_rate": 7.723195175075136e-06, | |
| "loss": 0.6763, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.1732673267326732, | |
| "grad_norm": 0.6546358450685043, | |
| "learning_rate": 7.649596321166024e-06, | |
| "loss": 0.7588, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.188118811881188, | |
| "grad_norm": 0.6479565965693698, | |
| "learning_rate": 7.575190374550272e-06, | |
| "loss": 0.7277, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.202970297029703, | |
| "grad_norm": 0.6626458405633724, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.7174, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.2178217821782178, | |
| "grad_norm": 0.6756782160293404, | |
| "learning_rate": 7.424048101231687e-06, | |
| "loss": 0.7548, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.2326732673267327, | |
| "grad_norm": 0.808652221633845, | |
| "learning_rate": 7.347357813929455e-06, | |
| "loss": 0.7933, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.2475247524752475, | |
| "grad_norm": 0.573811498550998, | |
| "learning_rate": 7.269952498697734e-06, | |
| "loss": 0.6931, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.2623762376237624, | |
| "grad_norm": 0.646918318906572, | |
| "learning_rate": 7.191855733945388e-06, | |
| "loss": 0.7785, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.2772277227722773, | |
| "grad_norm": 0.5958671013573899, | |
| "learning_rate": 7.113091308703498e-06, | |
| "loss": 0.709, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.2920792079207921, | |
| "grad_norm": 0.7165583961725527, | |
| "learning_rate": 7.033683215379002e-06, | |
| "loss": 0.6866, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.306930693069307, | |
| "grad_norm": 0.8848970604430635, | |
| "learning_rate": 6.953655642446368e-06, | |
| "loss": 0.7576, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.3217821782178218, | |
| "grad_norm": 0.6315927520470037, | |
| "learning_rate": 6.873032967079562e-06, | |
| "loss": 0.7615, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.3366336633663367, | |
| "grad_norm": 0.6472208368826241, | |
| "learning_rate": 6.7918397477265e-06, | |
| "loss": 0.7158, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.3514851485148514, | |
| "grad_norm": 0.8136088626747461, | |
| "learning_rate": 6.710100716628345e-06, | |
| "loss": 0.8109, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.3663366336633662, | |
| "grad_norm": 0.6789106969356933, | |
| "learning_rate": 6.627840772285784e-06, | |
| "loss": 0.6553, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.381188118811881, | |
| "grad_norm": 0.6684192399196682, | |
| "learning_rate": 6.545084971874738e-06, | |
| "loss": 0.7604, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.396039603960396, | |
| "grad_norm": 0.6470872320361255, | |
| "learning_rate": 6.461858523613684e-06, | |
| "loss": 0.7548, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.4108910891089108, | |
| "grad_norm": 0.6486567165728302, | |
| "learning_rate": 6.378186779084996e-06, | |
| "loss": 0.7484, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.4257425742574257, | |
| "grad_norm": 0.8461471561359525, | |
| "learning_rate": 6.294095225512604e-06, | |
| "loss": 0.7523, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.4405940594059405, | |
| "grad_norm": 0.6814439448544345, | |
| "learning_rate": 6.209609477998339e-06, | |
| "loss": 0.7334, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.4554455445544554, | |
| "grad_norm": 0.5328496145736351, | |
| "learning_rate": 6.124755271719326e-06, | |
| "loss": 0.6584, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.4702970297029703, | |
| "grad_norm": 0.680813767822209, | |
| "learning_rate": 6.039558454088796e-06, | |
| "loss": 0.8677, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.4851485148514851, | |
| "grad_norm": 0.7359886080900357, | |
| "learning_rate": 5.954044976882725e-06, | |
| "loss": 0.7591, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.6483808901598864, | |
| "learning_rate": 5.8682408883346535e-06, | |
| "loss": 0.7024, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.5148514851485149, | |
| "grad_norm": 0.573259352808237, | |
| "learning_rate": 5.782172325201155e-06, | |
| "loss": 0.7647, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.5297029702970297, | |
| "grad_norm": 0.620232903115091, | |
| "learning_rate": 5.695865504800328e-06, | |
| "loss": 0.746, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.5445544554455446, | |
| "grad_norm": 0.6008704350145148, | |
| "learning_rate": 5.609346717025738e-06, | |
| "loss": 0.7653, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.5594059405940595, | |
| "grad_norm": 0.6651231967169247, | |
| "learning_rate": 5.522642316338268e-06, | |
| "loss": 0.7284, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.5742574257425743, | |
| "grad_norm": 0.6508987525775917, | |
| "learning_rate": 5.435778713738292e-06, | |
| "loss": 0.7081, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.5891089108910892, | |
| "grad_norm": 0.6668715575162661, | |
| "learning_rate": 5.348782368720627e-06, | |
| "loss": 0.7653, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.603960396039604, | |
| "grad_norm": 0.5993279825702773, | |
| "learning_rate": 5.2616797812147205e-06, | |
| "loss": 0.6936, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.618811881188119, | |
| "grad_norm": 0.6642916869660335, | |
| "learning_rate": 5.174497483512506e-06, | |
| "loss": 0.6781, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.6336633663366338, | |
| "grad_norm": 0.666238358663729, | |
| "learning_rate": 5.087262032186418e-06, | |
| "loss": 0.7442, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.6485148514851486, | |
| "grad_norm": 0.5597421651434128, | |
| "learning_rate": 5e-06, | |
| "loss": 0.781, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.6633663366336635, | |
| "grad_norm": 0.5707641758576157, | |
| "learning_rate": 4.9127379678135825e-06, | |
| "loss": 0.6567, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.6782178217821784, | |
| "grad_norm": 0.7209030283118137, | |
| "learning_rate": 4.825502516487497e-06, | |
| "loss": 0.8752, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.693069306930693, | |
| "grad_norm": 0.6674679363110967, | |
| "learning_rate": 4.738320218785281e-06, | |
| "loss": 0.6735, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.7079207920792079, | |
| "grad_norm": 0.5604986801350424, | |
| "learning_rate": 4.651217631279374e-06, | |
| "loss": 0.6056, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.7227722772277227, | |
| "grad_norm": 0.6669131750643947, | |
| "learning_rate": 4.564221286261709e-06, | |
| "loss": 0.7485, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.7376237623762376, | |
| "grad_norm": 0.64731110152955, | |
| "learning_rate": 4.477357683661734e-06, | |
| "loss": 0.7505, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.7524752475247525, | |
| "grad_norm": 0.6087298446862004, | |
| "learning_rate": 4.390653282974264e-06, | |
| "loss": 0.7236, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.7673267326732673, | |
| "grad_norm": 0.605354758035472, | |
| "learning_rate": 4.304134495199675e-06, | |
| "loss": 0.7576, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.7821782178217822, | |
| "grad_norm": 0.592121550404726, | |
| "learning_rate": 4.217827674798845e-06, | |
| "loss": 0.6612, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.797029702970297, | |
| "grad_norm": 0.583998747608208, | |
| "learning_rate": 4.131759111665349e-06, | |
| "loss": 0.8031, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.811881188118812, | |
| "grad_norm": 0.5401381825177095, | |
| "learning_rate": 4.045955023117276e-06, | |
| "loss": 0.7083, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.8267326732673266, | |
| "grad_norm": 0.5505024887448378, | |
| "learning_rate": 3.960441545911205e-06, | |
| "loss": 0.6603, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.8415841584158414, | |
| "grad_norm": 0.5807490002016577, | |
| "learning_rate": 3.875244728280676e-06, | |
| "loss": 0.7428, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.8564356435643563, | |
| "grad_norm": 0.5775296918472558, | |
| "learning_rate": 3.790390522001662e-06, | |
| "loss": 0.7071, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.8712871287128712, | |
| "grad_norm": 0.5618797996455542, | |
| "learning_rate": 3.705904774487396e-06, | |
| "loss": 0.7738, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.886138613861386, | |
| "grad_norm": 0.5612544221604727, | |
| "learning_rate": 3.6218132209150047e-06, | |
| "loss": 0.7353, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.900990099009901, | |
| "grad_norm": 0.5269006078917019, | |
| "learning_rate": 3.538141476386317e-06, | |
| "loss": 0.7185, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.9158415841584158, | |
| "grad_norm": 0.5442468804329766, | |
| "learning_rate": 3.4549150281252635e-06, | |
| "loss": 0.696, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.9306930693069306, | |
| "grad_norm": 0.5960660751056697, | |
| "learning_rate": 3.372159227714218e-06, | |
| "loss": 0.7649, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.9455445544554455, | |
| "grad_norm": 0.4969333687228696, | |
| "learning_rate": 3.289899283371657e-06, | |
| "loss": 0.7024, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.9603960396039604, | |
| "grad_norm": 0.5553043936209708, | |
| "learning_rate": 3.2081602522734987e-06, | |
| "loss": 0.823, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.9752475247524752, | |
| "grad_norm": 0.5681040781939833, | |
| "learning_rate": 3.12696703292044e-06, | |
| "loss": 0.7705, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.99009900990099, | |
| "grad_norm": 0.6009985220517085, | |
| "learning_rate": 3.0463443575536324e-06, | |
| "loss": 0.7319, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 2.004950495049505, | |
| "grad_norm": 1.2209353424526557, | |
| "learning_rate": 2.966316784621e-06, | |
| "loss": 0.9991, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.01980198019802, | |
| "grad_norm": 0.6663865953806991, | |
| "learning_rate": 2.886908691296504e-06, | |
| "loss": 0.7344, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 2.0346534653465347, | |
| "grad_norm": 0.6515234599013568, | |
| "learning_rate": 2.8081442660546126e-06, | |
| "loss": 0.6747, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 2.0495049504950495, | |
| "grad_norm": 0.5730091194025813, | |
| "learning_rate": 2.7300475013022666e-06, | |
| "loss": 0.603, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 2.0643564356435644, | |
| "grad_norm": 0.5406638850651563, | |
| "learning_rate": 2.6526421860705474e-06, | |
| "loss": 0.7451, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 2.0792079207920793, | |
| "grad_norm": 0.5841557122529807, | |
| "learning_rate": 2.5759518987683154e-06, | |
| "loss": 0.7037, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.094059405940594, | |
| "grad_norm": 0.5894024920902818, | |
| "learning_rate": 2.5000000000000015e-06, | |
| "loss": 0.6763, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 2.108910891089109, | |
| "grad_norm": 0.6174804381821258, | |
| "learning_rate": 2.424809625449729e-06, | |
| "loss": 0.6384, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 2.123762376237624, | |
| "grad_norm": 0.6119987306390974, | |
| "learning_rate": 2.3504036788339763e-06, | |
| "loss": 0.7545, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 2.1386138613861387, | |
| "grad_norm": 0.5006999182134887, | |
| "learning_rate": 2.2768048249248648e-06, | |
| "loss": 0.5474, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 2.1534653465346536, | |
| "grad_norm": 0.5375559180884052, | |
| "learning_rate": 2.204035482646267e-06, | |
| "loss": 0.6522, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.1683168316831685, | |
| "grad_norm": 0.5708691567653545, | |
| "learning_rate": 2.132117818244771e-06, | |
| "loss": 0.6746, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 2.1831683168316833, | |
| "grad_norm": 0.5360011345721072, | |
| "learning_rate": 2.061073738537635e-06, | |
| "loss": 0.6705, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 2.198019801980198, | |
| "grad_norm": 0.6053709432824774, | |
| "learning_rate": 1.990924884239758e-06, | |
| "loss": 0.7101, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 2.212871287128713, | |
| "grad_norm": 0.5531801803062121, | |
| "learning_rate": 1.9216926233717087e-06, | |
| "loss": 0.6503, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 2.227722772277228, | |
| "grad_norm": 0.5761743172160733, | |
| "learning_rate": 1.8533980447508138e-06, | |
| "loss": 0.6597, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.2425742574257423, | |
| "grad_norm": 0.5920189276988723, | |
| "learning_rate": 1.7860619515673034e-06, | |
| "loss": 0.7937, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.2574257425742577, | |
| "grad_norm": 0.4711557273037673, | |
| "learning_rate": 1.7197048550474643e-06, | |
| "loss": 0.574, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 2.272277227722772, | |
| "grad_norm": 0.5323981360953441, | |
| "learning_rate": 1.6543469682057105e-06, | |
| "loss": 0.6766, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.287128712871287, | |
| "grad_norm": 0.5677075588486846, | |
| "learning_rate": 1.5900081996875083e-06, | |
| "loss": 0.7836, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.301980198019802, | |
| "grad_norm": 0.495711192365763, | |
| "learning_rate": 1.5267081477050132e-06, | |
| "loss": 0.5805, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.3168316831683167, | |
| "grad_norm": 0.5019129834363211, | |
| "learning_rate": 1.4644660940672628e-06, | |
| "loss": 0.6805, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.3316831683168315, | |
| "grad_norm": 0.5924600811693187, | |
| "learning_rate": 1.4033009983067454e-06, | |
| "loss": 0.6958, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.3465346534653464, | |
| "grad_norm": 0.5679498872247285, | |
| "learning_rate": 1.3432314919041478e-06, | |
| "loss": 0.7157, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.3613861386138613, | |
| "grad_norm": 0.5378653771399984, | |
| "learning_rate": 1.2842758726130283e-06, | |
| "loss": 0.6421, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.376237623762376, | |
| "grad_norm": 0.5716411245552344, | |
| "learning_rate": 1.22645209888614e-06, | |
| "loss": 0.7546, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.391089108910891, | |
| "grad_norm": 0.5031816053120705, | |
| "learning_rate": 1.1697777844051105e-06, | |
| "loss": 0.7018, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.405940594059406, | |
| "grad_norm": 0.46660397121098096, | |
| "learning_rate": 1.1142701927151456e-06, | |
| "loss": 0.639, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.4207920792079207, | |
| "grad_norm": 0.5135926843254873, | |
| "learning_rate": 1.0599462319663906e-06, | |
| "loss": 0.7345, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.4356435643564356, | |
| "grad_norm": 0.5145225601719905, | |
| "learning_rate": 1.006822449763537e-06, | |
| "loss": 0.6413, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.4504950495049505, | |
| "grad_norm": 0.4781710026536871, | |
| "learning_rate": 9.549150281252633e-07, | |
| "loss": 0.6647, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.4653465346534653, | |
| "grad_norm": 0.48799619696609925, | |
| "learning_rate": 9.042397785550405e-07, | |
| "loss": 0.7179, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.48019801980198, | |
| "grad_norm": 0.488756423169197, | |
| "learning_rate": 8.54812137224792e-07, | |
| "loss": 0.6727, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.495049504950495, | |
| "grad_norm": 0.47225504186982886, | |
| "learning_rate": 8.066471602728804e-07, | |
| "loss": 0.7016, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.50990099009901, | |
| "grad_norm": 0.4884739098762309, | |
| "learning_rate": 7.597595192178702e-07, | |
| "loss": 0.7249, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.5247524752475248, | |
| "grad_norm": 0.470588495873427, | |
| "learning_rate": 7.141634964894389e-07, | |
| "loss": 0.6672, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.5396039603960396, | |
| "grad_norm": 0.4673487289438152, | |
| "learning_rate": 6.698729810778065e-07, | |
| "loss": 0.6774, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.5544554455445545, | |
| "grad_norm": 0.4635764685120432, | |
| "learning_rate": 6.269014643030214e-07, | |
| "loss": 0.679, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.5693069306930694, | |
| "grad_norm": 0.4956166674866954, | |
| "learning_rate": 5.852620357053651e-07, | |
| "loss": 0.6947, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.5841584158415842, | |
| "grad_norm": 0.4817833751685946, | |
| "learning_rate": 5.449673790581611e-07, | |
| "loss": 0.6292, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.599009900990099, | |
| "grad_norm": 0.5033414960012568, | |
| "learning_rate": 5.06029768504166e-07, | |
| "loss": 0.6521, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.613861386138614, | |
| "grad_norm": 0.4826152038387741, | |
| "learning_rate": 4.6846106481675035e-07, | |
| "loss": 0.6345, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.628712871287129, | |
| "grad_norm": 0.5148110561313826, | |
| "learning_rate": 4.322727117869951e-07, | |
| "loss": 0.7187, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.6435643564356437, | |
| "grad_norm": 0.520468554829645, | |
| "learning_rate": 3.9747573273779816e-07, | |
| "loss": 0.6922, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.6584158415841586, | |
| "grad_norm": 0.48040006877742775, | |
| "learning_rate": 3.6408072716606346e-07, | |
| "loss": 0.6394, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.6732673267326734, | |
| "grad_norm": 0.49771916703934643, | |
| "learning_rate": 3.320978675139919e-07, | |
| "loss": 0.6555, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.6881188118811883, | |
| "grad_norm": 0.4985224870159162, | |
| "learning_rate": 3.015368960704584e-07, | |
| "loss": 0.6621, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.7029702970297027, | |
| "grad_norm": 0.5032558199370275, | |
| "learning_rate": 2.724071220034158e-07, | |
| "loss": 0.7285, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.717821782178218, | |
| "grad_norm": 0.4881972665265835, | |
| "learning_rate": 2.447174185242324e-07, | |
| "loss": 0.6426, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.7326732673267324, | |
| "grad_norm": 0.4828031422366926, | |
| "learning_rate": 2.1847622018482283e-07, | |
| "loss": 0.6782, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.7475247524752477, | |
| "grad_norm": 0.47484851901453023, | |
| "learning_rate": 1.9369152030840553e-07, | |
| "loss": 0.6326, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.762376237623762, | |
| "grad_norm": 0.5311388807144789, | |
| "learning_rate": 1.7037086855465902e-07, | |
| "loss": 0.7419, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.7772277227722775, | |
| "grad_norm": 0.4614470612824122, | |
| "learning_rate": 1.4852136862001766e-07, | |
| "loss": 0.6123, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.792079207920792, | |
| "grad_norm": 0.48962299396889825, | |
| "learning_rate": 1.2814967607382433e-07, | |
| "loss": 0.7775, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.806930693069307, | |
| "grad_norm": 0.5121698180749854, | |
| "learning_rate": 1.0926199633097156e-07, | |
| "loss": 0.6253, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.8217821782178216, | |
| "grad_norm": 0.5454806153588347, | |
| "learning_rate": 9.186408276168012e-08, | |
| "loss": 0.6817, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.8366336633663365, | |
| "grad_norm": 0.47926294627215255, | |
| "learning_rate": 7.59612349389599e-08, | |
| "loss": 0.6702, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.8514851485148514, | |
| "grad_norm": 0.488962969794833, | |
| "learning_rate": 6.15582970243117e-08, | |
| "loss": 0.6687, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.866336633663366, | |
| "grad_norm": 0.5042360939591695, | |
| "learning_rate": 4.865965629214819e-08, | |
| "loss": 0.6828, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.881188118811881, | |
| "grad_norm": 0.4737639717650882, | |
| "learning_rate": 3.726924179339009e-08, | |
| "loss": 0.6109, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.896039603960396, | |
| "grad_norm": 0.5367048842660397, | |
| "learning_rate": 2.7390523158633552e-08, | |
| "loss": 0.6467, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.910891089108911, | |
| "grad_norm": 0.5043896642226987, | |
| "learning_rate": 1.9026509541272276e-08, | |
| "loss": 0.7169, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.9257425742574257, | |
| "grad_norm": 0.4791625292476249, | |
| "learning_rate": 1.2179748700879013e-08, | |
| "loss": 0.7351, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.9405940594059405, | |
| "grad_norm": 0.5102082840215327, | |
| "learning_rate": 6.852326227130835e-09, | |
| "loss": 0.6391, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.9554455445544554, | |
| "grad_norm": 0.4865097004583583, | |
| "learning_rate": 3.0458649045211897e-09, | |
| "loss": 0.6674, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.9702970297029703, | |
| "grad_norm": 0.5133741423875121, | |
| "learning_rate": 7.615242180436521e-10, | |
| "loss": 0.7427, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.985148514851485, | |
| "grad_norm": 0.4923182359179608, | |
| "learning_rate": 0.0, | |
| "loss": 0.595, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.985148514851485, | |
| "step": 201, | |
| "total_flos": 138721009057792.0, | |
| "train_loss": 0.7744821606583856, | |
| "train_runtime": 2765.7022, | |
| "train_samples_per_second": 6.994, | |
| "train_steps_per_second": 0.073 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 201, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 138721009057792.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |