| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 7.0, | |
| "eval_steps": 500, | |
| "global_step": 231, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.030303030303030304, | |
| "grad_norm": 5.792642134780677, | |
| "learning_rate": 8.333333333333333e-07, | |
| "loss": 0.9515, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.06060606060606061, | |
| "grad_norm": 6.382571164842765, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 1.0119, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.09090909090909091, | |
| "grad_norm": 6.02036402240374, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.9728, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.12121212121212122, | |
| "grad_norm": 5.818167379345609, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 1.0002, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.15151515151515152, | |
| "grad_norm": 4.482978716814255, | |
| "learning_rate": 4.166666666666667e-06, | |
| "loss": 0.9005, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.18181818181818182, | |
| "grad_norm": 2.2752195885468782, | |
| "learning_rate": 5e-06, | |
| "loss": 0.8823, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.21212121212121213, | |
| "grad_norm": 2.007755284237358, | |
| "learning_rate": 5.833333333333334e-06, | |
| "loss": 0.8801, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.24242424242424243, | |
| "grad_norm": 3.9023239936341967, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.8614, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.2727272727272727, | |
| "grad_norm": 4.581155266714506, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.8856, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.30303030303030304, | |
| "grad_norm": 4.4563639683308, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 0.8762, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 4.036284479049859, | |
| "learning_rate": 9.166666666666666e-06, | |
| "loss": 0.8418, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.36363636363636365, | |
| "grad_norm": 3.5386084914228837, | |
| "learning_rate": 1e-05, | |
| "loss": 0.838, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.3939393939393939, | |
| "grad_norm": 2.8027022946203317, | |
| "learning_rate": 1.0833333333333334e-05, | |
| "loss": 0.8285, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.42424242424242425, | |
| "grad_norm": 1.9204210335828813, | |
| "learning_rate": 1.1666666666666668e-05, | |
| "loss": 0.7153, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.45454545454545453, | |
| "grad_norm": 2.3107282485007232, | |
| "learning_rate": 1.25e-05, | |
| "loss": 0.7976, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.48484848484848486, | |
| "grad_norm": 2.320207582587203, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.7645, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.5151515151515151, | |
| "grad_norm": 1.990131101081182, | |
| "learning_rate": 1.416666666666667e-05, | |
| "loss": 0.752, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.5454545454545454, | |
| "grad_norm": 1.522500998675586, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.7225, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.5757575757575758, | |
| "grad_norm": 1.3644809685735186, | |
| "learning_rate": 1.5833333333333333e-05, | |
| "loss": 0.6978, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.6060606060606061, | |
| "grad_norm": 1.5223710023668546, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.7086, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.6363636363636364, | |
| "grad_norm": 1.3179520412189247, | |
| "learning_rate": 1.7500000000000002e-05, | |
| "loss": 0.7108, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 1.032339489893466, | |
| "learning_rate": 1.8333333333333333e-05, | |
| "loss": 0.7159, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.696969696969697, | |
| "grad_norm": 1.2588368994602428, | |
| "learning_rate": 1.916666666666667e-05, | |
| "loss": 0.7061, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.7272727272727273, | |
| "grad_norm": 0.901121939524265, | |
| "learning_rate": 2e-05, | |
| "loss": 0.6796, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.7575757575757576, | |
| "grad_norm": 0.7528128278120151, | |
| "learning_rate": 1.999884834944106e-05, | |
| "loss": 0.6993, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.7878787878787878, | |
| "grad_norm": 0.9518503285882077, | |
| "learning_rate": 1.9995393663024054e-05, | |
| "loss": 0.6822, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.8181818181818182, | |
| "grad_norm": 0.7573600517508814, | |
| "learning_rate": 1.9989636736467278e-05, | |
| "loss": 0.6453, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.8484848484848485, | |
| "grad_norm": 0.9702144938750634, | |
| "learning_rate": 1.9981578895764272e-05, | |
| "loss": 0.6884, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.8787878787878788, | |
| "grad_norm": 0.6893913942952646, | |
| "learning_rate": 1.9971221996878395e-05, | |
| "loss": 0.6516, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 0.8481568719894347, | |
| "learning_rate": 1.9958568425315316e-05, | |
| "loss": 0.6851, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.9393939393939394, | |
| "grad_norm": 0.6860057833899889, | |
| "learning_rate": 1.9943621095573588e-05, | |
| "loss": 0.6661, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.9696969696969697, | |
| "grad_norm": 0.6698001122649556, | |
| "learning_rate": 1.9926383450473344e-05, | |
| "loss": 0.6725, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.6344887887916567, | |
| "learning_rate": 1.9906859460363307e-05, | |
| "loss": 0.6535, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 1.0303030303030303, | |
| "grad_norm": 0.6820742340475162, | |
| "learning_rate": 1.9885053622206305e-05, | |
| "loss": 0.5658, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 1.0606060606060606, | |
| "grad_norm": 0.6198289923893283, | |
| "learning_rate": 1.986097095854347e-05, | |
| "loss": 0.5686, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 1.0909090909090908, | |
| "grad_norm": 0.6329798983022055, | |
| "learning_rate": 1.9834617016337424e-05, | |
| "loss": 0.5851, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 1.121212121212121, | |
| "grad_norm": 0.7775710564857078, | |
| "learning_rate": 1.9805997865694616e-05, | |
| "loss": 0.5805, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 1.1515151515151516, | |
| "grad_norm": 0.5892214749020198, | |
| "learning_rate": 1.9775120098467212e-05, | |
| "loss": 0.5727, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 1.1818181818181819, | |
| "grad_norm": 0.7142145316951859, | |
| "learning_rate": 1.9741990826734793e-05, | |
| "loss": 0.5988, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 1.2121212121212122, | |
| "grad_norm": 0.7240893818720315, | |
| "learning_rate": 1.970661768116622e-05, | |
| "loss": 0.6047, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 1.2424242424242424, | |
| "grad_norm": 1.3045892213155967, | |
| "learning_rate": 1.9669008809262064e-05, | |
| "loss": 0.5963, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 1.2727272727272727, | |
| "grad_norm": 1.570873477257169, | |
| "learning_rate": 1.9629172873477995e-05, | |
| "loss": 0.5686, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 1.303030303030303, | |
| "grad_norm": 0.8582320942548944, | |
| "learning_rate": 1.9587119049229558e-05, | |
| "loss": 0.5732, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 0.6072012475804945, | |
| "learning_rate": 1.954285702277879e-05, | |
| "loss": 0.5893, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 1.3636363636363638, | |
| "grad_norm": 0.746084738873446, | |
| "learning_rate": 1.9496396989003195e-05, | |
| "loss": 0.5564, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 1.393939393939394, | |
| "grad_norm": 0.7519606714903169, | |
| "learning_rate": 1.944774964904754e-05, | |
| "loss": 0.5808, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 1.4242424242424243, | |
| "grad_norm": 2.5218024682528775, | |
| "learning_rate": 1.9396926207859085e-05, | |
| "loss": 0.6085, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 1.4545454545454546, | |
| "grad_norm": 0.7953052370583591, | |
| "learning_rate": 1.9343938371606714e-05, | |
| "loss": 0.5248, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 1.4848484848484849, | |
| "grad_norm": 0.6817458558899578, | |
| "learning_rate": 1.9288798344984673e-05, | |
| "loss": 0.5508, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 1.5151515151515151, | |
| "grad_norm": 0.7926918339374145, | |
| "learning_rate": 1.9231518828401458e-05, | |
| "loss": 0.577, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.5454545454545454, | |
| "grad_norm": 0.8365570273338034, | |
| "learning_rate": 1.917211301505453e-05, | |
| "loss": 0.5833, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 1.5757575757575757, | |
| "grad_norm": 0.6280132538822605, | |
| "learning_rate": 1.911059458789152e-05, | |
| "loss": 0.5598, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 1.606060606060606, | |
| "grad_norm": 0.7504572682561476, | |
| "learning_rate": 1.9046977716458627e-05, | |
| "loss": 0.5543, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 1.6363636363636362, | |
| "grad_norm": 0.6649032045086283, | |
| "learning_rate": 1.8981277053636963e-05, | |
| "loss": 0.5485, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 0.6158934656649441, | |
| "learning_rate": 1.891350773226754e-05, | |
| "loss": 0.552, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.696969696969697, | |
| "grad_norm": 0.5578253751825053, | |
| "learning_rate": 1.8843685361665724e-05, | |
| "loss": 0.537, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.7272727272727273, | |
| "grad_norm": 0.6329112649004469, | |
| "learning_rate": 1.8771826024025944e-05, | |
| "loss": 0.537, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.7575757575757576, | |
| "grad_norm": 0.6850791050938211, | |
| "learning_rate": 1.8697946270717468e-05, | |
| "loss": 0.5955, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.7878787878787878, | |
| "grad_norm": 0.6033750071365439, | |
| "learning_rate": 1.8622063118472135e-05, | |
| "loss": 0.5586, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.8181818181818183, | |
| "grad_norm": 0.673718575643345, | |
| "learning_rate": 1.8544194045464888e-05, | |
| "loss": 0.5666, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.8484848484848486, | |
| "grad_norm": 0.5624746929294268, | |
| "learning_rate": 1.8464356987288012e-05, | |
| "loss": 0.5598, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.878787878787879, | |
| "grad_norm": 0.6211999431525035, | |
| "learning_rate": 1.8382570332820045e-05, | |
| "loss": 0.5824, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.9090909090909092, | |
| "grad_norm": 0.4958772347998872, | |
| "learning_rate": 1.8298852919990254e-05, | |
| "loss": 0.5464, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.9393939393939394, | |
| "grad_norm": 0.534752866759772, | |
| "learning_rate": 1.821322403143969e-05, | |
| "loss": 0.5312, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.9696969696969697, | |
| "grad_norm": 0.5621388930667433, | |
| "learning_rate": 1.812570339007983e-05, | |
| "loss": 0.5498, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.5381122674752019, | |
| "learning_rate": 1.8036311154549783e-05, | |
| "loss": 0.5427, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 2.0303030303030303, | |
| "grad_norm": 0.7578168001157617, | |
| "learning_rate": 1.7945067914573147e-05, | |
| "loss": 0.5067, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 2.0606060606060606, | |
| "grad_norm": 0.6446804403966812, | |
| "learning_rate": 1.7851994686215592e-05, | |
| "loss": 0.4755, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 2.090909090909091, | |
| "grad_norm": 0.6754071384034545, | |
| "learning_rate": 1.77571129070442e-05, | |
| "loss": 0.449, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 2.121212121212121, | |
| "grad_norm": 0.7433159460035733, | |
| "learning_rate": 1.766044443118978e-05, | |
| "loss": 0.4546, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 2.1515151515151514, | |
| "grad_norm": 0.6286732134939782, | |
| "learning_rate": 1.7562011524313187e-05, | |
| "loss": 0.4522, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 2.1818181818181817, | |
| "grad_norm": 0.6931242528492747, | |
| "learning_rate": 1.7461836858476858e-05, | |
| "loss": 0.4363, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 2.212121212121212, | |
| "grad_norm": 0.6228303093581359, | |
| "learning_rate": 1.7359943506922775e-05, | |
| "loss": 0.4394, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 2.242424242424242, | |
| "grad_norm": 0.6566264031206333, | |
| "learning_rate": 1.725635493875799e-05, | |
| "loss": 0.4371, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 2.2727272727272725, | |
| "grad_norm": 0.5927282396336451, | |
| "learning_rate": 1.7151095013548996e-05, | |
| "loss": 0.4303, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 2.303030303030303, | |
| "grad_norm": 0.7264098258400549, | |
| "learning_rate": 1.7044187975826126e-05, | |
| "loss": 0.4378, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 2.3333333333333335, | |
| "grad_norm": 0.5494574539456141, | |
| "learning_rate": 1.693565844949933e-05, | |
| "loss": 0.4364, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 2.3636363636363638, | |
| "grad_norm": 0.586478823644888, | |
| "learning_rate": 1.6825531432186545e-05, | |
| "loss": 0.4187, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 2.393939393939394, | |
| "grad_norm": 0.5934153390296679, | |
| "learning_rate": 1.671383228945597e-05, | |
| "loss": 0.4262, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 2.4242424242424243, | |
| "grad_norm": 0.502863696614169, | |
| "learning_rate": 1.6600586748983642e-05, | |
| "loss": 0.4201, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 2.4545454545454546, | |
| "grad_norm": 0.567123847809039, | |
| "learning_rate": 1.648582089462756e-05, | |
| "loss": 0.4397, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 2.484848484848485, | |
| "grad_norm": 0.5616400197858715, | |
| "learning_rate": 1.6369561160419783e-05, | |
| "loss": 0.4339, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 2.515151515151515, | |
| "grad_norm": 0.5155130770072307, | |
| "learning_rate": 1.625183432447789e-05, | |
| "loss": 0.4391, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 2.5454545454545454, | |
| "grad_norm": 0.5529983074318092, | |
| "learning_rate": 1.6132667502837164e-05, | |
| "loss": 0.4264, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 2.5757575757575757, | |
| "grad_norm": 0.5181135590344216, | |
| "learning_rate": 1.6012088143204953e-05, | |
| "loss": 0.4381, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 2.606060606060606, | |
| "grad_norm": 0.500195563035196, | |
| "learning_rate": 1.589012401863864e-05, | |
| "loss": 0.411, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 2.6363636363636362, | |
| "grad_norm": 0.6387564048552598, | |
| "learning_rate": 1.5766803221148676e-05, | |
| "loss": 0.406, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 0.5102855942894612, | |
| "learning_rate": 1.5642154155228124e-05, | |
| "loss": 0.4489, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 2.6969696969696972, | |
| "grad_norm": 0.626186159413465, | |
| "learning_rate": 1.5516205531310272e-05, | |
| "loss": 0.4373, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 2.7272727272727275, | |
| "grad_norm": 0.533918204813247, | |
| "learning_rate": 1.538898635915576e-05, | |
| "loss": 0.4364, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 2.757575757575758, | |
| "grad_norm": 0.5826020156535429, | |
| "learning_rate": 1.526052594117071e-05, | |
| "loss": 0.4633, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 2.787878787878788, | |
| "grad_norm": 0.5268233366892546, | |
| "learning_rate": 1.513085386565758e-05, | |
| "loss": 0.4288, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 2.8181818181818183, | |
| "grad_norm": 0.4989207490329867, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.4158, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 2.8484848484848486, | |
| "grad_norm": 0.5392513240813872, | |
| "learning_rate": 1.4867994483783485e-05, | |
| "loss": 0.4493, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 2.878787878787879, | |
| "grad_norm": 0.5412547971942594, | |
| "learning_rate": 1.4734867721853341e-05, | |
| "loss": 0.4333, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 2.909090909090909, | |
| "grad_norm": 0.47787701781989034, | |
| "learning_rate": 1.4600650377311523e-05, | |
| "loss": 0.436, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 2.9393939393939394, | |
| "grad_norm": 0.4738010394162418, | |
| "learning_rate": 1.4465373364454001e-05, | |
| "loss": 0.4434, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 2.9696969696969697, | |
| "grad_norm": 0.4759665355689762, | |
| "learning_rate": 1.4329067841650274e-05, | |
| "loss": 0.4292, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.5347888280085701, | |
| "learning_rate": 1.4191765204166643e-05, | |
| "loss": 0.4458, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 3.0303030303030303, | |
| "grad_norm": 0.7578730318403921, | |
| "learning_rate": 1.4053497076934948e-05, | |
| "loss": 0.3589, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 3.0606060606060606, | |
| "grad_norm": 0.5590827351296772, | |
| "learning_rate": 1.3914295307268396e-05, | |
| "loss": 0.3522, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 3.090909090909091, | |
| "grad_norm": 0.7663017214456678, | |
| "learning_rate": 1.3774191957526144e-05, | |
| "loss": 0.3412, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 3.121212121212121, | |
| "grad_norm": 0.7255267139613365, | |
| "learning_rate": 1.3633219297728415e-05, | |
| "loss": 0.3138, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 3.1515151515151514, | |
| "grad_norm": 0.5953528568597565, | |
| "learning_rate": 1.3491409798123687e-05, | |
| "loss": 0.3325, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 3.1818181818181817, | |
| "grad_norm": 0.5958107979593327, | |
| "learning_rate": 1.3348796121709862e-05, | |
| "loss": 0.3488, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 3.212121212121212, | |
| "grad_norm": 0.6097699089574262, | |
| "learning_rate": 1.3205411116710973e-05, | |
| "loss": 0.3366, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 3.242424242424242, | |
| "grad_norm": 0.525552536797333, | |
| "learning_rate": 1.3061287809011243e-05, | |
| "loss": 0.3354, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 3.2727272727272725, | |
| "grad_norm": 0.5107168278136667, | |
| "learning_rate": 1.291645939454825e-05, | |
| "loss": 0.3255, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 3.303030303030303, | |
| "grad_norm": 0.49562286761158575, | |
| "learning_rate": 1.277095923166689e-05, | |
| "loss": 0.3226, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 3.3333333333333335, | |
| "grad_norm": 0.552564963270535, | |
| "learning_rate": 1.2624820833435939e-05, | |
| "loss": 0.3262, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 3.3636363636363638, | |
| "grad_norm": 0.529860518763179, | |
| "learning_rate": 1.2478077859929e-05, | |
| "loss": 0.3223, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 3.393939393939394, | |
| "grad_norm": 0.46705898403530477, | |
| "learning_rate": 1.2330764110471567e-05, | |
| "loss": 0.3148, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 3.4242424242424243, | |
| "grad_norm": 0.5282623236299218, | |
| "learning_rate": 1.2182913515856016e-05, | |
| "loss": 0.3136, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 3.4545454545454546, | |
| "grad_norm": 0.4736109046736109, | |
| "learning_rate": 1.2034560130526341e-05, | |
| "loss": 0.3209, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 3.484848484848485, | |
| "grad_norm": 0.49962304598092333, | |
| "learning_rate": 1.1885738124734359e-05, | |
| "loss": 0.3027, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 3.515151515151515, | |
| "grad_norm": 0.4433582001575731, | |
| "learning_rate": 1.1736481776669307e-05, | |
| "loss": 0.3217, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 3.5454545454545454, | |
| "grad_norm": 0.5171221122870995, | |
| "learning_rate": 1.1586825464562515e-05, | |
| "loss": 0.3321, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 3.5757575757575757, | |
| "grad_norm": 0.456601993523051, | |
| "learning_rate": 1.1436803658769082e-05, | |
| "loss": 0.3274, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 3.606060606060606, | |
| "grad_norm": 0.48920924636407565, | |
| "learning_rate": 1.1286450913828313e-05, | |
| "loss": 0.332, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 3.6363636363636362, | |
| "grad_norm": 0.48147073171548743, | |
| "learning_rate": 1.113580186050475e-05, | |
| "loss": 0.3165, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 3.6666666666666665, | |
| "grad_norm": 0.5082342266188418, | |
| "learning_rate": 1.0984891197811686e-05, | |
| "loss": 0.3147, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 3.6969696969696972, | |
| "grad_norm": 0.49947475460015794, | |
| "learning_rate": 1.0833753685018935e-05, | |
| "loss": 0.3195, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 3.7272727272727275, | |
| "grad_norm": 0.4902554646083638, | |
| "learning_rate": 1.0682424133646712e-05, | |
| "loss": 0.328, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 3.757575757575758, | |
| "grad_norm": 0.509057304468142, | |
| "learning_rate": 1.0530937399447496e-05, | |
| "loss": 0.3366, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 3.787878787878788, | |
| "grad_norm": 0.5628445523146441, | |
| "learning_rate": 1.0379328374377715e-05, | |
| "loss": 0.3351, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 3.8181818181818183, | |
| "grad_norm": 0.4412802225534068, | |
| "learning_rate": 1.0227631978561057e-05, | |
| "loss": 0.3174, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 3.8484848484848486, | |
| "grad_norm": 0.45045981739898905, | |
| "learning_rate": 1.0075883152245334e-05, | |
| "loss": 0.3253, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 3.878787878787879, | |
| "grad_norm": 0.4952983553162171, | |
| "learning_rate": 9.92411684775467e-06, | |
| "loss": 0.317, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 3.909090909090909, | |
| "grad_norm": 0.4670258222227986, | |
| "learning_rate": 9.772368021438943e-06, | |
| "loss": 0.3304, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 3.9393939393939394, | |
| "grad_norm": 0.45909455329725174, | |
| "learning_rate": 9.620671625622287e-06, | |
| "loss": 0.3136, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 3.9696969696969697, | |
| "grad_norm": 0.5183779437816718, | |
| "learning_rate": 9.469062600552509e-06, | |
| "loss": 0.3305, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.43030496359434006, | |
| "learning_rate": 9.317575866353293e-06, | |
| "loss": 0.3233, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 4.03030303030303, | |
| "grad_norm": 0.6551297453698638, | |
| "learning_rate": 9.166246314981066e-06, | |
| "loss": 0.245, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 4.0606060606060606, | |
| "grad_norm": 0.5346447826258135, | |
| "learning_rate": 9.015108802188314e-06, | |
| "loss": 0.2484, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 4.090909090909091, | |
| "grad_norm": 0.5129787144325263, | |
| "learning_rate": 8.86419813949525e-06, | |
| "loss": 0.2442, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 4.121212121212121, | |
| "grad_norm": 0.8307262099481713, | |
| "learning_rate": 8.71354908617169e-06, | |
| "loss": 0.2453, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 4.151515151515151, | |
| "grad_norm": 0.5648300076341694, | |
| "learning_rate": 8.56319634123092e-06, | |
| "loss": 0.2147, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 4.181818181818182, | |
| "grad_norm": 0.5175008944201702, | |
| "learning_rate": 8.413174535437486e-06, | |
| "loss": 0.2384, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 4.212121212121212, | |
| "grad_norm": 0.5434304177721216, | |
| "learning_rate": 8.263518223330698e-06, | |
| "loss": 0.2385, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 4.242424242424242, | |
| "grad_norm": 0.5656126676103714, | |
| "learning_rate": 8.114261875265643e-06, | |
| "loss": 0.2382, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 4.2727272727272725, | |
| "grad_norm": 0.47834029227182495, | |
| "learning_rate": 7.965439869473664e-06, | |
| "loss": 0.248, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 4.303030303030303, | |
| "grad_norm": 0.45321812692904295, | |
| "learning_rate": 7.817086484143987e-06, | |
| "loss": 0.2108, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 4.333333333333333, | |
| "grad_norm": 0.5017557961334287, | |
| "learning_rate": 7.669235889528436e-06, | |
| "loss": 0.2351, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 4.363636363636363, | |
| "grad_norm": 0.5127175862774535, | |
| "learning_rate": 7.521922140071003e-06, | |
| "loss": 0.2381, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 4.393939393939394, | |
| "grad_norm": 0.5064799180026548, | |
| "learning_rate": 7.375179166564062e-06, | |
| "loss": 0.2502, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 4.424242424242424, | |
| "grad_norm": 0.4606421137621233, | |
| "learning_rate": 7.2290407683331154e-06, | |
| "loss": 0.2264, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 4.454545454545454, | |
| "grad_norm": 0.42303923096285145, | |
| "learning_rate": 7.0835406054517505e-06, | |
| "loss": 0.2203, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 4.484848484848484, | |
| "grad_norm": 0.4624770716910183, | |
| "learning_rate": 6.93871219098876e-06, | |
| "loss": 0.2414, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 4.515151515151516, | |
| "grad_norm": 0.48359586920480535, | |
| "learning_rate": 6.79458888328903e-06, | |
| "loss": 0.2336, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 4.545454545454545, | |
| "grad_norm": 0.48813375601319753, | |
| "learning_rate": 6.651203878290139e-06, | |
| "loss": 0.2345, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 4.575757575757576, | |
| "grad_norm": 0.41950624414364907, | |
| "learning_rate": 6.508590201876317e-06, | |
| "loss": 0.2406, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 4.606060606060606, | |
| "grad_norm": 0.4293523600653929, | |
| "learning_rate": 6.366780702271589e-06, | |
| "loss": 0.2327, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 4.636363636363637, | |
| "grad_norm": 0.4705142148220202, | |
| "learning_rate": 6.225808042473857e-06, | |
| "loss": 0.2444, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 4.666666666666667, | |
| "grad_norm": 0.4409469777504741, | |
| "learning_rate": 6.085704692731609e-06, | |
| "loss": 0.2397, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 4.696969696969697, | |
| "grad_norm": 0.4612034712726125, | |
| "learning_rate": 5.946502923065054e-06, | |
| "loss": 0.2437, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 4.7272727272727275, | |
| "grad_norm": 0.4276319004208902, | |
| "learning_rate": 5.8082347958333625e-06, | |
| "loss": 0.2404, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 4.757575757575758, | |
| "grad_norm": 0.4035332466902113, | |
| "learning_rate": 5.670932158349732e-06, | |
| "loss": 0.2312, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 4.787878787878788, | |
| "grad_norm": 0.4551405541985991, | |
| "learning_rate": 5.534626635546e-06, | |
| "loss": 0.231, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 4.818181818181818, | |
| "grad_norm": 0.3978436746485399, | |
| "learning_rate": 5.399349622688479e-06, | |
| "loss": 0.2224, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 4.848484848484849, | |
| "grad_norm": 0.4428461598197092, | |
| "learning_rate": 5.2651322781466606e-06, | |
| "loss": 0.2407, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 4.878787878787879, | |
| "grad_norm": 0.42484770040407305, | |
| "learning_rate": 5.132005516216512e-06, | |
| "loss": 0.2271, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 4.909090909090909, | |
| "grad_norm": 0.4221865203466915, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.23, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 4.9393939393939394, | |
| "grad_norm": 0.405149774500086, | |
| "learning_rate": 4.869146134342426e-06, | |
| "loss": 0.2186, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 4.96969696969697, | |
| "grad_norm": 0.40843472424969607, | |
| "learning_rate": 4.739474058829288e-06, | |
| "loss": 0.2252, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 0.4142427866189157, | |
| "learning_rate": 4.611013640844245e-06, | |
| "loss": 0.228, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 5.03030303030303, | |
| "grad_norm": 0.5742115594923108, | |
| "learning_rate": 4.483794468689728e-06, | |
| "loss": 0.1915, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 5.0606060606060606, | |
| "grad_norm": 0.5166207814269285, | |
| "learning_rate": 4.357845844771881e-06, | |
| "loss": 0.1897, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 5.090909090909091, | |
| "grad_norm": 0.4165284747216062, | |
| "learning_rate": 4.2331967788513295e-06, | |
| "loss": 0.1721, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 5.121212121212121, | |
| "grad_norm": 0.44388278472804926, | |
| "learning_rate": 4.109875981361363e-06, | |
| "loss": 0.1687, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 5.151515151515151, | |
| "grad_norm": 0.5783207903922752, | |
| "learning_rate": 3.987911856795047e-06, | |
| "loss": 0.1955, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 5.181818181818182, | |
| "grad_norm": 0.60783392075912, | |
| "learning_rate": 3.867332497162836e-06, | |
| "loss": 0.1877, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 5.212121212121212, | |
| "grad_norm": 0.4271940085585141, | |
| "learning_rate": 3.748165675522113e-06, | |
| "loss": 0.1708, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 5.242424242424242, | |
| "grad_norm": 0.40343616710144975, | |
| "learning_rate": 3.630438839580217e-06, | |
| "loss": 0.1678, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 5.2727272727272725, | |
| "grad_norm": 0.4297988621112416, | |
| "learning_rate": 3.5141791053724405e-06, | |
| "loss": 0.1632, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 5.303030303030303, | |
| "grad_norm": 0.44367455864660416, | |
| "learning_rate": 3.399413251016359e-06, | |
| "loss": 0.1876, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 5.333333333333333, | |
| "grad_norm": 0.46142274579507064, | |
| "learning_rate": 3.2861677105440335e-06, | |
| "loss": 0.174, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 5.363636363636363, | |
| "grad_norm": 0.4162071086041392, | |
| "learning_rate": 3.174468567813461e-06, | |
| "loss": 0.17, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 5.393939393939394, | |
| "grad_norm": 0.3894461320822898, | |
| "learning_rate": 3.0643415505006733e-06, | |
| "loss": 0.1676, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 5.424242424242424, | |
| "grad_norm": 0.39903454988143183, | |
| "learning_rate": 2.9558120241738786e-06, | |
| "loss": 0.1678, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 5.454545454545454, | |
| "grad_norm": 0.43314054163119214, | |
| "learning_rate": 2.8489049864510053e-06, | |
| "loss": 0.1676, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 5.484848484848484, | |
| "grad_norm": 0.42598643749346615, | |
| "learning_rate": 2.7436450612420098e-06, | |
| "loss": 0.1732, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 5.515151515151516, | |
| "grad_norm": 0.41292734228892786, | |
| "learning_rate": 2.640056493077231e-06, | |
| "loss": 0.1679, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 5.545454545454545, | |
| "grad_norm": 0.4220043665147572, | |
| "learning_rate": 2.5381631415231455e-06, | |
| "loss": 0.1801, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 5.575757575757576, | |
| "grad_norm": 0.3841188506361115, | |
| "learning_rate": 2.4379884756868167e-06, | |
| "loss": 0.1559, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 5.606060606060606, | |
| "grad_norm": 0.3929211402827616, | |
| "learning_rate": 2.339555568810221e-06, | |
| "loss": 0.1643, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 5.636363636363637, | |
| "grad_norm": 0.38247231160015316, | |
| "learning_rate": 2.2428870929558012e-06, | |
| "loss": 0.1684, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 5.666666666666667, | |
| "grad_norm": 0.3783424280815931, | |
| "learning_rate": 2.1480053137844115e-06, | |
| "loss": 0.1767, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 5.696969696969697, | |
| "grad_norm": 0.3963870273429583, | |
| "learning_rate": 2.054932085426856e-06, | |
| "loss": 0.1663, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 5.7272727272727275, | |
| "grad_norm": 0.3752808343226345, | |
| "learning_rate": 1.963688845450218e-06, | |
| "loss": 0.167, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 5.757575757575758, | |
| "grad_norm": 0.3633282258887765, | |
| "learning_rate": 1.8742966099201699e-06, | |
| "loss": 0.1717, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 5.787878787878788, | |
| "grad_norm": 0.36221110762082537, | |
| "learning_rate": 1.7867759685603115e-06, | |
| "loss": 0.1642, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 5.818181818181818, | |
| "grad_norm": 0.3934225315456543, | |
| "learning_rate": 1.7011470800097496e-06, | |
| "loss": 0.1627, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 5.848484848484849, | |
| "grad_norm": 0.4353556265864982, | |
| "learning_rate": 1.6174296671799571e-06, | |
| "loss": 0.1599, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 5.878787878787879, | |
| "grad_norm": 0.40032835131099176, | |
| "learning_rate": 1.5356430127119915e-06, | |
| "loss": 0.1769, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 5.909090909090909, | |
| "grad_norm": 0.38135206747532385, | |
| "learning_rate": 1.4558059545351144e-06, | |
| "loss": 0.1739, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 5.9393939393939394, | |
| "grad_norm": 0.36420081321251285, | |
| "learning_rate": 1.3779368815278648e-06, | |
| "loss": 0.186, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 5.96969696969697, | |
| "grad_norm": 0.3808807238458524, | |
| "learning_rate": 1.302053729282533e-06, | |
| "loss": 0.1635, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 0.3664803305584857, | |
| "learning_rate": 1.2281739759740575e-06, | |
| "loss": 0.1567, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 6.03030303030303, | |
| "grad_norm": 0.49626825440614986, | |
| "learning_rate": 1.156314638334277e-06, | |
| "loss": 0.1431, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 6.0606060606060606, | |
| "grad_norm": 0.4239002010356736, | |
| "learning_rate": 1.086492267732462e-06, | |
| "loss": 0.1336, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 6.090909090909091, | |
| "grad_norm": 0.4416525859119522, | |
| "learning_rate": 1.01872294636304e-06, | |
| "loss": 0.1379, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 6.121212121212121, | |
| "grad_norm": 0.3711931095261706, | |
| "learning_rate": 9.530222835413739e-07, | |
| "loss": 0.1457, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 6.151515151515151, | |
| "grad_norm": 0.3447887072353519, | |
| "learning_rate": 8.894054121084839e-07, | |
| "loss": 0.1483, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 6.181818181818182, | |
| "grad_norm": 0.3289966890609169, | |
| "learning_rate": 8.278869849454718e-07, | |
| "loss": 0.1364, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 6.212121212121212, | |
| "grad_norm": 0.36254988244086095, | |
| "learning_rate": 7.684811715985429e-07, | |
| "loss": 0.1496, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 6.242424242424242, | |
| "grad_norm": 0.3632904904272957, | |
| "learning_rate": 7.1120165501533e-07, | |
| "loss": 0.1436, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 6.2727272727272725, | |
| "grad_norm": 0.37269285787256123, | |
| "learning_rate": 6.560616283932897e-07, | |
| "loss": 0.1387, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 6.303030303030303, | |
| "grad_norm": 0.46235743024257353, | |
| "learning_rate": 6.030737921409169e-07, | |
| "loss": 0.1595, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 6.333333333333333, | |
| "grad_norm": 0.3777015762464913, | |
| "learning_rate": 5.522503509524591e-07, | |
| "loss": 0.1383, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 6.363636363636363, | |
| "grad_norm": 0.4213699560133867, | |
| "learning_rate": 5.036030109968082e-07, | |
| "loss": 0.1438, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 6.393939393939394, | |
| "grad_norm": 0.36921531650339, | |
| "learning_rate": 4.5714297722121105e-07, | |
| "loss": 0.149, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 6.424242424242424, | |
| "grad_norm": 0.4071554197055839, | |
| "learning_rate": 4.128809507704445e-07, | |
| "loss": 0.154, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 6.454545454545454, | |
| "grad_norm": 0.3652257992581415, | |
| "learning_rate": 3.708271265220087e-07, | |
| "loss": 0.1491, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 6.484848484848484, | |
| "grad_norm": 0.33809394138141063, | |
| "learning_rate": 3.309911907379393e-07, | |
| "loss": 0.1402, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 6.515151515151516, | |
| "grad_norm": 0.3470365859188612, | |
| "learning_rate": 2.9338231883378365e-07, | |
| "loss": 0.1506, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 6.545454545454545, | |
| "grad_norm": 0.34658124746044666, | |
| "learning_rate": 2.5800917326521013e-07, | |
| "loss": 0.1476, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 6.575757575757576, | |
| "grad_norm": 0.3316918566486324, | |
| "learning_rate": 2.248799015327907e-07, | |
| "loss": 0.1376, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 6.606060606060606, | |
| "grad_norm": 0.35672439409490786, | |
| "learning_rate": 1.9400213430538773e-07, | |
| "loss": 0.1429, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 6.636363636363637, | |
| "grad_norm": 0.3631603267271598, | |
| "learning_rate": 1.6538298366257975e-07, | |
| "loss": 0.1533, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 6.666666666666667, | |
| "grad_norm": 0.3567061213195218, | |
| "learning_rate": 1.3902904145653094e-07, | |
| "loss": 0.1351, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 6.696969696969697, | |
| "grad_norm": 0.34827383931170836, | |
| "learning_rate": 1.1494637779369766e-07, | |
| "loss": 0.1368, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 6.7272727272727275, | |
| "grad_norm": 0.34287094175806077, | |
| "learning_rate": 9.314053963669245e-08, | |
| "loss": 0.1576, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 6.757575757575758, | |
| "grad_norm": 0.3482358604532515, | |
| "learning_rate": 7.361654952665608e-08, | |
| "loss": 0.1517, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 6.787878787878788, | |
| "grad_norm": 0.3356226829450221, | |
| "learning_rate": 5.637890442641403e-08, | |
| "loss": 0.1391, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 6.818181818181818, | |
| "grad_norm": 0.3456342111460825, | |
| "learning_rate": 4.143157468468717e-08, | |
| "loss": 0.1446, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 6.848484848484849, | |
| "grad_norm": 0.3836659500224973, | |
| "learning_rate": 2.8778003121607834e-08, | |
| "loss": 0.1412, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 6.878787878787879, | |
| "grad_norm": 0.3561494017126276, | |
| "learning_rate": 1.8421104235727406e-08, | |
| "loss": 0.1443, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 6.909090909090909, | |
| "grad_norm": 0.3480245489945403, | |
| "learning_rate": 1.0363263532724433e-08, | |
| "loss": 0.1637, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 6.9393939393939394, | |
| "grad_norm": 0.3311151848847929, | |
| "learning_rate": 4.606336975948589e-09, | |
| "loss": 0.1516, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 6.96969696969697, | |
| "grad_norm": 0.33221424604486316, | |
| "learning_rate": 1.1516505589381777e-09, | |
| "loss": 0.1555, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "grad_norm": 0.33957128528435504, | |
| "learning_rate": 0.0, | |
| "loss": 0.1402, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "step": 231, | |
| "total_flos": 3.450799819629527e+17, | |
| "train_loss": 0.3807417738747287, | |
| "train_runtime": 6577.9886, | |
| "train_samples_per_second": 3.363, | |
| "train_steps_per_second": 0.035 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 231, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 7, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.450799819629527e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |