| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9943851768669285, | |
| "eval_steps": 500, | |
| "global_step": 333, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.008983717012914094, | |
| "grad_norm": 5.839048375887439, | |
| "learning_rate": 2.3529411764705885e-06, | |
| "loss": 0.851, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.017967434025828188, | |
| "grad_norm": 5.796622055110998, | |
| "learning_rate": 4.705882352941177e-06, | |
| "loss": 0.8589, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.02695115103874228, | |
| "grad_norm": 5.449611767460796, | |
| "learning_rate": 7.058823529411766e-06, | |
| "loss": 0.851, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.035934868051656375, | |
| "grad_norm": 3.918769913446889, | |
| "learning_rate": 9.411764705882354e-06, | |
| "loss": 0.8013, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.044918585064570464, | |
| "grad_norm": 2.1146491352495644, | |
| "learning_rate": 1.1764705882352942e-05, | |
| "loss": 0.7675, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.05390230207748456, | |
| "grad_norm": 5.3866836077244535, | |
| "learning_rate": 1.4117647058823532e-05, | |
| "loss": 0.8021, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.06288601909039865, | |
| "grad_norm": 7.1506220356372205, | |
| "learning_rate": 1.647058823529412e-05, | |
| "loss": 0.7729, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.07186973610331275, | |
| "grad_norm": 7.3684198638012575, | |
| "learning_rate": 1.8823529411764708e-05, | |
| "loss": 0.7895, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.08085345311622684, | |
| "grad_norm": 4.2008371048182624, | |
| "learning_rate": 2.1176470588235296e-05, | |
| "loss": 0.7502, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.08983717012914093, | |
| "grad_norm": 3.018632704832958, | |
| "learning_rate": 2.3529411764705884e-05, | |
| "loss": 0.7085, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.09882088714205503, | |
| "grad_norm": 2.4103057201747564, | |
| "learning_rate": 2.5882352941176475e-05, | |
| "loss": 0.68, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.10780460415496912, | |
| "grad_norm": 1.6029821095889687, | |
| "learning_rate": 2.8235294117647063e-05, | |
| "loss": 0.6579, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.11678832116788321, | |
| "grad_norm": 1.5359955676401171, | |
| "learning_rate": 3.0588235294117644e-05, | |
| "loss": 0.6348, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.1257720381807973, | |
| "grad_norm": 1.2677879393997815, | |
| "learning_rate": 3.294117647058824e-05, | |
| "loss": 0.6259, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.13475575519371139, | |
| "grad_norm": 1.1307239717924904, | |
| "learning_rate": 3.529411764705883e-05, | |
| "loss": 0.6217, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.1437394722066255, | |
| "grad_norm": 1.1919595690235936, | |
| "learning_rate": 3.7647058823529415e-05, | |
| "loss": 0.6117, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.1527231892195396, | |
| "grad_norm": 1.5137586529647031, | |
| "learning_rate": 4e-05, | |
| "loss": 0.5951, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.16170690623245368, | |
| "grad_norm": 1.0839410029716163, | |
| "learning_rate": 4.235294117647059e-05, | |
| "loss": 0.5873, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.17069062324536777, | |
| "grad_norm": 1.411013539783664, | |
| "learning_rate": 4.470588235294118e-05, | |
| "loss": 0.5855, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.17967434025828186, | |
| "grad_norm": 0.9604494585660002, | |
| "learning_rate": 4.705882352941177e-05, | |
| "loss": 0.5855, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.18865805727119594, | |
| "grad_norm": 1.7966858442462152, | |
| "learning_rate": 4.941176470588236e-05, | |
| "loss": 0.5834, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.19764177428411006, | |
| "grad_norm": 1.2639231326008076, | |
| "learning_rate": 5.176470588235295e-05, | |
| "loss": 0.573, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.20662549129702415, | |
| "grad_norm": 1.6189242064013842, | |
| "learning_rate": 5.411764705882354e-05, | |
| "loss": 0.5687, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.21560920830993824, | |
| "grad_norm": 1.2918939880837212, | |
| "learning_rate": 5.6470588235294126e-05, | |
| "loss": 0.5629, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.22459292532285233, | |
| "grad_norm": 1.8153484123304104, | |
| "learning_rate": 5.8823529411764714e-05, | |
| "loss": 0.5644, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.23357664233576642, | |
| "grad_norm": 1.383080233694585, | |
| "learning_rate": 6.117647058823529e-05, | |
| "loss": 0.561, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.2425603593486805, | |
| "grad_norm": 1.908843640323105, | |
| "learning_rate": 6.352941176470589e-05, | |
| "loss": 0.5581, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.2515440763615946, | |
| "grad_norm": 1.5638595834141664, | |
| "learning_rate": 6.588235294117648e-05, | |
| "loss": 0.5497, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.2605277933745087, | |
| "grad_norm": 1.0799402527915973, | |
| "learning_rate": 6.823529411764707e-05, | |
| "loss": 0.5405, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.26951151038742277, | |
| "grad_norm": 1.2842268786606617, | |
| "learning_rate": 7.058823529411765e-05, | |
| "loss": 0.5489, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.2784952274003369, | |
| "grad_norm": 2.1296312845067953, | |
| "learning_rate": 7.294117647058824e-05, | |
| "loss": 0.5429, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.287478944413251, | |
| "grad_norm": 1.2081689918064082, | |
| "learning_rate": 7.529411764705883e-05, | |
| "loss": 0.5439, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.29646266142616506, | |
| "grad_norm": 2.681914408101531, | |
| "learning_rate": 7.764705882352942e-05, | |
| "loss": 0.5409, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.3054463784390792, | |
| "grad_norm": 2.088525203831682, | |
| "learning_rate": 8e-05, | |
| "loss": 0.5448, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.31443009545199324, | |
| "grad_norm": 2.429087878227431, | |
| "learning_rate": 7.999779207981935e-05, | |
| "loss": 0.5376, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.32341381246490736, | |
| "grad_norm": 2.2118789436187467, | |
| "learning_rate": 7.999116856302298e-05, | |
| "loss": 0.5429, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.3323975294778215, | |
| "grad_norm": 1.998359697047038, | |
| "learning_rate": 7.998013018082072e-05, | |
| "loss": 0.5372, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.34138124649073553, | |
| "grad_norm": 1.4739747352645118, | |
| "learning_rate": 7.996467815180588e-05, | |
| "loss": 0.5306, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.35036496350364965, | |
| "grad_norm": 1.8627456810217522, | |
| "learning_rate": 7.994481418182082e-05, | |
| "loss": 0.5309, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.3593486805165637, | |
| "grad_norm": 1.2832017910437872, | |
| "learning_rate": 7.992054046376854e-05, | |
| "loss": 0.5301, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.36833239752947783, | |
| "grad_norm": 1.9368339421908154, | |
| "learning_rate": 7.989185967737066e-05, | |
| "loss": 0.534, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.3773161145423919, | |
| "grad_norm": 1.4631687551659387, | |
| "learning_rate": 7.985877498887149e-05, | |
| "loss": 0.526, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.386299831555306, | |
| "grad_norm": 1.4992485001174218, | |
| "learning_rate": 7.982129005068865e-05, | |
| "loss": 0.5169, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.3952835485682201, | |
| "grad_norm": 1.2556436640866477, | |
| "learning_rate": 7.977940900100967e-05, | |
| "loss": 0.5086, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.4042672655811342, | |
| "grad_norm": 1.4032135906881233, | |
| "learning_rate": 7.973313646333532e-05, | |
| "loss": 0.5214, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.4132509825940483, | |
| "grad_norm": 1.3909591395416454, | |
| "learning_rate": 7.968247754596908e-05, | |
| "loss": 0.5143, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.42223469960696236, | |
| "grad_norm": 1.9540476320319087, | |
| "learning_rate": 7.962743784145323e-05, | |
| "loss": 0.5207, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.4312184166198765, | |
| "grad_norm": 1.0309999384978459, | |
| "learning_rate": 7.956802342595152e-05, | |
| "loss": 0.5069, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.4402021336327906, | |
| "grad_norm": 1.3811159742393722, | |
| "learning_rate": 7.950424085857827e-05, | |
| "loss": 0.5145, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.44918585064570465, | |
| "grad_norm": 1.4750899151595693, | |
| "learning_rate": 7.943609718067437e-05, | |
| "loss": 0.5034, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.45816956765861877, | |
| "grad_norm": 1.586345005999154, | |
| "learning_rate": 7.936359991502993e-05, | |
| "loss": 0.5139, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.46715328467153283, | |
| "grad_norm": 0.8294429144133623, | |
| "learning_rate": 7.92867570650537e-05, | |
| "loss": 0.5024, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.47613700168444695, | |
| "grad_norm": 1.4768401149936898, | |
| "learning_rate": 7.920557711388967e-05, | |
| "loss": 0.5024, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.485120718697361, | |
| "grad_norm": 1.4206688134227474, | |
| "learning_rate": 7.912006902348045e-05, | |
| "loss": 0.5087, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.4941044357102751, | |
| "grad_norm": 0.9288407004391388, | |
| "learning_rate": 7.903024223357797e-05, | |
| "loss": 0.5059, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.5030881527231892, | |
| "grad_norm": 1.0360922211438188, | |
| "learning_rate": 7.893610666070134e-05, | |
| "loss": 0.5025, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.5120718697361033, | |
| "grad_norm": 1.5478822264915786, | |
| "learning_rate": 7.883767269704209e-05, | |
| "loss": 0.5166, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.5210555867490174, | |
| "grad_norm": 1.2814412272627784, | |
| "learning_rate": 7.873495120931697e-05, | |
| "loss": 0.5053, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.5300393037619315, | |
| "grad_norm": 1.2394259373221026, | |
| "learning_rate": 7.86279535375683e-05, | |
| "loss": 0.506, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.5390230207748455, | |
| "grad_norm": 0.7402164979257034, | |
| "learning_rate": 7.851669149391198e-05, | |
| "loss": 0.4972, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.5480067377877597, | |
| "grad_norm": 1.0415746091150728, | |
| "learning_rate": 7.84011773612336e-05, | |
| "loss": 0.503, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.5569904548006738, | |
| "grad_norm": 1.287772964853304, | |
| "learning_rate": 7.828142389183239e-05, | |
| "loss": 0.5002, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.5659741718135879, | |
| "grad_norm": 1.5812766703675953, | |
| "learning_rate": 7.815744430601344e-05, | |
| "loss": 0.5056, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.574957888826502, | |
| "grad_norm": 0.8792970340323266, | |
| "learning_rate": 7.802925229062823e-05, | |
| "loss": 0.4918, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.583941605839416, | |
| "grad_norm": 1.1398022409490918, | |
| "learning_rate": 7.789686199756365e-05, | |
| "loss": 0.4969, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.5929253228523301, | |
| "grad_norm": 1.7704394806497858, | |
| "learning_rate": 7.776028804217968e-05, | |
| "loss": 0.5026, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.6019090398652442, | |
| "grad_norm": 0.9434307642741575, | |
| "learning_rate": 7.761954550169593e-05, | |
| "loss": 0.4932, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.6108927568781584, | |
| "grad_norm": 2.082446516632287, | |
| "learning_rate": 7.74746499135272e-05, | |
| "loss": 0.5172, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.6198764738910725, | |
| "grad_norm": 1.8262658550358597, | |
| "learning_rate": 7.732561727356811e-05, | |
| "loss": 0.5039, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.6288601909039865, | |
| "grad_norm": 1.2365083047972374, | |
| "learning_rate": 7.717246403442735e-05, | |
| "loss": 0.4973, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.6378439079169006, | |
| "grad_norm": 1.4539572435843178, | |
| "learning_rate": 7.701520710361129e-05, | |
| "loss": 0.4988, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.6468276249298147, | |
| "grad_norm": 1.0089172545802498, | |
| "learning_rate": 7.685386384165748e-05, | |
| "loss": 0.4874, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.6558113419427288, | |
| "grad_norm": 1.3604668572334533, | |
| "learning_rate": 7.668845206021812e-05, | |
| "loss": 0.5035, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.664795058955643, | |
| "grad_norm": 1.0804166408590006, | |
| "learning_rate": 7.651899002009375e-05, | |
| "loss": 0.4926, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.673778775968557, | |
| "grad_norm": 1.4514655022285154, | |
| "learning_rate": 7.634549642921725e-05, | |
| "loss": 0.4939, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.6827624929814711, | |
| "grad_norm": 0.991441799340063, | |
| "learning_rate": 7.616799044058867e-05, | |
| "loss": 0.5067, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.6917462099943852, | |
| "grad_norm": 1.117806439503576, | |
| "learning_rate": 7.598649165016073e-05, | |
| "loss": 0.4915, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.7007299270072993, | |
| "grad_norm": 0.8700173580337157, | |
| "learning_rate": 7.58010200946755e-05, | |
| "loss": 0.4934, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.7097136440202133, | |
| "grad_norm": 0.9327016278300901, | |
| "learning_rate": 7.561159624945257e-05, | |
| "loss": 0.4823, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.7186973610331274, | |
| "grad_norm": 1.357805985616199, | |
| "learning_rate": 7.541824102612839e-05, | |
| "loss": 0.5005, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.7276810780460415, | |
| "grad_norm": 0.8155442555029125, | |
| "learning_rate": 7.5220975770348e-05, | |
| "loss": 0.4846, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.7366647950589557, | |
| "grad_norm": 0.8879306236067503, | |
| "learning_rate": 7.501982225940833e-05, | |
| "loss": 0.4716, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.7456485120718698, | |
| "grad_norm": 1.1674726644441007, | |
| "learning_rate": 7.48148026998542e-05, | |
| "loss": 0.4912, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.7546322290847838, | |
| "grad_norm": 0.9150265900061878, | |
| "learning_rate": 7.460593972502674e-05, | |
| "loss": 0.4857, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.7636159460976979, | |
| "grad_norm": 1.2818859952439805, | |
| "learning_rate": 7.439325639256483e-05, | |
| "loss": 0.4815, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.772599663110612, | |
| "grad_norm": 0.7413198899597869, | |
| "learning_rate": 7.417677618185955e-05, | |
| "loss": 0.4837, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.7815833801235261, | |
| "grad_norm": 0.9478143258160291, | |
| "learning_rate": 7.39565229914622e-05, | |
| "loss": 0.4765, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.7905670971364402, | |
| "grad_norm": 0.963846436149164, | |
| "learning_rate": 7.373252113644596e-05, | |
| "loss": 0.4879, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.7995508141493542, | |
| "grad_norm": 0.9081468443162725, | |
| "learning_rate": 7.350479534572166e-05, | |
| "loss": 0.4705, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.8085345311622684, | |
| "grad_norm": 0.6474133013047098, | |
| "learning_rate": 7.327337075930775e-05, | |
| "loss": 0.4806, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.8175182481751825, | |
| "grad_norm": 0.6809036448679276, | |
| "learning_rate": 7.303827292555495e-05, | |
| "loss": 0.481, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.8265019651880966, | |
| "grad_norm": 1.0720011034315253, | |
| "learning_rate": 7.279952779832584e-05, | |
| "loss": 0.4759, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.8354856822010107, | |
| "grad_norm": 1.0029995670688614, | |
| "learning_rate": 7.255716173412966e-05, | |
| "loss": 0.4736, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.8444693992139247, | |
| "grad_norm": 0.8353160428003878, | |
| "learning_rate": 7.23112014892126e-05, | |
| "loss": 0.4754, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.8534531162268388, | |
| "grad_norm": 0.9066101057502527, | |
| "learning_rate": 7.20616742166041e-05, | |
| "loss": 0.4801, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.862436833239753, | |
| "grad_norm": 1.3700422899595712, | |
| "learning_rate": 7.180860746311917e-05, | |
| "loss": 0.4904, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.8714205502526671, | |
| "grad_norm": 0.8940940289737819, | |
| "learning_rate": 7.155202916631743e-05, | |
| "loss": 0.4824, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.8804042672655812, | |
| "grad_norm": 1.3226382972793307, | |
| "learning_rate": 7.129196765141886e-05, | |
| "loss": 0.4679, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.8893879842784952, | |
| "grad_norm": 0.8696732708344425, | |
| "learning_rate": 7.10284516281768e-05, | |
| "loss": 0.4786, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.8983717012914093, | |
| "grad_norm": 1.0583061403429452, | |
| "learning_rate": 7.076151018770854e-05, | |
| "loss": 0.4669, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.9073554183043234, | |
| "grad_norm": 1.0722660670077375, | |
| "learning_rate": 7.049117279928374e-05, | |
| "loss": 0.4801, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.9163391353172375, | |
| "grad_norm": 1.2855639166116766, | |
| "learning_rate": 7.021746930707117e-05, | |
| "loss": 0.4674, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.9253228523301515, | |
| "grad_norm": 0.6812836053141724, | |
| "learning_rate": 6.994042992684406e-05, | |
| "loss": 0.4788, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.9343065693430657, | |
| "grad_norm": 0.6913667167229325, | |
| "learning_rate": 6.966008524264429e-05, | |
| "loss": 0.4679, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.9432902863559798, | |
| "grad_norm": 0.9914419206668401, | |
| "learning_rate": 6.937646620340618e-05, | |
| "loss": 0.4792, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.9522740033688939, | |
| "grad_norm": 1.3797231509055095, | |
| "learning_rate": 6.908960411953973e-05, | |
| "loss": 0.4683, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.961257720381808, | |
| "grad_norm": 0.5888339311760277, | |
| "learning_rate": 6.879953065947416e-05, | |
| "loss": 0.4678, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.970241437394722, | |
| "grad_norm": 1.038793566408693, | |
| "learning_rate": 6.850627784616178e-05, | |
| "loss": 0.4601, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.9792251544076361, | |
| "grad_norm": 1.0842893632221857, | |
| "learning_rate": 6.82098780535428e-05, | |
| "loss": 0.4681, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.9882088714205502, | |
| "grad_norm": 0.8626180542597122, | |
| "learning_rate": 6.791036400297142e-05, | |
| "loss": 0.4761, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.9971925884334644, | |
| "grad_norm": 0.7965083948145554, | |
| "learning_rate": 6.760776875960347e-05, | |
| "loss": 0.4689, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.0075800112296462, | |
| "grad_norm": 1.786141860362385, | |
| "learning_rate": 6.730212572874618e-05, | |
| "loss": 0.8334, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.0165637282425604, | |
| "grad_norm": 0.8882542778931486, | |
| "learning_rate": 6.699346865217031e-05, | |
| "loss": 0.4299, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.0255474452554745, | |
| "grad_norm": 0.6042242319747863, | |
| "learning_rate": 6.668183160438531e-05, | |
| "loss": 0.4431, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.0345311622683886, | |
| "grad_norm": 0.5382725444188637, | |
| "learning_rate": 6.636724898887751e-05, | |
| "loss": 0.4464, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.0435148792813027, | |
| "grad_norm": 0.6092419399947249, | |
| "learning_rate": 6.604975553431219e-05, | |
| "loss": 0.45, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.0524985962942168, | |
| "grad_norm": 0.952732534414775, | |
| "learning_rate": 6.572938629069959e-05, | |
| "loss": 0.4416, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.0614823133071307, | |
| "grad_norm": 1.2586357901517062, | |
| "learning_rate": 6.540617662552565e-05, | |
| "loss": 0.4478, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.0704660303200448, | |
| "grad_norm": 0.5882439478167614, | |
| "learning_rate": 6.508016221984747e-05, | |
| "loss": 0.4346, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.079449747332959, | |
| "grad_norm": 0.8126561068118403, | |
| "learning_rate": 6.475137906435435e-05, | |
| "loss": 0.4373, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.088433464345873, | |
| "grad_norm": 1.5656070861661977, | |
| "learning_rate": 6.441986345539446e-05, | |
| "loss": 0.4461, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.0974171813587872, | |
| "grad_norm": 0.5066258143638921, | |
| "learning_rate": 6.408565199096798e-05, | |
| "loss": 0.4327, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.1064008983717013, | |
| "grad_norm": 1.6682432153253044, | |
| "learning_rate": 6.374878156668676e-05, | |
| "loss": 0.4568, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.1153846153846154, | |
| "grad_norm": 0.6299972385025496, | |
| "learning_rate": 6.340928937170118e-05, | |
| "loss": 0.4285, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.1243683323975295, | |
| "grad_norm": 1.9783962130226307, | |
| "learning_rate": 6.30672128845947e-05, | |
| "loss": 0.4643, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.1333520494104437, | |
| "grad_norm": 1.2699186089072336, | |
| "learning_rate": 6.272258986924624e-05, | |
| "loss": 0.4362, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.1423357664233578, | |
| "grad_norm": 1.617840953040159, | |
| "learning_rate": 6.237545837066133e-05, | |
| "loss": 0.4476, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.1513194834362717, | |
| "grad_norm": 1.5459662606407818, | |
| "learning_rate": 6.202585671077204e-05, | |
| "loss": 0.4452, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.1603032004491858, | |
| "grad_norm": 0.8520351440042236, | |
| "learning_rate": 6.167382348420637e-05, | |
| "loss": 0.4485, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.1692869174621, | |
| "grad_norm": 1.2109454920551714, | |
| "learning_rate": 6.131939755402755e-05, | |
| "loss": 0.4436, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.178270634475014, | |
| "grad_norm": 0.7621427547756707, | |
| "learning_rate": 6.09626180474438e-05, | |
| "loss": 0.4436, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.1872543514879281, | |
| "grad_norm": 0.9377335412029888, | |
| "learning_rate": 6.060352435148874e-05, | |
| "loss": 0.4404, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.1962380685008422, | |
| "grad_norm": 0.742883271426497, | |
| "learning_rate": 6.024215610867327e-05, | |
| "loss": 0.4431, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.2052217855137564, | |
| "grad_norm": 0.623519085897906, | |
| "learning_rate": 5.9878553212609184e-05, | |
| "loss": 0.4349, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.2142055025266705, | |
| "grad_norm": 0.5738395916616672, | |
| "learning_rate": 5.95127558036051e-05, | |
| "loss": 0.4383, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.2231892195395846, | |
| "grad_norm": 0.5745090863281419, | |
| "learning_rate": 5.9144804264235066e-05, | |
| "loss": 0.4353, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.2321729365524985, | |
| "grad_norm": 0.6173172910448796, | |
| "learning_rate": 5.8774739214880554e-05, | |
| "loss": 0.4397, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.2411566535654126, | |
| "grad_norm": 0.44131140572281446, | |
| "learning_rate": 5.840260150924609e-05, | |
| "loss": 0.4358, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.2501403705783267, | |
| "grad_norm": 0.48929805430187906, | |
| "learning_rate": 5.802843222984919e-05, | |
| "loss": 0.4348, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.2591240875912408, | |
| "grad_norm": 0.46891906471051287, | |
| "learning_rate": 5.765227268348501e-05, | |
| "loss": 0.433, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.268107804604155, | |
| "grad_norm": 0.4197153657865162, | |
| "learning_rate": 5.727416439666622e-05, | |
| "loss": 0.4373, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.277091521617069, | |
| "grad_norm": 0.49836695670940545, | |
| "learning_rate": 5.689414911103867e-05, | |
| "loss": 0.4339, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.2860752386299832, | |
| "grad_norm": 0.4282436546394893, | |
| "learning_rate": 5.651226877877326e-05, | |
| "loss": 0.4371, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.2950589556428973, | |
| "grad_norm": 0.39801460777108744, | |
| "learning_rate": 5.612856555793459e-05, | |
| "loss": 0.4422, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.3040426726558114, | |
| "grad_norm": 0.384138892088657, | |
| "learning_rate": 5.574308180782693e-05, | |
| "loss": 0.4235, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.3130263896687255, | |
| "grad_norm": 0.4344092290487113, | |
| "learning_rate": 5.5355860084317787e-05, | |
| "loss": 0.4356, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.3220101066816397, | |
| "grad_norm": 0.37928071414714964, | |
| "learning_rate": 5.496694313514009e-05, | |
| "loss": 0.4362, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.3309938236945535, | |
| "grad_norm": 0.34476337733896867, | |
| "learning_rate": 5.457637389517285e-05, | |
| "loss": 0.4398, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.3399775407074677, | |
| "grad_norm": 0.4506193928055396, | |
| "learning_rate": 5.4184195481701425e-05, | |
| "loss": 0.428, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.3489612577203818, | |
| "grad_norm": 0.32358078080164415, | |
| "learning_rate": 5.3790451189657486e-05, | |
| "loss": 0.4354, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.357944974733296, | |
| "grad_norm": 0.3714258728709878, | |
| "learning_rate": 5.339518448683945e-05, | |
| "loss": 0.4336, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.36692869174621, | |
| "grad_norm": 0.3698334511165891, | |
| "learning_rate": 5.2998439009113814e-05, | |
| "loss": 0.421, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.3759124087591241, | |
| "grad_norm": 0.3039344886888513, | |
| "learning_rate": 5.260025855559792e-05, | |
| "loss": 0.4265, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.3848961257720382, | |
| "grad_norm": 0.38707783731390033, | |
| "learning_rate": 5.2200687083824706e-05, | |
| "loss": 0.4285, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.3938798427849521, | |
| "grad_norm": 0.3708434779281009, | |
| "learning_rate": 5.179976870488999e-05, | |
| "loss": 0.4242, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.4028635597978663, | |
| "grad_norm": 0.2827560899827463, | |
| "learning_rate": 5.1397547678582745e-05, | |
| "loss": 0.4215, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.4118472768107804, | |
| "grad_norm": 0.36357591101838377, | |
| "learning_rate": 5.099406840849902e-05, | |
| "loss": 0.4344, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.4208309938236945, | |
| "grad_norm": 0.2815376075683387, | |
| "learning_rate": 5.058937543713999e-05, | |
| "loss": 0.4234, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.4298147108366086, | |
| "grad_norm": 0.34011781572335237, | |
| "learning_rate": 5.018351344099453e-05, | |
| "loss": 0.438, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.4387984278495227, | |
| "grad_norm": 0.32400444292287883, | |
| "learning_rate": 4.9776527225607274e-05, | |
| "loss": 0.4328, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.4477821448624368, | |
| "grad_norm": 0.2529768229883554, | |
| "learning_rate": 4.93684617206321e-05, | |
| "loss": 0.4339, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.456765861875351, | |
| "grad_norm": 0.2629261907748739, | |
| "learning_rate": 4.89593619748722e-05, | |
| "loss": 0.4394, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.465749578888265, | |
| "grad_norm": 0.32833332654685193, | |
| "learning_rate": 4.8549273151306795e-05, | |
| "loss": 0.4331, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.4747332959011792, | |
| "grad_norm": 0.37621666962913386, | |
| "learning_rate": 4.8138240522105365e-05, | |
| "loss": 0.4294, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.4837170129140933, | |
| "grad_norm": 0.33680087947524656, | |
| "learning_rate": 4.7726309463629733e-05, | |
| "loss": 0.4317, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.4927007299270074, | |
| "grad_norm": 0.22831905889281706, | |
| "learning_rate": 4.731352545142478e-05, | |
| "loss": 0.4265, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.5016844469399215, | |
| "grad_norm": 0.23855204275905859, | |
| "learning_rate": 4.689993405519802e-05, | |
| "loss": 0.4343, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.5106681639528357, | |
| "grad_norm": 0.3165956064567229, | |
| "learning_rate": 4.648558093378899e-05, | |
| "loss": 0.4331, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.5196518809657495, | |
| "grad_norm": 0.3792988696086109, | |
| "learning_rate": 4.607051183012862e-05, | |
| "loss": 0.4214, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.5286355979786637, | |
| "grad_norm": 0.29743099805783, | |
| "learning_rate": 4.5654772566189415e-05, | |
| "loss": 0.4322, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.5376193149915778, | |
| "grad_norm": 0.22315650518088825, | |
| "learning_rate": 4.5238409037926905e-05, | |
| "loss": 0.4286, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.546603032004492, | |
| "grad_norm": 0.3745132482990998, | |
| "learning_rate": 4.4821467210212924e-05, | |
| "loss": 0.4325, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.5555867490174058, | |
| "grad_norm": 0.4349435850403825, | |
| "learning_rate": 4.4403993111761265e-05, | |
| "loss": 0.4278, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.56457046603032, | |
| "grad_norm": 0.34417305619739286, | |
| "learning_rate": 4.398603283004626e-05, | |
| "loss": 0.421, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.573554183043234, | |
| "grad_norm": 0.2697125284825644, | |
| "learning_rate": 4.356763250621496e-05, | |
| "loss": 0.4355, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.5825379000561481, | |
| "grad_norm": 0.3759486979388303, | |
| "learning_rate": 4.314883832999326e-05, | |
| "loss": 0.4336, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.5915216170690623, | |
| "grad_norm": 0.42378540392492775, | |
| "learning_rate": 4.272969653458685e-05, | |
| "loss": 0.4258, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.6005053340819764, | |
| "grad_norm": 0.36295049407047797, | |
| "learning_rate": 4.231025339157714e-05, | |
| "loss": 0.4214, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.6094890510948905, | |
| "grad_norm": 0.22472885668491016, | |
| "learning_rate": 4.189055520581315e-05, | |
| "loss": 0.4302, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.6184727681078046, | |
| "grad_norm": 0.24842552091044895, | |
| "learning_rate": 4.147064831029959e-05, | |
| "loss": 0.4267, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.6274564851207187, | |
| "grad_norm": 0.27444720145312784, | |
| "learning_rate": 4.105057906108189e-05, | |
| "loss": 0.4306, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.6364402021336328, | |
| "grad_norm": 0.24170155717400021, | |
| "learning_rate": 4.063039383212866e-05, | |
| "loss": 0.4226, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.645423919146547, | |
| "grad_norm": 0.2158635971279587, | |
| "learning_rate": 4.021013901021225e-05, | |
| "loss": 0.4182, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.654407636159461, | |
| "grad_norm": 0.2392521978049934, | |
| "learning_rate": 3.978986098978777e-05, | |
| "loss": 0.4184, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.6633913531723752, | |
| "grad_norm": 0.2333669024637109, | |
| "learning_rate": 3.936960616787135e-05, | |
| "loss": 0.4272, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.6723750701852893, | |
| "grad_norm": 0.303971238157218, | |
| "learning_rate": 3.8949420938918124e-05, | |
| "loss": 0.4278, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.6813587871982034, | |
| "grad_norm": 0.20260216895624977, | |
| "learning_rate": 3.852935168970042e-05, | |
| "loss": 0.4274, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.6903425042111173, | |
| "grad_norm": 0.27940230833327445, | |
| "learning_rate": 3.810944479418686e-05, | |
| "loss": 0.4235, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.6993262212240314, | |
| "grad_norm": 0.24151727930977668, | |
| "learning_rate": 3.768974660842287e-05, | |
| "loss": 0.43, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.7083099382369455, | |
| "grad_norm": 0.28305027971796287, | |
| "learning_rate": 3.727030346541317e-05, | |
| "loss": 0.4204, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.7172936552498597, | |
| "grad_norm": 0.23944450177743382, | |
| "learning_rate": 3.685116167000675e-05, | |
| "loss": 0.4303, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.7262773722627736, | |
| "grad_norm": 0.2649036336219584, | |
| "learning_rate": 3.6432367493785056e-05, | |
| "loss": 0.4306, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.7352610892756877, | |
| "grad_norm": 0.20536449694593853, | |
| "learning_rate": 3.601396716995375e-05, | |
| "loss": 0.4298, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.7442448062886018, | |
| "grad_norm": 0.2752898250202777, | |
| "learning_rate": 3.559600688823875e-05, | |
| "loss": 0.4213, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.753228523301516, | |
| "grad_norm": 0.2441828080744484, | |
| "learning_rate": 3.517853278978708e-05, | |
| "loss": 0.4317, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.76221224031443, | |
| "grad_norm": 0.18921602583278035, | |
| "learning_rate": 3.4761590962073115e-05, | |
| "loss": 0.4352, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.7711959573273441, | |
| "grad_norm": 0.25381845245915774, | |
| "learning_rate": 3.434522743381061e-05, | |
| "loss": 0.4227, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.7801796743402583, | |
| "grad_norm": 0.22098025447635186, | |
| "learning_rate": 3.39294881698714e-05, | |
| "loss": 0.4145, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.7891633913531724, | |
| "grad_norm": 0.21253762725360797, | |
| "learning_rate": 3.3514419066211025e-05, | |
| "loss": 0.4204, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.7981471083660865, | |
| "grad_norm": 0.1993914725116837, | |
| "learning_rate": 3.310006594480199e-05, | |
| "loss": 0.4212, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.8071308253790006, | |
| "grad_norm": 0.21949029430642605, | |
| "learning_rate": 3.268647454857524e-05, | |
| "loss": 0.4246, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.8161145423919147, | |
| "grad_norm": 0.18093321704969498, | |
| "learning_rate": 3.227369053637028e-05, | |
| "loss": 0.4379, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.8250982594048288, | |
| "grad_norm": 0.21469795266380035, | |
| "learning_rate": 3.1861759477894656e-05, | |
| "loss": 0.4227, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.834081976417743, | |
| "grad_norm": 0.18433389487278787, | |
| "learning_rate": 3.145072684869322e-05, | |
| "loss": 0.4166, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.843065693430657, | |
| "grad_norm": 0.19979893625833967, | |
| "learning_rate": 3.104063802512782e-05, | |
| "loss": 0.4281, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.8520494104435712, | |
| "grad_norm": 0.19103290364400521, | |
| "learning_rate": 3.063153827936792e-05, | |
| "loss": 0.4161, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.861033127456485, | |
| "grad_norm": 0.21106537662314218, | |
| "learning_rate": 3.0223472774392753e-05, | |
| "loss": 0.4242, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.8700168444693992, | |
| "grad_norm": 0.20844328823177255, | |
| "learning_rate": 2.9816486559005482e-05, | |
| "loss": 0.4333, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.8790005614823133, | |
| "grad_norm": 0.19732371134712767, | |
| "learning_rate": 2.9410624562860026e-05, | |
| "loss": 0.4211, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.8879842784952274, | |
| "grad_norm": 0.23608540869194397, | |
| "learning_rate": 2.9005931591500974e-05, | |
| "loss": 0.422, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.8969679955081415, | |
| "grad_norm": 0.18142970460558788, | |
| "learning_rate": 2.860245232141726e-05, | |
| "loss": 0.4215, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.9059517125210554, | |
| "grad_norm": 0.21920791583501226, | |
| "learning_rate": 2.8200231295110012e-05, | |
| "loss": 0.4304, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.9149354295339696, | |
| "grad_norm": 0.1575572213846906, | |
| "learning_rate": 2.7799312916175294e-05, | |
| "loss": 0.4115, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.9239191465468837, | |
| "grad_norm": 0.21181976577415998, | |
| "learning_rate": 2.7399741444402087e-05, | |
| "loss": 0.4301, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.9329028635597978, | |
| "grad_norm": 0.1817247821525619, | |
| "learning_rate": 2.7001560990886196e-05, | |
| "loss": 0.4202, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.941886580572712, | |
| "grad_norm": 0.17883228911738394, | |
| "learning_rate": 2.6604815513160556e-05, | |
| "loss": 0.4241, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.950870297585626, | |
| "grad_norm": 0.19947193861762386, | |
| "learning_rate": 2.6209548810342517e-05, | |
| "loss": 0.4137, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.9598540145985401, | |
| "grad_norm": 0.19071364537376845, | |
| "learning_rate": 2.5815804518298575e-05, | |
| "loss": 0.4327, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.9688377316114543, | |
| "grad_norm": 0.1712467330551581, | |
| "learning_rate": 2.542362610482715e-05, | |
| "loss": 0.4239, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.9778214486243684, | |
| "grad_norm": 0.16971518326737378, | |
| "learning_rate": 2.503305686485991e-05, | |
| "loss": 0.4197, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.9868051656372825, | |
| "grad_norm": 0.15655415639126638, | |
| "learning_rate": 2.464413991568222e-05, | |
| "loss": 0.4085, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.9957888826501966, | |
| "grad_norm": 0.19809705687476845, | |
| "learning_rate": 2.4256918192173088e-05, | |
| "loss": 0.4325, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.0061763054463784, | |
| "grad_norm": 0.32148951189801367, | |
| "learning_rate": 2.3871434442065414e-05, | |
| "loss": 0.7473, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.0151600224592925, | |
| "grad_norm": 0.20734628721061774, | |
| "learning_rate": 2.3487731221226754e-05, | |
| "loss": 0.3817, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.0241437394722066, | |
| "grad_norm": 0.2283335322281334, | |
| "learning_rate": 2.3105850888961348e-05, | |
| "loss": 0.4043, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.0331274564851207, | |
| "grad_norm": 0.20409956046524397, | |
| "learning_rate": 2.272583560333379e-05, | |
| "loss": 0.3908, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.042111173498035, | |
| "grad_norm": 0.21233165191280134, | |
| "learning_rate": 2.2347727316515e-05, | |
| "loss": 0.3963, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.051094890510949, | |
| "grad_norm": 0.2058022752445305, | |
| "learning_rate": 2.1971567770150814e-05, | |
| "loss": 0.3875, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.060078607523863, | |
| "grad_norm": 0.2202126073153398, | |
| "learning_rate": 2.1597398490753917e-05, | |
| "loss": 0.4031, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.069062324536777, | |
| "grad_norm": 0.20478388239536333, | |
| "learning_rate": 2.1225260785119456e-05, | |
| "loss": 0.3891, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.0780460415496913, | |
| "grad_norm": 0.1937488744583445, | |
| "learning_rate": 2.0855195735764947e-05, | |
| "loss": 0.3906, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.0870297585626054, | |
| "grad_norm": 0.2163161528806818, | |
| "learning_rate": 2.0487244196394912e-05, | |
| "loss": 0.3941, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.0960134755755195, | |
| "grad_norm": 0.18524457129950797, | |
| "learning_rate": 2.0121446787390822e-05, | |
| "loss": 0.3799, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 2.1049971925884337, | |
| "grad_norm": 0.217114553144753, | |
| "learning_rate": 1.9757843891326736e-05, | |
| "loss": 0.3951, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 2.1139809096013478, | |
| "grad_norm": 0.2017737198987517, | |
| "learning_rate": 1.939647564851127e-05, | |
| "loss": 0.3928, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 2.1229646266142614, | |
| "grad_norm": 0.20060221109136825, | |
| "learning_rate": 1.9037381952556217e-05, | |
| "loss": 0.3891, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 2.1319483436271756, | |
| "grad_norm": 0.19362260661168698, | |
| "learning_rate": 1.8680602445972463e-05, | |
| "loss": 0.3967, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 2.1409320606400897, | |
| "grad_norm": 0.1898518071335132, | |
| "learning_rate": 1.832617651579365e-05, | |
| "loss": 0.3859, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 2.149915777653004, | |
| "grad_norm": 0.1817097392847635, | |
| "learning_rate": 1.797414328922797e-05, | |
| "loss": 0.3878, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 2.158899494665918, | |
| "grad_norm": 0.1438401587120875, | |
| "learning_rate": 1.7624541629338676e-05, | |
| "loss": 0.3902, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.167883211678832, | |
| "grad_norm": 0.1785624789625333, | |
| "learning_rate": 1.7277410130753775e-05, | |
| "loss": 0.3803, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 2.176866928691746, | |
| "grad_norm": 0.16410314706005405, | |
| "learning_rate": 1.6932787115405318e-05, | |
| "loss": 0.4038, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 2.1858506457046603, | |
| "grad_norm": 0.17495931093245287, | |
| "learning_rate": 1.6590710628298826e-05, | |
| "loss": 0.4097, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 2.1948343627175744, | |
| "grad_norm": 0.15732504490550855, | |
| "learning_rate": 1.6251218433313254e-05, | |
| "loss": 0.3841, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 2.2038180797304885, | |
| "grad_norm": 0.15370170593613078, | |
| "learning_rate": 1.591434800903203e-05, | |
| "loss": 0.3879, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 2.2128017967434026, | |
| "grad_norm": 0.14225993974716095, | |
| "learning_rate": 1.558013654460555e-05, | |
| "loss": 0.3831, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 2.2217855137563167, | |
| "grad_norm": 0.16769227472528905, | |
| "learning_rate": 1.5248620935645666e-05, | |
| "loss": 0.3932, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 2.230769230769231, | |
| "grad_norm": 0.15697609805604978, | |
| "learning_rate": 1.4919837780152544e-05, | |
| "loss": 0.3793, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 2.239752947782145, | |
| "grad_norm": 0.14834997366132227, | |
| "learning_rate": 1.4593823374474374e-05, | |
| "loss": 0.3884, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 2.248736664795059, | |
| "grad_norm": 0.1416591291545358, | |
| "learning_rate": 1.4270613709300429e-05, | |
| "loss": 0.3883, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.257720381807973, | |
| "grad_norm": 0.1416137601943575, | |
| "learning_rate": 1.3950244465687833e-05, | |
| "loss": 0.3925, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 2.2667040988208873, | |
| "grad_norm": 0.14486611430866134, | |
| "learning_rate": 1.3632751011122497e-05, | |
| "loss": 0.3988, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 2.2756878158338014, | |
| "grad_norm": 0.1438931082489217, | |
| "learning_rate": 1.3318168395614697e-05, | |
| "loss": 0.3857, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 2.2846715328467155, | |
| "grad_norm": 0.1376572002883416, | |
| "learning_rate": 1.3006531347829699e-05, | |
| "loss": 0.4054, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 2.293655249859629, | |
| "grad_norm": 0.13374618220430773, | |
| "learning_rate": 1.2697874271253844e-05, | |
| "loss": 0.389, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.3026389668725433, | |
| "grad_norm": 0.1344986927289295, | |
| "learning_rate": 1.2392231240396542e-05, | |
| "loss": 0.398, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 2.3116226838854574, | |
| "grad_norm": 0.13248310416193929, | |
| "learning_rate": 1.2089635997028592e-05, | |
| "loss": 0.3964, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 2.3206064008983716, | |
| "grad_norm": 0.1250202978575819, | |
| "learning_rate": 1.1790121946457212e-05, | |
| "loss": 0.3827, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 2.3295901179112857, | |
| "grad_norm": 0.1267954036736396, | |
| "learning_rate": 1.1493722153838239e-05, | |
| "loss": 0.403, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 2.3385738349242, | |
| "grad_norm": 0.11963916536373062, | |
| "learning_rate": 1.120046934052585e-05, | |
| "loss": 0.3774, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.347557551937114, | |
| "grad_norm": 0.11989536513409248, | |
| "learning_rate": 1.0910395880460274e-05, | |
| "loss": 0.3994, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 2.356541268950028, | |
| "grad_norm": 0.13086834428014207, | |
| "learning_rate": 1.062353379659383e-05, | |
| "loss": 0.3959, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 2.365524985962942, | |
| "grad_norm": 0.12262091225654496, | |
| "learning_rate": 1.0339914757355718e-05, | |
| "loss": 0.3909, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 2.3745087029758563, | |
| "grad_norm": 0.13332341565208733, | |
| "learning_rate": 1.0059570073155953e-05, | |
| "loss": 0.3964, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 2.3834924199887704, | |
| "grad_norm": 0.1390720043482773, | |
| "learning_rate": 9.782530692928832e-06, | |
| "loss": 0.3962, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.3924761370016845, | |
| "grad_norm": 0.12593592732440334, | |
| "learning_rate": 9.508827200716273e-06, | |
| "loss": 0.3987, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 2.4014598540145986, | |
| "grad_norm": 0.14058113905698136, | |
| "learning_rate": 9.238489812291469e-06, | |
| "loss": 0.3851, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 2.4104435710275127, | |
| "grad_norm": 0.1293391140497839, | |
| "learning_rate": 8.971548371823205e-06, | |
| "loss": 0.3972, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 2.419427288040427, | |
| "grad_norm": 0.1312457950118106, | |
| "learning_rate": 8.708032348581144e-06, | |
| "loss": 0.401, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 2.428411005053341, | |
| "grad_norm": 0.13054369787139117, | |
| "learning_rate": 8.447970833682584e-06, | |
| "loss": 0.3801, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.437394722066255, | |
| "grad_norm": 0.12662575874177268, | |
| "learning_rate": 8.191392536880852e-06, | |
| "loss": 0.3835, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 2.446378439079169, | |
| "grad_norm": 0.14035374781938695, | |
| "learning_rate": 7.938325783395924e-06, | |
| "loss": 0.3924, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 2.4553621560920833, | |
| "grad_norm": 0.11900240575463085, | |
| "learning_rate": 7.68879851078741e-06, | |
| "loss": 0.3839, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 2.464345873104997, | |
| "grad_norm": 0.13981444396533035, | |
| "learning_rate": 7.442838265870347e-06, | |
| "loss": 0.4029, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 2.473329590117911, | |
| "grad_norm": 0.13223802646070204, | |
| "learning_rate": 7.2004722016741605e-06, | |
| "loss": 0.3933, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.482313307130825, | |
| "grad_norm": 0.1225885147923774, | |
| "learning_rate": 6.961727074445055e-06, | |
| "loss": 0.3984, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 2.4912970241437393, | |
| "grad_norm": 0.11467365860762357, | |
| "learning_rate": 6.726629240692255e-06, | |
| "loss": 0.378, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 2.5002807411566534, | |
| "grad_norm": 0.11696089178213367, | |
| "learning_rate": 6.4952046542783395e-06, | |
| "loss": 0.3794, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 2.5092644581695676, | |
| "grad_norm": 0.12498140576448633, | |
| "learning_rate": 6.2674788635540415e-06, | |
| "loss": 0.3905, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 2.5182481751824817, | |
| "grad_norm": 0.11214127166395674, | |
| "learning_rate": 6.04347700853781e-06, | |
| "loss": 0.4005, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.527231892195396, | |
| "grad_norm": 0.10515201118000413, | |
| "learning_rate": 5.823223818140458e-06, | |
| "loss": 0.3826, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 2.53621560920831, | |
| "grad_norm": 0.10659878155108474, | |
| "learning_rate": 5.606743607435183e-06, | |
| "loss": 0.3862, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 2.545199326221224, | |
| "grad_norm": 0.12414903554961848, | |
| "learning_rate": 5.394060274973267e-06, | |
| "loss": 0.4022, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 2.554183043234138, | |
| "grad_norm": 0.1086137186577079, | |
| "learning_rate": 5.185197300145817e-06, | |
| "loss": 0.3772, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 2.5631667602470523, | |
| "grad_norm": 0.11392438691496676, | |
| "learning_rate": 4.980177740591678e-06, | |
| "loss": 0.4074, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 2.5721504772599664, | |
| "grad_norm": 0.10614011676956907, | |
| "learning_rate": 4.779024229652005e-06, | |
| "loss": 0.3946, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 2.5811341942728805, | |
| "grad_norm": 0.10589648874830734, | |
| "learning_rate": 4.581758973871609e-06, | |
| "loss": 0.3948, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 2.5901179112857946, | |
| "grad_norm": 0.10117190574073207, | |
| "learning_rate": 4.3884037505474455e-06, | |
| "loss": 0.3834, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 2.5991016282987087, | |
| "grad_norm": 0.10088190358164044, | |
| "learning_rate": 4.198979905324496e-06, | |
| "loss": 0.3839, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 2.608085345311623, | |
| "grad_norm": 0.10998415397637723, | |
| "learning_rate": 4.0135083498392905e-06, | |
| "loss": 0.4005, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.6170690623245365, | |
| "grad_norm": 0.10291443940485356, | |
| "learning_rate": 3.832009559411338e-06, | |
| "loss": 0.3946, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 2.626052779337451, | |
| "grad_norm": 0.0987497217572695, | |
| "learning_rate": 3.654503570782755e-06, | |
| "loss": 0.3797, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 2.6350364963503647, | |
| "grad_norm": 0.10073485737297722, | |
| "learning_rate": 3.481009979906258e-06, | |
| "loss": 0.3977, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 2.6440202133632793, | |
| "grad_norm": 0.10086816317959164, | |
| "learning_rate": 3.311547939781887e-06, | |
| "loss": 0.3944, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 2.653003930376193, | |
| "grad_norm": 0.09803317553764752, | |
| "learning_rate": 3.14613615834253e-06, | |
| "loss": 0.3851, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.661987647389107, | |
| "grad_norm": 0.10547156290892046, | |
| "learning_rate": 2.9847928963887198e-06, | |
| "loss": 0.3955, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 2.670971364402021, | |
| "grad_norm": 0.09842701976978478, | |
| "learning_rate": 2.8275359655726586e-06, | |
| "loss": 0.3918, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 2.6799550814149353, | |
| "grad_norm": 0.10279243698030634, | |
| "learning_rate": 2.6743827264319012e-06, | |
| "loss": 0.3916, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 2.6889387984278494, | |
| "grad_norm": 0.09391104128159572, | |
| "learning_rate": 2.5253500864728155e-06, | |
| "loss": 0.3846, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 2.6979225154407636, | |
| "grad_norm": 0.0914654057659311, | |
| "learning_rate": 2.3804544983040724e-06, | |
| "loss": 0.3824, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.7069062324536777, | |
| "grad_norm": 0.09870686168033262, | |
| "learning_rate": 2.23971195782033e-06, | |
| "loss": 0.3927, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 2.715889949466592, | |
| "grad_norm": 0.1061384997031586, | |
| "learning_rate": 2.1031380024363645e-06, | |
| "loss": 0.3993, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 2.724873666479506, | |
| "grad_norm": 0.10190369293256683, | |
| "learning_rate": 1.9707477093717786e-06, | |
| "loss": 0.386, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 2.73385738349242, | |
| "grad_norm": 0.09359682387036863, | |
| "learning_rate": 1.8425556939865696e-06, | |
| "loss": 0.3816, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 2.742841100505334, | |
| "grad_norm": 0.0951294949828999, | |
| "learning_rate": 1.7185761081676222e-06, | |
| "loss": 0.3914, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.7518248175182483, | |
| "grad_norm": 0.09488726435556502, | |
| "learning_rate": 1.5988226387664151e-06, | |
| "loss": 0.3826, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 2.7608085345311624, | |
| "grad_norm": 0.08946031352013306, | |
| "learning_rate": 1.4833085060880349e-06, | |
| "loss": 0.3933, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 2.7697922515440765, | |
| "grad_norm": 0.09936760457984616, | |
| "learning_rate": 1.3720464624317108e-06, | |
| "loss": 0.3754, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 2.7787759685569906, | |
| "grad_norm": 0.09605033066883667, | |
| "learning_rate": 1.2650487906830234e-06, | |
| "loss": 0.4084, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 2.7877596855699043, | |
| "grad_norm": 0.099913289552659, | |
| "learning_rate": 1.1623273029579195e-06, | |
| "loss": 0.3757, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.796743402582819, | |
| "grad_norm": 0.09596634894133811, | |
| "learning_rate": 1.063893339298674e-06, | |
| "loss": 0.3957, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 2.8057271195957325, | |
| "grad_norm": 0.09539713481428638, | |
| "learning_rate": 9.697577664220303e-07, | |
| "loss": 0.3943, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.814710836608647, | |
| "grad_norm": 0.09238341661295082, | |
| "learning_rate": 8.799309765195452e-07, | |
| "loss": 0.3865, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 2.8236945536215607, | |
| "grad_norm": 0.09215370347773162, | |
| "learning_rate": 7.944228861103264e-07, | |
| "loss": 0.393, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 2.832678270634475, | |
| "grad_norm": 0.09454052595038595, | |
| "learning_rate": 7.132429349463011e-07, | |
| "loss": 0.387, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.841661987647389, | |
| "grad_norm": 0.09250173345082788, | |
| "learning_rate": 6.364000849700791e-07, | |
| "loss": 0.391, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.850645704660303, | |
| "grad_norm": 0.08839044291030243, | |
| "learning_rate": 5.639028193256257e-07, | |
| "loss": 0.3974, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 2.859629421673217, | |
| "grad_norm": 0.08678885222396349, | |
| "learning_rate": 4.957591414217344e-07, | |
| "loss": 0.3912, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 2.8686131386861313, | |
| "grad_norm": 0.09688413018501994, | |
| "learning_rate": 4.3197657404848935e-07, | |
| "loss": 0.3871, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.8775968556990454, | |
| "grad_norm": 0.09442855901495942, | |
| "learning_rate": 3.725621585467698e-07, | |
| "loss": 0.3955, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.8865805727119596, | |
| "grad_norm": 0.08776447719254343, | |
| "learning_rate": 3.1752245403092963e-07, | |
| "loss": 0.3889, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.8955642897248737, | |
| "grad_norm": 0.08591645042050655, | |
| "learning_rate": 2.6686353666468323e-07, | |
| "loss": 0.3876, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.904548006737788, | |
| "grad_norm": 0.08956097003735121, | |
| "learning_rate": 2.2059099899033098e-07, | |
| "loss": 0.3928, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.913531723750702, | |
| "grad_norm": 0.08874803315016298, | |
| "learning_rate": 1.7870994931135977e-07, | |
| "loss": 0.3856, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.922515440763616, | |
| "grad_norm": 0.09325131199811001, | |
| "learning_rate": 1.412250111285074e-07, | |
| "loss": 0.3976, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.93149915777653, | |
| "grad_norm": 0.08572072856798726, | |
| "learning_rate": 1.0814032262935315e-07, | |
| "loss": 0.3809, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.9404828747894443, | |
| "grad_norm": 0.08771028382043292, | |
| "learning_rate": 7.945953623146096e-08, | |
| "loss": 0.3791, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.9494665918023584, | |
| "grad_norm": 0.08835820422720063, | |
| "learning_rate": 5.518581817918645e-08, | |
| "loss": 0.3928, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.958450308815272, | |
| "grad_norm": 0.08971780481475433, | |
| "learning_rate": 3.532184819412532e-08, | |
| "loss": 0.3875, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 2.9674340258281866, | |
| "grad_norm": 0.09086168349256409, | |
| "learning_rate": 1.9869819179292315e-08, | |
| "loss": 0.3902, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.9764177428411003, | |
| "grad_norm": 0.09110615713231646, | |
| "learning_rate": 8.83143697702149e-09, | |
| "loss": 0.3928, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 2.985401459854015, | |
| "grad_norm": 0.08717148170316144, | |
| "learning_rate": 2.2079201806501916e-09, | |
| "loss": 0.3867, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 2.9943851768669285, | |
| "grad_norm": 0.09194318654950019, | |
| "learning_rate": 0.0, | |
| "loss": 0.3902, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 2.9943851768669285, | |
| "step": 333, | |
| "total_flos": 4607052149424128.0, | |
| "train_loss": 0.4566754535869793, | |
| "train_runtime": 115300.1106, | |
| "train_samples_per_second": 2.965, | |
| "train_steps_per_second": 0.003 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 333, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4607052149424128.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |