| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 6.0, | |
| "eval_steps": 500, | |
| "global_step": 1326, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004524886877828055, | |
| "grad_norm": 3.2510592937469482, | |
| "learning_rate": 4.524886877828055e-08, | |
| "loss": 0.8401, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.00904977375565611, | |
| "grad_norm": 2.890446424484253, | |
| "learning_rate": 9.04977375565611e-08, | |
| "loss": 0.7699, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.013574660633484163, | |
| "grad_norm": 3.4535956382751465, | |
| "learning_rate": 1.3574660633484163e-07, | |
| "loss": 0.8325, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.01809954751131222, | |
| "grad_norm": 2.992128610610962, | |
| "learning_rate": 1.809954751131222e-07, | |
| "loss": 0.7607, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.02262443438914027, | |
| "grad_norm": 2.929943561553955, | |
| "learning_rate": 2.2624434389140273e-07, | |
| "loss": 0.7356, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.027149321266968326, | |
| "grad_norm": 3.091803550720215, | |
| "learning_rate": 2.7149321266968326e-07, | |
| "loss": 0.7924, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.03167420814479638, | |
| "grad_norm": 2.760162591934204, | |
| "learning_rate": 3.167420814479638e-07, | |
| "loss": 0.7152, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.03619909502262444, | |
| "grad_norm": 2.924128770828247, | |
| "learning_rate": 3.619909502262444e-07, | |
| "loss": 0.7361, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.04072398190045249, | |
| "grad_norm": 3.2693638801574707, | |
| "learning_rate": 4.072398190045249e-07, | |
| "loss": 0.8098, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.04524886877828054, | |
| "grad_norm": 2.807117462158203, | |
| "learning_rate": 4.5248868778280546e-07, | |
| "loss": 0.7414, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.049773755656108594, | |
| "grad_norm": 2.837460994720459, | |
| "learning_rate": 4.977375565610859e-07, | |
| "loss": 0.7736, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.05429864253393665, | |
| "grad_norm": 2.843400239944458, | |
| "learning_rate": 5.429864253393665e-07, | |
| "loss": 0.7254, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.058823529411764705, | |
| "grad_norm": 2.7627649307250977, | |
| "learning_rate": 5.882352941176471e-07, | |
| "loss": 0.7314, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.06334841628959276, | |
| "grad_norm": 2.5128955841064453, | |
| "learning_rate": 6.334841628959276e-07, | |
| "loss": 0.6816, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.06787330316742081, | |
| "grad_norm": 2.2178218364715576, | |
| "learning_rate": 6.787330316742082e-07, | |
| "loss": 0.687, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07239819004524888, | |
| "grad_norm": 2.223222017288208, | |
| "learning_rate": 7.239819004524888e-07, | |
| "loss": 0.6834, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.07692307692307693, | |
| "grad_norm": 2.177546501159668, | |
| "learning_rate": 7.692307692307694e-07, | |
| "loss": 0.7176, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.08144796380090498, | |
| "grad_norm": 2.0788660049438477, | |
| "learning_rate": 8.144796380090498e-07, | |
| "loss": 0.6928, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.08597285067873303, | |
| "grad_norm": 2.013267993927002, | |
| "learning_rate": 8.597285067873304e-07, | |
| "loss": 0.7033, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.09049773755656108, | |
| "grad_norm": 1.640211820602417, | |
| "learning_rate": 9.049773755656109e-07, | |
| "loss": 0.7006, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.09502262443438914, | |
| "grad_norm": 1.2833340167999268, | |
| "learning_rate": 9.502262443438914e-07, | |
| "loss": 0.6464, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.09954751131221719, | |
| "grad_norm": 1.2619421482086182, | |
| "learning_rate": 9.954751131221719e-07, | |
| "loss": 0.6643, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.10407239819004525, | |
| "grad_norm": 1.2324565649032593, | |
| "learning_rate": 1.0407239819004527e-06, | |
| "loss": 0.6724, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.1085972850678733, | |
| "grad_norm": 1.1131088733673096, | |
| "learning_rate": 1.085972850678733e-06, | |
| "loss": 0.6059, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.11312217194570136, | |
| "grad_norm": 1.1473792791366577, | |
| "learning_rate": 1.1312217194570136e-06, | |
| "loss": 0.6494, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.11764705882352941, | |
| "grad_norm": 1.1130750179290771, | |
| "learning_rate": 1.1764705882352942e-06, | |
| "loss": 0.6693, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.12217194570135746, | |
| "grad_norm": 1.1199308633804321, | |
| "learning_rate": 1.2217194570135748e-06, | |
| "loss": 0.7353, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.12669683257918551, | |
| "grad_norm": 1.094309687614441, | |
| "learning_rate": 1.2669683257918552e-06, | |
| "loss": 0.6729, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.13122171945701358, | |
| "grad_norm": 1.2093147039413452, | |
| "learning_rate": 1.312217194570136e-06, | |
| "loss": 0.6549, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.13574660633484162, | |
| "grad_norm": 1.2095019817352295, | |
| "learning_rate": 1.3574660633484164e-06, | |
| "loss": 0.6317, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.14027149321266968, | |
| "grad_norm": 1.43827223777771, | |
| "learning_rate": 1.402714932126697e-06, | |
| "loss": 0.6496, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.14479638009049775, | |
| "grad_norm": 1.4549624919891357, | |
| "learning_rate": 1.4479638009049775e-06, | |
| "loss": 0.6697, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.1493212669683258, | |
| "grad_norm": 1.165290117263794, | |
| "learning_rate": 1.493212669683258e-06, | |
| "loss": 0.6249, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.15384615384615385, | |
| "grad_norm": 1.257883906364441, | |
| "learning_rate": 1.5384615384615387e-06, | |
| "loss": 0.6564, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.1583710407239819, | |
| "grad_norm": 1.027738094329834, | |
| "learning_rate": 1.583710407239819e-06, | |
| "loss": 0.5368, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.16289592760180996, | |
| "grad_norm": 0.9741559624671936, | |
| "learning_rate": 1.6289592760180997e-06, | |
| "loss": 0.6242, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.167420814479638, | |
| "grad_norm": 0.8538191318511963, | |
| "learning_rate": 1.67420814479638e-06, | |
| "loss": 0.6142, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.17194570135746606, | |
| "grad_norm": 0.6769871115684509, | |
| "learning_rate": 1.7194570135746609e-06, | |
| "loss": 0.5496, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.17647058823529413, | |
| "grad_norm": 0.6637622117996216, | |
| "learning_rate": 1.7647058823529414e-06, | |
| "loss": 0.5751, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.18099547511312217, | |
| "grad_norm": 0.6483653783798218, | |
| "learning_rate": 1.8099547511312218e-06, | |
| "loss": 0.6127, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.18552036199095023, | |
| "grad_norm": 0.7997961044311523, | |
| "learning_rate": 1.8552036199095024e-06, | |
| "loss": 0.576, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.19004524886877827, | |
| "grad_norm": 0.7297148704528809, | |
| "learning_rate": 1.9004524886877828e-06, | |
| "loss": 0.6254, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.19457013574660634, | |
| "grad_norm": 0.6087789535522461, | |
| "learning_rate": 1.9457013574660634e-06, | |
| "loss": 0.5344, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.19909502262443438, | |
| "grad_norm": 0.7219811677932739, | |
| "learning_rate": 1.9909502262443437e-06, | |
| "loss": 0.5776, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.20361990950226244, | |
| "grad_norm": 0.7508623003959656, | |
| "learning_rate": 2.0361990950226245e-06, | |
| "loss": 0.5435, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.2081447963800905, | |
| "grad_norm": 0.6041704416275024, | |
| "learning_rate": 2.0814479638009053e-06, | |
| "loss": 0.5094, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.21266968325791855, | |
| "grad_norm": 0.6673488020896912, | |
| "learning_rate": 2.1266968325791857e-06, | |
| "loss": 0.556, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.2171945701357466, | |
| "grad_norm": 0.6235842704772949, | |
| "learning_rate": 2.171945701357466e-06, | |
| "loss": 0.5766, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.22171945701357465, | |
| "grad_norm": 0.5541844367980957, | |
| "learning_rate": 2.2171945701357465e-06, | |
| "loss": 0.5422, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.22624434389140272, | |
| "grad_norm": 0.6511553525924683, | |
| "learning_rate": 2.2624434389140273e-06, | |
| "loss": 0.6076, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.23076923076923078, | |
| "grad_norm": 0.5737389922142029, | |
| "learning_rate": 2.307692307692308e-06, | |
| "loss": 0.5804, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.23529411764705882, | |
| "grad_norm": 0.45331427454948425, | |
| "learning_rate": 2.3529411764705885e-06, | |
| "loss": 0.4903, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.2398190045248869, | |
| "grad_norm": 0.5451053977012634, | |
| "learning_rate": 2.3981900452488693e-06, | |
| "loss": 0.5716, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.24434389140271492, | |
| "grad_norm": 0.5204625725746155, | |
| "learning_rate": 2.4434389140271496e-06, | |
| "loss": 0.5129, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.248868778280543, | |
| "grad_norm": 0.4971032440662384, | |
| "learning_rate": 2.48868778280543e-06, | |
| "loss": 0.5389, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.25339366515837103, | |
| "grad_norm": 0.5272262096405029, | |
| "learning_rate": 2.5339366515837104e-06, | |
| "loss": 0.5235, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.2579185520361991, | |
| "grad_norm": 0.5696743130683899, | |
| "learning_rate": 2.5791855203619916e-06, | |
| "loss": 0.508, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.26244343891402716, | |
| "grad_norm": 0.5246968269348145, | |
| "learning_rate": 2.624434389140272e-06, | |
| "loss": 0.5625, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.2669683257918552, | |
| "grad_norm": 0.5081693530082703, | |
| "learning_rate": 2.6696832579185524e-06, | |
| "loss": 0.501, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.27149321266968324, | |
| "grad_norm": 0.48727846145629883, | |
| "learning_rate": 2.7149321266968327e-06, | |
| "loss": 0.5259, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.27601809954751133, | |
| "grad_norm": 0.4420638084411621, | |
| "learning_rate": 2.7601809954751135e-06, | |
| "loss": 0.5165, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.28054298642533937, | |
| "grad_norm": 0.4388761818408966, | |
| "learning_rate": 2.805429864253394e-06, | |
| "loss": 0.4902, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.2850678733031674, | |
| "grad_norm": 0.42472442984580994, | |
| "learning_rate": 2.8506787330316743e-06, | |
| "loss": 0.4561, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.2895927601809955, | |
| "grad_norm": 0.4837457239627838, | |
| "learning_rate": 2.895927601809955e-06, | |
| "loss": 0.5295, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.29411764705882354, | |
| "grad_norm": 0.4987254738807678, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 0.515, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.2986425339366516, | |
| "grad_norm": 0.4454256594181061, | |
| "learning_rate": 2.986425339366516e-06, | |
| "loss": 0.5238, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.3031674208144796, | |
| "grad_norm": 0.5037821531295776, | |
| "learning_rate": 3.0316742081447962e-06, | |
| "loss": 0.5527, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.3076923076923077, | |
| "grad_norm": 0.4538140296936035, | |
| "learning_rate": 3.0769230769230774e-06, | |
| "loss": 0.4905, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.31221719457013575, | |
| "grad_norm": 0.5122717022895813, | |
| "learning_rate": 3.122171945701358e-06, | |
| "loss": 0.5493, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.3167420814479638, | |
| "grad_norm": 0.44596371054649353, | |
| "learning_rate": 3.167420814479638e-06, | |
| "loss": 0.5484, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.3212669683257919, | |
| "grad_norm": 0.4473898410797119, | |
| "learning_rate": 3.212669683257919e-06, | |
| "loss": 0.5218, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.3257918552036199, | |
| "grad_norm": 0.3825899064540863, | |
| "learning_rate": 3.2579185520361994e-06, | |
| "loss": 0.4883, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.33031674208144796, | |
| "grad_norm": 0.40941205620765686, | |
| "learning_rate": 3.3031674208144797e-06, | |
| "loss": 0.4127, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.334841628959276, | |
| "grad_norm": 0.42915788292884827, | |
| "learning_rate": 3.34841628959276e-06, | |
| "loss": 0.5194, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.3393665158371041, | |
| "grad_norm": 0.44509854912757874, | |
| "learning_rate": 3.3936651583710413e-06, | |
| "loss": 0.4664, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.3438914027149321, | |
| "grad_norm": 0.41393935680389404, | |
| "learning_rate": 3.4389140271493217e-06, | |
| "loss": 0.5213, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.34841628959276016, | |
| "grad_norm": 0.4840753674507141, | |
| "learning_rate": 3.484162895927602e-06, | |
| "loss": 0.5456, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.35294117647058826, | |
| "grad_norm": 0.39024800062179565, | |
| "learning_rate": 3.529411764705883e-06, | |
| "loss": 0.446, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.3574660633484163, | |
| "grad_norm": 0.4665883481502533, | |
| "learning_rate": 3.5746606334841633e-06, | |
| "loss": 0.504, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.36199095022624433, | |
| "grad_norm": 0.38244664669036865, | |
| "learning_rate": 3.6199095022624436e-06, | |
| "loss": 0.4656, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.3665158371040724, | |
| "grad_norm": 0.39441755414009094, | |
| "learning_rate": 3.665158371040724e-06, | |
| "loss": 0.4861, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.37104072398190047, | |
| "grad_norm": 0.41697946190834045, | |
| "learning_rate": 3.710407239819005e-06, | |
| "loss": 0.4678, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.3755656108597285, | |
| "grad_norm": 0.4145350456237793, | |
| "learning_rate": 3.755656108597285e-06, | |
| "loss": 0.4501, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.38009049773755654, | |
| "grad_norm": 0.42326846718788147, | |
| "learning_rate": 3.8009049773755656e-06, | |
| "loss": 0.4617, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.38461538461538464, | |
| "grad_norm": 0.44864463806152344, | |
| "learning_rate": 3.846153846153847e-06, | |
| "loss": 0.5503, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.3891402714932127, | |
| "grad_norm": 0.4511263072490692, | |
| "learning_rate": 3.891402714932127e-06, | |
| "loss": 0.4581, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.3936651583710407, | |
| "grad_norm": 0.5121440291404724, | |
| "learning_rate": 3.9366515837104075e-06, | |
| "loss": 0.4622, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.39819004524886875, | |
| "grad_norm": 0.45248016715049744, | |
| "learning_rate": 3.9819004524886875e-06, | |
| "loss": 0.5035, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.40271493212669685, | |
| "grad_norm": 0.4109788239002228, | |
| "learning_rate": 4.027149321266969e-06, | |
| "loss": 0.4743, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.4072398190045249, | |
| "grad_norm": 0.4233633577823639, | |
| "learning_rate": 4.072398190045249e-06, | |
| "loss": 0.4829, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.4117647058823529, | |
| "grad_norm": 0.47834792733192444, | |
| "learning_rate": 4.11764705882353e-06, | |
| "loss": 0.5588, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.416289592760181, | |
| "grad_norm": 0.4849421977996826, | |
| "learning_rate": 4.162895927601811e-06, | |
| "loss": 0.4738, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.42081447963800905, | |
| "grad_norm": 0.4980224370956421, | |
| "learning_rate": 4.208144796380091e-06, | |
| "loss": 0.5288, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.4253393665158371, | |
| "grad_norm": 0.42526569962501526, | |
| "learning_rate": 4.2533936651583714e-06, | |
| "loss": 0.4128, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.4298642533936652, | |
| "grad_norm": 0.483390748500824, | |
| "learning_rate": 4.298642533936652e-06, | |
| "loss": 0.4445, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.4343891402714932, | |
| "grad_norm": 0.4766150116920471, | |
| "learning_rate": 4.343891402714932e-06, | |
| "loss": 0.5001, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.43891402714932126, | |
| "grad_norm": 0.5314707159996033, | |
| "learning_rate": 4.389140271493213e-06, | |
| "loss": 0.4891, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.4434389140271493, | |
| "grad_norm": 0.44213107228279114, | |
| "learning_rate": 4.434389140271493e-06, | |
| "loss": 0.5129, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.4479638009049774, | |
| "grad_norm": 0.43551620841026306, | |
| "learning_rate": 4.479638009049775e-06, | |
| "loss": 0.4557, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.45248868778280543, | |
| "grad_norm": 0.4333869516849518, | |
| "learning_rate": 4.5248868778280546e-06, | |
| "loss": 0.4746, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.45701357466063347, | |
| "grad_norm": 0.42583024501800537, | |
| "learning_rate": 4.570135746606335e-06, | |
| "loss": 0.45, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.46153846153846156, | |
| "grad_norm": 0.48476096987724304, | |
| "learning_rate": 4.615384615384616e-06, | |
| "loss": 0.5087, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.4660633484162896, | |
| "grad_norm": 0.4503658413887024, | |
| "learning_rate": 4.660633484162896e-06, | |
| "loss": 0.4574, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.47058823529411764, | |
| "grad_norm": 0.43352094292640686, | |
| "learning_rate": 4.705882352941177e-06, | |
| "loss": 0.4494, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.4751131221719457, | |
| "grad_norm": 0.4933190643787384, | |
| "learning_rate": 4.751131221719457e-06, | |
| "loss": 0.4441, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.4796380090497738, | |
| "grad_norm": 0.4639478325843811, | |
| "learning_rate": 4.7963800904977385e-06, | |
| "loss": 0.4721, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.4841628959276018, | |
| "grad_norm": 0.4035486876964569, | |
| "learning_rate": 4.8416289592760185e-06, | |
| "loss": 0.4574, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.48868778280542985, | |
| "grad_norm": 0.47403860092163086, | |
| "learning_rate": 4.886877828054299e-06, | |
| "loss": 0.4516, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.49321266968325794, | |
| "grad_norm": 0.48216933012008667, | |
| "learning_rate": 4.93212669683258e-06, | |
| "loss": 0.4521, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.497737556561086, | |
| "grad_norm": 0.43759751319885254, | |
| "learning_rate": 4.97737556561086e-06, | |
| "loss": 0.4409, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.502262443438914, | |
| "grad_norm": 0.4080127775669098, | |
| "learning_rate": 5.022624434389141e-06, | |
| "loss": 0.4762, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.5067873303167421, | |
| "grad_norm": 0.5794436931610107, | |
| "learning_rate": 5.067873303167421e-06, | |
| "loss": 0.4954, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.5113122171945701, | |
| "grad_norm": 0.5085976123809814, | |
| "learning_rate": 5.1131221719457016e-06, | |
| "loss": 0.4635, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.5158371040723982, | |
| "grad_norm": 0.41183438897132874, | |
| "learning_rate": 5.158371040723983e-06, | |
| "loss": 0.46, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.5203619909502263, | |
| "grad_norm": 0.4061497747898102, | |
| "learning_rate": 5.203619909502263e-06, | |
| "loss": 0.4559, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.5248868778280543, | |
| "grad_norm": 0.4753948748111725, | |
| "learning_rate": 5.248868778280544e-06, | |
| "loss": 0.4398, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.5294117647058824, | |
| "grad_norm": 0.47879111766815186, | |
| "learning_rate": 5.294117647058824e-06, | |
| "loss": 0.4709, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.5339366515837104, | |
| "grad_norm": 0.4345899820327759, | |
| "learning_rate": 5.339366515837105e-06, | |
| "loss": 0.4576, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.5384615384615384, | |
| "grad_norm": 0.4419268071651459, | |
| "learning_rate": 5.384615384615385e-06, | |
| "loss": 0.4489, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.5429864253393665, | |
| "grad_norm": 0.46015602350234985, | |
| "learning_rate": 5.4298642533936655e-06, | |
| "loss": 0.4463, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.5475113122171946, | |
| "grad_norm": 0.4941313862800598, | |
| "learning_rate": 5.475113122171946e-06, | |
| "loss": 0.5041, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.5520361990950227, | |
| "grad_norm": 0.4165358245372772, | |
| "learning_rate": 5.520361990950227e-06, | |
| "loss": 0.4749, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.5565610859728507, | |
| "grad_norm": 0.4693758189678192, | |
| "learning_rate": 5.565610859728508e-06, | |
| "loss": 0.4536, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.5610859728506787, | |
| "grad_norm": 0.4350532293319702, | |
| "learning_rate": 5.610859728506788e-06, | |
| "loss": 0.5089, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.5656108597285068, | |
| "grad_norm": 0.45343929529190063, | |
| "learning_rate": 5.656108597285069e-06, | |
| "loss": 0.4014, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5701357466063348, | |
| "grad_norm": 0.507248044013977, | |
| "learning_rate": 5.7013574660633486e-06, | |
| "loss": 0.4932, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.5746606334841629, | |
| "grad_norm": 0.48399487137794495, | |
| "learning_rate": 5.746606334841629e-06, | |
| "loss": 0.5221, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.579185520361991, | |
| "grad_norm": 0.39801326394081116, | |
| "learning_rate": 5.79185520361991e-06, | |
| "loss": 0.4325, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.583710407239819, | |
| "grad_norm": 0.4183308780193329, | |
| "learning_rate": 5.837104072398191e-06, | |
| "loss": 0.4293, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.5882352941176471, | |
| "grad_norm": 0.4360859990119934, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 0.4135, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5927601809954751, | |
| "grad_norm": 0.4908457398414612, | |
| "learning_rate": 5.927601809954752e-06, | |
| "loss": 0.4765, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.5972850678733032, | |
| "grad_norm": 0.4215766489505768, | |
| "learning_rate": 5.972850678733032e-06, | |
| "loss": 0.4622, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.6018099547511312, | |
| "grad_norm": 0.4052114188671112, | |
| "learning_rate": 6.0180995475113125e-06, | |
| "loss": 0.4317, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.6063348416289592, | |
| "grad_norm": 0.46360260248184204, | |
| "learning_rate": 6.0633484162895924e-06, | |
| "loss": 0.4827, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.6108597285067874, | |
| "grad_norm": 0.40575292706489563, | |
| "learning_rate": 6.108597285067874e-06, | |
| "loss": 0.4281, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 0.5005738139152527, | |
| "learning_rate": 6.153846153846155e-06, | |
| "loss": 0.5005, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.6199095022624435, | |
| "grad_norm": 0.451043963432312, | |
| "learning_rate": 6.199095022624435e-06, | |
| "loss": 0.4006, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.6244343891402715, | |
| "grad_norm": 0.4629691541194916, | |
| "learning_rate": 6.244343891402716e-06, | |
| "loss": 0.4941, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.6289592760180995, | |
| "grad_norm": 0.4519931375980377, | |
| "learning_rate": 6.2895927601809956e-06, | |
| "loss": 0.4438, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.6334841628959276, | |
| "grad_norm": 0.4885016679763794, | |
| "learning_rate": 6.334841628959276e-06, | |
| "loss": 0.4642, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.6380090497737556, | |
| "grad_norm": 0.4142250418663025, | |
| "learning_rate": 6.380090497737556e-06, | |
| "loss": 0.4164, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.6425339366515838, | |
| "grad_norm": 0.4803358316421509, | |
| "learning_rate": 6.425339366515838e-06, | |
| "loss": 0.4564, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.6470588235294118, | |
| "grad_norm": 0.47766202688217163, | |
| "learning_rate": 6.470588235294119e-06, | |
| "loss": 0.4106, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.6515837104072398, | |
| "grad_norm": 0.449381947517395, | |
| "learning_rate": 6.515837104072399e-06, | |
| "loss": 0.4301, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.6561085972850679, | |
| "grad_norm": 0.3938814103603363, | |
| "learning_rate": 6.5610859728506795e-06, | |
| "loss": 0.4916, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.6606334841628959, | |
| "grad_norm": 0.39036545157432556, | |
| "learning_rate": 6.6063348416289595e-06, | |
| "loss": 0.4538, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.665158371040724, | |
| "grad_norm": 0.4129018485546112, | |
| "learning_rate": 6.65158371040724e-06, | |
| "loss": 0.4414, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.669683257918552, | |
| "grad_norm": 0.4925534725189209, | |
| "learning_rate": 6.69683257918552e-06, | |
| "loss": 0.4393, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.6742081447963801, | |
| "grad_norm": 0.4131447374820709, | |
| "learning_rate": 6.742081447963802e-06, | |
| "loss": 0.4944, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.6787330316742082, | |
| "grad_norm": 0.4236401319503784, | |
| "learning_rate": 6.787330316742083e-06, | |
| "loss": 0.4603, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.6832579185520362, | |
| "grad_norm": 0.4191535413265228, | |
| "learning_rate": 6.832579185520363e-06, | |
| "loss": 0.4195, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.6877828054298643, | |
| "grad_norm": 0.36520934104919434, | |
| "learning_rate": 6.8778280542986434e-06, | |
| "loss": 0.4602, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.6923076923076923, | |
| "grad_norm": 0.5145202875137329, | |
| "learning_rate": 6.923076923076923e-06, | |
| "loss": 0.508, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.6968325791855203, | |
| "grad_norm": 0.5549051761627197, | |
| "learning_rate": 6.968325791855204e-06, | |
| "loss": 0.4974, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.7013574660633484, | |
| "grad_norm": 0.42621299624443054, | |
| "learning_rate": 7.013574660633484e-06, | |
| "loss": 0.4783, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.7058823529411765, | |
| "grad_norm": 0.562082052230835, | |
| "learning_rate": 7.058823529411766e-06, | |
| "loss": 0.519, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.7104072398190046, | |
| "grad_norm": 0.4220001697540283, | |
| "learning_rate": 7.104072398190046e-06, | |
| "loss": 0.4909, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.7149321266968326, | |
| "grad_norm": 0.450371652841568, | |
| "learning_rate": 7.1493212669683265e-06, | |
| "loss": 0.441, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.7194570135746606, | |
| "grad_norm": 0.4640924036502838, | |
| "learning_rate": 7.1945701357466065e-06, | |
| "loss": 0.4315, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.7239819004524887, | |
| "grad_norm": 0.417269229888916, | |
| "learning_rate": 7.239819004524887e-06, | |
| "loss": 0.4071, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.7285067873303167, | |
| "grad_norm": 0.5066007971763611, | |
| "learning_rate": 7.285067873303168e-06, | |
| "loss": 0.4618, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.7330316742081447, | |
| "grad_norm": 0.412210613489151, | |
| "learning_rate": 7.330316742081448e-06, | |
| "loss": 0.4871, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.7375565610859729, | |
| "grad_norm": 0.41878026723861694, | |
| "learning_rate": 7.37556561085973e-06, | |
| "loss": 0.437, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.7420814479638009, | |
| "grad_norm": 0.48414674401283264, | |
| "learning_rate": 7.42081447963801e-06, | |
| "loss": 0.4682, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.746606334841629, | |
| "grad_norm": 0.45116230845451355, | |
| "learning_rate": 7.4660633484162904e-06, | |
| "loss": 0.4137, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.751131221719457, | |
| "grad_norm": 0.4211224615573883, | |
| "learning_rate": 7.51131221719457e-06, | |
| "loss": 0.4063, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.755656108597285, | |
| "grad_norm": 0.46173834800720215, | |
| "learning_rate": 7.556561085972851e-06, | |
| "loss": 0.4924, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.7601809954751131, | |
| "grad_norm": 0.4191412925720215, | |
| "learning_rate": 7.601809954751131e-06, | |
| "loss": 0.4572, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.7647058823529411, | |
| "grad_norm": 0.4505658745765686, | |
| "learning_rate": 7.647058823529411e-06, | |
| "loss": 0.4481, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.7692307692307693, | |
| "grad_norm": 0.5168994069099426, | |
| "learning_rate": 7.692307692307694e-06, | |
| "loss": 0.4449, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.7737556561085973, | |
| "grad_norm": 0.5589344501495361, | |
| "learning_rate": 7.737556561085974e-06, | |
| "loss": 0.4761, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.7782805429864253, | |
| "grad_norm": 0.4831865429878235, | |
| "learning_rate": 7.782805429864253e-06, | |
| "loss": 0.4805, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.7828054298642534, | |
| "grad_norm": 0.46204084157943726, | |
| "learning_rate": 7.828054298642534e-06, | |
| "loss": 0.4535, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.7873303167420814, | |
| "grad_norm": 0.44106197357177734, | |
| "learning_rate": 7.873303167420815e-06, | |
| "loss": 0.4072, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.7918552036199095, | |
| "grad_norm": 0.5106877088546753, | |
| "learning_rate": 7.918552036199096e-06, | |
| "loss": 0.4125, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.7963800904977375, | |
| "grad_norm": 0.5074198842048645, | |
| "learning_rate": 7.963800904977375e-06, | |
| "loss": 0.434, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.8009049773755657, | |
| "grad_norm": 0.40606701374053955, | |
| "learning_rate": 8.009049773755657e-06, | |
| "loss": 0.4624, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.8054298642533937, | |
| "grad_norm": 0.5007057189941406, | |
| "learning_rate": 8.054298642533938e-06, | |
| "loss": 0.4476, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.8099547511312217, | |
| "grad_norm": 0.5165268778800964, | |
| "learning_rate": 8.099547511312217e-06, | |
| "loss": 0.4746, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.8144796380090498, | |
| "grad_norm": 0.46993181109428406, | |
| "learning_rate": 8.144796380090498e-06, | |
| "loss": 0.4432, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.8190045248868778, | |
| "grad_norm": 0.5677555799484253, | |
| "learning_rate": 8.190045248868779e-06, | |
| "loss": 0.4422, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.8235294117647058, | |
| "grad_norm": 0.5643273591995239, | |
| "learning_rate": 8.23529411764706e-06, | |
| "loss": 0.5143, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.8280542986425339, | |
| "grad_norm": 0.5022807717323303, | |
| "learning_rate": 8.280542986425339e-06, | |
| "loss": 0.4544, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.832579185520362, | |
| "grad_norm": 0.49226853251457214, | |
| "learning_rate": 8.325791855203621e-06, | |
| "loss": 0.4637, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.8371040723981901, | |
| "grad_norm": 0.48023420572280884, | |
| "learning_rate": 8.371040723981902e-06, | |
| "loss": 0.4264, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.8416289592760181, | |
| "grad_norm": 0.5742875337600708, | |
| "learning_rate": 8.416289592760181e-06, | |
| "loss": 0.4761, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.8461538461538461, | |
| "grad_norm": 0.5855184197425842, | |
| "learning_rate": 8.461538461538462e-06, | |
| "loss": 0.4668, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.8506787330316742, | |
| "grad_norm": 0.5004088282585144, | |
| "learning_rate": 8.506787330316743e-06, | |
| "loss": 0.4354, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.8552036199095022, | |
| "grad_norm": 0.5111898183822632, | |
| "learning_rate": 8.552036199095024e-06, | |
| "loss": 0.4494, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.8597285067873304, | |
| "grad_norm": 0.5159589052200317, | |
| "learning_rate": 8.597285067873304e-06, | |
| "loss": 0.493, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.8642533936651584, | |
| "grad_norm": 0.5572559833526611, | |
| "learning_rate": 8.642533936651585e-06, | |
| "loss": 0.4845, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.8687782805429864, | |
| "grad_norm": 0.47993189096450806, | |
| "learning_rate": 8.687782805429864e-06, | |
| "loss": 0.4709, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.8733031674208145, | |
| "grad_norm": 0.4479634463787079, | |
| "learning_rate": 8.733031674208145e-06, | |
| "loss": 0.4192, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.8778280542986425, | |
| "grad_norm": 0.48463767766952515, | |
| "learning_rate": 8.778280542986426e-06, | |
| "loss": 0.4468, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.8823529411764706, | |
| "grad_norm": 0.4300975203514099, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 0.4546, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.8868778280542986, | |
| "grad_norm": 0.43377694487571716, | |
| "learning_rate": 8.868778280542986e-06, | |
| "loss": 0.4517, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.8914027149321267, | |
| "grad_norm": 0.4832284152507782, | |
| "learning_rate": 8.914027149321268e-06, | |
| "loss": 0.4296, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.8959276018099548, | |
| "grad_norm": 0.47211429476737976, | |
| "learning_rate": 8.95927601809955e-06, | |
| "loss": 0.4487, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.9004524886877828, | |
| "grad_norm": 0.4101850688457489, | |
| "learning_rate": 9.004524886877828e-06, | |
| "loss": 0.4392, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.9049773755656109, | |
| "grad_norm": 0.5208500623703003, | |
| "learning_rate": 9.049773755656109e-06, | |
| "loss": 0.4276, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.9095022624434389, | |
| "grad_norm": 0.49378854036331177, | |
| "learning_rate": 9.09502262443439e-06, | |
| "loss": 0.454, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.9140271493212669, | |
| "grad_norm": 0.4764745831489563, | |
| "learning_rate": 9.14027149321267e-06, | |
| "loss": 0.4176, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.918552036199095, | |
| "grad_norm": 0.45192357897758484, | |
| "learning_rate": 9.18552036199095e-06, | |
| "loss": 0.4762, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.9230769230769231, | |
| "grad_norm": 0.4330678880214691, | |
| "learning_rate": 9.230769230769232e-06, | |
| "loss": 0.4158, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.9276018099547512, | |
| "grad_norm": 0.5406427383422852, | |
| "learning_rate": 9.276018099547513e-06, | |
| "loss": 0.4351, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.9321266968325792, | |
| "grad_norm": 0.4686480760574341, | |
| "learning_rate": 9.321266968325792e-06, | |
| "loss": 0.4683, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.9366515837104072, | |
| "grad_norm": 0.430961549282074, | |
| "learning_rate": 9.366515837104073e-06, | |
| "loss": 0.4077, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.9411764705882353, | |
| "grad_norm": 0.4596535563468933, | |
| "learning_rate": 9.411764705882354e-06, | |
| "loss": 0.4357, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.9457013574660633, | |
| "grad_norm": 0.49825915694236755, | |
| "learning_rate": 9.457013574660635e-06, | |
| "loss": 0.4438, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.9502262443438914, | |
| "grad_norm": 0.4968724846839905, | |
| "learning_rate": 9.502262443438914e-06, | |
| "loss": 0.4391, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.9547511312217195, | |
| "grad_norm": 0.5227912664413452, | |
| "learning_rate": 9.547511312217196e-06, | |
| "loss": 0.4682, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.9592760180995475, | |
| "grad_norm": 0.593227744102478, | |
| "learning_rate": 9.592760180995477e-06, | |
| "loss": 0.4375, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.9638009049773756, | |
| "grad_norm": 0.46191665530204773, | |
| "learning_rate": 9.638009049773756e-06, | |
| "loss": 0.4469, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.9683257918552036, | |
| "grad_norm": 0.49439120292663574, | |
| "learning_rate": 9.683257918552037e-06, | |
| "loss": 0.4167, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.9728506787330317, | |
| "grad_norm": 0.5323225855827332, | |
| "learning_rate": 9.728506787330318e-06, | |
| "loss": 0.462, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.9773755656108597, | |
| "grad_norm": 0.40613311529159546, | |
| "learning_rate": 9.773755656108599e-06, | |
| "loss": 0.3551, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.9819004524886877, | |
| "grad_norm": 0.523820161819458, | |
| "learning_rate": 9.819004524886878e-06, | |
| "loss": 0.4303, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.9864253393665159, | |
| "grad_norm": 0.4367105960845947, | |
| "learning_rate": 9.86425339366516e-06, | |
| "loss": 0.4617, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.9909502262443439, | |
| "grad_norm": 0.474623441696167, | |
| "learning_rate": 9.90950226244344e-06, | |
| "loss": 0.4098, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.995475113122172, | |
| "grad_norm": 0.5020592212677002, | |
| "learning_rate": 9.95475113122172e-06, | |
| "loss": 0.3816, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.4186856150627136, | |
| "learning_rate": 1e-05, | |
| "loss": 0.4303, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.004524886877828, | |
| "grad_norm": 0.5000538229942322, | |
| "learning_rate": 9.999993763081091e-06, | |
| "loss": 0.4021, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.009049773755656, | |
| "grad_norm": 0.4233403205871582, | |
| "learning_rate": 9.999975052339923e-06, | |
| "loss": 0.3917, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.0135746606334841, | |
| "grad_norm": 0.44955193996429443, | |
| "learning_rate": 9.999943867823174e-06, | |
| "loss": 0.4112, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.0180995475113122, | |
| "grad_norm": 0.5060259699821472, | |
| "learning_rate": 9.999900209608642e-06, | |
| "loss": 0.3585, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.0226244343891402, | |
| "grad_norm": 0.46285244822502136, | |
| "learning_rate": 9.999844077805245e-06, | |
| "loss": 0.3972, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.0271493212669682, | |
| "grad_norm": 0.519055962562561, | |
| "learning_rate": 9.999775472553019e-06, | |
| "loss": 0.3439, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.0316742081447963, | |
| "grad_norm": 0.49448850750923157, | |
| "learning_rate": 9.999694394023119e-06, | |
| "loss": 0.3777, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.0361990950226245, | |
| "grad_norm": 0.502755343914032, | |
| "learning_rate": 9.999600842417815e-06, | |
| "loss": 0.3685, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.0407239819004526, | |
| "grad_norm": 0.5343412756919861, | |
| "learning_rate": 9.999494817970498e-06, | |
| "loss": 0.3402, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.0452488687782806, | |
| "grad_norm": 0.5237849354743958, | |
| "learning_rate": 9.999376320945673e-06, | |
| "loss": 0.3453, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.0497737556561086, | |
| "grad_norm": 0.5596418380737305, | |
| "learning_rate": 9.999245351638964e-06, | |
| "loss": 0.3945, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.0542986425339367, | |
| "grad_norm": 0.3989311456680298, | |
| "learning_rate": 9.999101910377107e-06, | |
| "loss": 0.3383, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.0588235294117647, | |
| "grad_norm": 0.4172278642654419, | |
| "learning_rate": 9.998945997517957e-06, | |
| "loss": 0.392, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.0633484162895928, | |
| "grad_norm": 0.5389004349708557, | |
| "learning_rate": 9.998777613450478e-06, | |
| "loss": 0.3883, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.0678733031674208, | |
| "grad_norm": 0.5382748246192932, | |
| "learning_rate": 9.998596758594752e-06, | |
| "loss": 0.3582, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.0723981900452488, | |
| "grad_norm": 0.5459226965904236, | |
| "learning_rate": 9.998403433401969e-06, | |
| "loss": 0.4137, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.0769230769230769, | |
| "grad_norm": 0.48526811599731445, | |
| "learning_rate": 9.998197638354428e-06, | |
| "loss": 0.3618, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.081447963800905, | |
| "grad_norm": 0.47047311067581177, | |
| "learning_rate": 9.997979373965542e-06, | |
| "loss": 0.3097, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.085972850678733, | |
| "grad_norm": 0.5708703398704529, | |
| "learning_rate": 9.997748640779829e-06, | |
| "loss": 0.4371, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.090497737556561, | |
| "grad_norm": 0.5245316028594971, | |
| "learning_rate": 9.997505439372914e-06, | |
| "loss": 0.3988, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.0950226244343892, | |
| "grad_norm": 0.5281980633735657, | |
| "learning_rate": 9.997249770351531e-06, | |
| "loss": 0.414, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.0995475113122173, | |
| "grad_norm": 0.6034480333328247, | |
| "learning_rate": 9.99698163435351e-06, | |
| "loss": 0.4268, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.1040723981900453, | |
| "grad_norm": 0.479973703622818, | |
| "learning_rate": 9.996701032047795e-06, | |
| "loss": 0.4261, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.1085972850678734, | |
| "grad_norm": 0.4137464165687561, | |
| "learning_rate": 9.996407964134416e-06, | |
| "loss": 0.369, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.1131221719457014, | |
| "grad_norm": 0.7938151359558105, | |
| "learning_rate": 9.996102431344514e-06, | |
| "loss": 0.4152, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.1176470588235294, | |
| "grad_norm": 0.5222673416137695, | |
| "learning_rate": 9.99578443444032e-06, | |
| "loss": 0.3956, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.1221719457013575, | |
| "grad_norm": 0.41782158613204956, | |
| "learning_rate": 9.995453974215164e-06, | |
| "loss": 0.3423, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.1266968325791855, | |
| "grad_norm": 0.6398916244506836, | |
| "learning_rate": 9.995111051493468e-06, | |
| "loss": 0.3975, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.1312217194570136, | |
| "grad_norm": 0.5231488943099976, | |
| "learning_rate": 9.99475566713074e-06, | |
| "loss": 0.4051, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.1357466063348416, | |
| "grad_norm": 0.48015347123146057, | |
| "learning_rate": 9.994387822013586e-06, | |
| "loss": 0.4183, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.1402714932126696, | |
| "grad_norm": 0.44875526428222656, | |
| "learning_rate": 9.994007517059693e-06, | |
| "loss": 0.37, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.1447963800904977, | |
| "grad_norm": 0.6783897876739502, | |
| "learning_rate": 9.993614753217833e-06, | |
| "loss": 0.4063, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.1493212669683257, | |
| "grad_norm": 0.5309958457946777, | |
| "learning_rate": 9.99320953146786e-06, | |
| "loss": 0.3896, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.1538461538461537, | |
| "grad_norm": 0.5052987933158875, | |
| "learning_rate": 9.992791852820709e-06, | |
| "loss": 0.4243, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.1583710407239818, | |
| "grad_norm": 0.47427037358283997, | |
| "learning_rate": 9.992361718318393e-06, | |
| "loss": 0.3601, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.16289592760181, | |
| "grad_norm": 0.4682779312133789, | |
| "learning_rate": 9.991919129033994e-06, | |
| "loss": 0.3564, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.167420814479638, | |
| "grad_norm": 0.5049699544906616, | |
| "learning_rate": 9.991464086071669e-06, | |
| "loss": 0.3362, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.1719457013574661, | |
| "grad_norm": 0.43032440543174744, | |
| "learning_rate": 9.990996590566648e-06, | |
| "loss": 0.3884, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.1764705882352942, | |
| "grad_norm": 0.48647770285606384, | |
| "learning_rate": 9.990516643685222e-06, | |
| "loss": 0.3797, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.1809954751131222, | |
| "grad_norm": 0.6087546944618225, | |
| "learning_rate": 9.990024246624745e-06, | |
| "loss": 0.4037, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.1855203619909502, | |
| "grad_norm": 0.4716806709766388, | |
| "learning_rate": 9.989519400613636e-06, | |
| "loss": 0.4145, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.1900452488687783, | |
| "grad_norm": 0.47347491979599, | |
| "learning_rate": 9.989002106911368e-06, | |
| "loss": 0.3781, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.1945701357466063, | |
| "grad_norm": 0.6511465907096863, | |
| "learning_rate": 9.988472366808468e-06, | |
| "loss": 0.4168, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.1990950226244343, | |
| "grad_norm": 0.4355635941028595, | |
| "learning_rate": 9.987930181626515e-06, | |
| "loss": 0.3703, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.2036199095022624, | |
| "grad_norm": 0.4431900382041931, | |
| "learning_rate": 9.987375552718133e-06, | |
| "loss": 0.4355, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.2081447963800904, | |
| "grad_norm": 0.4137621223926544, | |
| "learning_rate": 9.986808481466994e-06, | |
| "loss": 0.3837, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.2126696832579185, | |
| "grad_norm": 0.49528124928474426, | |
| "learning_rate": 9.98622896928781e-06, | |
| "loss": 0.3732, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.2171945701357467, | |
| "grad_norm": 0.45777904987335205, | |
| "learning_rate": 9.985637017626326e-06, | |
| "loss": 0.4144, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.2217194570135748, | |
| "grad_norm": 0.5112358331680298, | |
| "learning_rate": 9.985032627959325e-06, | |
| "loss": 0.4616, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.2262443438914028, | |
| "grad_norm": 0.5006369948387146, | |
| "learning_rate": 9.984415801794622e-06, | |
| "loss": 0.4317, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.2307692307692308, | |
| "grad_norm": 0.48134738206863403, | |
| "learning_rate": 9.983786540671052e-06, | |
| "loss": 0.3928, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.2352941176470589, | |
| "grad_norm": 0.4563564956188202, | |
| "learning_rate": 9.983144846158472e-06, | |
| "loss": 0.3907, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.239819004524887, | |
| "grad_norm": 0.43982940912246704, | |
| "learning_rate": 9.982490719857766e-06, | |
| "loss": 0.41, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.244343891402715, | |
| "grad_norm": 0.4124949872493744, | |
| "learning_rate": 9.981824163400827e-06, | |
| "loss": 0.3775, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.248868778280543, | |
| "grad_norm": 0.44366535544395447, | |
| "learning_rate": 9.981145178450555e-06, | |
| "loss": 0.3958, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.253393665158371, | |
| "grad_norm": 0.4540594220161438, | |
| "learning_rate": 9.980453766700861e-06, | |
| "loss": 0.4276, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.257918552036199, | |
| "grad_norm": 0.4591408967971802, | |
| "learning_rate": 9.979749929876658e-06, | |
| "loss": 0.3899, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.262443438914027, | |
| "grad_norm": 0.4709743857383728, | |
| "learning_rate": 9.979033669733855e-06, | |
| "loss": 0.4155, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.2669683257918551, | |
| "grad_norm": 0.5153703689575195, | |
| "learning_rate": 9.978304988059352e-06, | |
| "loss": 0.355, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.2714932126696832, | |
| "grad_norm": 0.5748324990272522, | |
| "learning_rate": 9.977563886671043e-06, | |
| "loss": 0.3903, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.2760180995475112, | |
| "grad_norm": 0.42208483815193176, | |
| "learning_rate": 9.976810367417801e-06, | |
| "loss": 0.3711, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.2805429864253393, | |
| "grad_norm": 0.4175834059715271, | |
| "learning_rate": 9.976044432179486e-06, | |
| "loss": 0.3714, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.2850678733031673, | |
| "grad_norm": 0.48363685607910156, | |
| "learning_rate": 9.975266082866923e-06, | |
| "loss": 0.359, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.2895927601809956, | |
| "grad_norm": 0.4426751732826233, | |
| "learning_rate": 9.974475321421918e-06, | |
| "loss": 0.3497, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.2941176470588236, | |
| "grad_norm": 0.49176037311553955, | |
| "learning_rate": 9.973672149817232e-06, | |
| "loss": 0.4077, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.2986425339366516, | |
| "grad_norm": 0.47568652033805847, | |
| "learning_rate": 9.972856570056594e-06, | |
| "loss": 0.3892, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.3031674208144797, | |
| "grad_norm": 0.484466552734375, | |
| "learning_rate": 9.972028584174687e-06, | |
| "loss": 0.4417, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.3076923076923077, | |
| "grad_norm": 0.41258934140205383, | |
| "learning_rate": 9.971188194237141e-06, | |
| "loss": 0.3803, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.3122171945701357, | |
| "grad_norm": 0.463830828666687, | |
| "learning_rate": 9.970335402340534e-06, | |
| "loss": 0.4389, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.3167420814479638, | |
| "grad_norm": 0.425300270318985, | |
| "learning_rate": 9.969470210612384e-06, | |
| "loss": 0.4108, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.3212669683257918, | |
| "grad_norm": 0.5346444249153137, | |
| "learning_rate": 9.968592621211146e-06, | |
| "loss": 0.3969, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.3257918552036199, | |
| "grad_norm": 0.4658094644546509, | |
| "learning_rate": 9.967702636326195e-06, | |
| "loss": 0.4434, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.330316742081448, | |
| "grad_norm": 0.48898160457611084, | |
| "learning_rate": 9.966800258177842e-06, | |
| "loss": 0.4164, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.334841628959276, | |
| "grad_norm": 0.5616970062255859, | |
| "learning_rate": 9.96588548901731e-06, | |
| "loss": 0.4472, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.3393665158371042, | |
| "grad_norm": 0.43943265080451965, | |
| "learning_rate": 9.964958331126735e-06, | |
| "loss": 0.4284, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.3438914027149322, | |
| "grad_norm": 0.5051960945129395, | |
| "learning_rate": 9.964018786819158e-06, | |
| "loss": 0.3999, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.3484162895927603, | |
| "grad_norm": 0.5062891840934753, | |
| "learning_rate": 9.963066858438525e-06, | |
| "loss": 0.4063, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.3529411764705883, | |
| "grad_norm": 0.5116050243377686, | |
| "learning_rate": 9.96210254835968e-06, | |
| "loss": 0.3752, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.3574660633484164, | |
| "grad_norm": 0.4431830048561096, | |
| "learning_rate": 9.961125858988348e-06, | |
| "loss": 0.3763, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.3619909502262444, | |
| "grad_norm": 0.4586993455886841, | |
| "learning_rate": 9.96013679276114e-06, | |
| "loss": 0.3983, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.3665158371040724, | |
| "grad_norm": 0.4054921567440033, | |
| "learning_rate": 9.959135352145552e-06, | |
| "loss": 0.3679, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.3710407239819005, | |
| "grad_norm": 0.4635688066482544, | |
| "learning_rate": 9.958121539639945e-06, | |
| "loss": 0.3835, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.3755656108597285, | |
| "grad_norm": 0.4249536395072937, | |
| "learning_rate": 9.95709535777354e-06, | |
| "loss": 0.3947, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.3800904977375565, | |
| "grad_norm": 0.43373027443885803, | |
| "learning_rate": 9.956056809106426e-06, | |
| "loss": 0.3603, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.3846153846153846, | |
| "grad_norm": 0.45955413579940796, | |
| "learning_rate": 9.955005896229543e-06, | |
| "loss": 0.3909, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.3891402714932126, | |
| "grad_norm": 0.44556981325149536, | |
| "learning_rate": 9.95394262176467e-06, | |
| "loss": 0.3668, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.3936651583710407, | |
| "grad_norm": 0.4461539685726166, | |
| "learning_rate": 9.952866988364431e-06, | |
| "loss": 0.3902, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.3981900452488687, | |
| "grad_norm": 0.4445544481277466, | |
| "learning_rate": 9.951778998712282e-06, | |
| "loss": 0.407, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.4027149321266967, | |
| "grad_norm": 0.40608200430870056, | |
| "learning_rate": 9.950678655522505e-06, | |
| "loss": 0.3472, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.4072398190045248, | |
| "grad_norm": 0.4478733241558075, | |
| "learning_rate": 9.9495659615402e-06, | |
| "loss": 0.4088, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.4117647058823528, | |
| "grad_norm": 0.5241243243217468, | |
| "learning_rate": 9.948440919541277e-06, | |
| "loss": 0.3937, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.416289592760181, | |
| "grad_norm": 0.44417575001716614, | |
| "learning_rate": 9.947303532332457e-06, | |
| "loss": 0.3508, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.420814479638009, | |
| "grad_norm": 0.46424350142478943, | |
| "learning_rate": 9.946153802751257e-06, | |
| "loss": 0.4189, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.4253393665158371, | |
| "grad_norm": 0.41135942935943604, | |
| "learning_rate": 9.944991733665986e-06, | |
| "loss": 0.3924, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.4298642533936652, | |
| "grad_norm": 0.4415293037891388, | |
| "learning_rate": 9.943817327975732e-06, | |
| "loss": 0.3657, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.4343891402714932, | |
| "grad_norm": 0.40911784768104553, | |
| "learning_rate": 9.942630588610368e-06, | |
| "loss": 0.383, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.4389140271493213, | |
| "grad_norm": 0.43340831995010376, | |
| "learning_rate": 9.94143151853053e-06, | |
| "loss": 0.3657, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.4434389140271493, | |
| "grad_norm": 0.46907198429107666, | |
| "learning_rate": 9.940220120727624e-06, | |
| "loss": 0.4239, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.4479638009049773, | |
| "grad_norm": 0.4872211813926697, | |
| "learning_rate": 9.938996398223802e-06, | |
| "loss": 0.4008, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.4524886877828054, | |
| "grad_norm": 0.46110352873802185, | |
| "learning_rate": 9.937760354071966e-06, | |
| "loss": 0.3792, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.4570135746606334, | |
| "grad_norm": 0.46933513879776, | |
| "learning_rate": 9.936511991355764e-06, | |
| "loss": 0.3626, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.4615384615384617, | |
| "grad_norm": 0.47610121965408325, | |
| "learning_rate": 9.935251313189564e-06, | |
| "loss": 0.4133, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.4660633484162897, | |
| "grad_norm": 0.4467121660709381, | |
| "learning_rate": 9.933978322718472e-06, | |
| "loss": 0.4247, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.4705882352941178, | |
| "grad_norm": 0.4571016728878021, | |
| "learning_rate": 9.932693023118299e-06, | |
| "loss": 0.4107, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.4751131221719458, | |
| "grad_norm": 0.533100962638855, | |
| "learning_rate": 9.931395417595568e-06, | |
| "loss": 0.4091, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.4796380090497738, | |
| "grad_norm": 0.4366355538368225, | |
| "learning_rate": 9.930085509387509e-06, | |
| "loss": 0.3467, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.4841628959276019, | |
| "grad_norm": 0.41149038076400757, | |
| "learning_rate": 9.92876330176203e-06, | |
| "loss": 0.387, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.48868778280543, | |
| "grad_norm": 0.4341404438018799, | |
| "learning_rate": 9.927428798017738e-06, | |
| "loss": 0.4285, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.493212669683258, | |
| "grad_norm": 0.46218249201774597, | |
| "learning_rate": 9.926082001483909e-06, | |
| "loss": 0.4224, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.497737556561086, | |
| "grad_norm": 0.42733314633369446, | |
| "learning_rate": 9.924722915520484e-06, | |
| "loss": 0.375, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.502262443438914, | |
| "grad_norm": 0.4854833781719208, | |
| "learning_rate": 9.92335154351807e-06, | |
| "loss": 0.4287, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.506787330316742, | |
| "grad_norm": 0.4013662040233612, | |
| "learning_rate": 9.92196788889792e-06, | |
| "loss": 0.3262, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.51131221719457, | |
| "grad_norm": 0.4636831283569336, | |
| "learning_rate": 9.92057195511193e-06, | |
| "loss": 0.4486, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.5158371040723981, | |
| "grad_norm": 0.3883086144924164, | |
| "learning_rate": 9.919163745642633e-06, | |
| "loss": 0.3536, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.5203619909502262, | |
| "grad_norm": 0.47283634543418884, | |
| "learning_rate": 9.91774326400318e-06, | |
| "loss": 0.3475, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.5248868778280542, | |
| "grad_norm": 0.5383743047714233, | |
| "learning_rate": 9.916310513737345e-06, | |
| "loss": 0.5013, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.5294117647058822, | |
| "grad_norm": 0.4297215938568115, | |
| "learning_rate": 9.91486549841951e-06, | |
| "loss": 0.3726, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.5339366515837103, | |
| "grad_norm": 0.5066801905632019, | |
| "learning_rate": 9.913408221654647e-06, | |
| "loss": 0.3823, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.5384615384615383, | |
| "grad_norm": 0.4163861870765686, | |
| "learning_rate": 9.911938687078324e-06, | |
| "loss": 0.3728, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.5429864253393664, | |
| "grad_norm": 0.46085265278816223, | |
| "learning_rate": 9.91045689835669e-06, | |
| "loss": 0.3822, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.5475113122171946, | |
| "grad_norm": 0.4126754105091095, | |
| "learning_rate": 9.908962859186465e-06, | |
| "loss": 0.3634, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.5520361990950227, | |
| "grad_norm": 0.4993842542171478, | |
| "learning_rate": 9.907456573294924e-06, | |
| "loss": 0.3835, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.5565610859728507, | |
| "grad_norm": 0.4884220063686371, | |
| "learning_rate": 9.905938044439904e-06, | |
| "loss": 0.436, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.5610859728506787, | |
| "grad_norm": 0.3982529044151306, | |
| "learning_rate": 9.90440727640978e-06, | |
| "loss": 0.389, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.5656108597285068, | |
| "grad_norm": 0.4887678921222687, | |
| "learning_rate": 9.902864273023465e-06, | |
| "loss": 0.344, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.5701357466063348, | |
| "grad_norm": 0.4793776869773865, | |
| "learning_rate": 9.901309038130392e-06, | |
| "loss": 0.4078, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.5746606334841629, | |
| "grad_norm": 0.44910818338394165, | |
| "learning_rate": 9.89974157561051e-06, | |
| "loss": 0.3894, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.5791855203619911, | |
| "grad_norm": 0.45025259256362915, | |
| "learning_rate": 9.898161889374272e-06, | |
| "loss": 0.4182, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.5837104072398192, | |
| "grad_norm": 0.45233941078186035, | |
| "learning_rate": 9.896569983362632e-06, | |
| "loss": 0.3846, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.5882352941176472, | |
| "grad_norm": 0.4646618664264679, | |
| "learning_rate": 9.894965861547023e-06, | |
| "loss": 0.364, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.5927601809954752, | |
| "grad_norm": 0.44090190529823303, | |
| "learning_rate": 9.893349527929357e-06, | |
| "loss": 0.3949, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.5972850678733033, | |
| "grad_norm": 0.5286484360694885, | |
| "learning_rate": 9.891720986542011e-06, | |
| "loss": 0.4311, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.6018099547511313, | |
| "grad_norm": 0.44429534673690796, | |
| "learning_rate": 9.890080241447816e-06, | |
| "loss": 0.3906, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.6063348416289593, | |
| "grad_norm": 0.5068367123603821, | |
| "learning_rate": 9.888427296740054e-06, | |
| "loss": 0.4052, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.6108597285067874, | |
| "grad_norm": 0.507785975933075, | |
| "learning_rate": 9.886762156542428e-06, | |
| "loss": 0.4063, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.6153846153846154, | |
| "grad_norm": 0.49715733528137207, | |
| "learning_rate": 9.885084825009085e-06, | |
| "loss": 0.3965, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.6199095022624435, | |
| "grad_norm": 0.4482433497905731, | |
| "learning_rate": 9.883395306324575e-06, | |
| "loss": 0.4066, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.6244343891402715, | |
| "grad_norm": 0.41335636377334595, | |
| "learning_rate": 9.881693604703853e-06, | |
| "loss": 0.3674, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.6289592760180995, | |
| "grad_norm": 0.5023950934410095, | |
| "learning_rate": 9.87997972439227e-06, | |
| "loss": 0.3444, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.6334841628959276, | |
| "grad_norm": 0.5373045802116394, | |
| "learning_rate": 9.878253669665557e-06, | |
| "loss": 0.3853, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.6380090497737556, | |
| "grad_norm": 0.4574486315250397, | |
| "learning_rate": 9.876515444829822e-06, | |
| "loss": 0.4254, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.6425339366515836, | |
| "grad_norm": 0.5090571045875549, | |
| "learning_rate": 9.874765054221532e-06, | |
| "loss": 0.391, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.6470588235294117, | |
| "grad_norm": 0.4528499245643616, | |
| "learning_rate": 9.873002502207502e-06, | |
| "loss": 0.4292, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.6515837104072397, | |
| "grad_norm": 0.4229438304901123, | |
| "learning_rate": 9.871227793184893e-06, | |
| "loss": 0.3829, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.6561085972850678, | |
| "grad_norm": 0.4995672404766083, | |
| "learning_rate": 9.869440931581186e-06, | |
| "loss": 0.3534, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.6606334841628958, | |
| "grad_norm": 0.527503490447998, | |
| "learning_rate": 9.867641921854192e-06, | |
| "loss": 0.4057, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.6651583710407238, | |
| "grad_norm": 0.4658501446247101, | |
| "learning_rate": 9.865830768492019e-06, | |
| "loss": 0.4088, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.6696832579185519, | |
| "grad_norm": 0.5441451668739319, | |
| "learning_rate": 9.864007476013075e-06, | |
| "loss": 0.4149, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.6742081447963801, | |
| "grad_norm": 0.48692771792411804, | |
| "learning_rate": 9.862172048966048e-06, | |
| "loss": 0.3818, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.6787330316742082, | |
| "grad_norm": 0.40074610710144043, | |
| "learning_rate": 9.860324491929905e-06, | |
| "loss": 0.3657, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.6832579185520362, | |
| "grad_norm": 0.4921663999557495, | |
| "learning_rate": 9.858464809513867e-06, | |
| "loss": 0.4022, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.6877828054298643, | |
| "grad_norm": 0.46872252225875854, | |
| "learning_rate": 9.856593006357415e-06, | |
| "loss": 0.4099, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.6923076923076923, | |
| "grad_norm": 0.5141183137893677, | |
| "learning_rate": 9.854709087130261e-06, | |
| "loss": 0.3961, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.6968325791855203, | |
| "grad_norm": 0.46619221568107605, | |
| "learning_rate": 9.852813056532345e-06, | |
| "loss": 0.3172, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.7013574660633484, | |
| "grad_norm": 0.5484828352928162, | |
| "learning_rate": 9.85090491929382e-06, | |
| "loss": 0.4182, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.7058823529411766, | |
| "grad_norm": 0.5394891500473022, | |
| "learning_rate": 9.848984680175049e-06, | |
| "loss": 0.3898, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.7104072398190047, | |
| "grad_norm": 0.48792046308517456, | |
| "learning_rate": 9.84705234396658e-06, | |
| "loss": 0.3373, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.7149321266968327, | |
| "grad_norm": 0.3987761437892914, | |
| "learning_rate": 9.845107915489143e-06, | |
| "loss": 0.334, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.7194570135746607, | |
| "grad_norm": 0.525193452835083, | |
| "learning_rate": 9.843151399593636e-06, | |
| "loss": 0.3718, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.7239819004524888, | |
| "grad_norm": 0.48990345001220703, | |
| "learning_rate": 9.84118280116111e-06, | |
| "loss": 0.3763, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.7285067873303168, | |
| "grad_norm": 0.4889325797557831, | |
| "learning_rate": 9.839202125102761e-06, | |
| "loss": 0.3785, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.7330316742081449, | |
| "grad_norm": 0.5154342651367188, | |
| "learning_rate": 9.837209376359918e-06, | |
| "loss": 0.3883, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.737556561085973, | |
| "grad_norm": 0.5224171280860901, | |
| "learning_rate": 9.835204559904021e-06, | |
| "loss": 0.3666, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.742081447963801, | |
| "grad_norm": 0.5012906193733215, | |
| "learning_rate": 9.833187680736624e-06, | |
| "loss": 0.4093, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.746606334841629, | |
| "grad_norm": 0.5155039429664612, | |
| "learning_rate": 9.831158743889373e-06, | |
| "loss": 0.3768, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.751131221719457, | |
| "grad_norm": 0.44377192854881287, | |
| "learning_rate": 9.829117754423991e-06, | |
| "loss": 0.4136, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.755656108597285, | |
| "grad_norm": 0.6181144714355469, | |
| "learning_rate": 9.827064717432272e-06, | |
| "loss": 0.4698, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.760180995475113, | |
| "grad_norm": 0.5261528491973877, | |
| "learning_rate": 9.82499963803607e-06, | |
| "loss": 0.3924, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.7647058823529411, | |
| "grad_norm": 0.43123725056648254, | |
| "learning_rate": 9.822922521387277e-06, | |
| "loss": 0.4207, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.7692307692307692, | |
| "grad_norm": 0.4998953342437744, | |
| "learning_rate": 9.820833372667813e-06, | |
| "loss": 0.3933, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.7737556561085972, | |
| "grad_norm": 0.5654891133308411, | |
| "learning_rate": 9.81873219708962e-06, | |
| "loss": 0.3732, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.7782805429864252, | |
| "grad_norm": 0.4899405539035797, | |
| "learning_rate": 9.816618999894645e-06, | |
| "loss": 0.3752, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.7828054298642533, | |
| "grad_norm": 0.5240674614906311, | |
| "learning_rate": 9.81449378635482e-06, | |
| "loss": 0.4596, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.7873303167420813, | |
| "grad_norm": 0.46099403500556946, | |
| "learning_rate": 9.81235656177206e-06, | |
| "loss": 0.3748, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.7918552036199094, | |
| "grad_norm": 0.46495547890663147, | |
| "learning_rate": 9.810207331478247e-06, | |
| "loss": 0.4091, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.7963800904977374, | |
| "grad_norm": 0.5413225293159485, | |
| "learning_rate": 9.808046100835207e-06, | |
| "loss": 0.3903, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.8009049773755657, | |
| "grad_norm": 0.5968965888023376, | |
| "learning_rate": 9.80587287523471e-06, | |
| "loss": 0.3925, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.8054298642533937, | |
| "grad_norm": 0.46063658595085144, | |
| "learning_rate": 9.803687660098446e-06, | |
| "loss": 0.4055, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.8099547511312217, | |
| "grad_norm": 0.4856792390346527, | |
| "learning_rate": 9.801490460878023e-06, | |
| "loss": 0.3594, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.8144796380090498, | |
| "grad_norm": 0.4093053638935089, | |
| "learning_rate": 9.79928128305494e-06, | |
| "loss": 0.353, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.8190045248868778, | |
| "grad_norm": 0.5286359786987305, | |
| "learning_rate": 9.797060132140584e-06, | |
| "loss": 0.3798, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.8235294117647058, | |
| "grad_norm": 0.49250075221061707, | |
| "learning_rate": 9.794827013676206e-06, | |
| "loss": 0.412, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.8280542986425339, | |
| "grad_norm": 0.43814513087272644, | |
| "learning_rate": 9.792581933232924e-06, | |
| "loss": 0.386, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.8325791855203621, | |
| "grad_norm": 0.4019566774368286, | |
| "learning_rate": 9.790324896411684e-06, | |
| "loss": 0.3668, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.8371040723981902, | |
| "grad_norm": 0.44960689544677734, | |
| "learning_rate": 9.788055908843275e-06, | |
| "loss": 0.3761, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.8416289592760182, | |
| "grad_norm": 0.40887507796287537, | |
| "learning_rate": 9.78577497618829e-06, | |
| "loss": 0.3824, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.8461538461538463, | |
| "grad_norm": 0.5183314085006714, | |
| "learning_rate": 9.783482104137127e-06, | |
| "loss": 0.3466, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.8506787330316743, | |
| "grad_norm": 0.4178057312965393, | |
| "learning_rate": 9.781177298409969e-06, | |
| "loss": 0.3428, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.8552036199095023, | |
| "grad_norm": 0.45295459032058716, | |
| "learning_rate": 9.778860564756769e-06, | |
| "loss": 0.3886, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.8597285067873304, | |
| "grad_norm": 0.5001879334449768, | |
| "learning_rate": 9.776531908957241e-06, | |
| "loss": 0.4219, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.8642533936651584, | |
| "grad_norm": 0.47920331358909607, | |
| "learning_rate": 9.77419133682084e-06, | |
| "loss": 0.4203, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.8687782805429864, | |
| "grad_norm": 0.48827502131462097, | |
| "learning_rate": 9.771838854186748e-06, | |
| "loss": 0.4053, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.8733031674208145, | |
| "grad_norm": 0.42105311155319214, | |
| "learning_rate": 9.769474466923864e-06, | |
| "loss": 0.3749, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.8778280542986425, | |
| "grad_norm": 0.41811907291412354, | |
| "learning_rate": 9.76709818093078e-06, | |
| "loss": 0.3489, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.8823529411764706, | |
| "grad_norm": 0.42248275876045227, | |
| "learning_rate": 9.764710002135784e-06, | |
| "loss": 0.4341, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.8868778280542986, | |
| "grad_norm": 0.4599759578704834, | |
| "learning_rate": 9.762309936496824e-06, | |
| "loss": 0.3931, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.8914027149321266, | |
| "grad_norm": 0.47709447145462036, | |
| "learning_rate": 9.759897990001504e-06, | |
| "loss": 0.3697, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.8959276018099547, | |
| "grad_norm": 0.44299864768981934, | |
| "learning_rate": 9.757474168667072e-06, | |
| "loss": 0.4235, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.9004524886877827, | |
| "grad_norm": 0.45147445797920227, | |
| "learning_rate": 9.7550384785404e-06, | |
| "loss": 0.3915, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.9049773755656108, | |
| "grad_norm": 0.4743190109729767, | |
| "learning_rate": 9.752590925697964e-06, | |
| "loss": 0.4003, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.9095022624434388, | |
| "grad_norm": 0.515183687210083, | |
| "learning_rate": 9.750131516245844e-06, | |
| "loss": 0.4147, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.9140271493212668, | |
| "grad_norm": 0.4393065273761749, | |
| "learning_rate": 9.747660256319695e-06, | |
| "loss": 0.4065, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.9185520361990949, | |
| "grad_norm": 0.4030460715293884, | |
| "learning_rate": 9.745177152084733e-06, | |
| "loss": 0.3824, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.9230769230769231, | |
| "grad_norm": 0.47524887323379517, | |
| "learning_rate": 9.742682209735727e-06, | |
| "loss": 0.4089, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.9276018099547512, | |
| "grad_norm": 0.4686073660850525, | |
| "learning_rate": 9.74017543549698e-06, | |
| "loss": 0.3935, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.9321266968325792, | |
| "grad_norm": 0.3981013894081116, | |
| "learning_rate": 9.737656835622312e-06, | |
| "loss": 0.3755, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.9366515837104072, | |
| "grad_norm": 0.46402475237846375, | |
| "learning_rate": 9.73512641639504e-06, | |
| "loss": 0.3673, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.9411764705882353, | |
| "grad_norm": 0.502984881401062, | |
| "learning_rate": 9.732584184127973e-06, | |
| "loss": 0.3981, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.9457013574660633, | |
| "grad_norm": 0.42585644125938416, | |
| "learning_rate": 9.730030145163392e-06, | |
| "loss": 0.4047, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.9502262443438914, | |
| "grad_norm": 0.39187467098236084, | |
| "learning_rate": 9.72746430587303e-06, | |
| "loss": 0.391, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.9547511312217196, | |
| "grad_norm": 0.3843924105167389, | |
| "learning_rate": 9.724886672658056e-06, | |
| "loss": 0.4038, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.9592760180995477, | |
| "grad_norm": 0.47524020075798035, | |
| "learning_rate": 9.72229725194907e-06, | |
| "loss": 0.4075, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.9638009049773757, | |
| "grad_norm": 0.46401557326316833, | |
| "learning_rate": 9.719696050206072e-06, | |
| "loss": 0.3564, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.9683257918552037, | |
| "grad_norm": 0.4504929482936859, | |
| "learning_rate": 9.717083073918459e-06, | |
| "loss": 0.401, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.9728506787330318, | |
| "grad_norm": 0.4272751212120056, | |
| "learning_rate": 9.714458329604997e-06, | |
| "loss": 0.4096, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.9773755656108598, | |
| "grad_norm": 0.4029974341392517, | |
| "learning_rate": 9.711821823813812e-06, | |
| "loss": 0.4362, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.9819004524886878, | |
| "grad_norm": 0.4730750322341919, | |
| "learning_rate": 9.709173563122377e-06, | |
| "loss": 0.3892, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.9864253393665159, | |
| "grad_norm": 0.4282892644405365, | |
| "learning_rate": 9.706513554137482e-06, | |
| "loss": 0.4198, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.990950226244344, | |
| "grad_norm": 0.4743562340736389, | |
| "learning_rate": 9.703841803495234e-06, | |
| "loss": 0.4141, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.995475113122172, | |
| "grad_norm": 0.4112869203090668, | |
| "learning_rate": 9.701158317861031e-06, | |
| "loss": 0.3836, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.41886061429977417, | |
| "learning_rate": 9.698463103929542e-06, | |
| "loss": 0.3317, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 2.004524886877828, | |
| "grad_norm": 0.5465108752250671, | |
| "learning_rate": 9.695756168424703e-06, | |
| "loss": 0.3391, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 2.009049773755656, | |
| "grad_norm": 0.4423621892929077, | |
| "learning_rate": 9.693037518099689e-06, | |
| "loss": 0.2949, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.013574660633484, | |
| "grad_norm": 0.43876439332962036, | |
| "learning_rate": 9.690307159736899e-06, | |
| "loss": 0.3114, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.018099547511312, | |
| "grad_norm": 0.5939359664916992, | |
| "learning_rate": 9.68756510014794e-06, | |
| "loss": 0.3299, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 2.02262443438914, | |
| "grad_norm": 0.5051030516624451, | |
| "learning_rate": 9.684811346173617e-06, | |
| "loss": 0.2708, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 2.0271493212669682, | |
| "grad_norm": 0.46658316254615784, | |
| "learning_rate": 9.682045904683905e-06, | |
| "loss": 0.2798, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 2.0316742081447963, | |
| "grad_norm": 0.5176125168800354, | |
| "learning_rate": 9.67926878257794e-06, | |
| "loss": 0.3084, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 2.0361990950226243, | |
| "grad_norm": 0.5656089186668396, | |
| "learning_rate": 9.676479986783992e-06, | |
| "loss": 0.3079, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.0407239819004523, | |
| "grad_norm": 0.47683894634246826, | |
| "learning_rate": 9.67367952425946e-06, | |
| "loss": 0.2702, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.0452488687782804, | |
| "grad_norm": 0.4187275767326355, | |
| "learning_rate": 9.67086740199085e-06, | |
| "loss": 0.2688, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.0497737556561084, | |
| "grad_norm": 0.5175010561943054, | |
| "learning_rate": 9.668043626993748e-06, | |
| "loss": 0.3128, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.0542986425339365, | |
| "grad_norm": 0.43686679005622864, | |
| "learning_rate": 9.66520820631282e-06, | |
| "loss": 0.2665, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.0588235294117645, | |
| "grad_norm": 0.39973190426826477, | |
| "learning_rate": 9.66236114702178e-06, | |
| "loss": 0.2743, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.0633484162895925, | |
| "grad_norm": 0.3550076186656952, | |
| "learning_rate": 9.65950245622338e-06, | |
| "loss": 0.2471, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.067873303167421, | |
| "grad_norm": 0.408397376537323, | |
| "learning_rate": 9.65663214104939e-06, | |
| "loss": 0.2556, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.072398190045249, | |
| "grad_norm": 0.4247797131538391, | |
| "learning_rate": 9.653750208660577e-06, | |
| "loss": 0.2887, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.076923076923077, | |
| "grad_norm": 0.4115331172943115, | |
| "learning_rate": 9.650856666246693e-06, | |
| "loss": 0.285, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.081447963800905, | |
| "grad_norm": 0.40403059124946594, | |
| "learning_rate": 9.647951521026453e-06, | |
| "loss": 0.2477, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.085972850678733, | |
| "grad_norm": 0.42889419198036194, | |
| "learning_rate": 9.645034780247521e-06, | |
| "loss": 0.2836, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.090497737556561, | |
| "grad_norm": 0.39554914832115173, | |
| "learning_rate": 9.642106451186489e-06, | |
| "loss": 0.2984, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.0950226244343892, | |
| "grad_norm": 0.4248838722705841, | |
| "learning_rate": 9.639166541148855e-06, | |
| "loss": 0.2922, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.0995475113122173, | |
| "grad_norm": 0.39132872223854065, | |
| "learning_rate": 9.636215057469009e-06, | |
| "loss": 0.3063, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 2.1040723981900453, | |
| "grad_norm": 0.40757620334625244, | |
| "learning_rate": 9.63325200751022e-06, | |
| "loss": 0.2808, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.1085972850678734, | |
| "grad_norm": 0.4235598146915436, | |
| "learning_rate": 9.630277398664605e-06, | |
| "loss": 0.2934, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.1131221719457014, | |
| "grad_norm": 0.38248857855796814, | |
| "learning_rate": 9.627291238353127e-06, | |
| "loss": 0.2453, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.1176470588235294, | |
| "grad_norm": 0.4244171679019928, | |
| "learning_rate": 9.62429353402556e-06, | |
| "loss": 0.2371, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.1221719457013575, | |
| "grad_norm": 0.41888427734375, | |
| "learning_rate": 9.621284293160478e-06, | |
| "loss": 0.3136, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 2.1266968325791855, | |
| "grad_norm": 0.3960363566875458, | |
| "learning_rate": 9.618263523265238e-06, | |
| "loss": 0.2585, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.1312217194570136, | |
| "grad_norm": 0.4441540241241455, | |
| "learning_rate": 9.61523123187596e-06, | |
| "loss": 0.2777, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 2.1357466063348416, | |
| "grad_norm": 0.4612972140312195, | |
| "learning_rate": 9.612187426557505e-06, | |
| "loss": 0.3011, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 2.1402714932126696, | |
| "grad_norm": 0.49506399035453796, | |
| "learning_rate": 9.609132114903458e-06, | |
| "loss": 0.289, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 2.1447963800904977, | |
| "grad_norm": 0.45658251643180847, | |
| "learning_rate": 9.606065304536116e-06, | |
| "loss": 0.3009, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 2.1493212669683257, | |
| "grad_norm": 0.47934991121292114, | |
| "learning_rate": 9.602987003106455e-06, | |
| "loss": 0.2878, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.1538461538461537, | |
| "grad_norm": 0.3920869529247284, | |
| "learning_rate": 9.599897218294122e-06, | |
| "loss": 0.2379, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 2.158371040723982, | |
| "grad_norm": 0.46996328234672546, | |
| "learning_rate": 9.596795957807412e-06, | |
| "loss": 0.287, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 2.16289592760181, | |
| "grad_norm": 0.4109254777431488, | |
| "learning_rate": 9.593683229383249e-06, | |
| "loss": 0.2708, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 2.167420814479638, | |
| "grad_norm": 0.37143900990486145, | |
| "learning_rate": 9.590559040787168e-06, | |
| "loss": 0.2497, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 2.171945701357466, | |
| "grad_norm": 0.42445284128189087, | |
| "learning_rate": 9.587423399813292e-06, | |
| "loss": 0.2806, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.176470588235294, | |
| "grad_norm": 0.3920595645904541, | |
| "learning_rate": 9.584276314284316e-06, | |
| "loss": 0.265, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 2.180995475113122, | |
| "grad_norm": 0.46008458733558655, | |
| "learning_rate": 9.581117792051487e-06, | |
| "loss": 0.3138, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 2.1855203619909505, | |
| "grad_norm": 0.39173129200935364, | |
| "learning_rate": 9.577947840994585e-06, | |
| "loss": 0.2933, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 2.1900452488687785, | |
| "grad_norm": 0.37566521763801575, | |
| "learning_rate": 9.574766469021901e-06, | |
| "loss": 0.2465, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 2.1945701357466065, | |
| "grad_norm": 0.43578964471817017, | |
| "learning_rate": 9.57157368407022e-06, | |
| "loss": 0.2658, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.1990950226244346, | |
| "grad_norm": 0.4081728160381317, | |
| "learning_rate": 9.568369494104793e-06, | |
| "loss": 0.2881, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 2.2036199095022626, | |
| "grad_norm": 0.3810422718524933, | |
| "learning_rate": 9.565153907119336e-06, | |
| "loss": 0.2875, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 2.2081447963800906, | |
| "grad_norm": 0.631112277507782, | |
| "learning_rate": 9.561926931135985e-06, | |
| "loss": 0.3056, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 2.2126696832579187, | |
| "grad_norm": 0.4002344310283661, | |
| "learning_rate": 9.5586885742053e-06, | |
| "loss": 0.2806, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 2.2171945701357467, | |
| "grad_norm": 0.46340012550354004, | |
| "learning_rate": 9.555438844406226e-06, | |
| "loss": 0.2862, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.2217194570135748, | |
| "grad_norm": 0.4309289753437042, | |
| "learning_rate": 9.552177749846083e-06, | |
| "loss": 0.2899, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 2.226244343891403, | |
| "grad_norm": 0.4052945375442505, | |
| "learning_rate": 9.548905298660547e-06, | |
| "loss": 0.257, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 2.230769230769231, | |
| "grad_norm": 0.445239782333374, | |
| "learning_rate": 9.54562149901362e-06, | |
| "loss": 0.2582, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.235294117647059, | |
| "grad_norm": 0.4149002730846405, | |
| "learning_rate": 9.542326359097619e-06, | |
| "loss": 0.2558, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.239819004524887, | |
| "grad_norm": 0.5213009715080261, | |
| "learning_rate": 9.539019887133154e-06, | |
| "loss": 0.3235, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.244343891402715, | |
| "grad_norm": 0.4313045144081116, | |
| "learning_rate": 9.535702091369103e-06, | |
| "loss": 0.308, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.248868778280543, | |
| "grad_norm": 0.4291783273220062, | |
| "learning_rate": 9.532372980082598e-06, | |
| "loss": 0.273, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.253393665158371, | |
| "grad_norm": 0.4001004099845886, | |
| "learning_rate": 9.529032561578992e-06, | |
| "loss": 0.282, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.257918552036199, | |
| "grad_norm": 0.46362540125846863, | |
| "learning_rate": 9.525680844191858e-06, | |
| "loss": 0.3265, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.262443438914027, | |
| "grad_norm": 0.43766117095947266, | |
| "learning_rate": 9.522317836282949e-06, | |
| "loss": 0.3059, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.266968325791855, | |
| "grad_norm": 0.4003988206386566, | |
| "learning_rate": 9.518943546242189e-06, | |
| "loss": 0.3059, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.271493212669683, | |
| "grad_norm": 0.3825046420097351, | |
| "learning_rate": 9.515557982487647e-06, | |
| "loss": 0.2901, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.276018099547511, | |
| "grad_norm": 0.4281090199947357, | |
| "learning_rate": 9.512161153465518e-06, | |
| "loss": 0.3094, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.2805429864253393, | |
| "grad_norm": 0.4918365478515625, | |
| "learning_rate": 9.5087530676501e-06, | |
| "loss": 0.3387, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.2850678733031673, | |
| "grad_norm": 0.38389474153518677, | |
| "learning_rate": 9.505333733543777e-06, | |
| "loss": 0.2812, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.2895927601809953, | |
| "grad_norm": 0.5115017294883728, | |
| "learning_rate": 9.501903159676993e-06, | |
| "loss": 0.2665, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.2941176470588234, | |
| "grad_norm": 0.385798841714859, | |
| "learning_rate": 9.498461354608228e-06, | |
| "loss": 0.2978, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.2986425339366514, | |
| "grad_norm": 0.4701876938343048, | |
| "learning_rate": 9.495008326923989e-06, | |
| "loss": 0.3222, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.3031674208144794, | |
| "grad_norm": 0.37049078941345215, | |
| "learning_rate": 9.491544085238778e-06, | |
| "loss": 0.2672, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.3076923076923075, | |
| "grad_norm": 0.4914953410625458, | |
| "learning_rate": 9.488068638195072e-06, | |
| "loss": 0.3117, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.3122171945701355, | |
| "grad_norm": 0.3973662853240967, | |
| "learning_rate": 9.484581994463303e-06, | |
| "loss": 0.2915, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.3167420814479636, | |
| "grad_norm": 0.3646693825721741, | |
| "learning_rate": 9.481084162741835e-06, | |
| "loss": 0.2562, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.321266968325792, | |
| "grad_norm": 0.3842124938964844, | |
| "learning_rate": 9.477575151756951e-06, | |
| "loss": 0.263, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.32579185520362, | |
| "grad_norm": 0.4494537115097046, | |
| "learning_rate": 9.474054970262811e-06, | |
| "loss": 0.3218, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.330316742081448, | |
| "grad_norm": 0.46450093388557434, | |
| "learning_rate": 9.470523627041452e-06, | |
| "loss": 0.2917, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.334841628959276, | |
| "grad_norm": 0.4324088394641876, | |
| "learning_rate": 9.466981130902758e-06, | |
| "loss": 0.2871, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.339366515837104, | |
| "grad_norm": 0.4018588066101074, | |
| "learning_rate": 9.46342749068443e-06, | |
| "loss": 0.2783, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.3438914027149322, | |
| "grad_norm": 0.39708182215690613, | |
| "learning_rate": 9.459862715251973e-06, | |
| "loss": 0.2681, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.3484162895927603, | |
| "grad_norm": 0.403104305267334, | |
| "learning_rate": 9.456286813498677e-06, | |
| "loss": 0.276, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.3529411764705883, | |
| "grad_norm": 0.365902304649353, | |
| "learning_rate": 9.452699794345583e-06, | |
| "loss": 0.2397, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.3574660633484164, | |
| "grad_norm": 0.39722907543182373, | |
| "learning_rate": 9.44910166674147e-06, | |
| "loss": 0.2581, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.3619909502262444, | |
| "grad_norm": 0.3249555230140686, | |
| "learning_rate": 9.445492439662832e-06, | |
| "loss": 0.2592, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.3665158371040724, | |
| "grad_norm": 0.36296772956848145, | |
| "learning_rate": 9.441872122113848e-06, | |
| "loss": 0.3029, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.3710407239819005, | |
| "grad_norm": 0.44991156458854675, | |
| "learning_rate": 9.438240723126376e-06, | |
| "loss": 0.2628, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.3755656108597285, | |
| "grad_norm": 0.37070232629776, | |
| "learning_rate": 9.434598251759904e-06, | |
| "loss": 0.3199, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.3800904977375565, | |
| "grad_norm": 0.45003408193588257, | |
| "learning_rate": 9.430944717101556e-06, | |
| "loss": 0.2697, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.3846153846153846, | |
| "grad_norm": 0.49059706926345825, | |
| "learning_rate": 9.427280128266049e-06, | |
| "loss": 0.3067, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.3891402714932126, | |
| "grad_norm": 0.3652385175228119, | |
| "learning_rate": 9.423604494395686e-06, | |
| "loss": 0.336, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.3936651583710407, | |
| "grad_norm": 0.37585943937301636, | |
| "learning_rate": 9.419917824660311e-06, | |
| "loss": 0.297, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.3981900452488687, | |
| "grad_norm": 0.4154417812824249, | |
| "learning_rate": 9.416220128257317e-06, | |
| "loss": 0.3214, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.4027149321266967, | |
| "grad_norm": 0.3846287131309509, | |
| "learning_rate": 9.41251141441159e-06, | |
| "loss": 0.3087, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.4072398190045248, | |
| "grad_norm": 0.3685553967952728, | |
| "learning_rate": 9.408791692375511e-06, | |
| "loss": 0.2663, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.411764705882353, | |
| "grad_norm": 0.3650501072406769, | |
| "learning_rate": 9.405060971428924e-06, | |
| "loss": 0.2475, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.416289592760181, | |
| "grad_norm": 0.4251144230365753, | |
| "learning_rate": 9.401319260879107e-06, | |
| "loss": 0.346, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.420814479638009, | |
| "grad_norm": 0.3708731532096863, | |
| "learning_rate": 9.397566570060761e-06, | |
| "loss": 0.252, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.425339366515837, | |
| "grad_norm": 0.3827893137931824, | |
| "learning_rate": 9.393802908335978e-06, | |
| "loss": 0.284, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.4298642533936654, | |
| "grad_norm": 0.401019424200058, | |
| "learning_rate": 9.390028285094215e-06, | |
| "loss": 0.2954, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.4343891402714934, | |
| "grad_norm": 0.4581804871559143, | |
| "learning_rate": 9.386242709752283e-06, | |
| "loss": 0.3333, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.4389140271493215, | |
| "grad_norm": 0.3979438245296478, | |
| "learning_rate": 9.382446191754313e-06, | |
| "loss": 0.2901, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.4434389140271495, | |
| "grad_norm": 0.3686637580394745, | |
| "learning_rate": 9.378638740571733e-06, | |
| "loss": 0.275, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.4479638009049776, | |
| "grad_norm": 0.42701080441474915, | |
| "learning_rate": 9.37482036570325e-06, | |
| "loss": 0.3166, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.4524886877828056, | |
| "grad_norm": 0.39671456813812256, | |
| "learning_rate": 9.370991076674821e-06, | |
| "loss": 0.2416, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.4570135746606336, | |
| "grad_norm": 0.4166645109653473, | |
| "learning_rate": 9.36715088303963e-06, | |
| "loss": 0.2876, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.4615384615384617, | |
| "grad_norm": 0.4189673066139221, | |
| "learning_rate": 9.363299794378072e-06, | |
| "loss": 0.2872, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.4660633484162897, | |
| "grad_norm": 0.4147026836872101, | |
| "learning_rate": 9.359437820297716e-06, | |
| "loss": 0.2781, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.4705882352941178, | |
| "grad_norm": 0.40569254755973816, | |
| "learning_rate": 9.355564970433288e-06, | |
| "loss": 0.2911, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.475113122171946, | |
| "grad_norm": 0.40611332654953003, | |
| "learning_rate": 9.351681254446651e-06, | |
| "loss": 0.2691, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.479638009049774, | |
| "grad_norm": 0.3574923276901245, | |
| "learning_rate": 9.347786682026774e-06, | |
| "loss": 0.2476, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.484162895927602, | |
| "grad_norm": 0.44315120577812195, | |
| "learning_rate": 9.343881262889706e-06, | |
| "loss": 0.2976, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.48868778280543, | |
| "grad_norm": 0.45607924461364746, | |
| "learning_rate": 9.339965006778564e-06, | |
| "loss": 0.2874, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.493212669683258, | |
| "grad_norm": 0.4020167291164398, | |
| "learning_rate": 9.336037923463494e-06, | |
| "loss": 0.2975, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.497737556561086, | |
| "grad_norm": 0.3714078962802887, | |
| "learning_rate": 9.332100022741657e-06, | |
| "loss": 0.276, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.502262443438914, | |
| "grad_norm": 0.4652123749256134, | |
| "learning_rate": 9.328151314437201e-06, | |
| "loss": 0.3213, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.506787330316742, | |
| "grad_norm": 0.4106157124042511, | |
| "learning_rate": 9.324191808401235e-06, | |
| "loss": 0.3131, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.51131221719457, | |
| "grad_norm": 0.41118085384368896, | |
| "learning_rate": 9.320221514511805e-06, | |
| "loss": 0.3477, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.515837104072398, | |
| "grad_norm": 0.39603039622306824, | |
| "learning_rate": 9.316240442673872e-06, | |
| "loss": 0.3234, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 2.520361990950226, | |
| "grad_norm": 0.4552370309829712, | |
| "learning_rate": 9.312248602819284e-06, | |
| "loss": 0.3104, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 2.524886877828054, | |
| "grad_norm": 0.4341452717781067, | |
| "learning_rate": 9.308246004906758e-06, | |
| "loss": 0.3191, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 2.5294117647058822, | |
| "grad_norm": 0.33837419748306274, | |
| "learning_rate": 9.30423265892184e-06, | |
| "loss": 0.287, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 2.5339366515837103, | |
| "grad_norm": 0.4650215804576874, | |
| "learning_rate": 9.300208574876897e-06, | |
| "loss": 0.2909, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.5384615384615383, | |
| "grad_norm": 0.3926837146282196, | |
| "learning_rate": 9.296173762811084e-06, | |
| "loss": 0.2819, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 2.5429864253393664, | |
| "grad_norm": 0.4534710943698883, | |
| "learning_rate": 9.292128232790321e-06, | |
| "loss": 0.3077, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.5475113122171944, | |
| "grad_norm": 0.38600146770477295, | |
| "learning_rate": 9.288071994907262e-06, | |
| "loss": 0.3145, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.5520361990950224, | |
| "grad_norm": 0.4256378710269928, | |
| "learning_rate": 9.284005059281278e-06, | |
| "loss": 0.2503, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.5565610859728505, | |
| "grad_norm": 0.4025668799877167, | |
| "learning_rate": 9.27992743605843e-06, | |
| "loss": 0.3042, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.5610859728506785, | |
| "grad_norm": 0.43262800574302673, | |
| "learning_rate": 9.275839135411439e-06, | |
| "loss": 0.3031, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.5656108597285066, | |
| "grad_norm": 0.42043566703796387, | |
| "learning_rate": 9.271740167539665e-06, | |
| "loss": 0.2848, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.5701357466063346, | |
| "grad_norm": 0.43189898133277893, | |
| "learning_rate": 9.26763054266908e-06, | |
| "loss": 0.278, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.5746606334841626, | |
| "grad_norm": 0.40669795870780945, | |
| "learning_rate": 9.263510271052243e-06, | |
| "loss": 0.3034, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.579185520361991, | |
| "grad_norm": 0.3897744119167328, | |
| "learning_rate": 9.259379362968276e-06, | |
| "loss": 0.2455, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.583710407239819, | |
| "grad_norm": 0.4340456426143646, | |
| "learning_rate": 9.255237828722828e-06, | |
| "loss": 0.2672, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.588235294117647, | |
| "grad_norm": 0.4333140254020691, | |
| "learning_rate": 9.251085678648072e-06, | |
| "loss": 0.3283, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.5927601809954752, | |
| "grad_norm": 0.5001205205917358, | |
| "learning_rate": 9.246922923102652e-06, | |
| "loss": 0.3178, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.5972850678733033, | |
| "grad_norm": 0.42825013399124146, | |
| "learning_rate": 9.242749572471679e-06, | |
| "loss": 0.3468, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.6018099547511313, | |
| "grad_norm": 0.4305206835269928, | |
| "learning_rate": 9.238565637166692e-06, | |
| "loss": 0.2952, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.6063348416289593, | |
| "grad_norm": 0.4100704789161682, | |
| "learning_rate": 9.234371127625634e-06, | |
| "loss": 0.3348, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.6108597285067874, | |
| "grad_norm": 0.37542393803596497, | |
| "learning_rate": 9.230166054312834e-06, | |
| "loss": 0.3066, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.6153846153846154, | |
| "grad_norm": 0.38587042689323425, | |
| "learning_rate": 9.225950427718974e-06, | |
| "loss": 0.3205, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.6199095022624435, | |
| "grad_norm": 0.4810059368610382, | |
| "learning_rate": 9.22172425836106e-06, | |
| "loss": 0.3374, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.6244343891402715, | |
| "grad_norm": 0.43957462906837463, | |
| "learning_rate": 9.217487556782402e-06, | |
| "loss": 0.3029, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.6289592760180995, | |
| "grad_norm": 0.39742496609687805, | |
| "learning_rate": 9.213240333552589e-06, | |
| "loss": 0.3118, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.6334841628959276, | |
| "grad_norm": 0.4322555661201477, | |
| "learning_rate": 9.208982599267454e-06, | |
| "loss": 0.3043, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.6380090497737556, | |
| "grad_norm": 0.43412262201309204, | |
| "learning_rate": 9.20471436454905e-06, | |
| "loss": 0.3009, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.6425339366515836, | |
| "grad_norm": 0.35117828845977783, | |
| "learning_rate": 9.200435640045637e-06, | |
| "loss": 0.3099, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.6470588235294117, | |
| "grad_norm": 0.4399445354938507, | |
| "learning_rate": 9.196146436431635e-06, | |
| "loss": 0.3394, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.6515837104072397, | |
| "grad_norm": 0.4506850242614746, | |
| "learning_rate": 9.191846764407609e-06, | |
| "loss": 0.3124, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.6561085972850678, | |
| "grad_norm": 0.43107202649116516, | |
| "learning_rate": 9.187536634700244e-06, | |
| "loss": 0.2903, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.660633484162896, | |
| "grad_norm": 0.44276267290115356, | |
| "learning_rate": 9.18321605806231e-06, | |
| "loss": 0.2783, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.665158371040724, | |
| "grad_norm": 0.4580158293247223, | |
| "learning_rate": 9.178885045272642e-06, | |
| "loss": 0.3157, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.669683257918552, | |
| "grad_norm": 0.39571404457092285, | |
| "learning_rate": 9.174543607136111e-06, | |
| "loss": 0.3387, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.6742081447963804, | |
| "grad_norm": 0.4113115072250366, | |
| "learning_rate": 9.170191754483594e-06, | |
| "loss": 0.2723, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.6787330316742084, | |
| "grad_norm": 0.3653103709220886, | |
| "learning_rate": 9.165829498171956e-06, | |
| "loss": 0.2986, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.6832579185520364, | |
| "grad_norm": 0.381362646818161, | |
| "learning_rate": 9.161456849084007e-06, | |
| "loss": 0.2845, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.6877828054298645, | |
| "grad_norm": 0.38520559668540955, | |
| "learning_rate": 9.157073818128495e-06, | |
| "loss": 0.2455, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.6923076923076925, | |
| "grad_norm": 0.4085952043533325, | |
| "learning_rate": 9.152680416240059e-06, | |
| "loss": 0.3021, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.6968325791855206, | |
| "grad_norm": 0.3726223409175873, | |
| "learning_rate": 9.14827665437922e-06, | |
| "loss": 0.2836, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.7013574660633486, | |
| "grad_norm": 0.40436556935310364, | |
| "learning_rate": 9.143862543532336e-06, | |
| "loss": 0.3051, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.7058823529411766, | |
| "grad_norm": 0.46403786540031433, | |
| "learning_rate": 9.13943809471159e-06, | |
| "loss": 0.2604, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.7104072398190047, | |
| "grad_norm": 0.4343224763870239, | |
| "learning_rate": 9.135003318954954e-06, | |
| "loss": 0.3354, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.7149321266968327, | |
| "grad_norm": 0.463319331407547, | |
| "learning_rate": 9.13055822732616e-06, | |
| "loss": 0.3098, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.7194570135746607, | |
| "grad_norm": 0.48439162969589233, | |
| "learning_rate": 9.126102830914682e-06, | |
| "loss": 0.2805, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.723981900452489, | |
| "grad_norm": 0.46056264638900757, | |
| "learning_rate": 9.121637140835696e-06, | |
| "loss": 0.3309, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.728506787330317, | |
| "grad_norm": 0.44612205028533936, | |
| "learning_rate": 9.11716116823006e-06, | |
| "loss": 0.3131, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.733031674208145, | |
| "grad_norm": 0.45993444323539734, | |
| "learning_rate": 9.112674924264287e-06, | |
| "loss": 0.2702, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.737556561085973, | |
| "grad_norm": 0.41719764471054077, | |
| "learning_rate": 9.108178420130514e-06, | |
| "loss": 0.288, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.742081447963801, | |
| "grad_norm": 0.49494820833206177, | |
| "learning_rate": 9.103671667046472e-06, | |
| "loss": 0.3831, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.746606334841629, | |
| "grad_norm": 0.47351089119911194, | |
| "learning_rate": 9.099154676255462e-06, | |
| "loss": 0.3197, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.751131221719457, | |
| "grad_norm": 0.4318673312664032, | |
| "learning_rate": 9.094627459026326e-06, | |
| "loss": 0.278, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.755656108597285, | |
| "grad_norm": 0.4779452383518219, | |
| "learning_rate": 9.09009002665342e-06, | |
| "loss": 0.3151, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.760180995475113, | |
| "grad_norm": 0.4163115322589874, | |
| "learning_rate": 9.085542390456582e-06, | |
| "loss": 0.3073, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.764705882352941, | |
| "grad_norm": 0.39201343059539795, | |
| "learning_rate": 9.08098456178111e-06, | |
| "loss": 0.3137, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.769230769230769, | |
| "grad_norm": 0.5050286650657654, | |
| "learning_rate": 9.076416551997721e-06, | |
| "loss": 0.3052, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.773755656108597, | |
| "grad_norm": 0.4312725365161896, | |
| "learning_rate": 9.071838372502546e-06, | |
| "loss": 0.266, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.7782805429864252, | |
| "grad_norm": 0.42680007219314575, | |
| "learning_rate": 9.067250034717072e-06, | |
| "loss": 0.2631, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.7828054298642533, | |
| "grad_norm": 0.40380024909973145, | |
| "learning_rate": 9.062651550088137e-06, | |
| "loss": 0.2268, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.7873303167420813, | |
| "grad_norm": 0.395770788192749, | |
| "learning_rate": 9.058042930087894e-06, | |
| "loss": 0.2925, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.7918552036199094, | |
| "grad_norm": 0.47229576110839844, | |
| "learning_rate": 9.053424186213776e-06, | |
| "loss": 0.3023, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.7963800904977374, | |
| "grad_norm": 0.4000561833381653, | |
| "learning_rate": 9.048795329988473e-06, | |
| "loss": 0.2983, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.8009049773755654, | |
| "grad_norm": 0.39670804142951965, | |
| "learning_rate": 9.044156372959911e-06, | |
| "loss": 0.2657, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.8054298642533935, | |
| "grad_norm": 0.38643792271614075, | |
| "learning_rate": 9.039507326701207e-06, | |
| "loss": 0.2793, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.8099547511312215, | |
| "grad_norm": 0.4341120421886444, | |
| "learning_rate": 9.03484820281065e-06, | |
| "loss": 0.3408, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.8144796380090495, | |
| "grad_norm": 0.3975633382797241, | |
| "learning_rate": 9.030179012911672e-06, | |
| "loss": 0.2507, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.8190045248868776, | |
| "grad_norm": 0.3742230236530304, | |
| "learning_rate": 9.025499768652817e-06, | |
| "loss": 0.2768, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 2.8235294117647056, | |
| "grad_norm": 0.40196242928504944, | |
| "learning_rate": 9.020810481707709e-06, | |
| "loss": 0.2508, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 2.8280542986425337, | |
| "grad_norm": 0.41893255710601807, | |
| "learning_rate": 9.016111163775032e-06, | |
| "loss": 0.3294, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.832579185520362, | |
| "grad_norm": 0.3998293876647949, | |
| "learning_rate": 9.011401826578492e-06, | |
| "loss": 0.2652, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.83710407239819, | |
| "grad_norm": 0.502552330493927, | |
| "learning_rate": 9.00668248186679e-06, | |
| "loss": 0.2816, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.841628959276018, | |
| "grad_norm": 0.4417928457260132, | |
| "learning_rate": 9.001953141413593e-06, | |
| "loss": 0.3273, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.8461538461538463, | |
| "grad_norm": 0.40362390875816345, | |
| "learning_rate": 8.997213817017508e-06, | |
| "loss": 0.3004, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.8506787330316743, | |
| "grad_norm": 0.39725273847579956, | |
| "learning_rate": 8.992464520502045e-06, | |
| "loss": 0.2706, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.8552036199095023, | |
| "grad_norm": 0.407359778881073, | |
| "learning_rate": 8.987705263715598e-06, | |
| "loss": 0.2857, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.8597285067873304, | |
| "grad_norm": 0.4246702790260315, | |
| "learning_rate": 8.982936058531403e-06, | |
| "loss": 0.2658, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.8642533936651584, | |
| "grad_norm": 0.3781566619873047, | |
| "learning_rate": 8.978156916847523e-06, | |
| "loss": 0.2845, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.8687782805429864, | |
| "grad_norm": 0.4160442650318146, | |
| "learning_rate": 8.973367850586802e-06, | |
| "loss": 0.3012, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.8733031674208145, | |
| "grad_norm": 0.44970279932022095, | |
| "learning_rate": 8.968568871696847e-06, | |
| "loss": 0.3212, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.8778280542986425, | |
| "grad_norm": 0.38865357637405396, | |
| "learning_rate": 8.96375999215e-06, | |
| "loss": 0.2997, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 2.8823529411764706, | |
| "grad_norm": 0.4079996943473816, | |
| "learning_rate": 8.958941223943292e-06, | |
| "loss": 0.3396, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 2.8868778280542986, | |
| "grad_norm": 0.40862202644348145, | |
| "learning_rate": 8.95411257909843e-06, | |
| "loss": 0.2484, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 2.8914027149321266, | |
| "grad_norm": 0.39957407116889954, | |
| "learning_rate": 8.949274069661765e-06, | |
| "loss": 0.2696, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 2.8959276018099547, | |
| "grad_norm": 0.412178635597229, | |
| "learning_rate": 8.944425707704247e-06, | |
| "loss": 0.2805, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.9004524886877827, | |
| "grad_norm": 0.40401560068130493, | |
| "learning_rate": 8.939567505321418e-06, | |
| "loss": 0.26, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 2.9049773755656108, | |
| "grad_norm": 0.42878180742263794, | |
| "learning_rate": 8.934699474633362e-06, | |
| "loss": 0.3284, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 2.909502262443439, | |
| "grad_norm": 0.4750427007675171, | |
| "learning_rate": 8.929821627784682e-06, | |
| "loss": 0.3018, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 2.914027149321267, | |
| "grad_norm": 0.42947152256965637, | |
| "learning_rate": 8.924933976944474e-06, | |
| "loss": 0.3251, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 2.918552036199095, | |
| "grad_norm": 0.36342573165893555, | |
| "learning_rate": 8.92003653430629e-06, | |
| "loss": 0.2878, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.9230769230769234, | |
| "grad_norm": 0.4305476248264313, | |
| "learning_rate": 8.915129312088112e-06, | |
| "loss": 0.2904, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 2.9276018099547514, | |
| "grad_norm": 0.4355884790420532, | |
| "learning_rate": 8.910212322532317e-06, | |
| "loss": 0.2952, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 2.9321266968325794, | |
| "grad_norm": 0.39653098583221436, | |
| "learning_rate": 8.905285577905653e-06, | |
| "loss": 0.3174, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 2.9366515837104075, | |
| "grad_norm": 0.38590750098228455, | |
| "learning_rate": 8.900349090499203e-06, | |
| "loss": 0.2551, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 2.9411764705882355, | |
| "grad_norm": 0.43237945437431335, | |
| "learning_rate": 8.895402872628352e-06, | |
| "loss": 0.3424, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.9457013574660635, | |
| "grad_norm": 0.4289698004722595, | |
| "learning_rate": 8.890446936632768e-06, | |
| "loss": 0.3434, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 2.9502262443438916, | |
| "grad_norm": 0.426191121339798, | |
| "learning_rate": 8.88548129487636e-06, | |
| "loss": 0.3057, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 2.9547511312217196, | |
| "grad_norm": 0.4147898852825165, | |
| "learning_rate": 8.880505959747245e-06, | |
| "loss": 0.262, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 2.9592760180995477, | |
| "grad_norm": 0.4329768717288971, | |
| "learning_rate": 8.87552094365773e-06, | |
| "loss": 0.2902, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 2.9638009049773757, | |
| "grad_norm": 0.4282679855823517, | |
| "learning_rate": 8.870526259044275e-06, | |
| "loss": 0.2878, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.9683257918552037, | |
| "grad_norm": 0.3876934349536896, | |
| "learning_rate": 8.86552191836745e-06, | |
| "loss": 0.3095, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 2.9728506787330318, | |
| "grad_norm": 0.44530197978019714, | |
| "learning_rate": 8.860507934111929e-06, | |
| "loss": 0.2807, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 2.97737556561086, | |
| "grad_norm": 0.4089668095111847, | |
| "learning_rate": 8.855484318786433e-06, | |
| "loss": 0.323, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 2.981900452488688, | |
| "grad_norm": 0.4198194146156311, | |
| "learning_rate": 8.850451084923717e-06, | |
| "loss": 0.3207, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 2.986425339366516, | |
| "grad_norm": 0.4462484121322632, | |
| "learning_rate": 8.845408245080527e-06, | |
| "loss": 0.3145, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.990950226244344, | |
| "grad_norm": 0.39047619700431824, | |
| "learning_rate": 8.840355811837578e-06, | |
| "loss": 0.3429, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 2.995475113122172, | |
| "grad_norm": 0.38414013385772705, | |
| "learning_rate": 8.835293797799517e-06, | |
| "loss": 0.3076, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.41144996881484985, | |
| "learning_rate": 8.83022221559489e-06, | |
| "loss": 0.2834, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 3.004524886877828, | |
| "grad_norm": 0.4691222012042999, | |
| "learning_rate": 8.82514107787612e-06, | |
| "loss": 0.2059, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 3.009049773755656, | |
| "grad_norm": 0.40257298946380615, | |
| "learning_rate": 8.82005039731946e-06, | |
| "loss": 0.1973, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 3.013574660633484, | |
| "grad_norm": 0.40091562271118164, | |
| "learning_rate": 8.814950186624978e-06, | |
| "loss": 0.1897, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 3.018099547511312, | |
| "grad_norm": 0.5339998006820679, | |
| "learning_rate": 8.809840458516511e-06, | |
| "loss": 0.1607, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 3.02262443438914, | |
| "grad_norm": 0.5496724247932434, | |
| "learning_rate": 8.804721225741646e-06, | |
| "loss": 0.2262, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 3.0271493212669682, | |
| "grad_norm": 0.4552634060382843, | |
| "learning_rate": 8.799592501071679e-06, | |
| "loss": 0.2015, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 3.0316742081447963, | |
| "grad_norm": 0.4297693073749542, | |
| "learning_rate": 8.794454297301583e-06, | |
| "loss": 0.1849, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 3.0361990950226243, | |
| "grad_norm": 0.46762946248054504, | |
| "learning_rate": 8.789306627249985e-06, | |
| "loss": 0.2071, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 3.0407239819004523, | |
| "grad_norm": 0.43196043372154236, | |
| "learning_rate": 8.784149503759124e-06, | |
| "loss": 0.1807, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 3.0452488687782804, | |
| "grad_norm": 0.39062273502349854, | |
| "learning_rate": 8.778982939694822e-06, | |
| "loss": 0.2068, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 3.0497737556561084, | |
| "grad_norm": 0.4284631907939911, | |
| "learning_rate": 8.77380694794646e-06, | |
| "loss": 0.1752, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 3.0542986425339365, | |
| "grad_norm": 0.5311867594718933, | |
| "learning_rate": 8.768621541426933e-06, | |
| "loss": 0.2363, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 3.0588235294117645, | |
| "grad_norm": 0.4057539701461792, | |
| "learning_rate": 8.763426733072624e-06, | |
| "loss": 0.1695, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 3.0633484162895925, | |
| "grad_norm": 0.4523632526397705, | |
| "learning_rate": 8.75822253584337e-06, | |
| "loss": 0.1925, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 3.067873303167421, | |
| "grad_norm": 0.40796959400177, | |
| "learning_rate": 8.75300896272244e-06, | |
| "loss": 0.2148, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 3.072398190045249, | |
| "grad_norm": 0.39213889837265015, | |
| "learning_rate": 8.747786026716481e-06, | |
| "loss": 0.2128, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 3.076923076923077, | |
| "grad_norm": 0.48423635959625244, | |
| "learning_rate": 8.742553740855507e-06, | |
| "loss": 0.2061, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 3.081447963800905, | |
| "grad_norm": 0.4347093403339386, | |
| "learning_rate": 8.737312118192853e-06, | |
| "loss": 0.2512, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 3.085972850678733, | |
| "grad_norm": 0.4026263356208801, | |
| "learning_rate": 8.732061171805154e-06, | |
| "loss": 0.1843, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 3.090497737556561, | |
| "grad_norm": 0.4047137200832367, | |
| "learning_rate": 8.726800914792296e-06, | |
| "loss": 0.1738, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 3.0950226244343892, | |
| "grad_norm": 0.4355809688568115, | |
| "learning_rate": 8.721531360277398e-06, | |
| "loss": 0.2185, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 3.0995475113122173, | |
| "grad_norm": 0.4358494281768799, | |
| "learning_rate": 8.716252521406776e-06, | |
| "loss": 0.2087, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 3.1040723981900453, | |
| "grad_norm": 0.4449985921382904, | |
| "learning_rate": 8.710964411349902e-06, | |
| "loss": 0.2285, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 3.1085972850678734, | |
| "grad_norm": 0.38388368487358093, | |
| "learning_rate": 8.705667043299388e-06, | |
| "loss": 0.1556, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 3.1131221719457014, | |
| "grad_norm": 0.4281388521194458, | |
| "learning_rate": 8.70036043047093e-06, | |
| "loss": 0.2041, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 3.1176470588235294, | |
| "grad_norm": 0.37215033173561096, | |
| "learning_rate": 8.695044586103297e-06, | |
| "loss": 0.2017, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 3.1221719457013575, | |
| "grad_norm": 0.4040149748325348, | |
| "learning_rate": 8.689719523458282e-06, | |
| "loss": 0.1838, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 3.1266968325791855, | |
| "grad_norm": 0.3721625804901123, | |
| "learning_rate": 8.684385255820682e-06, | |
| "loss": 0.1982, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 3.1312217194570136, | |
| "grad_norm": 0.37194955348968506, | |
| "learning_rate": 8.679041796498253e-06, | |
| "loss": 0.1832, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 3.1357466063348416, | |
| "grad_norm": 0.3825089633464813, | |
| "learning_rate": 8.673689158821684e-06, | |
| "loss": 0.1753, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 3.1402714932126696, | |
| "grad_norm": 0.41846585273742676, | |
| "learning_rate": 8.668327356144562e-06, | |
| "loss": 0.1979, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 3.1447963800904977, | |
| "grad_norm": 0.4502934515476227, | |
| "learning_rate": 8.66295640184334e-06, | |
| "loss": 0.2344, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 3.1493212669683257, | |
| "grad_norm": 0.41532212495803833, | |
| "learning_rate": 8.657576309317297e-06, | |
| "loss": 0.1949, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 3.1538461538461537, | |
| "grad_norm": 0.4130610525608063, | |
| "learning_rate": 8.652187091988516e-06, | |
| "loss": 0.2038, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 3.158371040723982, | |
| "grad_norm": 0.42346063256263733, | |
| "learning_rate": 8.646788763301842e-06, | |
| "loss": 0.2081, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 3.16289592760181, | |
| "grad_norm": 0.3540644645690918, | |
| "learning_rate": 8.641381336724848e-06, | |
| "loss": 0.1972, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 3.167420814479638, | |
| "grad_norm": 0.4263281226158142, | |
| "learning_rate": 8.635964825747809e-06, | |
| "loss": 0.1815, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 3.171945701357466, | |
| "grad_norm": 0.442663311958313, | |
| "learning_rate": 8.630539243883659e-06, | |
| "loss": 0.1829, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 3.176470588235294, | |
| "grad_norm": 0.4172739088535309, | |
| "learning_rate": 8.625104604667965e-06, | |
| "loss": 0.203, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 3.180995475113122, | |
| "grad_norm": 0.38168448209762573, | |
| "learning_rate": 8.619660921658885e-06, | |
| "loss": 0.2247, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 3.1855203619909505, | |
| "grad_norm": 0.3923109769821167, | |
| "learning_rate": 8.61420820843715e-06, | |
| "loss": 0.1861, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 3.1900452488687785, | |
| "grad_norm": 0.4268626272678375, | |
| "learning_rate": 8.608746478606004e-06, | |
| "loss": 0.1889, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 3.1945701357466065, | |
| "grad_norm": 0.567819356918335, | |
| "learning_rate": 8.6032757457912e-06, | |
| "loss": 0.2458, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 3.1990950226244346, | |
| "grad_norm": 0.4022853970527649, | |
| "learning_rate": 8.59779602364094e-06, | |
| "loss": 0.2101, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 3.2036199095022626, | |
| "grad_norm": 0.3837186098098755, | |
| "learning_rate": 8.592307325825861e-06, | |
| "loss": 0.1734, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 3.2081447963800906, | |
| "grad_norm": 0.3684345483779907, | |
| "learning_rate": 8.586809666038986e-06, | |
| "loss": 0.1862, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 3.2126696832579187, | |
| "grad_norm": 0.4099283814430237, | |
| "learning_rate": 8.581303057995697e-06, | |
| "loss": 0.1787, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 3.2171945701357467, | |
| "grad_norm": 0.4496215879917145, | |
| "learning_rate": 8.575787515433705e-06, | |
| "loss": 0.2169, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 3.2217194570135748, | |
| "grad_norm": 0.3973177969455719, | |
| "learning_rate": 8.570263052113005e-06, | |
| "loss": 0.2127, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 3.226244343891403, | |
| "grad_norm": 0.40007996559143066, | |
| "learning_rate": 8.564729681815846e-06, | |
| "loss": 0.1987, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 3.230769230769231, | |
| "grad_norm": 0.37854504585266113, | |
| "learning_rate": 8.559187418346703e-06, | |
| "loss": 0.2099, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 3.235294117647059, | |
| "grad_norm": 0.4037870466709137, | |
| "learning_rate": 8.553636275532236e-06, | |
| "loss": 0.2049, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 3.239819004524887, | |
| "grad_norm": 0.4421459436416626, | |
| "learning_rate": 8.548076267221258e-06, | |
| "loss": 0.1905, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 3.244343891402715, | |
| "grad_norm": 0.9329970479011536, | |
| "learning_rate": 8.54250740728469e-06, | |
| "loss": 0.1844, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 3.248868778280543, | |
| "grad_norm": 0.44006776809692383, | |
| "learning_rate": 8.536929709615552e-06, | |
| "loss": 0.1995, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 3.253393665158371, | |
| "grad_norm": 0.3820533752441406, | |
| "learning_rate": 8.531343188128896e-06, | |
| "loss": 0.1971, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 3.257918552036199, | |
| "grad_norm": 0.3708641231060028, | |
| "learning_rate": 8.525747856761799e-06, | |
| "loss": 0.1673, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 3.262443438914027, | |
| "grad_norm": 0.37453895807266235, | |
| "learning_rate": 8.520143729473312e-06, | |
| "loss": 0.2085, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 3.266968325791855, | |
| "grad_norm": 0.44639456272125244, | |
| "learning_rate": 8.514530820244427e-06, | |
| "loss": 0.2165, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 3.271493212669683, | |
| "grad_norm": 0.3881756365299225, | |
| "learning_rate": 8.50890914307805e-06, | |
| "loss": 0.1891, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 3.276018099547511, | |
| "grad_norm": 0.42670780420303345, | |
| "learning_rate": 8.503278711998958e-06, | |
| "loss": 0.2126, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 3.2805429864253393, | |
| "grad_norm": 0.4481564164161682, | |
| "learning_rate": 8.497639541053769e-06, | |
| "loss": 0.2376, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 3.2850678733031673, | |
| "grad_norm": 0.4192044138908386, | |
| "learning_rate": 8.491991644310903e-06, | |
| "loss": 0.2133, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 3.2895927601809953, | |
| "grad_norm": 0.4070059657096863, | |
| "learning_rate": 8.48633503586055e-06, | |
| "loss": 0.1873, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 3.2941176470588234, | |
| "grad_norm": 0.3769683539867401, | |
| "learning_rate": 8.480669729814635e-06, | |
| "loss": 0.2125, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 3.2986425339366514, | |
| "grad_norm": 0.45319512486457825, | |
| "learning_rate": 8.474995740306775e-06, | |
| "loss": 0.2039, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 3.3031674208144794, | |
| "grad_norm": 0.38497766852378845, | |
| "learning_rate": 8.469313081492258e-06, | |
| "loss": 0.2126, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 3.3076923076923075, | |
| "grad_norm": 0.3879552483558655, | |
| "learning_rate": 8.463621767547998e-06, | |
| "loss": 0.2104, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 3.3122171945701355, | |
| "grad_norm": 0.4097852110862732, | |
| "learning_rate": 8.457921812672498e-06, | |
| "loss": 0.1755, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 3.3167420814479636, | |
| "grad_norm": 0.4118267595767975, | |
| "learning_rate": 8.452213231085823e-06, | |
| "loss": 0.1905, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 3.321266968325792, | |
| "grad_norm": 0.42877197265625, | |
| "learning_rate": 8.446496037029555e-06, | |
| "loss": 0.2186, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 3.32579185520362, | |
| "grad_norm": 0.4173864424228668, | |
| "learning_rate": 8.440770244766764e-06, | |
| "loss": 0.1567, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 3.330316742081448, | |
| "grad_norm": 0.4193741977214813, | |
| "learning_rate": 8.435035868581974e-06, | |
| "loss": 0.2107, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 3.334841628959276, | |
| "grad_norm": 0.44753381609916687, | |
| "learning_rate": 8.429292922781115e-06, | |
| "loss": 0.2011, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 3.339366515837104, | |
| "grad_norm": 0.3999078869819641, | |
| "learning_rate": 8.42354142169151e-06, | |
| "loss": 0.2168, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 3.3438914027149322, | |
| "grad_norm": 0.3881414532661438, | |
| "learning_rate": 8.417781379661811e-06, | |
| "loss": 0.2272, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 3.3484162895927603, | |
| "grad_norm": 0.3872552216053009, | |
| "learning_rate": 8.412012811061985e-06, | |
| "loss": 0.2018, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 3.3529411764705883, | |
| "grad_norm": 0.4065379798412323, | |
| "learning_rate": 8.40623573028327e-06, | |
| "loss": 0.1935, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 3.3574660633484164, | |
| "grad_norm": 0.3988831639289856, | |
| "learning_rate": 8.400450151738143e-06, | |
| "loss": 0.2169, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 3.3619909502262444, | |
| "grad_norm": 0.3781244456768036, | |
| "learning_rate": 8.394656089860274e-06, | |
| "loss": 0.1622, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 3.3665158371040724, | |
| "grad_norm": 0.4144964814186096, | |
| "learning_rate": 8.388853559104502e-06, | |
| "loss": 0.2296, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 3.3710407239819005, | |
| "grad_norm": 0.38190963864326477, | |
| "learning_rate": 8.383042573946792e-06, | |
| "loss": 0.209, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 3.3755656108597285, | |
| "grad_norm": 0.4152965247631073, | |
| "learning_rate": 8.377223148884202e-06, | |
| "loss": 0.211, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 3.3800904977375565, | |
| "grad_norm": 0.4132832884788513, | |
| "learning_rate": 8.371395298434844e-06, | |
| "loss": 0.2005, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 3.3846153846153846, | |
| "grad_norm": 0.37585464119911194, | |
| "learning_rate": 8.36555903713785e-06, | |
| "loss": 0.1745, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 3.3891402714932126, | |
| "grad_norm": 0.40911996364593506, | |
| "learning_rate": 8.359714379553338e-06, | |
| "loss": 0.2016, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 3.3936651583710407, | |
| "grad_norm": 0.5231017470359802, | |
| "learning_rate": 8.353861340262367e-06, | |
| "loss": 0.2217, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.3981900452488687, | |
| "grad_norm": 0.4193516671657562, | |
| "learning_rate": 8.347999933866912e-06, | |
| "loss": 0.2042, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 3.4027149321266967, | |
| "grad_norm": 0.40086203813552856, | |
| "learning_rate": 8.342130174989819e-06, | |
| "loss": 0.1837, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 3.4072398190045248, | |
| "grad_norm": 0.44360044598579407, | |
| "learning_rate": 8.33625207827477e-06, | |
| "loss": 0.1675, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 3.411764705882353, | |
| "grad_norm": 0.3777754604816437, | |
| "learning_rate": 8.330365658386252e-06, | |
| "loss": 0.1793, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 3.416289592760181, | |
| "grad_norm": 0.42608344554901123, | |
| "learning_rate": 8.324470930009514e-06, | |
| "loss": 0.2048, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 3.420814479638009, | |
| "grad_norm": 0.46820250153541565, | |
| "learning_rate": 8.318567907850533e-06, | |
| "loss": 0.1656, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 3.425339366515837, | |
| "grad_norm": 0.3514660596847534, | |
| "learning_rate": 8.312656606635976e-06, | |
| "loss": 0.2045, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 3.4298642533936654, | |
| "grad_norm": 0.38363519310951233, | |
| "learning_rate": 8.306737041113169e-06, | |
| "loss": 0.2201, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 3.4343891402714934, | |
| "grad_norm": 0.39812731742858887, | |
| "learning_rate": 8.300809226050049e-06, | |
| "loss": 0.1857, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 3.4389140271493215, | |
| "grad_norm": 0.4550320506095886, | |
| "learning_rate": 8.294873176235137e-06, | |
| "loss": 0.2109, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 3.4434389140271495, | |
| "grad_norm": 0.34271740913391113, | |
| "learning_rate": 8.288928906477497e-06, | |
| "loss": 0.1732, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 3.4479638009049776, | |
| "grad_norm": 0.41642773151397705, | |
| "learning_rate": 8.282976431606703e-06, | |
| "loss": 0.2115, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 3.4524886877828056, | |
| "grad_norm": 0.40923258662223816, | |
| "learning_rate": 8.277015766472794e-06, | |
| "loss": 0.1746, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 3.4570135746606336, | |
| "grad_norm": 0.40351900458335876, | |
| "learning_rate": 8.271046925946247e-06, | |
| "loss": 0.2355, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 3.4615384615384617, | |
| "grad_norm": 0.4274236261844635, | |
| "learning_rate": 8.265069924917925e-06, | |
| "loss": 0.1954, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 3.4660633484162897, | |
| "grad_norm": 0.4109535217285156, | |
| "learning_rate": 8.259084778299064e-06, | |
| "loss": 0.2156, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 3.4705882352941178, | |
| "grad_norm": 0.4322621524333954, | |
| "learning_rate": 8.25309150102121e-06, | |
| "loss": 0.2329, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 3.475113122171946, | |
| "grad_norm": 0.42734619975090027, | |
| "learning_rate": 8.247090108036198e-06, | |
| "loss": 0.2, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 3.479638009049774, | |
| "grad_norm": 0.41158321499824524, | |
| "learning_rate": 8.241080614316108e-06, | |
| "loss": 0.1893, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 3.484162895927602, | |
| "grad_norm": 0.39836597442626953, | |
| "learning_rate": 8.235063034853228e-06, | |
| "loss": 0.2268, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 3.48868778280543, | |
| "grad_norm": 0.4076845943927765, | |
| "learning_rate": 8.229037384660023e-06, | |
| "loss": 0.2043, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 3.493212669683258, | |
| "grad_norm": 0.40793871879577637, | |
| "learning_rate": 8.223003678769089e-06, | |
| "loss": 0.2147, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 3.497737556561086, | |
| "grad_norm": 0.4273108243942261, | |
| "learning_rate": 8.216961932233118e-06, | |
| "loss": 0.2211, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 3.502262443438914, | |
| "grad_norm": 0.4290085434913635, | |
| "learning_rate": 8.210912160124866e-06, | |
| "loss": 0.1792, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 3.506787330316742, | |
| "grad_norm": 0.4068942964076996, | |
| "learning_rate": 8.204854377537105e-06, | |
| "loss": 0.1925, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 3.51131221719457, | |
| "grad_norm": 0.4647829830646515, | |
| "learning_rate": 8.198788599582596e-06, | |
| "loss": 0.2074, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 3.515837104072398, | |
| "grad_norm": 0.4103906452655792, | |
| "learning_rate": 8.192714841394048e-06, | |
| "loss": 0.2492, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 3.520361990950226, | |
| "grad_norm": 0.3793966472148895, | |
| "learning_rate": 8.18663311812407e-06, | |
| "loss": 0.2051, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 3.524886877828054, | |
| "grad_norm": 0.37239229679107666, | |
| "learning_rate": 8.180543444945154e-06, | |
| "loss": 0.1596, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 3.5294117647058822, | |
| "grad_norm": 0.43737536668777466, | |
| "learning_rate": 8.174445837049614e-06, | |
| "loss": 0.1427, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 3.5339366515837103, | |
| "grad_norm": 0.41861864924430847, | |
| "learning_rate": 8.168340309649569e-06, | |
| "loss": 0.1594, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 3.5384615384615383, | |
| "grad_norm": 0.3884759247303009, | |
| "learning_rate": 8.162226877976886e-06, | |
| "loss": 0.2057, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 3.5429864253393664, | |
| "grad_norm": 0.38496264815330505, | |
| "learning_rate": 8.156105557283161e-06, | |
| "loss": 0.1997, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 3.5475113122171944, | |
| "grad_norm": 0.3633590638637543, | |
| "learning_rate": 8.149976362839662e-06, | |
| "loss": 0.2228, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 3.5520361990950224, | |
| "grad_norm": 0.3853006362915039, | |
| "learning_rate": 8.143839309937307e-06, | |
| "loss": 0.2287, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 3.5565610859728505, | |
| "grad_norm": 0.41965270042419434, | |
| "learning_rate": 8.137694413886617e-06, | |
| "loss": 0.21, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 3.5610859728506785, | |
| "grad_norm": 0.45740389823913574, | |
| "learning_rate": 8.131541690017676e-06, | |
| "loss": 0.1703, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 3.5656108597285066, | |
| "grad_norm": 0.38977181911468506, | |
| "learning_rate": 8.125381153680103e-06, | |
| "loss": 0.1868, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 3.5701357466063346, | |
| "grad_norm": 0.7932807207107544, | |
| "learning_rate": 8.119212820243002e-06, | |
| "loss": 0.2025, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 3.5746606334841626, | |
| "grad_norm": 0.40560823678970337, | |
| "learning_rate": 8.113036705094935e-06, | |
| "loss": 0.2107, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 3.579185520361991, | |
| "grad_norm": 0.3959384262561798, | |
| "learning_rate": 8.10685282364387e-06, | |
| "loss": 0.2358, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 3.583710407239819, | |
| "grad_norm": 0.4452955424785614, | |
| "learning_rate": 8.100661191317153e-06, | |
| "loss": 0.2314, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 3.588235294117647, | |
| "grad_norm": 0.3842819929122925, | |
| "learning_rate": 8.094461823561473e-06, | |
| "loss": 0.233, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 3.5927601809954752, | |
| "grad_norm": 0.45402851700782776, | |
| "learning_rate": 8.088254735842808e-06, | |
| "loss": 0.1525, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 3.5972850678733033, | |
| "grad_norm": 0.4239247739315033, | |
| "learning_rate": 8.082039943646398e-06, | |
| "loss": 0.1896, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 3.6018099547511313, | |
| "grad_norm": 0.42828717827796936, | |
| "learning_rate": 8.075817462476706e-06, | |
| "loss": 0.2204, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 3.6063348416289593, | |
| "grad_norm": 0.39242568612098694, | |
| "learning_rate": 8.069587307857377e-06, | |
| "loss": 0.2161, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 3.6108597285067874, | |
| "grad_norm": 0.4356323480606079, | |
| "learning_rate": 8.063349495331197e-06, | |
| "loss": 0.2312, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 3.6153846153846154, | |
| "grad_norm": 0.42608317732810974, | |
| "learning_rate": 8.057104040460062e-06, | |
| "loss": 0.1939, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 3.6199095022624435, | |
| "grad_norm": 0.3582107126712799, | |
| "learning_rate": 8.050850958824926e-06, | |
| "loss": 0.1914, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 3.6244343891402715, | |
| "grad_norm": 0.396219938993454, | |
| "learning_rate": 8.044590266025776e-06, | |
| "loss": 0.141, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 3.6289592760180995, | |
| "grad_norm": 0.38731884956359863, | |
| "learning_rate": 8.038321977681586e-06, | |
| "loss": 0.2041, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 3.6334841628959276, | |
| "grad_norm": 0.4682024121284485, | |
| "learning_rate": 8.032046109430276e-06, | |
| "loss": 0.2546, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 3.6380090497737556, | |
| "grad_norm": 0.44411900639533997, | |
| "learning_rate": 8.025762676928682e-06, | |
| "loss": 0.2448, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 3.6425339366515836, | |
| "grad_norm": 0.3928527534008026, | |
| "learning_rate": 8.019471695852505e-06, | |
| "loss": 0.2319, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 3.6470588235294117, | |
| "grad_norm": 0.41334059834480286, | |
| "learning_rate": 8.013173181896283e-06, | |
| "loss": 0.1934, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 3.6515837104072397, | |
| "grad_norm": 0.4208468198776245, | |
| "learning_rate": 8.006867150773341e-06, | |
| "loss": 0.2124, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 3.6561085972850678, | |
| "grad_norm": 0.38747453689575195, | |
| "learning_rate": 8.000553618215762e-06, | |
| "loss": 0.2031, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 3.660633484162896, | |
| "grad_norm": 0.3957184851169586, | |
| "learning_rate": 7.994232599974346e-06, | |
| "loss": 0.2107, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 3.665158371040724, | |
| "grad_norm": 0.37880805134773254, | |
| "learning_rate": 7.987904111818559e-06, | |
| "loss": 0.2105, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 3.669683257918552, | |
| "grad_norm": 0.3965386152267456, | |
| "learning_rate": 7.981568169536512e-06, | |
| "loss": 0.1898, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 3.6742081447963804, | |
| "grad_norm": 0.3958094120025635, | |
| "learning_rate": 7.975224788934903e-06, | |
| "loss": 0.206, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 3.6787330316742084, | |
| "grad_norm": 0.3693002164363861, | |
| "learning_rate": 7.968873985838998e-06, | |
| "loss": 0.1915, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 3.6832579185520364, | |
| "grad_norm": 0.4172878563404083, | |
| "learning_rate": 7.96251577609257e-06, | |
| "loss": 0.2276, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 3.6877828054298645, | |
| "grad_norm": 0.3927733898162842, | |
| "learning_rate": 7.95615017555788e-06, | |
| "loss": 0.1971, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 3.6923076923076925, | |
| "grad_norm": 0.417679101228714, | |
| "learning_rate": 7.949777200115617e-06, | |
| "loss": 0.2045, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 3.6968325791855206, | |
| "grad_norm": 0.394901841878891, | |
| "learning_rate": 7.943396865664872e-06, | |
| "loss": 0.2073, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 3.7013574660633486, | |
| "grad_norm": 0.41202467679977417, | |
| "learning_rate": 7.937009188123102e-06, | |
| "loss": 0.2414, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 3.7058823529411766, | |
| "grad_norm": 0.3696572184562683, | |
| "learning_rate": 7.930614183426074e-06, | |
| "loss": 0.2078, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 3.7104072398190047, | |
| "grad_norm": 0.4119456112384796, | |
| "learning_rate": 7.92421186752784e-06, | |
| "loss": 0.2133, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 3.7149321266968327, | |
| "grad_norm": 0.48625221848487854, | |
| "learning_rate": 7.917802256400688e-06, | |
| "loss": 0.2036, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 3.7194570135746607, | |
| "grad_norm": 0.43132078647613525, | |
| "learning_rate": 7.911385366035111e-06, | |
| "loss": 0.2037, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 3.723981900452489, | |
| "grad_norm": 0.44492626190185547, | |
| "learning_rate": 7.904961212439756e-06, | |
| "loss": 0.235, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 3.728506787330317, | |
| "grad_norm": 0.37732723355293274, | |
| "learning_rate": 7.898529811641393e-06, | |
| "loss": 0.2191, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 3.733031674208145, | |
| "grad_norm": 0.3524717688560486, | |
| "learning_rate": 7.892091179684876e-06, | |
| "loss": 0.215, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 3.737556561085973, | |
| "grad_norm": 0.4359301030635834, | |
| "learning_rate": 7.88564533263309e-06, | |
| "loss": 0.2027, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 3.742081447963801, | |
| "grad_norm": 0.36626332998275757, | |
| "learning_rate": 7.879192286566929e-06, | |
| "loss": 0.1884, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 3.746606334841629, | |
| "grad_norm": 0.36521342396736145, | |
| "learning_rate": 7.87273205758524e-06, | |
| "loss": 0.1919, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 3.751131221719457, | |
| "grad_norm": 0.38231372833251953, | |
| "learning_rate": 7.866264661804795e-06, | |
| "loss": 0.2165, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 3.755656108597285, | |
| "grad_norm": 0.5153652429580688, | |
| "learning_rate": 7.859790115360243e-06, | |
| "loss": 0.2336, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 3.760180995475113, | |
| "grad_norm": 0.3765285909175873, | |
| "learning_rate": 7.853308434404071e-06, | |
| "loss": 0.1948, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 3.764705882352941, | |
| "grad_norm": 0.40210476517677307, | |
| "learning_rate": 7.846819635106569e-06, | |
| "loss": 0.2053, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 3.769230769230769, | |
| "grad_norm": 0.3615150451660156, | |
| "learning_rate": 7.84032373365578e-06, | |
| "loss": 0.2148, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 3.773755656108597, | |
| "grad_norm": 0.3671107590198517, | |
| "learning_rate": 7.833820746257471e-06, | |
| "loss": 0.1812, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 3.7782805429864252, | |
| "grad_norm": 0.48968544602394104, | |
| "learning_rate": 7.82731068913508e-06, | |
| "loss": 0.2532, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 3.7828054298642533, | |
| "grad_norm": 0.378062903881073, | |
| "learning_rate": 7.82079357852969e-06, | |
| "loss": 0.2263, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 3.7873303167420813, | |
| "grad_norm": 0.3558998107910156, | |
| "learning_rate": 7.814269430699978e-06, | |
| "loss": 0.2033, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 3.7918552036199094, | |
| "grad_norm": 0.4033679664134979, | |
| "learning_rate": 7.807738261922173e-06, | |
| "loss": 0.2005, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 3.7963800904977374, | |
| "grad_norm": 0.40575459599494934, | |
| "learning_rate": 7.801200088490026e-06, | |
| "loss": 0.2269, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 3.8009049773755654, | |
| "grad_norm": 0.42206406593322754, | |
| "learning_rate": 7.794654926714755e-06, | |
| "loss": 0.2176, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 3.8054298642533935, | |
| "grad_norm": 0.4195837080478668, | |
| "learning_rate": 7.788102792925026e-06, | |
| "loss": 0.1968, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 3.8099547511312215, | |
| "grad_norm": 0.3960534930229187, | |
| "learning_rate": 7.781543703466881e-06, | |
| "loss": 0.183, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 3.8144796380090495, | |
| "grad_norm": 0.3833674192428589, | |
| "learning_rate": 7.774977674703728e-06, | |
| "loss": 0.2106, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 3.8190045248868776, | |
| "grad_norm": 0.4212116301059723, | |
| "learning_rate": 7.768404723016281e-06, | |
| "loss": 0.1978, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 3.8235294117647056, | |
| "grad_norm": 0.38558632135391235, | |
| "learning_rate": 7.76182486480253e-06, | |
| "loss": 0.164, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 3.8280542986425337, | |
| "grad_norm": 0.4841013252735138, | |
| "learning_rate": 7.755238116477688e-06, | |
| "loss": 0.2152, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 3.832579185520362, | |
| "grad_norm": 0.41625547409057617, | |
| "learning_rate": 7.748644494474163e-06, | |
| "loss": 0.2322, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 3.83710407239819, | |
| "grad_norm": 0.3868163228034973, | |
| "learning_rate": 7.742044015241508e-06, | |
| "loss": 0.1888, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 3.841628959276018, | |
| "grad_norm": 0.3938911259174347, | |
| "learning_rate": 7.735436695246385e-06, | |
| "loss": 0.209, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 3.8461538461538463, | |
| "grad_norm": 0.4158743619918823, | |
| "learning_rate": 7.728822550972523e-06, | |
| "loss": 0.1966, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 3.8506787330316743, | |
| "grad_norm": 0.46179747581481934, | |
| "learning_rate": 7.722201598920673e-06, | |
| "loss": 0.2185, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 3.8552036199095023, | |
| "grad_norm": 0.3782907724380493, | |
| "learning_rate": 7.715573855608574e-06, | |
| "loss": 0.1929, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 3.8597285067873304, | |
| "grad_norm": 0.3639270067214966, | |
| "learning_rate": 7.7089393375709e-06, | |
| "loss": 0.2075, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 3.8642533936651584, | |
| "grad_norm": 0.3893643021583557, | |
| "learning_rate": 7.702298061359236e-06, | |
| "loss": 0.2073, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 3.8687782805429864, | |
| "grad_norm": 0.41035643219947815, | |
| "learning_rate": 7.69565004354202e-06, | |
| "loss": 0.2165, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 3.8733031674208145, | |
| "grad_norm": 0.3843505382537842, | |
| "learning_rate": 7.688995300704511e-06, | |
| "loss": 0.1806, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 3.8778280542986425, | |
| "grad_norm": 0.4357762038707733, | |
| "learning_rate": 7.682333849448749e-06, | |
| "loss": 0.2137, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 3.8823529411764706, | |
| "grad_norm": 0.424322247505188, | |
| "learning_rate": 7.675665706393502e-06, | |
| "loss": 0.2281, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 3.8868778280542986, | |
| "grad_norm": 0.46023404598236084, | |
| "learning_rate": 7.668990888174242e-06, | |
| "loss": 0.1564, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 3.8914027149321266, | |
| "grad_norm": 0.4409455358982086, | |
| "learning_rate": 7.662309411443084e-06, | |
| "loss": 0.2128, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 3.8959276018099547, | |
| "grad_norm": 0.4324515163898468, | |
| "learning_rate": 7.65562129286876e-06, | |
| "loss": 0.2044, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 3.9004524886877827, | |
| "grad_norm": 0.41723108291625977, | |
| "learning_rate": 7.648926549136574e-06, | |
| "loss": 0.1992, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 3.9049773755656108, | |
| "grad_norm": 0.40275150537490845, | |
| "learning_rate": 7.642225196948357e-06, | |
| "loss": 0.222, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 3.909502262443439, | |
| "grad_norm": 0.3896505832672119, | |
| "learning_rate": 7.635517253022419e-06, | |
| "loss": 0.2029, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 3.914027149321267, | |
| "grad_norm": 0.41698864102363586, | |
| "learning_rate": 7.628802734093528e-06, | |
| "loss": 0.1867, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 3.918552036199095, | |
| "grad_norm": 0.3895374536514282, | |
| "learning_rate": 7.622081656912842e-06, | |
| "loss": 0.2302, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 3.9230769230769234, | |
| "grad_norm": 0.42431965470314026, | |
| "learning_rate": 7.615354038247889e-06, | |
| "loss": 0.2197, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 3.9276018099547514, | |
| "grad_norm": 0.4233725965023041, | |
| "learning_rate": 7.608619894882514e-06, | |
| "loss": 0.1918, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 3.9321266968325794, | |
| "grad_norm": 0.39451637864112854, | |
| "learning_rate": 7.601879243616838e-06, | |
| "loss": 0.1807, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 3.9366515837104075, | |
| "grad_norm": 0.42328158020973206, | |
| "learning_rate": 7.595132101267221e-06, | |
| "loss": 0.179, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 3.9411764705882355, | |
| "grad_norm": 0.49399158358573914, | |
| "learning_rate": 7.588378484666214e-06, | |
| "loss": 0.2318, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 3.9457013574660635, | |
| "grad_norm": 0.4472160041332245, | |
| "learning_rate": 7.581618410662519e-06, | |
| "loss": 0.1861, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 3.9502262443438916, | |
| "grad_norm": 0.4840329587459564, | |
| "learning_rate": 7.574851896120951e-06, | |
| "loss": 0.2355, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 3.9547511312217196, | |
| "grad_norm": 0.40697556734085083, | |
| "learning_rate": 7.568078957922394e-06, | |
| "loss": 0.1865, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 3.9592760180995477, | |
| "grad_norm": 0.4209546744823456, | |
| "learning_rate": 7.56129961296375e-06, | |
| "loss": 0.231, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 3.9638009049773757, | |
| "grad_norm": 0.36913007497787476, | |
| "learning_rate": 7.55451387815791e-06, | |
| "loss": 0.1939, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 3.9683257918552037, | |
| "grad_norm": 0.45089685916900635, | |
| "learning_rate": 7.547721770433706e-06, | |
| "loss": 0.2428, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 3.9728506787330318, | |
| "grad_norm": 0.39802390336990356, | |
| "learning_rate": 7.540923306735868e-06, | |
| "loss": 0.1777, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 3.97737556561086, | |
| "grad_norm": 0.4216815233230591, | |
| "learning_rate": 7.534118504024981e-06, | |
| "loss": 0.2052, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 3.981900452488688, | |
| "grad_norm": 0.5024846792221069, | |
| "learning_rate": 7.527307379277448e-06, | |
| "loss": 0.2161, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 3.986425339366516, | |
| "grad_norm": 0.3847920000553131, | |
| "learning_rate": 7.5204899494854415e-06, | |
| "loss": 0.1682, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 3.990950226244344, | |
| "grad_norm": 0.41892680525779724, | |
| "learning_rate": 7.5136662316568645e-06, | |
| "loss": 0.2123, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 3.995475113122172, | |
| "grad_norm": 0.40700384974479675, | |
| "learning_rate": 7.5068362428153055e-06, | |
| "loss": 0.2115, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.40372100472450256, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.1805, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 4.004524886877828, | |
| "grad_norm": 0.4641391932964325, | |
| "learning_rate": 7.493157520265787e-06, | |
| "loss": 0.1209, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 4.009049773755656, | |
| "grad_norm": 0.47821393609046936, | |
| "learning_rate": 7.48630882068306e-06, | |
| "loss": 0.1517, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 4.013574660633484, | |
| "grad_norm": 0.3496209979057312, | |
| "learning_rate": 7.479453918337733e-06, | |
| "loss": 0.0967, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 4.018099547511312, | |
| "grad_norm": 0.5180306434631348, | |
| "learning_rate": 7.472592830331196e-06, | |
| "loss": 0.1436, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 4.02262443438914, | |
| "grad_norm": 0.6377127766609192, | |
| "learning_rate": 7.465725573780267e-06, | |
| "loss": 0.1437, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 4.027149321266968, | |
| "grad_norm": 0.4262060225009918, | |
| "learning_rate": 7.458852165817153e-06, | |
| "loss": 0.1164, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 4.031674208144796, | |
| "grad_norm": 0.46363940834999084, | |
| "learning_rate": 7.451972623589414e-06, | |
| "loss": 0.1251, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 4.036199095022624, | |
| "grad_norm": 0.38100776076316833, | |
| "learning_rate": 7.445086964259905e-06, | |
| "loss": 0.1282, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 4.040723981900452, | |
| "grad_norm": 0.37024420499801636, | |
| "learning_rate": 7.438195205006749e-06, | |
| "loss": 0.121, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 4.04524886877828, | |
| "grad_norm": 0.3664066791534424, | |
| "learning_rate": 7.43129736302328e-06, | |
| "loss": 0.0924, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 4.049773755656108, | |
| "grad_norm": 0.4302886426448822, | |
| "learning_rate": 7.4243934555180116e-06, | |
| "loss": 0.1134, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 4.0542986425339365, | |
| "grad_norm": 0.3795878291130066, | |
| "learning_rate": 7.417483499714589e-06, | |
| "loss": 0.1179, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 4.0588235294117645, | |
| "grad_norm": 0.3966153562068939, | |
| "learning_rate": 7.4105675128517456e-06, | |
| "loss": 0.1404, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 4.0633484162895925, | |
| "grad_norm": 0.4186801314353943, | |
| "learning_rate": 7.4036455121832595e-06, | |
| "loss": 0.1253, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 4.067873303167421, | |
| "grad_norm": 0.45986229181289673, | |
| "learning_rate": 7.396717514977916e-06, | |
| "loss": 0.1458, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 4.072398190045249, | |
| "grad_norm": 0.5305169820785522, | |
| "learning_rate": 7.389783538519456e-06, | |
| "loss": 0.1608, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 4.076923076923077, | |
| "grad_norm": 0.42427879571914673, | |
| "learning_rate": 7.382843600106539e-06, | |
| "loss": 0.1207, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 4.081447963800905, | |
| "grad_norm": 0.3799537420272827, | |
| "learning_rate": 7.3758977170527e-06, | |
| "loss": 0.1235, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 4.085972850678733, | |
| "grad_norm": 0.3693038821220398, | |
| "learning_rate": 7.368945906686303e-06, | |
| "loss": 0.1144, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 4.090497737556561, | |
| "grad_norm": 0.3847089111804962, | |
| "learning_rate": 7.361988186350495e-06, | |
| "loss": 0.1265, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 4.095022624434389, | |
| "grad_norm": 0.448414146900177, | |
| "learning_rate": 7.355024573403174e-06, | |
| "loss": 0.1451, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 4.099547511312217, | |
| "grad_norm": 0.42488551139831543, | |
| "learning_rate": 7.348055085216937e-06, | |
| "loss": 0.0954, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 4.104072398190045, | |
| "grad_norm": 0.45263928174972534, | |
| "learning_rate": 7.341079739179033e-06, | |
| "loss": 0.143, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 4.108597285067873, | |
| "grad_norm": 0.40875062346458435, | |
| "learning_rate": 7.3340985526913335e-06, | |
| "loss": 0.1438, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 4.113122171945701, | |
| "grad_norm": 0.4271523356437683, | |
| "learning_rate": 7.327111543170274e-06, | |
| "loss": 0.1377, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 4.117647058823529, | |
| "grad_norm": 0.38401153683662415, | |
| "learning_rate": 7.320118728046818e-06, | |
| "loss": 0.1004, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 4.122171945701357, | |
| "grad_norm": 0.42597001791000366, | |
| "learning_rate": 7.313120124766417e-06, | |
| "loss": 0.1505, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 4.126696832579185, | |
| "grad_norm": 0.40540316700935364, | |
| "learning_rate": 7.306115750788956e-06, | |
| "loss": 0.1144, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 4.131221719457014, | |
| "grad_norm": 0.3972390592098236, | |
| "learning_rate": 7.299105623588722e-06, | |
| "loss": 0.1068, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 4.135746606334842, | |
| "grad_norm": 0.39872103929519653, | |
| "learning_rate": 7.292089760654352e-06, | |
| "loss": 0.1106, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 4.14027149321267, | |
| "grad_norm": 0.4083847403526306, | |
| "learning_rate": 7.2850681794887946e-06, | |
| "loss": 0.1402, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 4.144796380090498, | |
| "grad_norm": 0.4313944876194, | |
| "learning_rate": 7.278040897609262e-06, | |
| "loss": 0.1467, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 4.149321266968326, | |
| "grad_norm": 0.4161936640739441, | |
| "learning_rate": 7.271007932547188e-06, | |
| "loss": 0.1135, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 4.153846153846154, | |
| "grad_norm": 0.385703444480896, | |
| "learning_rate": 7.263969301848188e-06, | |
| "loss": 0.1019, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 4.158371040723982, | |
| "grad_norm": 0.41860657930374146, | |
| "learning_rate": 7.256925023072008e-06, | |
| "loss": 0.129, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 4.16289592760181, | |
| "grad_norm": 0.4360624849796295, | |
| "learning_rate": 7.249875113792485e-06, | |
| "loss": 0.1349, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 4.167420814479638, | |
| "grad_norm": 0.39161208271980286, | |
| "learning_rate": 7.242819591597507e-06, | |
| "loss": 0.1048, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 4.171945701357466, | |
| "grad_norm": 0.36276566982269287, | |
| "learning_rate": 7.235758474088961e-06, | |
| "loss": 0.1253, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 4.176470588235294, | |
| "grad_norm": 0.414889931678772, | |
| "learning_rate": 7.2286917788826926e-06, | |
| "loss": 0.1252, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 4.180995475113122, | |
| "grad_norm": 0.38982105255126953, | |
| "learning_rate": 7.221619523608464e-06, | |
| "loss": 0.135, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 4.1855203619909505, | |
| "grad_norm": 0.3900318145751953, | |
| "learning_rate": 7.21454172590991e-06, | |
| "loss": 0.1232, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 4.1900452488687785, | |
| "grad_norm": 0.4147709608078003, | |
| "learning_rate": 7.207458403444488e-06, | |
| "loss": 0.1217, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 4.1945701357466065, | |
| "grad_norm": 0.43696555495262146, | |
| "learning_rate": 7.200369573883444e-06, | |
| "loss": 0.1292, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 4.199095022624435, | |
| "grad_norm": 0.4161735475063324, | |
| "learning_rate": 7.193275254911758e-06, | |
| "loss": 0.1456, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 4.203619909502263, | |
| "grad_norm": 0.4888911545276642, | |
| "learning_rate": 7.186175464228109e-06, | |
| "loss": 0.1266, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 4.208144796380091, | |
| "grad_norm": 0.39078477025032043, | |
| "learning_rate": 7.179070219544822e-06, | |
| "loss": 0.1055, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 4.212669683257919, | |
| "grad_norm": 0.4553499221801758, | |
| "learning_rate": 7.171959538587831e-06, | |
| "loss": 0.1048, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 4.217194570135747, | |
| "grad_norm": 0.4161282479763031, | |
| "learning_rate": 7.1648434390966356e-06, | |
| "loss": 0.1229, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 4.221719457013575, | |
| "grad_norm": 0.3798969089984894, | |
| "learning_rate": 7.157721938824247e-06, | |
| "loss": 0.1178, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 4.226244343891403, | |
| "grad_norm": 0.3456389009952545, | |
| "learning_rate": 7.150595055537153e-06, | |
| "loss": 0.1244, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 4.230769230769231, | |
| "grad_norm": 0.4065212607383728, | |
| "learning_rate": 7.143462807015271e-06, | |
| "loss": 0.1242, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 4.235294117647059, | |
| "grad_norm": 0.43408218026161194, | |
| "learning_rate": 7.136325211051905e-06, | |
| "loss": 0.1324, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 4.239819004524887, | |
| "grad_norm": 0.4018978774547577, | |
| "learning_rate": 7.129182285453697e-06, | |
| "loss": 0.1435, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 4.244343891402715, | |
| "grad_norm": 0.38995468616485596, | |
| "learning_rate": 7.122034048040586e-06, | |
| "loss": 0.105, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 4.248868778280543, | |
| "grad_norm": 0.40599551796913147, | |
| "learning_rate": 7.114880516645761e-06, | |
| "loss": 0.122, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 4.253393665158371, | |
| "grad_norm": 0.40751129388809204, | |
| "learning_rate": 7.107721709115624e-06, | |
| "loss": 0.1338, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 4.257918552036199, | |
| "grad_norm": 0.38541626930236816, | |
| "learning_rate": 7.100557643309732e-06, | |
| "loss": 0.1121, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 4.262443438914027, | |
| "grad_norm": 0.38623547554016113, | |
| "learning_rate": 7.0933883371007665e-06, | |
| "loss": 0.1053, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 4.266968325791855, | |
| "grad_norm": 0.3839960992336273, | |
| "learning_rate": 7.086213808374478e-06, | |
| "loss": 0.1153, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 4.271493212669683, | |
| "grad_norm": 0.35866132378578186, | |
| "learning_rate": 7.079034075029651e-06, | |
| "loss": 0.1032, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 4.276018099547511, | |
| "grad_norm": 0.4211701452732086, | |
| "learning_rate": 7.071849154978048e-06, | |
| "loss": 0.1228, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 4.280542986425339, | |
| "grad_norm": 0.4204184114933014, | |
| "learning_rate": 7.064659066144376e-06, | |
| "loss": 0.1409, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 4.285067873303167, | |
| "grad_norm": 0.39663875102996826, | |
| "learning_rate": 7.057463826466235e-06, | |
| "loss": 0.1291, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 4.289592760180995, | |
| "grad_norm": 0.4302012622356415, | |
| "learning_rate": 7.050263453894078e-06, | |
| "loss": 0.1281, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 4.294117647058823, | |
| "grad_norm": 0.39377084374427795, | |
| "learning_rate": 7.043057966391158e-06, | |
| "loss": 0.1471, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 4.298642533936651, | |
| "grad_norm": 0.4178546369075775, | |
| "learning_rate": 7.035847381933494e-06, | |
| "loss": 0.1139, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 4.3031674208144794, | |
| "grad_norm": 0.4286903738975525, | |
| "learning_rate": 7.028631718509816e-06, | |
| "loss": 0.1389, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 4.3076923076923075, | |
| "grad_norm": 0.4679263234138489, | |
| "learning_rate": 7.021410994121525e-06, | |
| "loss": 0.141, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 4.3122171945701355, | |
| "grad_norm": 0.38693973422050476, | |
| "learning_rate": 7.014185226782655e-06, | |
| "loss": 0.1114, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 4.316742081447964, | |
| "grad_norm": 0.514302670955658, | |
| "learning_rate": 7.006954434519814e-06, | |
| "loss": 0.0925, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 4.321266968325792, | |
| "grad_norm": 0.36845308542251587, | |
| "learning_rate": 6.999718635372148e-06, | |
| "loss": 0.1151, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 4.32579185520362, | |
| "grad_norm": 0.4446454346179962, | |
| "learning_rate": 6.992477847391292e-06, | |
| "loss": 0.1057, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 4.330316742081448, | |
| "grad_norm": 0.39890649914741516, | |
| "learning_rate": 6.985232088641333e-06, | |
| "loss": 0.1156, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 4.334841628959276, | |
| "grad_norm": 0.4039772152900696, | |
| "learning_rate": 6.9779813771987525e-06, | |
| "loss": 0.1212, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 4.339366515837104, | |
| "grad_norm": 0.38077545166015625, | |
| "learning_rate": 6.970725731152389e-06, | |
| "loss": 0.0997, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 4.343891402714932, | |
| "grad_norm": 0.44557592272758484, | |
| "learning_rate": 6.963465168603395e-06, | |
| "loss": 0.1263, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 4.34841628959276, | |
| "grad_norm": 0.45519551634788513, | |
| "learning_rate": 6.9561997076651854e-06, | |
| "loss": 0.1457, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 4.352941176470588, | |
| "grad_norm": 0.37018322944641113, | |
| "learning_rate": 6.948929366463397e-06, | |
| "loss": 0.0994, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 4.357466063348416, | |
| "grad_norm": 0.40303707122802734, | |
| "learning_rate": 6.941654163135841e-06, | |
| "loss": 0.1476, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 4.361990950226244, | |
| "grad_norm": 0.39125677943229675, | |
| "learning_rate": 6.934374115832459e-06, | |
| "loss": 0.1516, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 4.366515837104072, | |
| "grad_norm": 0.3679739534854889, | |
| "learning_rate": 6.927089242715277e-06, | |
| "loss": 0.1074, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 4.371040723981901, | |
| "grad_norm": 0.38703691959381104, | |
| "learning_rate": 6.919799561958359e-06, | |
| "loss": 0.1351, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 4.375565610859729, | |
| "grad_norm": 0.45326265692710876, | |
| "learning_rate": 6.912505091747765e-06, | |
| "loss": 0.1347, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 4.380090497737557, | |
| "grad_norm": 0.39937224984169006, | |
| "learning_rate": 6.905205850281502e-06, | |
| "loss": 0.1658, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 4.384615384615385, | |
| "grad_norm": 0.39014557003974915, | |
| "learning_rate": 6.897901855769483e-06, | |
| "loss": 0.1213, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 4.389140271493213, | |
| "grad_norm": 0.41682887077331543, | |
| "learning_rate": 6.890593126433474e-06, | |
| "loss": 0.1256, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 4.393665158371041, | |
| "grad_norm": 0.37030866742134094, | |
| "learning_rate": 6.883279680507057e-06, | |
| "loss": 0.1239, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 4.398190045248869, | |
| "grad_norm": 0.4141203463077545, | |
| "learning_rate": 6.8759615362355795e-06, | |
| "loss": 0.1365, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 4.402714932126697, | |
| "grad_norm": 0.5501455664634705, | |
| "learning_rate": 6.868638711876109e-06, | |
| "loss": 0.1241, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 4.407239819004525, | |
| "grad_norm": 0.42243292927742004, | |
| "learning_rate": 6.861311225697392e-06, | |
| "loss": 0.1347, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 4.411764705882353, | |
| "grad_norm": 0.43161842226982117, | |
| "learning_rate": 6.8539790959798045e-06, | |
| "loss": 0.1299, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 4.416289592760181, | |
| "grad_norm": 0.3907506465911865, | |
| "learning_rate": 6.846642341015304e-06, | |
| "loss": 0.1083, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 4.420814479638009, | |
| "grad_norm": 0.39436179399490356, | |
| "learning_rate": 6.8393009791073895e-06, | |
| "loss": 0.1177, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 4.425339366515837, | |
| "grad_norm": 0.3935137689113617, | |
| "learning_rate": 6.831955028571051e-06, | |
| "loss": 0.1141, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 4.429864253393665, | |
| "grad_norm": 0.3807559609413147, | |
| "learning_rate": 6.82460450773273e-06, | |
| "loss": 0.1033, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 4.4343891402714934, | |
| "grad_norm": 0.3852434456348419, | |
| "learning_rate": 6.817249434930267e-06, | |
| "loss": 0.134, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 4.4389140271493215, | |
| "grad_norm": 0.4558684825897217, | |
| "learning_rate": 6.809889828512857e-06, | |
| "loss": 0.1246, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 4.4434389140271495, | |
| "grad_norm": 0.3894815742969513, | |
| "learning_rate": 6.80252570684101e-06, | |
| "loss": 0.1376, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 4.447963800904978, | |
| "grad_norm": 0.43874591588974, | |
| "learning_rate": 6.7951570882864944e-06, | |
| "loss": 0.1458, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 4.452488687782806, | |
| "grad_norm": 0.39900916814804077, | |
| "learning_rate": 6.787783991232305e-06, | |
| "loss": 0.1398, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 4.457013574660634, | |
| "grad_norm": 0.4251669645309448, | |
| "learning_rate": 6.780406434072603e-06, | |
| "loss": 0.1445, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 4.461538461538462, | |
| "grad_norm": 0.42457953095436096, | |
| "learning_rate": 6.773024435212678e-06, | |
| "loss": 0.1344, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 4.46606334841629, | |
| "grad_norm": 0.48800379037857056, | |
| "learning_rate": 6.765638013068903e-06, | |
| "loss": 0.1183, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 4.470588235294118, | |
| "grad_norm": 0.3914732336997986, | |
| "learning_rate": 6.758247186068684e-06, | |
| "loss": 0.1176, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 4.475113122171946, | |
| "grad_norm": 0.40365973114967346, | |
| "learning_rate": 6.750851972650416e-06, | |
| "loss": 0.1166, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 4.479638009049774, | |
| "grad_norm": 0.4439378082752228, | |
| "learning_rate": 6.743452391263438e-06, | |
| "loss": 0.1109, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 4.484162895927602, | |
| "grad_norm": 0.365218847990036, | |
| "learning_rate": 6.736048460367983e-06, | |
| "loss": 0.1292, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 4.48868778280543, | |
| "grad_norm": 0.37418243288993835, | |
| "learning_rate": 6.728640198435143e-06, | |
| "loss": 0.1404, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 4.493212669683258, | |
| "grad_norm": 0.3743608891963959, | |
| "learning_rate": 6.721227623946804e-06, | |
| "loss": 0.115, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 4.497737556561086, | |
| "grad_norm": 0.384598046541214, | |
| "learning_rate": 6.71381075539562e-06, | |
| "loss": 0.1036, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 4.502262443438914, | |
| "grad_norm": 0.4858543276786804, | |
| "learning_rate": 6.706389611284953e-06, | |
| "loss": 0.1347, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 4.506787330316742, | |
| "grad_norm": 0.3937552869319916, | |
| "learning_rate": 6.698964210128832e-06, | |
| "loss": 0.1274, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 4.51131221719457, | |
| "grad_norm": 0.4109607934951782, | |
| "learning_rate": 6.691534570451908e-06, | |
| "loss": 0.1214, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 4.515837104072398, | |
| "grad_norm": 0.37876155972480774, | |
| "learning_rate": 6.684100710789405e-06, | |
| "loss": 0.1353, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 4.520361990950226, | |
| "grad_norm": 0.4157622158527374, | |
| "learning_rate": 6.676662649687074e-06, | |
| "loss": 0.143, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 4.524886877828054, | |
| "grad_norm": 0.41489532589912415, | |
| "learning_rate": 6.669220405701149e-06, | |
| "loss": 0.1336, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 4.529411764705882, | |
| "grad_norm": 0.3667726516723633, | |
| "learning_rate": 6.6617739973982985e-06, | |
| "loss": 0.1218, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 4.53393665158371, | |
| "grad_norm": 0.4074230194091797, | |
| "learning_rate": 6.6543234433555815e-06, | |
| "loss": 0.1255, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 4.538461538461538, | |
| "grad_norm": 0.358986496925354, | |
| "learning_rate": 6.646868762160399e-06, | |
| "loss": 0.1037, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 4.542986425339366, | |
| "grad_norm": 0.4198194742202759, | |
| "learning_rate": 6.639409972410446e-06, | |
| "loss": 0.1726, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 4.547511312217194, | |
| "grad_norm": 0.3871258497238159, | |
| "learning_rate": 6.6319470927136694e-06, | |
| "loss": 0.104, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 4.552036199095022, | |
| "grad_norm": 0.40250852704048157, | |
| "learning_rate": 6.624480141688221e-06, | |
| "loss": 0.1248, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 4.5565610859728505, | |
| "grad_norm": 0.38853251934051514, | |
| "learning_rate": 6.617009137962407e-06, | |
| "loss": 0.1078, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 4.5610859728506785, | |
| "grad_norm": 0.45916301012039185, | |
| "learning_rate": 6.609534100174646e-06, | |
| "loss": 0.1556, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 4.5656108597285066, | |
| "grad_norm": 0.43607795238494873, | |
| "learning_rate": 6.6020550469734175e-06, | |
| "loss": 0.1119, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 4.570135746606335, | |
| "grad_norm": 0.3727249503135681, | |
| "learning_rate": 6.594571997017224e-06, | |
| "loss": 0.1095, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 4.574660633484163, | |
| "grad_norm": 0.4152163863182068, | |
| "learning_rate": 6.587084968974533e-06, | |
| "loss": 0.141, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 4.579185520361991, | |
| "grad_norm": 0.3729671835899353, | |
| "learning_rate": 6.5795939815237405e-06, | |
| "loss": 0.1133, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 4.583710407239819, | |
| "grad_norm": 0.3653905391693115, | |
| "learning_rate": 6.57209905335312e-06, | |
| "loss": 0.1186, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 4.588235294117647, | |
| "grad_norm": 0.406921923160553, | |
| "learning_rate": 6.5646002031607726e-06, | |
| "loss": 0.1338, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 4.592760180995475, | |
| "grad_norm": 0.46261557936668396, | |
| "learning_rate": 6.557097449654588e-06, | |
| "loss": 0.1515, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 4.597285067873303, | |
| "grad_norm": 0.43040263652801514, | |
| "learning_rate": 6.549590811552193e-06, | |
| "loss": 0.1087, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 4.601809954751131, | |
| "grad_norm": 0.4118295907974243, | |
| "learning_rate": 6.542080307580905e-06, | |
| "loss": 0.1161, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 4.606334841628959, | |
| "grad_norm": 0.5026023387908936, | |
| "learning_rate": 6.534565956477684e-06, | |
| "loss": 0.1373, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 4.610859728506787, | |
| "grad_norm": 0.37630221247673035, | |
| "learning_rate": 6.5270477769890906e-06, | |
| "loss": 0.1281, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 4.615384615384615, | |
| "grad_norm": 0.4093388617038727, | |
| "learning_rate": 6.519525787871235e-06, | |
| "loss": 0.1177, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 4.619909502262443, | |
| "grad_norm": 0.38547810912132263, | |
| "learning_rate": 6.512000007889731e-06, | |
| "loss": 0.1268, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 4.624434389140271, | |
| "grad_norm": 0.3508959412574768, | |
| "learning_rate": 6.504470455819651e-06, | |
| "loss": 0.1081, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 4.628959276018099, | |
| "grad_norm": 0.40760180354118347, | |
| "learning_rate": 6.496937150445478e-06, | |
| "loss": 0.1152, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 4.633484162895927, | |
| "grad_norm": 0.42812004685401917, | |
| "learning_rate": 6.489400110561056e-06, | |
| "loss": 0.1307, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 4.638009049773755, | |
| "grad_norm": 0.3820403218269348, | |
| "learning_rate": 6.481859354969549e-06, | |
| "loss": 0.131, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 4.642533936651584, | |
| "grad_norm": 0.374654620885849, | |
| "learning_rate": 6.474314902483388e-06, | |
| "loss": 0.1177, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 4.647058823529412, | |
| "grad_norm": 0.3797123432159424, | |
| "learning_rate": 6.466766771924231e-06, | |
| "loss": 0.1328, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 4.65158371040724, | |
| "grad_norm": 0.42189446091651917, | |
| "learning_rate": 6.4592149821229064e-06, | |
| "loss": 0.1309, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 4.656108597285068, | |
| "grad_norm": 0.4253498911857605, | |
| "learning_rate": 6.451659551919378e-06, | |
| "loss": 0.1297, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 4.660633484162896, | |
| "grad_norm": 0.4357294738292694, | |
| "learning_rate": 6.444100500162684e-06, | |
| "loss": 0.1669, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 4.665158371040724, | |
| "grad_norm": 0.3664080798625946, | |
| "learning_rate": 6.436537845710904e-06, | |
| "loss": 0.1158, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 4.669683257918552, | |
| "grad_norm": 0.4295387268066406, | |
| "learning_rate": 6.428971607431102e-06, | |
| "loss": 0.1322, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 4.67420814479638, | |
| "grad_norm": 0.3990268409252167, | |
| "learning_rate": 6.421401804199284e-06, | |
| "loss": 0.1383, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 4.678733031674208, | |
| "grad_norm": 0.4077833890914917, | |
| "learning_rate": 6.413828454900351e-06, | |
| "loss": 0.1474, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 4.683257918552036, | |
| "grad_norm": 0.4128705859184265, | |
| "learning_rate": 6.406251578428047e-06, | |
| "loss": 0.1417, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 4.6877828054298645, | |
| "grad_norm": 0.41966959834098816, | |
| "learning_rate": 6.398671193684919e-06, | |
| "loss": 0.0998, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 4.6923076923076925, | |
| "grad_norm": 0.379185289144516, | |
| "learning_rate": 6.391087319582264e-06, | |
| "loss": 0.1333, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 4.6968325791855206, | |
| "grad_norm": 0.38707613945007324, | |
| "learning_rate": 6.383499975040086e-06, | |
| "loss": 0.1288, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 4.701357466063349, | |
| "grad_norm": 0.4196775257587433, | |
| "learning_rate": 6.375909178987046e-06, | |
| "loss": 0.1327, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 4.705882352941177, | |
| "grad_norm": 0.36592400074005127, | |
| "learning_rate": 6.368314950360416e-06, | |
| "loss": 0.1233, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 4.710407239819005, | |
| "grad_norm": 0.35522153973579407, | |
| "learning_rate": 6.36071730810603e-06, | |
| "loss": 0.1071, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 4.714932126696833, | |
| "grad_norm": 0.3890746533870697, | |
| "learning_rate": 6.35311627117824e-06, | |
| "loss": 0.1321, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 4.719457013574661, | |
| "grad_norm": 0.3768482506275177, | |
| "learning_rate": 6.3455118585398676e-06, | |
| "loss": 0.1092, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 4.723981900452489, | |
| "grad_norm": 0.3709065318107605, | |
| "learning_rate": 6.337904089162152e-06, | |
| "loss": 0.1207, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 4.728506787330317, | |
| "grad_norm": 0.4227345585823059, | |
| "learning_rate": 6.3302929820247125e-06, | |
| "loss": 0.1046, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 4.733031674208145, | |
| "grad_norm": 0.44126981496810913, | |
| "learning_rate": 6.3226785561154914e-06, | |
| "loss": 0.1215, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 4.737556561085973, | |
| "grad_norm": 0.3676055669784546, | |
| "learning_rate": 6.315060830430711e-06, | |
| "loss": 0.1379, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 4.742081447963801, | |
| "grad_norm": 0.41453203558921814, | |
| "learning_rate": 6.307439823974826e-06, | |
| "loss": 0.1357, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 4.746606334841629, | |
| "grad_norm": 0.43501806259155273, | |
| "learning_rate": 6.299815555760478e-06, | |
| "loss": 0.1295, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 4.751131221719457, | |
| "grad_norm": 0.38064759969711304, | |
| "learning_rate": 6.29218804480844e-06, | |
| "loss": 0.132, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 4.755656108597285, | |
| "grad_norm": 0.41559624671936035, | |
| "learning_rate": 6.284557310147583e-06, | |
| "loss": 0.1299, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 4.760180995475113, | |
| "grad_norm": 0.40272775292396545, | |
| "learning_rate": 6.276923370814815e-06, | |
| "loss": 0.1378, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 4.764705882352941, | |
| "grad_norm": 0.38200849294662476, | |
| "learning_rate": 6.269286245855039e-06, | |
| "loss": 0.121, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 4.769230769230769, | |
| "grad_norm": 0.42263320088386536, | |
| "learning_rate": 6.261645954321109e-06, | |
| "loss": 0.1377, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 4.773755656108597, | |
| "grad_norm": 0.44799476861953735, | |
| "learning_rate": 6.254002515273775e-06, | |
| "loss": 0.1183, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 4.778280542986425, | |
| "grad_norm": 0.39172661304473877, | |
| "learning_rate": 6.246355947781641e-06, | |
| "loss": 0.1239, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 4.782805429864253, | |
| "grad_norm": 0.3744019567966461, | |
| "learning_rate": 6.2387062709211155e-06, | |
| "loss": 0.1221, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 4.787330316742081, | |
| "grad_norm": 0.3859163820743561, | |
| "learning_rate": 6.231053503776363e-06, | |
| "loss": 0.1416, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 4.791855203619909, | |
| "grad_norm": 0.39975130558013916, | |
| "learning_rate": 6.223397665439261e-06, | |
| "loss": 0.1323, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 4.796380090497737, | |
| "grad_norm": 0.41266360878944397, | |
| "learning_rate": 6.2157387750093455e-06, | |
| "loss": 0.1067, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 4.800904977375565, | |
| "grad_norm": 0.3963589668273926, | |
| "learning_rate": 6.208076851593768e-06, | |
| "loss": 0.1338, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 4.8054298642533935, | |
| "grad_norm": 0.42241835594177246, | |
| "learning_rate": 6.200411914307247e-06, | |
| "loss": 0.123, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 4.8099547511312215, | |
| "grad_norm": 0.350961834192276, | |
| "learning_rate": 6.192743982272019e-06, | |
| "loss": 0.1125, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 4.8144796380090495, | |
| "grad_norm": 0.39947742223739624, | |
| "learning_rate": 6.185073074617793e-06, | |
| "loss": 0.1147, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 4.819004524886878, | |
| "grad_norm": 0.36715415120124817, | |
| "learning_rate": 6.177399210481698e-06, | |
| "loss": 0.1121, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 4.823529411764706, | |
| "grad_norm": 0.3727978467941284, | |
| "learning_rate": 6.169722409008244e-06, | |
| "loss": 0.1188, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 4.828054298642534, | |
| "grad_norm": 0.39884573221206665, | |
| "learning_rate": 6.1620426893492645e-06, | |
| "loss": 0.1229, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 4.832579185520362, | |
| "grad_norm": 0.38297924399375916, | |
| "learning_rate": 6.1543600706638766e-06, | |
| "loss": 0.135, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 4.83710407239819, | |
| "grad_norm": 0.4219057559967041, | |
| "learning_rate": 6.1466745721184275e-06, | |
| "loss": 0.1362, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 4.841628959276018, | |
| "grad_norm": 0.3759216070175171, | |
| "learning_rate": 6.13898621288645e-06, | |
| "loss": 0.1149, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 4.846153846153846, | |
| "grad_norm": 0.44968006014823914, | |
| "learning_rate": 6.131295012148613e-06, | |
| "loss": 0.1403, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 4.850678733031674, | |
| "grad_norm": 0.3693656027317047, | |
| "learning_rate": 6.123600989092673e-06, | |
| "loss": 0.1185, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 4.855203619909502, | |
| "grad_norm": 0.42584556341171265, | |
| "learning_rate": 6.115904162913431e-06, | |
| "loss": 0.1186, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 4.859728506787331, | |
| "grad_norm": 0.4135635495185852, | |
| "learning_rate": 6.10820455281268e-06, | |
| "loss": 0.11, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 4.864253393665159, | |
| "grad_norm": 0.40252724289894104, | |
| "learning_rate": 6.100502177999156e-06, | |
| "loss": 0.1194, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 4.868778280542987, | |
| "grad_norm": 0.4541674852371216, | |
| "learning_rate": 6.092797057688496e-06, | |
| "loss": 0.127, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 4.873303167420815, | |
| "grad_norm": 0.37728506326675415, | |
| "learning_rate": 6.085089211103181e-06, | |
| "loss": 0.128, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 4.877828054298643, | |
| "grad_norm": 0.3978901505470276, | |
| "learning_rate": 6.077378657472498e-06, | |
| "loss": 0.1358, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 4.882352941176471, | |
| "grad_norm": 0.3870754837989807, | |
| "learning_rate": 6.0696654160324875e-06, | |
| "loss": 0.1281, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 4.886877828054299, | |
| "grad_norm": 0.40275850892066956, | |
| "learning_rate": 6.061949506025892e-06, | |
| "loss": 0.1183, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 4.891402714932127, | |
| "grad_norm": 0.37450602650642395, | |
| "learning_rate": 6.0542309467021156e-06, | |
| "loss": 0.1533, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 4.895927601809955, | |
| "grad_norm": 0.41943442821502686, | |
| "learning_rate": 6.046509757317168e-06, | |
| "loss": 0.1318, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 4.900452488687783, | |
| "grad_norm": 0.3888932466506958, | |
| "learning_rate": 6.038785957133624e-06, | |
| "loss": 0.1224, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 4.904977375565611, | |
| "grad_norm": 0.47233960032463074, | |
| "learning_rate": 6.0310595654205674e-06, | |
| "loss": 0.1557, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 4.909502262443439, | |
| "grad_norm": 0.43002673983573914, | |
| "learning_rate": 6.0233306014535505e-06, | |
| "loss": 0.11, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 4.914027149321267, | |
| "grad_norm": 0.39190760254859924, | |
| "learning_rate": 6.015599084514542e-06, | |
| "loss": 0.1115, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 4.918552036199095, | |
| "grad_norm": 0.36419686675071716, | |
| "learning_rate": 6.00786503389188e-06, | |
| "loss": 0.1156, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 4.923076923076923, | |
| "grad_norm": 0.3950363099575043, | |
| "learning_rate": 6.000128468880223e-06, | |
| "loss": 0.1399, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 4.927601809954751, | |
| "grad_norm": 0.44508564472198486, | |
| "learning_rate": 5.992389408780501e-06, | |
| "loss": 0.1134, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 4.932126696832579, | |
| "grad_norm": 0.4318121373653412, | |
| "learning_rate": 5.984647872899873e-06, | |
| "loss": 0.1034, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 4.9366515837104075, | |
| "grad_norm": 0.37849506735801697, | |
| "learning_rate": 5.976903880551669e-06, | |
| "loss": 0.11, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 4.9411764705882355, | |
| "grad_norm": 0.44689616560935974, | |
| "learning_rate": 5.9691574510553505e-06, | |
| "loss": 0.1248, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 4.9457013574660635, | |
| "grad_norm": 0.37326931953430176, | |
| "learning_rate": 5.961408603736461e-06, | |
| "loss": 0.1354, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 4.950226244343892, | |
| "grad_norm": 0.3781431019306183, | |
| "learning_rate": 5.953657357926569e-06, | |
| "loss": 0.1296, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 4.95475113122172, | |
| "grad_norm": 0.41875743865966797, | |
| "learning_rate": 5.945903732963234e-06, | |
| "loss": 0.1351, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 4.959276018099548, | |
| "grad_norm": 0.3916160464286804, | |
| "learning_rate": 5.938147748189947e-06, | |
| "loss": 0.1383, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 4.963800904977376, | |
| "grad_norm": 0.42897456884384155, | |
| "learning_rate": 5.930389422956088e-06, | |
| "loss": 0.1494, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 4.968325791855204, | |
| "grad_norm": 0.4615175127983093, | |
| "learning_rate": 5.922628776616876e-06, | |
| "loss": 0.1263, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 4.972850678733032, | |
| "grad_norm": 0.43058305978775024, | |
| "learning_rate": 5.914865828533319e-06, | |
| "loss": 0.1356, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 4.97737556561086, | |
| "grad_norm": 0.42625221610069275, | |
| "learning_rate": 5.907100598072166e-06, | |
| "loss": 0.1388, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 4.981900452488688, | |
| "grad_norm": 0.42407146096229553, | |
| "learning_rate": 5.899333104605864e-06, | |
| "loss": 0.1476, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 4.986425339366516, | |
| "grad_norm": 0.3559970557689667, | |
| "learning_rate": 5.891563367512505e-06, | |
| "loss": 0.1148, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 4.990950226244344, | |
| "grad_norm": 0.3517310917377472, | |
| "learning_rate": 5.883791406175775e-06, | |
| "loss": 0.11, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 4.995475113122172, | |
| "grad_norm": 0.3917599320411682, | |
| "learning_rate": 5.876017239984911e-06, | |
| "loss": 0.1138, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 0.39552751183509827, | |
| "learning_rate": 5.8682408883346535e-06, | |
| "loss": 0.1195, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 5.004524886877828, | |
| "grad_norm": 0.3723715841770172, | |
| "learning_rate": 5.860462370625189e-06, | |
| "loss": 0.067, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 5.009049773755656, | |
| "grad_norm": 0.4061602056026459, | |
| "learning_rate": 5.85268170626211e-06, | |
| "loss": 0.0793, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 5.013574660633484, | |
| "grad_norm": 0.3349854052066803, | |
| "learning_rate": 5.844898914656371e-06, | |
| "loss": 0.0649, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 5.018099547511312, | |
| "grad_norm": 0.383832186460495, | |
| "learning_rate": 5.837114015224223e-06, | |
| "loss": 0.0664, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 5.02262443438914, | |
| "grad_norm": 0.4438365399837494, | |
| "learning_rate": 5.829327027387184e-06, | |
| "loss": 0.0767, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 5.027149321266968, | |
| "grad_norm": 0.4521051049232483, | |
| "learning_rate": 5.821537970571978e-06, | |
| "loss": 0.0728, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 5.031674208144796, | |
| "grad_norm": 0.411251038312912, | |
| "learning_rate": 5.813746864210489e-06, | |
| "loss": 0.0652, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 5.036199095022624, | |
| "grad_norm": 0.42150846123695374, | |
| "learning_rate": 5.805953727739719e-06, | |
| "loss": 0.0615, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 5.040723981900452, | |
| "grad_norm": 0.3648434281349182, | |
| "learning_rate": 5.79815858060173e-06, | |
| "loss": 0.0527, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 5.04524886877828, | |
| "grad_norm": 0.37746983766555786, | |
| "learning_rate": 5.790361442243605e-06, | |
| "loss": 0.0689, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 5.049773755656108, | |
| "grad_norm": 0.35608813166618347, | |
| "learning_rate": 5.78256233211739e-06, | |
| "loss": 0.0752, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 5.0542986425339365, | |
| "grad_norm": 0.36897188425064087, | |
| "learning_rate": 5.774761269680052e-06, | |
| "loss": 0.0575, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 5.0588235294117645, | |
| "grad_norm": 0.334196537733078, | |
| "learning_rate": 5.766958274393428e-06, | |
| "loss": 0.0578, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 5.0633484162895925, | |
| "grad_norm": 0.3506363034248352, | |
| "learning_rate": 5.759153365724178e-06, | |
| "loss": 0.0642, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 5.067873303167421, | |
| "grad_norm": 0.3522595763206482, | |
| "learning_rate": 5.751346563143736e-06, | |
| "loss": 0.0635, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 5.072398190045249, | |
| "grad_norm": 0.6559270620346069, | |
| "learning_rate": 5.743537886128258e-06, | |
| "loss": 0.1065, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 5.076923076923077, | |
| "grad_norm": 0.339643657207489, | |
| "learning_rate": 5.735727354158581e-06, | |
| "loss": 0.0674, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 5.081447963800905, | |
| "grad_norm": 0.3840213418006897, | |
| "learning_rate": 5.727914986720164e-06, | |
| "loss": 0.0742, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 5.085972850678733, | |
| "grad_norm": 0.3815678656101227, | |
| "learning_rate": 5.72010080330305e-06, | |
| "loss": 0.0602, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 5.090497737556561, | |
| "grad_norm": 0.3994493782520294, | |
| "learning_rate": 5.712284823401808e-06, | |
| "loss": 0.0595, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 5.095022624434389, | |
| "grad_norm": 0.40029487013816833, | |
| "learning_rate": 5.704467066515492e-06, | |
| "loss": 0.0728, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 5.099547511312217, | |
| "grad_norm": 0.3628596067428589, | |
| "learning_rate": 5.696647552147589e-06, | |
| "loss": 0.0598, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 5.104072398190045, | |
| "grad_norm": 0.36752235889434814, | |
| "learning_rate": 5.688826299805971e-06, | |
| "loss": 0.074, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 5.108597285067873, | |
| "grad_norm": 0.3554799556732178, | |
| "learning_rate": 5.681003329002842e-06, | |
| "loss": 0.0614, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 5.113122171945701, | |
| "grad_norm": 0.41899964213371277, | |
| "learning_rate": 5.673178659254698e-06, | |
| "loss": 0.0974, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 5.117647058823529, | |
| "grad_norm": 0.31798073649406433, | |
| "learning_rate": 5.66535231008227e-06, | |
| "loss": 0.0629, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 5.122171945701357, | |
| "grad_norm": 0.3718262016773224, | |
| "learning_rate": 5.657524301010478e-06, | |
| "loss": 0.0696, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 5.126696832579185, | |
| "grad_norm": 0.3424396216869354, | |
| "learning_rate": 5.64969465156839e-06, | |
| "loss": 0.0553, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 5.131221719457014, | |
| "grad_norm": 0.3795858323574066, | |
| "learning_rate": 5.641863381289158e-06, | |
| "loss": 0.0805, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 5.135746606334842, | |
| "grad_norm": 0.40230146050453186, | |
| "learning_rate": 5.634030509709982e-06, | |
| "loss": 0.0814, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 5.14027149321267, | |
| "grad_norm": 0.3625279366970062, | |
| "learning_rate": 5.626196056372056e-06, | |
| "loss": 0.0819, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 5.144796380090498, | |
| "grad_norm": 0.37393632531166077, | |
| "learning_rate": 5.618360040820521e-06, | |
| "loss": 0.0704, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 5.149321266968326, | |
| "grad_norm": 0.3487889766693115, | |
| "learning_rate": 5.6105224826044105e-06, | |
| "loss": 0.0496, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 5.153846153846154, | |
| "grad_norm": 0.3692089319229126, | |
| "learning_rate": 5.6026834012766155e-06, | |
| "loss": 0.0672, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 5.158371040723982, | |
| "grad_norm": 0.4027572572231293, | |
| "learning_rate": 5.594842816393819e-06, | |
| "loss": 0.0616, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 5.16289592760181, | |
| "grad_norm": 0.44265252351760864, | |
| "learning_rate": 5.58700074751646e-06, | |
| "loss": 0.0778, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 5.167420814479638, | |
| "grad_norm": 0.36350980401039124, | |
| "learning_rate": 5.579157214208675e-06, | |
| "loss": 0.0637, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 5.171945701357466, | |
| "grad_norm": 0.35687702894210815, | |
| "learning_rate": 5.571312236038259e-06, | |
| "loss": 0.0753, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 5.176470588235294, | |
| "grad_norm": 0.35756856203079224, | |
| "learning_rate": 5.5634658325766066e-06, | |
| "loss": 0.0804, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 5.180995475113122, | |
| "grad_norm": 0.35187193751335144, | |
| "learning_rate": 5.555618023398671e-06, | |
| "loss": 0.0776, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 5.1855203619909505, | |
| "grad_norm": 0.361777126789093, | |
| "learning_rate": 5.547768828082915e-06, | |
| "loss": 0.0708, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 5.1900452488687785, | |
| "grad_norm": 0.40918031334877014, | |
| "learning_rate": 5.539918266211253e-06, | |
| "loss": 0.0959, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 5.1945701357466065, | |
| "grad_norm": 0.37231284379959106, | |
| "learning_rate": 5.532066357369012e-06, | |
| "loss": 0.0646, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 5.199095022624435, | |
| "grad_norm": 0.3823784589767456, | |
| "learning_rate": 5.524213121144883e-06, | |
| "loss": 0.064, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 5.203619909502263, | |
| "grad_norm": 0.3881456255912781, | |
| "learning_rate": 5.516358577130863e-06, | |
| "loss": 0.0799, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 5.208144796380091, | |
| "grad_norm": 0.38889485597610474, | |
| "learning_rate": 5.508502744922212e-06, | |
| "loss": 0.0866, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 5.212669683257919, | |
| "grad_norm": 0.34559571743011475, | |
| "learning_rate": 5.500645644117406e-06, | |
| "loss": 0.0659, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 5.217194570135747, | |
| "grad_norm": 0.4092608392238617, | |
| "learning_rate": 5.492787294318087e-06, | |
| "loss": 0.0876, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 5.221719457013575, | |
| "grad_norm": 0.3414454460144043, | |
| "learning_rate": 5.484927715129011e-06, | |
| "loss": 0.0681, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 5.226244343891403, | |
| "grad_norm": 0.3446487486362457, | |
| "learning_rate": 5.4770669261580014e-06, | |
| "loss": 0.0532, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 5.230769230769231, | |
| "grad_norm": 0.38337475061416626, | |
| "learning_rate": 5.469204947015897e-06, | |
| "loss": 0.0765, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 5.235294117647059, | |
| "grad_norm": 0.41466450691223145, | |
| "learning_rate": 5.46134179731651e-06, | |
| "loss": 0.0788, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 5.239819004524887, | |
| "grad_norm": 0.3642111122608185, | |
| "learning_rate": 5.453477496676573e-06, | |
| "loss": 0.0809, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 5.244343891402715, | |
| "grad_norm": 0.3510957360267639, | |
| "learning_rate": 5.445612064715684e-06, | |
| "loss": 0.0683, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 5.248868778280543, | |
| "grad_norm": 0.390898734331131, | |
| "learning_rate": 5.437745521056272e-06, | |
| "loss": 0.0803, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 5.253393665158371, | |
| "grad_norm": 0.4196690618991852, | |
| "learning_rate": 5.4298778853235315e-06, | |
| "loss": 0.0717, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 5.257918552036199, | |
| "grad_norm": 0.32665446400642395, | |
| "learning_rate": 5.422009177145388e-06, | |
| "loss": 0.0527, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 5.262443438914027, | |
| "grad_norm": 0.35169556736946106, | |
| "learning_rate": 5.414139416152435e-06, | |
| "loss": 0.0507, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 5.266968325791855, | |
| "grad_norm": 0.36220046877861023, | |
| "learning_rate": 5.406268621977902e-06, | |
| "loss": 0.0782, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 5.271493212669683, | |
| "grad_norm": 0.3889884054660797, | |
| "learning_rate": 5.398396814257588e-06, | |
| "loss": 0.0694, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 5.276018099547511, | |
| "grad_norm": 0.3402976095676422, | |
| "learning_rate": 5.390524012629824e-06, | |
| "loss": 0.0572, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 5.280542986425339, | |
| "grad_norm": 0.3195909559726715, | |
| "learning_rate": 5.382650236735421e-06, | |
| "loss": 0.0651, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 5.285067873303167, | |
| "grad_norm": 0.3863244354724884, | |
| "learning_rate": 5.3747755062176185e-06, | |
| "loss": 0.0598, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 5.289592760180995, | |
| "grad_norm": 0.33079132437705994, | |
| "learning_rate": 5.3668998407220385e-06, | |
| "loss": 0.0635, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 5.294117647058823, | |
| "grad_norm": 0.3895846903324127, | |
| "learning_rate": 5.359023259896638e-06, | |
| "loss": 0.065, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 5.298642533936651, | |
| "grad_norm": 0.3789190948009491, | |
| "learning_rate": 5.351145783391654e-06, | |
| "loss": 0.0744, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 5.3031674208144794, | |
| "grad_norm": 0.35495415329933167, | |
| "learning_rate": 5.343267430859559e-06, | |
| "loss": 0.0834, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 5.3076923076923075, | |
| "grad_norm": 0.38719555735588074, | |
| "learning_rate": 5.335388221955012e-06, | |
| "loss": 0.0666, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 5.3122171945701355, | |
| "grad_norm": 0.3902130126953125, | |
| "learning_rate": 5.327508176334808e-06, | |
| "loss": 0.0681, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 5.316742081447964, | |
| "grad_norm": 0.391261488199234, | |
| "learning_rate": 5.319627313657829e-06, | |
| "loss": 0.0768, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 5.321266968325792, | |
| "grad_norm": 0.3392927348613739, | |
| "learning_rate": 5.311745653584995e-06, | |
| "loss": 0.0679, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 5.32579185520362, | |
| "grad_norm": 0.3632453978061676, | |
| "learning_rate": 5.3038632157792155e-06, | |
| "loss": 0.06, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 5.330316742081448, | |
| "grad_norm": 0.3581770062446594, | |
| "learning_rate": 5.295980019905342e-06, | |
| "loss": 0.0695, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 5.334841628959276, | |
| "grad_norm": 0.35193902254104614, | |
| "learning_rate": 5.288096085630114e-06, | |
| "loss": 0.0689, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 5.339366515837104, | |
| "grad_norm": 0.356852650642395, | |
| "learning_rate": 5.280211432622118e-06, | |
| "loss": 0.0716, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 5.343891402714932, | |
| "grad_norm": 0.389583021402359, | |
| "learning_rate": 5.272326080551729e-06, | |
| "loss": 0.0669, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 5.34841628959276, | |
| "grad_norm": 0.3774275779724121, | |
| "learning_rate": 5.264440049091068e-06, | |
| "loss": 0.0414, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 5.352941176470588, | |
| "grad_norm": 0.35961687564849854, | |
| "learning_rate": 5.2565533579139484e-06, | |
| "loss": 0.0772, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 5.357466063348416, | |
| "grad_norm": 0.35463571548461914, | |
| "learning_rate": 5.248666026695835e-06, | |
| "loss": 0.07, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 5.361990950226244, | |
| "grad_norm": 0.3400146961212158, | |
| "learning_rate": 5.240778075113781e-06, | |
| "loss": 0.0631, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 5.366515837104072, | |
| "grad_norm": 0.40522345900535583, | |
| "learning_rate": 5.232889522846398e-06, | |
| "loss": 0.0726, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 5.371040723981901, | |
| "grad_norm": 0.40270382165908813, | |
| "learning_rate": 5.2250003895737865e-06, | |
| "loss": 0.0655, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 5.375565610859729, | |
| "grad_norm": 0.35652971267700195, | |
| "learning_rate": 5.217110694977501e-06, | |
| "loss": 0.0656, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 5.380090497737557, | |
| "grad_norm": 0.36236080527305603, | |
| "learning_rate": 5.209220458740496e-06, | |
| "loss": 0.0645, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 5.384615384615385, | |
| "grad_norm": 0.39016634225845337, | |
| "learning_rate": 5.201329700547077e-06, | |
| "loss": 0.0681, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 5.389140271493213, | |
| "grad_norm": 0.3703252971172333, | |
| "learning_rate": 5.193438440082851e-06, | |
| "loss": 0.0579, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 5.393665158371041, | |
| "grad_norm": 0.3513895571231842, | |
| "learning_rate": 5.185546697034679e-06, | |
| "loss": 0.0689, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 5.398190045248869, | |
| "grad_norm": 0.3632599413394928, | |
| "learning_rate": 5.177654491090627e-06, | |
| "loss": 0.069, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 5.402714932126697, | |
| "grad_norm": 0.46198567748069763, | |
| "learning_rate": 5.169761841939912e-06, | |
| "loss": 0.0813, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 5.407239819004525, | |
| "grad_norm": 0.37140151858329773, | |
| "learning_rate": 5.16186876927286e-06, | |
| "loss": 0.0728, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 5.411764705882353, | |
| "grad_norm": 0.3435964584350586, | |
| "learning_rate": 5.153975292780852e-06, | |
| "loss": 0.0605, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 5.416289592760181, | |
| "grad_norm": 0.3748542368412018, | |
| "learning_rate": 5.14608143215628e-06, | |
| "loss": 0.06, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 5.420814479638009, | |
| "grad_norm": 0.3335817754268646, | |
| "learning_rate": 5.1381872070924884e-06, | |
| "loss": 0.05, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 5.425339366515837, | |
| "grad_norm": 0.4469960033893585, | |
| "learning_rate": 5.130292637283735e-06, | |
| "loss": 0.0653, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 5.429864253393665, | |
| "grad_norm": 0.3295464813709259, | |
| "learning_rate": 5.122397742425136e-06, | |
| "loss": 0.0711, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 5.4343891402714934, | |
| "grad_norm": 0.36631765961647034, | |
| "learning_rate": 5.11450254221262e-06, | |
| "loss": 0.0726, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 5.4389140271493215, | |
| "grad_norm": 0.3538479208946228, | |
| "learning_rate": 5.1066070563428736e-06, | |
| "loss": 0.0586, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 5.4434389140271495, | |
| "grad_norm": 0.3970729112625122, | |
| "learning_rate": 5.098711304513303e-06, | |
| "loss": 0.0701, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 5.447963800904978, | |
| "grad_norm": 0.36112064123153687, | |
| "learning_rate": 5.090815306421971e-06, | |
| "loss": 0.065, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 5.452488687782806, | |
| "grad_norm": 0.34354132413864136, | |
| "learning_rate": 5.082919081767558e-06, | |
| "loss": 0.0515, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 5.457013574660634, | |
| "grad_norm": 0.3775116801261902, | |
| "learning_rate": 5.075022650249309e-06, | |
| "loss": 0.0684, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 5.461538461538462, | |
| "grad_norm": 0.35393577814102173, | |
| "learning_rate": 5.067126031566988e-06, | |
| "loss": 0.0737, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 5.46606334841629, | |
| "grad_norm": 0.36637890338897705, | |
| "learning_rate": 5.059229245420819e-06, | |
| "loss": 0.0708, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 5.470588235294118, | |
| "grad_norm": 0.3605267405509949, | |
| "learning_rate": 5.05133231151145e-06, | |
| "loss": 0.0555, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 5.475113122171946, | |
| "grad_norm": 0.3860551118850708, | |
| "learning_rate": 5.0434352495398956e-06, | |
| "loss": 0.0706, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 5.479638009049774, | |
| "grad_norm": 0.36745893955230713, | |
| "learning_rate": 5.035538079207488e-06, | |
| "loss": 0.0734, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 5.484162895927602, | |
| "grad_norm": 0.3685935437679291, | |
| "learning_rate": 5.027640820215834e-06, | |
| "loss": 0.0793, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 5.48868778280543, | |
| "grad_norm": 0.3923707604408264, | |
| "learning_rate": 5.01974349226676e-06, | |
| "loss": 0.0827, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 5.493212669683258, | |
| "grad_norm": 0.3473922312259674, | |
| "learning_rate": 5.01184611506226e-06, | |
| "loss": 0.0614, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 5.497737556561086, | |
| "grad_norm": 0.3433387279510498, | |
| "learning_rate": 5.003948708304457e-06, | |
| "loss": 0.0716, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 5.502262443438914, | |
| "grad_norm": 0.3532765805721283, | |
| "learning_rate": 4.996051291695544e-06, | |
| "loss": 0.0719, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 5.506787330316742, | |
| "grad_norm": 0.3672707974910736, | |
| "learning_rate": 4.988153884937742e-06, | |
| "loss": 0.0691, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 5.51131221719457, | |
| "grad_norm": 0.46894240379333496, | |
| "learning_rate": 4.980256507733242e-06, | |
| "loss": 0.0923, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 5.515837104072398, | |
| "grad_norm": 0.390020489692688, | |
| "learning_rate": 4.9723591797841665e-06, | |
| "loss": 0.0738, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 5.520361990950226, | |
| "grad_norm": 0.3452206254005432, | |
| "learning_rate": 4.964461920792512e-06, | |
| "loss": 0.0492, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 5.524886877828054, | |
| "grad_norm": 0.3586297333240509, | |
| "learning_rate": 4.956564750460107e-06, | |
| "loss": 0.0609, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 5.529411764705882, | |
| "grad_norm": 0.3771961033344269, | |
| "learning_rate": 4.948667688488552e-06, | |
| "loss": 0.0786, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 5.53393665158371, | |
| "grad_norm": 0.37323540449142456, | |
| "learning_rate": 4.940770754579183e-06, | |
| "loss": 0.0733, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 5.538461538461538, | |
| "grad_norm": 0.353660523891449, | |
| "learning_rate": 4.932873968433014e-06, | |
| "loss": 0.0729, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 5.542986425339366, | |
| "grad_norm": 0.35802799463272095, | |
| "learning_rate": 4.924977349750692e-06, | |
| "loss": 0.0716, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 5.547511312217194, | |
| "grad_norm": 0.4036112129688263, | |
| "learning_rate": 4.917080918232444e-06, | |
| "loss": 0.0836, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 5.552036199095022, | |
| "grad_norm": 0.3766706585884094, | |
| "learning_rate": 4.90918469357803e-06, | |
| "loss": 0.0638, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 5.5565610859728505, | |
| "grad_norm": 0.3591664135456085, | |
| "learning_rate": 4.9012886954866985e-06, | |
| "loss": 0.0677, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 5.5610859728506785, | |
| "grad_norm": 0.37841859459877014, | |
| "learning_rate": 4.893392943657127e-06, | |
| "loss": 0.0624, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 5.5656108597285066, | |
| "grad_norm": 0.3366260826587677, | |
| "learning_rate": 4.885497457787383e-06, | |
| "loss": 0.0855, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 5.570135746606335, | |
| "grad_norm": 0.4003104269504547, | |
| "learning_rate": 4.877602257574866e-06, | |
| "loss": 0.0731, | |
| "step": 1231 | |
| }, | |
| { | |
| "epoch": 5.574660633484163, | |
| "grad_norm": 0.3531114459037781, | |
| "learning_rate": 4.8697073627162675e-06, | |
| "loss": 0.0574, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 5.579185520361991, | |
| "grad_norm": 0.3736576437950134, | |
| "learning_rate": 4.861812792907514e-06, | |
| "loss": 0.0599, | |
| "step": 1233 | |
| }, | |
| { | |
| "epoch": 5.583710407239819, | |
| "grad_norm": 0.34564992785453796, | |
| "learning_rate": 4.853918567843722e-06, | |
| "loss": 0.0538, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 5.588235294117647, | |
| "grad_norm": 0.32957449555397034, | |
| "learning_rate": 4.846024707219149e-06, | |
| "loss": 0.0594, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 5.592760180995475, | |
| "grad_norm": 0.35310038924217224, | |
| "learning_rate": 4.8381312307271425e-06, | |
| "loss": 0.0728, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 5.597285067873303, | |
| "grad_norm": 0.36492466926574707, | |
| "learning_rate": 4.830238158060091e-06, | |
| "loss": 0.0726, | |
| "step": 1237 | |
| }, | |
| { | |
| "epoch": 5.601809954751131, | |
| "grad_norm": 0.35988250374794006, | |
| "learning_rate": 4.822345508909376e-06, | |
| "loss": 0.0552, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 5.606334841628959, | |
| "grad_norm": 0.394717276096344, | |
| "learning_rate": 4.814453302965323e-06, | |
| "loss": 0.0817, | |
| "step": 1239 | |
| }, | |
| { | |
| "epoch": 5.610859728506787, | |
| "grad_norm": 0.374766081571579, | |
| "learning_rate": 4.806561559917152e-06, | |
| "loss": 0.0695, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 5.615384615384615, | |
| "grad_norm": 0.3558162450790405, | |
| "learning_rate": 4.798670299452926e-06, | |
| "loss": 0.077, | |
| "step": 1241 | |
| }, | |
| { | |
| "epoch": 5.619909502262443, | |
| "grad_norm": 0.3893064558506012, | |
| "learning_rate": 4.790779541259507e-06, | |
| "loss": 0.077, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 5.624434389140271, | |
| "grad_norm": 0.34312954545021057, | |
| "learning_rate": 4.7828893050225015e-06, | |
| "loss": 0.0788, | |
| "step": 1243 | |
| }, | |
| { | |
| "epoch": 5.628959276018099, | |
| "grad_norm": 0.38066574931144714, | |
| "learning_rate": 4.774999610426216e-06, | |
| "loss": 0.0922, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 5.633484162895927, | |
| "grad_norm": 0.3637709617614746, | |
| "learning_rate": 4.767110477153605e-06, | |
| "loss": 0.0565, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 5.638009049773755, | |
| "grad_norm": 0.3352390229701996, | |
| "learning_rate": 4.7592219248862205e-06, | |
| "loss": 0.0703, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 5.642533936651584, | |
| "grad_norm": 0.3382686376571655, | |
| "learning_rate": 4.751333973304166e-06, | |
| "loss": 0.0536, | |
| "step": 1247 | |
| }, | |
| { | |
| "epoch": 5.647058823529412, | |
| "grad_norm": 0.3596508502960205, | |
| "learning_rate": 4.7434466420860515e-06, | |
| "loss": 0.0509, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 5.65158371040724, | |
| "grad_norm": 0.3635534942150116, | |
| "learning_rate": 4.735559950908933e-06, | |
| "loss": 0.0577, | |
| "step": 1249 | |
| }, | |
| { | |
| "epoch": 5.656108597285068, | |
| "grad_norm": 0.3797168731689453, | |
| "learning_rate": 4.727673919448271e-06, | |
| "loss": 0.0629, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 5.660633484162896, | |
| "grad_norm": 0.3910801410675049, | |
| "learning_rate": 4.7197885673778816e-06, | |
| "loss": 0.0746, | |
| "step": 1251 | |
| }, | |
| { | |
| "epoch": 5.665158371040724, | |
| "grad_norm": 0.35556650161743164, | |
| "learning_rate": 4.711903914369885e-06, | |
| "loss": 0.0784, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 5.669683257918552, | |
| "grad_norm": 0.3415828347206116, | |
| "learning_rate": 4.704019980094659e-06, | |
| "loss": 0.0533, | |
| "step": 1253 | |
| }, | |
| { | |
| "epoch": 5.67420814479638, | |
| "grad_norm": 0.34802836179733276, | |
| "learning_rate": 4.696136784220785e-06, | |
| "loss": 0.0734, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 5.678733031674208, | |
| "grad_norm": 0.34953394532203674, | |
| "learning_rate": 4.688254346415006e-06, | |
| "loss": 0.0618, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 5.683257918552036, | |
| "grad_norm": 0.3640146255493164, | |
| "learning_rate": 4.680372686342173e-06, | |
| "loss": 0.0708, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 5.6877828054298645, | |
| "grad_norm": 0.5030450224876404, | |
| "learning_rate": 4.672491823665193e-06, | |
| "loss": 0.0661, | |
| "step": 1257 | |
| }, | |
| { | |
| "epoch": 5.6923076923076925, | |
| "grad_norm": 0.3879057466983795, | |
| "learning_rate": 4.664611778044988e-06, | |
| "loss": 0.0769, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 5.6968325791855206, | |
| "grad_norm": 0.353974848985672, | |
| "learning_rate": 4.656732569140441e-06, | |
| "loss": 0.0788, | |
| "step": 1259 | |
| }, | |
| { | |
| "epoch": 5.701357466063349, | |
| "grad_norm": 0.3681681156158447, | |
| "learning_rate": 4.648854216608346e-06, | |
| "loss": 0.0546, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 5.705882352941177, | |
| "grad_norm": 0.36380332708358765, | |
| "learning_rate": 4.640976740103363e-06, | |
| "loss": 0.0619, | |
| "step": 1261 | |
| }, | |
| { | |
| "epoch": 5.710407239819005, | |
| "grad_norm": 0.3659432530403137, | |
| "learning_rate": 4.6331001592779615e-06, | |
| "loss": 0.0613, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 5.714932126696833, | |
| "grad_norm": 0.3356708884239197, | |
| "learning_rate": 4.625224493782382e-06, | |
| "loss": 0.0357, | |
| "step": 1263 | |
| }, | |
| { | |
| "epoch": 5.719457013574661, | |
| "grad_norm": 0.3507138192653656, | |
| "learning_rate": 4.61734976326458e-06, | |
| "loss": 0.0715, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 5.723981900452489, | |
| "grad_norm": 0.3713688552379608, | |
| "learning_rate": 4.609475987370177e-06, | |
| "loss": 0.0763, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 5.728506787330317, | |
| "grad_norm": 0.3401196599006653, | |
| "learning_rate": 4.6016031857424135e-06, | |
| "loss": 0.0674, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 5.733031674208145, | |
| "grad_norm": 0.3747742772102356, | |
| "learning_rate": 4.593731378022099e-06, | |
| "loss": 0.0655, | |
| "step": 1267 | |
| }, | |
| { | |
| "epoch": 5.737556561085973, | |
| "grad_norm": 0.3921152353286743, | |
| "learning_rate": 4.585860583847566e-06, | |
| "loss": 0.079, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 5.742081447963801, | |
| "grad_norm": 0.4036964178085327, | |
| "learning_rate": 4.577990822854615e-06, | |
| "loss": 0.0728, | |
| "step": 1269 | |
| }, | |
| { | |
| "epoch": 5.746606334841629, | |
| "grad_norm": 0.36047136783599854, | |
| "learning_rate": 4.570122114676469e-06, | |
| "loss": 0.0569, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 5.751131221719457, | |
| "grad_norm": 0.3767181932926178, | |
| "learning_rate": 4.562254478943729e-06, | |
| "loss": 0.0691, | |
| "step": 1271 | |
| }, | |
| { | |
| "epoch": 5.755656108597285, | |
| "grad_norm": 0.3421269953250885, | |
| "learning_rate": 4.554387935284317e-06, | |
| "loss": 0.0587, | |
| "step": 1272 | |
| }, | |
| { | |
| "epoch": 5.760180995475113, | |
| "grad_norm": 0.3528580367565155, | |
| "learning_rate": 4.546522503323429e-06, | |
| "loss": 0.0531, | |
| "step": 1273 | |
| }, | |
| { | |
| "epoch": 5.764705882352941, | |
| "grad_norm": 0.3879212737083435, | |
| "learning_rate": 4.53865820268349e-06, | |
| "loss": 0.0752, | |
| "step": 1274 | |
| }, | |
| { | |
| "epoch": 5.769230769230769, | |
| "grad_norm": 0.34203389286994934, | |
| "learning_rate": 4.530795052984104e-06, | |
| "loss": 0.0619, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 5.773755656108597, | |
| "grad_norm": 0.32171866297721863, | |
| "learning_rate": 4.522933073842001e-06, | |
| "loss": 0.0615, | |
| "step": 1276 | |
| }, | |
| { | |
| "epoch": 5.778280542986425, | |
| "grad_norm": 0.409665584564209, | |
| "learning_rate": 4.51507228487099e-06, | |
| "loss": 0.0706, | |
| "step": 1277 | |
| }, | |
| { | |
| "epoch": 5.782805429864253, | |
| "grad_norm": 0.341505765914917, | |
| "learning_rate": 4.507212705681914e-06, | |
| "loss": 0.0503, | |
| "step": 1278 | |
| }, | |
| { | |
| "epoch": 5.787330316742081, | |
| "grad_norm": 0.34372374415397644, | |
| "learning_rate": 4.499354355882595e-06, | |
| "loss": 0.0685, | |
| "step": 1279 | |
| }, | |
| { | |
| "epoch": 5.791855203619909, | |
| "grad_norm": 0.35839352011680603, | |
| "learning_rate": 4.49149725507779e-06, | |
| "loss": 0.0768, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 5.796380090497737, | |
| "grad_norm": 0.331247478723526, | |
| "learning_rate": 4.483641422869139e-06, | |
| "loss": 0.0563, | |
| "step": 1281 | |
| }, | |
| { | |
| "epoch": 5.800904977375565, | |
| "grad_norm": 0.3905595541000366, | |
| "learning_rate": 4.475786878855118e-06, | |
| "loss": 0.08, | |
| "step": 1282 | |
| }, | |
| { | |
| "epoch": 5.8054298642533935, | |
| "grad_norm": 0.37202346324920654, | |
| "learning_rate": 4.467933642630989e-06, | |
| "loss": 0.0612, | |
| "step": 1283 | |
| }, | |
| { | |
| "epoch": 5.8099547511312215, | |
| "grad_norm": 0.3457815945148468, | |
| "learning_rate": 4.460081733788749e-06, | |
| "loss": 0.064, | |
| "step": 1284 | |
| }, | |
| { | |
| "epoch": 5.8144796380090495, | |
| "grad_norm": 0.3689752221107483, | |
| "learning_rate": 4.452231171917087e-06, | |
| "loss": 0.055, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 5.819004524886878, | |
| "grad_norm": 0.3966318666934967, | |
| "learning_rate": 4.44438197660133e-06, | |
| "loss": 0.0654, | |
| "step": 1286 | |
| }, | |
| { | |
| "epoch": 5.823529411764706, | |
| "grad_norm": 0.34534600377082825, | |
| "learning_rate": 4.436534167423395e-06, | |
| "loss": 0.0671, | |
| "step": 1287 | |
| }, | |
| { | |
| "epoch": 5.828054298642534, | |
| "grad_norm": 0.3827734887599945, | |
| "learning_rate": 4.4286877639617435e-06, | |
| "loss": 0.0857, | |
| "step": 1288 | |
| }, | |
| { | |
| "epoch": 5.832579185520362, | |
| "grad_norm": 0.3259190022945404, | |
| "learning_rate": 4.420842785791326e-06, | |
| "loss": 0.0556, | |
| "step": 1289 | |
| }, | |
| { | |
| "epoch": 5.83710407239819, | |
| "grad_norm": 0.3990061581134796, | |
| "learning_rate": 4.412999252483542e-06, | |
| "loss": 0.0803, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 5.841628959276018, | |
| "grad_norm": 0.37944766879081726, | |
| "learning_rate": 4.405157183606182e-06, | |
| "loss": 0.0729, | |
| "step": 1291 | |
| }, | |
| { | |
| "epoch": 5.846153846153846, | |
| "grad_norm": 0.362619012594223, | |
| "learning_rate": 4.397316598723385e-06, | |
| "loss": 0.0605, | |
| "step": 1292 | |
| }, | |
| { | |
| "epoch": 5.850678733031674, | |
| "grad_norm": 0.3391781747341156, | |
| "learning_rate": 4.38947751739559e-06, | |
| "loss": 0.0606, | |
| "step": 1293 | |
| }, | |
| { | |
| "epoch": 5.855203619909502, | |
| "grad_norm": 0.33791670203208923, | |
| "learning_rate": 4.381639959179482e-06, | |
| "loss": 0.0655, | |
| "step": 1294 | |
| }, | |
| { | |
| "epoch": 5.859728506787331, | |
| "grad_norm": 0.3713356852531433, | |
| "learning_rate": 4.373803943627946e-06, | |
| "loss": 0.0553, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 5.864253393665159, | |
| "grad_norm": 0.33650147914886475, | |
| "learning_rate": 4.36596949029002e-06, | |
| "loss": 0.068, | |
| "step": 1296 | |
| }, | |
| { | |
| "epoch": 5.868778280542987, | |
| "grad_norm": 0.39711567759513855, | |
| "learning_rate": 4.358136618710844e-06, | |
| "loss": 0.0789, | |
| "step": 1297 | |
| }, | |
| { | |
| "epoch": 5.873303167420815, | |
| "grad_norm": 0.416520893573761, | |
| "learning_rate": 4.350305348431612e-06, | |
| "loss": 0.0667, | |
| "step": 1298 | |
| }, | |
| { | |
| "epoch": 5.877828054298643, | |
| "grad_norm": 0.37820732593536377, | |
| "learning_rate": 4.342475698989524e-06, | |
| "loss": 0.0515, | |
| "step": 1299 | |
| }, | |
| { | |
| "epoch": 5.882352941176471, | |
| "grad_norm": 0.388907253742218, | |
| "learning_rate": 4.334647689917734e-06, | |
| "loss": 0.0599, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 5.886877828054299, | |
| "grad_norm": 0.3462461233139038, | |
| "learning_rate": 4.326821340745304e-06, | |
| "loss": 0.0792, | |
| "step": 1301 | |
| }, | |
| { | |
| "epoch": 5.891402714932127, | |
| "grad_norm": 0.3591066896915436, | |
| "learning_rate": 4.318996670997159e-06, | |
| "loss": 0.0595, | |
| "step": 1302 | |
| }, | |
| { | |
| "epoch": 5.895927601809955, | |
| "grad_norm": 0.38075944781303406, | |
| "learning_rate": 4.31117370019403e-06, | |
| "loss": 0.0688, | |
| "step": 1303 | |
| }, | |
| { | |
| "epoch": 5.900452488687783, | |
| "grad_norm": 0.39448288083076477, | |
| "learning_rate": 4.303352447852412e-06, | |
| "loss": 0.0901, | |
| "step": 1304 | |
| }, | |
| { | |
| "epoch": 5.904977375565611, | |
| "grad_norm": 0.3826497495174408, | |
| "learning_rate": 4.295532933484509e-06, | |
| "loss": 0.0745, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 5.909502262443439, | |
| "grad_norm": 0.34782737493515015, | |
| "learning_rate": 4.287715176598194e-06, | |
| "loss": 0.0619, | |
| "step": 1306 | |
| }, | |
| { | |
| "epoch": 5.914027149321267, | |
| "grad_norm": 0.3616624176502228, | |
| "learning_rate": 4.279899196696953e-06, | |
| "loss": 0.0722, | |
| "step": 1307 | |
| }, | |
| { | |
| "epoch": 5.918552036199095, | |
| "grad_norm": 0.3477957546710968, | |
| "learning_rate": 4.2720850132798375e-06, | |
| "loss": 0.0705, | |
| "step": 1308 | |
| }, | |
| { | |
| "epoch": 5.923076923076923, | |
| "grad_norm": 0.3332456350326538, | |
| "learning_rate": 4.264272645841419e-06, | |
| "loss": 0.0538, | |
| "step": 1309 | |
| }, | |
| { | |
| "epoch": 5.927601809954751, | |
| "grad_norm": 0.34810853004455566, | |
| "learning_rate": 4.256462113871741e-06, | |
| "loss": 0.071, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 5.932126696832579, | |
| "grad_norm": 0.39874809980392456, | |
| "learning_rate": 4.2486534368562645e-06, | |
| "loss": 0.0814, | |
| "step": 1311 | |
| }, | |
| { | |
| "epoch": 5.9366515837104075, | |
| "grad_norm": 0.3651580214500427, | |
| "learning_rate": 4.240846634275822e-06, | |
| "loss": 0.0521, | |
| "step": 1312 | |
| }, | |
| { | |
| "epoch": 5.9411764705882355, | |
| "grad_norm": 0.37812861800193787, | |
| "learning_rate": 4.233041725606573e-06, | |
| "loss": 0.0438, | |
| "step": 1313 | |
| }, | |
| { | |
| "epoch": 5.9457013574660635, | |
| "grad_norm": 0.3588254153728485, | |
| "learning_rate": 4.225238730319949e-06, | |
| "loss": 0.078, | |
| "step": 1314 | |
| }, | |
| { | |
| "epoch": 5.950226244343892, | |
| "grad_norm": 0.34432655572891235, | |
| "learning_rate": 4.217437667882611e-06, | |
| "loss": 0.0602, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 5.95475113122172, | |
| "grad_norm": 0.33063921332359314, | |
| "learning_rate": 4.209638557756396e-06, | |
| "loss": 0.0564, | |
| "step": 1316 | |
| }, | |
| { | |
| "epoch": 5.959276018099548, | |
| "grad_norm": 0.4011344909667969, | |
| "learning_rate": 4.2018414193982695e-06, | |
| "loss": 0.0671, | |
| "step": 1317 | |
| }, | |
| { | |
| "epoch": 5.963800904977376, | |
| "grad_norm": 0.40701520442962646, | |
| "learning_rate": 4.1940462722602815e-06, | |
| "loss": 0.0851, | |
| "step": 1318 | |
| }, | |
| { | |
| "epoch": 5.968325791855204, | |
| "grad_norm": 0.374459445476532, | |
| "learning_rate": 4.186253135789511e-06, | |
| "loss": 0.076, | |
| "step": 1319 | |
| }, | |
| { | |
| "epoch": 5.972850678733032, | |
| "grad_norm": 0.3491608798503876, | |
| "learning_rate": 4.178462029428023e-06, | |
| "loss": 0.0718, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 5.97737556561086, | |
| "grad_norm": 0.3967466354370117, | |
| "learning_rate": 4.170672972612816e-06, | |
| "loss": 0.0753, | |
| "step": 1321 | |
| }, | |
| { | |
| "epoch": 5.981900452488688, | |
| "grad_norm": 0.3280808627605438, | |
| "learning_rate": 4.162885984775777e-06, | |
| "loss": 0.0594, | |
| "step": 1322 | |
| }, | |
| { | |
| "epoch": 5.986425339366516, | |
| "grad_norm": 0.4042797088623047, | |
| "learning_rate": 4.15510108534363e-06, | |
| "loss": 0.0878, | |
| "step": 1323 | |
| }, | |
| { | |
| "epoch": 5.990950226244344, | |
| "grad_norm": 0.36680734157562256, | |
| "learning_rate": 4.1473182937378905e-06, | |
| "loss": 0.0672, | |
| "step": 1324 | |
| }, | |
| { | |
| "epoch": 5.995475113122172, | |
| "grad_norm": 0.3289303779602051, | |
| "learning_rate": 4.139537629374814e-06, | |
| "loss": 0.0632, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 0.3161666989326477, | |
| "learning_rate": 4.131759111665349e-06, | |
| "loss": 0.0474, | |
| "step": 1326 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 2210, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 10, | |
| "save_steps": 442, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 272662612566016.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |