| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.990202482037884, | |
| "eval_steps": 500, | |
| "global_step": 381, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.007838014369693011, | |
| "grad_norm": 3.012711763381958, | |
| "learning_rate": 1.282051282051282e-06, | |
| "loss": 0.5662, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.015676028739386023, | |
| "grad_norm": 3.1244778633117676, | |
| "learning_rate": 2.564102564102564e-06, | |
| "loss": 0.5951, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.023514043109079032, | |
| "grad_norm": 3.0221784114837646, | |
| "learning_rate": 3.846153846153847e-06, | |
| "loss": 0.5891, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.031352057478772045, | |
| "grad_norm": 2.6240241527557373, | |
| "learning_rate": 5.128205128205128e-06, | |
| "loss": 0.5812, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.039190071848465055, | |
| "grad_norm": 1.920555591583252, | |
| "learning_rate": 6.41025641025641e-06, | |
| "loss": 0.54, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.047028086218158065, | |
| "grad_norm": 1.4635577201843262, | |
| "learning_rate": 7.692307692307694e-06, | |
| "loss": 0.5043, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.054866100587851074, | |
| "grad_norm": 1.418139100074768, | |
| "learning_rate": 8.974358974358976e-06, | |
| "loss": 0.491, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.06270411495754409, | |
| "grad_norm": 1.3632681369781494, | |
| "learning_rate": 1.0256410256410256e-05, | |
| "loss": 0.4744, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.0705421293272371, | |
| "grad_norm": 1.7710790634155273, | |
| "learning_rate": 1.153846153846154e-05, | |
| "loss": 0.4128, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.07838014369693011, | |
| "grad_norm": 1.3074016571044922, | |
| "learning_rate": 1.282051282051282e-05, | |
| "loss": 0.3723, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.08621815806662313, | |
| "grad_norm": 0.8031821250915527, | |
| "learning_rate": 1.4102564102564104e-05, | |
| "loss": 0.3585, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.09405617243631613, | |
| "grad_norm": 1.116591453552246, | |
| "learning_rate": 1.5384615384615387e-05, | |
| "loss": 0.3427, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.10189418680600915, | |
| "grad_norm": 0.970167875289917, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.3333, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.10973220117570215, | |
| "grad_norm": 0.8765995502471924, | |
| "learning_rate": 1.794871794871795e-05, | |
| "loss": 0.3051, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.11757021554539517, | |
| "grad_norm": 0.7228055000305176, | |
| "learning_rate": 1.923076923076923e-05, | |
| "loss": 0.2992, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.12540822991508818, | |
| "grad_norm": 0.6161549687385559, | |
| "learning_rate": 2.0512820512820512e-05, | |
| "loss": 0.2884, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.13324624428478118, | |
| "grad_norm": 0.5537469983100891, | |
| "learning_rate": 2.1794871794871795e-05, | |
| "loss": 0.2829, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.1410842586544742, | |
| "grad_norm": 0.5031456351280212, | |
| "learning_rate": 2.307692307692308e-05, | |
| "loss": 0.2651, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.14892227302416722, | |
| "grad_norm": 0.49946239590644836, | |
| "learning_rate": 2.435897435897436e-05, | |
| "loss": 0.2594, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.15676028739386022, | |
| "grad_norm": 0.434522420167923, | |
| "learning_rate": 2.564102564102564e-05, | |
| "loss": 0.2491, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.16459830176355322, | |
| "grad_norm": 0.5493834614753723, | |
| "learning_rate": 2.6923076923076923e-05, | |
| "loss": 0.2474, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.17243631613324625, | |
| "grad_norm": 0.40903612971305847, | |
| "learning_rate": 2.8205128205128207e-05, | |
| "loss": 0.2534, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.18027433050293926, | |
| "grad_norm": 0.46355342864990234, | |
| "learning_rate": 2.948717948717949e-05, | |
| "loss": 0.2547, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.18811234487263226, | |
| "grad_norm": 0.38935673236846924, | |
| "learning_rate": 3.0769230769230774e-05, | |
| "loss": 0.238, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.1959503592423253, | |
| "grad_norm": 0.43248051404953003, | |
| "learning_rate": 3.205128205128206e-05, | |
| "loss": 0.243, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.2037883736120183, | |
| "grad_norm": 0.3940970301628113, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 0.2365, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.2116263879817113, | |
| "grad_norm": 0.39611291885375977, | |
| "learning_rate": 3.461538461538462e-05, | |
| "loss": 0.2299, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.2194644023514043, | |
| "grad_norm": 0.36743637919425964, | |
| "learning_rate": 3.58974358974359e-05, | |
| "loss": 0.235, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.22730241672109733, | |
| "grad_norm": 0.35339489579200745, | |
| "learning_rate": 3.717948717948718e-05, | |
| "loss": 0.224, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.23514043109079033, | |
| "grad_norm": 0.33060234785079956, | |
| "learning_rate": 3.846153846153846e-05, | |
| "loss": 0.2276, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.24297844546048333, | |
| "grad_norm": 0.3493947386741638, | |
| "learning_rate": 3.974358974358974e-05, | |
| "loss": 0.228, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.25081645983017636, | |
| "grad_norm": 0.34183913469314575, | |
| "learning_rate": 4.1025641025641023e-05, | |
| "loss": 0.2245, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.25865447419986937, | |
| "grad_norm": 0.3201765716075897, | |
| "learning_rate": 4.230769230769231e-05, | |
| "loss": 0.2303, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.26649248856956237, | |
| "grad_norm": 0.3946562707424164, | |
| "learning_rate": 4.358974358974359e-05, | |
| "loss": 0.2266, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.27433050293925537, | |
| "grad_norm": 0.36500218510627747, | |
| "learning_rate": 4.4871794871794874e-05, | |
| "loss": 0.2103, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.2821685173089484, | |
| "grad_norm": 0.3446497321128845, | |
| "learning_rate": 4.615384615384616e-05, | |
| "loss": 0.2309, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.29000653167864143, | |
| "grad_norm": 0.3353784382343292, | |
| "learning_rate": 4.7435897435897435e-05, | |
| "loss": 0.2294, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.29784454604833444, | |
| "grad_norm": 0.3502955138683319, | |
| "learning_rate": 4.871794871794872e-05, | |
| "loss": 0.2279, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.30568256041802744, | |
| "grad_norm": 0.3561626076698303, | |
| "learning_rate": 5e-05, | |
| "loss": 0.2289, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.31352057478772044, | |
| "grad_norm": 0.3842853009700775, | |
| "learning_rate": 4.999894523795781e-05, | |
| "loss": 0.2257, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.32135858915741344, | |
| "grad_norm": 0.4691725969314575, | |
| "learning_rate": 4.999578104083307e-05, | |
| "loss": 0.2227, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.32919660352710645, | |
| "grad_norm": 0.4886467754840851, | |
| "learning_rate": 4.999050767562379e-05, | |
| "loss": 0.2187, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.3370346178967995, | |
| "grad_norm": 0.38050761818885803, | |
| "learning_rate": 4.998312558730159e-05, | |
| "loss": 0.2301, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.3448726322664925, | |
| "grad_norm": 0.33100268244743347, | |
| "learning_rate": 4.997363539877422e-05, | |
| "loss": 0.2289, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.3527106466361855, | |
| "grad_norm": 0.36914992332458496, | |
| "learning_rate": 4.996203791083291e-05, | |
| "loss": 0.2301, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.3605486610058785, | |
| "grad_norm": 0.32373785972595215, | |
| "learning_rate": 4.994833410208487e-05, | |
| "loss": 0.2212, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.3683866753755715, | |
| "grad_norm": 0.3918166160583496, | |
| "learning_rate": 4.993252512887069e-05, | |
| "loss": 0.229, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.3762246897452645, | |
| "grad_norm": 0.3503532111644745, | |
| "learning_rate": 4.991461232516675e-05, | |
| "loss": 0.2323, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.3840627041149575, | |
| "grad_norm": 0.3073519766330719, | |
| "learning_rate": 4.9894597202472696e-05, | |
| "loss": 0.2248, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.3919007184846506, | |
| "grad_norm": 0.30389830470085144, | |
| "learning_rate": 4.9872481449683844e-05, | |
| "loss": 0.234, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.3997387328543436, | |
| "grad_norm": 0.3088521957397461, | |
| "learning_rate": 4.984826693294874e-05, | |
| "loss": 0.232, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.4075767472240366, | |
| "grad_norm": 0.2747111916542053, | |
| "learning_rate": 4.982195569551162e-05, | |
| "loss": 0.238, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.4154147615937296, | |
| "grad_norm": 0.33293288946151733, | |
| "learning_rate": 4.979354995754006e-05, | |
| "loss": 0.2407, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.4232527759634226, | |
| "grad_norm": 0.3552456796169281, | |
| "learning_rate": 4.976305211593758e-05, | |
| "loss": 0.2422, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.4310907903331156, | |
| "grad_norm": 0.29555025696754456, | |
| "learning_rate": 4.9730464744141445e-05, | |
| "loss": 0.2311, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.4389288047028086, | |
| "grad_norm": 0.314091295003891, | |
| "learning_rate": 4.969579059190549e-05, | |
| "loss": 0.2329, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.44676681907250165, | |
| "grad_norm": 0.30469128489494324, | |
| "learning_rate": 4.965903258506806e-05, | |
| "loss": 0.2278, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.45460483344219466, | |
| "grad_norm": 0.32766035199165344, | |
| "learning_rate": 4.962019382530521e-05, | |
| "loss": 0.2341, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.46244284781188766, | |
| "grad_norm": 0.29133275151252747, | |
| "learning_rate": 4.957927758986888e-05, | |
| "loss": 0.2274, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.47028086218158066, | |
| "grad_norm": 0.36449989676475525, | |
| "learning_rate": 4.953628733131045e-05, | |
| "loss": 0.2332, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.47811887655127366, | |
| "grad_norm": 0.2951260805130005, | |
| "learning_rate": 4.949122667718935e-05, | |
| "loss": 0.2408, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.48595689092096667, | |
| "grad_norm": 0.35234174132347107, | |
| "learning_rate": 4.944409942976699e-05, | |
| "loss": 0.2372, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.4937949052906597, | |
| "grad_norm": 0.33611589670181274, | |
| "learning_rate": 4.9394909565685894e-05, | |
| "loss": 0.2377, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.5016329196603527, | |
| "grad_norm": 0.32448795437812805, | |
| "learning_rate": 4.93436612356342e-05, | |
| "loss": 0.2374, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.5094709340300457, | |
| "grad_norm": 0.3616539537906647, | |
| "learning_rate": 4.929035876399535e-05, | |
| "loss": 0.2347, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.5173089483997387, | |
| "grad_norm": 0.3518874943256378, | |
| "learning_rate": 4.923500664848326e-05, | |
| "loss": 0.2368, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.5251469627694317, | |
| "grad_norm": 0.28374969959259033, | |
| "learning_rate": 4.917760955976277e-05, | |
| "loss": 0.2278, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.5329849771391247, | |
| "grad_norm": 0.3445357382297516, | |
| "learning_rate": 4.9118172341055516e-05, | |
| "loss": 0.2251, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.5408229915088177, | |
| "grad_norm": 0.31107786297798157, | |
| "learning_rate": 4.905670000773126e-05, | |
| "loss": 0.2288, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.5486610058785107, | |
| "grad_norm": 0.29442402720451355, | |
| "learning_rate": 4.899319774688473e-05, | |
| "loss": 0.2373, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.5564990202482037, | |
| "grad_norm": 0.33253952860832214, | |
| "learning_rate": 4.892767091689786e-05, | |
| "loss": 0.2363, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.5643370346178967, | |
| "grad_norm": 0.28732171654701233, | |
| "learning_rate": 4.886012504698769e-05, | |
| "loss": 0.2336, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.5721750489875899, | |
| "grad_norm": 0.3209660053253174, | |
| "learning_rate": 4.87905658367398e-05, | |
| "loss": 0.2444, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.5800130633572829, | |
| "grad_norm": 0.3354484438896179, | |
| "learning_rate": 4.871899915562736e-05, | |
| "loss": 0.2286, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.5878510777269759, | |
| "grad_norm": 0.34029415249824524, | |
| "learning_rate": 4.864543104251587e-05, | |
| "loss": 0.2375, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.5956890920966689, | |
| "grad_norm": 0.305865615606308, | |
| "learning_rate": 4.856986770515358e-05, | |
| "loss": 0.2397, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.6035271064663619, | |
| "grad_norm": 0.31168901920318604, | |
| "learning_rate": 4.849231551964771e-05, | |
| "loss": 0.2407, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.6113651208360549, | |
| "grad_norm": 0.3203716576099396, | |
| "learning_rate": 4.841278102992637e-05, | |
| "loss": 0.2305, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.6192031352057479, | |
| "grad_norm": 0.301705539226532, | |
| "learning_rate": 4.833127094718643e-05, | |
| "loss": 0.2228, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.6270411495754409, | |
| "grad_norm": 0.31093651056289673, | |
| "learning_rate": 4.82477921493272e-05, | |
| "loss": 0.2282, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.6348791639451339, | |
| "grad_norm": 0.30084165930747986, | |
| "learning_rate": 4.8162351680370044e-05, | |
| "loss": 0.2252, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.6427171783148269, | |
| "grad_norm": 0.2620241641998291, | |
| "learning_rate": 4.8074956749864075e-05, | |
| "loss": 0.2401, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.6505551926845199, | |
| "grad_norm": 0.2853231728076935, | |
| "learning_rate": 4.79856147322777e-05, | |
| "loss": 0.2346, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.6583932070542129, | |
| "grad_norm": 0.28441861271858215, | |
| "learning_rate": 4.789433316637644e-05, | |
| "loss": 0.2393, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.6662312214239059, | |
| "grad_norm": 0.2583891749382019, | |
| "learning_rate": 4.7801119754586766e-05, | |
| "loss": 0.2328, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.674069235793599, | |
| "grad_norm": 0.3212043344974518, | |
| "learning_rate": 4.7705982362346164e-05, | |
| "loss": 0.2336, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.681907250163292, | |
| "grad_norm": 0.29511553049087524, | |
| "learning_rate": 4.760892901743944e-05, | |
| "loss": 0.223, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.689745264532985, | |
| "grad_norm": 0.2569979429244995, | |
| "learning_rate": 4.750996790932134e-05, | |
| "loss": 0.227, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.697583278902678, | |
| "grad_norm": 0.3018832504749298, | |
| "learning_rate": 4.7409107388425504e-05, | |
| "loss": 0.2276, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.705421293272371, | |
| "grad_norm": 0.3047080636024475, | |
| "learning_rate": 4.730635596545985e-05, | |
| "loss": 0.2246, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.713259307642064, | |
| "grad_norm": 0.2749479413032532, | |
| "learning_rate": 4.7201722310688445e-05, | |
| "loss": 0.2259, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.721097322011757, | |
| "grad_norm": 0.27786287665367126, | |
| "learning_rate": 4.709521525319986e-05, | |
| "loss": 0.2294, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.72893533638145, | |
| "grad_norm": 0.26435744762420654, | |
| "learning_rate": 4.698684378016222e-05, | |
| "loss": 0.2241, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.736773350751143, | |
| "grad_norm": 0.26848137378692627, | |
| "learning_rate": 4.6876617036064844e-05, | |
| "loss": 0.2278, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.744611365120836, | |
| "grad_norm": 0.2701055407524109, | |
| "learning_rate": 4.676454432194656e-05, | |
| "loss": 0.2369, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.752449379490529, | |
| "grad_norm": 0.2705919146537781, | |
| "learning_rate": 4.665063509461097e-05, | |
| "loss": 0.2262, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.760287393860222, | |
| "grad_norm": 0.2773014307022095, | |
| "learning_rate": 4.6534898965828405e-05, | |
| "loss": 0.2355, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.768125408229915, | |
| "grad_norm": 0.23928780853748322, | |
| "learning_rate": 4.6417345701524915e-05, | |
| "loss": 0.2273, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.7759634225996082, | |
| "grad_norm": 0.26342830061912537, | |
| "learning_rate": 4.629798522095818e-05, | |
| "loss": 0.2293, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.7838014369693012, | |
| "grad_norm": 0.2792154550552368, | |
| "learning_rate": 4.617682759588055e-05, | |
| "loss": 0.2331, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.7916394513389942, | |
| "grad_norm": 0.2701910436153412, | |
| "learning_rate": 4.6053883049689145e-05, | |
| "loss": 0.2278, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.7994774657086872, | |
| "grad_norm": 0.2860325276851654, | |
| "learning_rate": 4.592916195656322e-05, | |
| "loss": 0.2342, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.8073154800783802, | |
| "grad_norm": 0.3034144639968872, | |
| "learning_rate": 4.580267484058876e-05, | |
| "loss": 0.2351, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.8151534944480732, | |
| "grad_norm": 0.28156647086143494, | |
| "learning_rate": 4.5674432374870455e-05, | |
| "loss": 0.2322, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.8229915088177662, | |
| "grad_norm": 0.2784689962863922, | |
| "learning_rate": 4.554444538063113e-05, | |
| "loss": 0.2361, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.8308295231874592, | |
| "grad_norm": 0.2945846617221832, | |
| "learning_rate": 4.5412724826298576e-05, | |
| "loss": 0.2302, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.8386675375571522, | |
| "grad_norm": 0.25669312477111816, | |
| "learning_rate": 4.5279281826580056e-05, | |
| "loss": 0.2327, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.8465055519268452, | |
| "grad_norm": 0.2950827181339264, | |
| "learning_rate": 4.514412764152446e-05, | |
| "loss": 0.2332, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.8543435662965382, | |
| "grad_norm": 0.2892044186592102, | |
| "learning_rate": 4.5007273675572104e-05, | |
| "loss": 0.2286, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.8621815806662312, | |
| "grad_norm": 0.27470967173576355, | |
| "learning_rate": 4.48687314765925e-05, | |
| "loss": 0.2364, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.8700195950359242, | |
| "grad_norm": 0.27829453349113464, | |
| "learning_rate": 4.4728512734909844e-05, | |
| "loss": 0.2304, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.8778576094056172, | |
| "grad_norm": 0.29203999042510986, | |
| "learning_rate": 4.4586629282316654e-05, | |
| "loss": 0.2363, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.8856956237753103, | |
| "grad_norm": 0.24676302075386047, | |
| "learning_rate": 4.444309309107535e-05, | |
| "loss": 0.2215, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.8935336381450033, | |
| "grad_norm": 0.27994513511657715, | |
| "learning_rate": 4.4297916272908024e-05, | |
| "loss": 0.232, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.9013716525146963, | |
| "grad_norm": 0.2659918963909149, | |
| "learning_rate": 4.415111107797445e-05, | |
| "loss": 0.2323, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.9092096668843893, | |
| "grad_norm": 0.2813761234283447, | |
| "learning_rate": 4.400268989383841e-05, | |
| "loss": 0.2294, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.9170476812540823, | |
| "grad_norm": 0.27070480585098267, | |
| "learning_rate": 4.385266524442241e-05, | |
| "loss": 0.2254, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.9248856956237753, | |
| "grad_norm": 0.2846162021160126, | |
| "learning_rate": 4.370104978895089e-05, | |
| "loss": 0.2324, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.9327237099934683, | |
| "grad_norm": 0.257036954164505, | |
| "learning_rate": 4.3547856320882044e-05, | |
| "loss": 0.23, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.9405617243631613, | |
| "grad_norm": 0.28918588161468506, | |
| "learning_rate": 4.3393097766828293e-05, | |
| "loss": 0.2234, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.9483997387328543, | |
| "grad_norm": 0.23960325121879578, | |
| "learning_rate": 4.3236787185465525e-05, | |
| "loss": 0.2222, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.9562377531025473, | |
| "grad_norm": 0.26537278294563293, | |
| "learning_rate": 4.307893776643117e-05, | |
| "loss": 0.2285, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.9640757674722403, | |
| "grad_norm": 0.23799960315227509, | |
| "learning_rate": 4.2919562829211283e-05, | |
| "loss": 0.2214, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.9719137818419333, | |
| "grad_norm": 0.2599596083164215, | |
| "learning_rate": 4.27586758220166e-05, | |
| "loss": 0.2246, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.9797517962116263, | |
| "grad_norm": 0.25664016604423523, | |
| "learning_rate": 4.259629032064779e-05, | |
| "loss": 0.2223, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.9875898105813194, | |
| "grad_norm": 0.24727745354175568, | |
| "learning_rate": 4.2432420027349886e-05, | |
| "loss": 0.2225, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.9954278249510125, | |
| "grad_norm": 0.2626395523548126, | |
| "learning_rate": 4.226707876965611e-05, | |
| "loss": 0.2333, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.0052253429131286, | |
| "grad_norm": 0.4331790506839752, | |
| "learning_rate": 4.210028049922108e-05, | |
| "loss": 0.3128, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.0130633572828216, | |
| "grad_norm": 0.31224802136421204, | |
| "learning_rate": 4.193203929064353e-05, | |
| "loss": 0.1265, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.0209013716525146, | |
| "grad_norm": 0.34126603603363037, | |
| "learning_rate": 4.176236934027873e-05, | |
| "loss": 0.1188, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.0287393860222076, | |
| "grad_norm": 0.3540596067905426, | |
| "learning_rate": 4.159128496504053e-05, | |
| "loss": 0.1149, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.0365774003919008, | |
| "grad_norm": 0.2591104209423065, | |
| "learning_rate": 4.141880060119336e-05, | |
| "loss": 0.1109, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.0444154147615938, | |
| "grad_norm": 0.33954110741615295, | |
| "learning_rate": 4.1244930803134e-05, | |
| "loss": 0.1307, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.0522534291312868, | |
| "grad_norm": 0.2748555839061737, | |
| "learning_rate": 4.1069690242163484e-05, | |
| "loss": 0.1246, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.0600914435009798, | |
| "grad_norm": 0.29179254174232483, | |
| "learning_rate": 4.089309370524921e-05, | |
| "loss": 0.1144, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.0679294578706728, | |
| "grad_norm": 0.3250279724597931, | |
| "learning_rate": 4.071515609377705e-05, | |
| "loss": 0.1363, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.0757674722403658, | |
| "grad_norm": 0.24014687538146973, | |
| "learning_rate": 4.053589242229412e-05, | |
| "loss": 0.1044, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.0836054866100588, | |
| "grad_norm": 0.2841418981552124, | |
| "learning_rate": 4.03553178172417e-05, | |
| "loss": 0.1151, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.0914435009797518, | |
| "grad_norm": 0.25916630029678345, | |
| "learning_rate": 4.0173447515678916e-05, | |
| "loss": 0.1178, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.0992815153494448, | |
| "grad_norm": 0.24618621170520782, | |
| "learning_rate": 3.999029686399704e-05, | |
| "loss": 0.1078, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.1071195297191379, | |
| "grad_norm": 0.3014475703239441, | |
| "learning_rate": 3.9805881316624506e-05, | |
| "loss": 0.1277, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.1149575440888309, | |
| "grad_norm": 0.2629724442958832, | |
| "learning_rate": 3.962021643472284e-05, | |
| "loss": 0.1168, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.1227955584585239, | |
| "grad_norm": 0.26206281781196594, | |
| "learning_rate": 3.9433317884873664e-05, | |
| "loss": 0.117, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.1306335728282169, | |
| "grad_norm": 0.2575000524520874, | |
| "learning_rate": 3.9245201437756654e-05, | |
| "loss": 0.1107, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.1384715871979099, | |
| "grad_norm": 0.2927393913269043, | |
| "learning_rate": 3.905588296681886e-05, | |
| "loss": 0.1216, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.1463096015676029, | |
| "grad_norm": 0.23872609436511993, | |
| "learning_rate": 3.8865378446935216e-05, | |
| "loss": 0.1067, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.1541476159372959, | |
| "grad_norm": 0.2771260738372803, | |
| "learning_rate": 3.867370395306068e-05, | |
| "loss": 0.1118, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.1619856303069889, | |
| "grad_norm": 0.2423640787601471, | |
| "learning_rate": 3.848087565887365e-05, | |
| "loss": 0.1179, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.1698236446766819, | |
| "grad_norm": 0.2449909895658493, | |
| "learning_rate": 3.82869098354114e-05, | |
| "loss": 0.1247, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.1776616590463749, | |
| "grad_norm": 0.2508186101913452, | |
| "learning_rate": 3.8091822849696954e-05, | |
| "loss": 0.1186, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.1854996734160679, | |
| "grad_norm": 0.22804874181747437, | |
| "learning_rate": 3.7895631163358105e-05, | |
| "loss": 0.1214, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.1933376877857609, | |
| "grad_norm": 0.23902134597301483, | |
| "learning_rate": 3.769835133123836e-05, | |
| "loss": 0.1118, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.2011757021554539, | |
| "grad_norm": 0.22123728692531586, | |
| "learning_rate": 3.7500000000000003e-05, | |
| "loss": 0.1081, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.2090137165251469, | |
| "grad_norm": 0.22215275466442108, | |
| "learning_rate": 3.7300593906719464e-05, | |
| "loss": 0.1115, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.21685173089484, | |
| "grad_norm": 0.21011732518672943, | |
| "learning_rate": 3.7100149877474974e-05, | |
| "loss": 0.1105, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.224689745264533, | |
| "grad_norm": 0.25005996227264404, | |
| "learning_rate": 3.689868482592684e-05, | |
| "loss": 0.1143, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.232527759634226, | |
| "grad_norm": 0.22056043148040771, | |
| "learning_rate": 3.66962157518902e-05, | |
| "loss": 0.1113, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.240365774003919, | |
| "grad_norm": 0.226328045129776, | |
| "learning_rate": 3.6492759739900564e-05, | |
| "loss": 0.1153, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.248203788373612, | |
| "grad_norm": 0.21589796245098114, | |
| "learning_rate": 3.628833395777224e-05, | |
| "loss": 0.1034, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.256041802743305, | |
| "grad_norm": 0.2250795215368271, | |
| "learning_rate": 3.608295565514965e-05, | |
| "loss": 0.116, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.263879817112998, | |
| "grad_norm": 0.21993276476860046, | |
| "learning_rate": 3.587664216205183e-05, | |
| "loss": 0.1217, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.2717178314826911, | |
| "grad_norm": 0.21394269168376923, | |
| "learning_rate": 3.5669410887410095e-05, | |
| "loss": 0.1119, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.2795558458523841, | |
| "grad_norm": 0.20264342427253723, | |
| "learning_rate": 3.546127931759903e-05, | |
| "loss": 0.1076, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.2873938602220771, | |
| "grad_norm": 0.23118674755096436, | |
| "learning_rate": 3.5252265014961006e-05, | |
| "loss": 0.1189, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.2952318745917701, | |
| "grad_norm": 0.215042844414711, | |
| "learning_rate": 3.504238561632424e-05, | |
| "loss": 0.1096, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.3030698889614631, | |
| "grad_norm": 0.21117202937602997, | |
| "learning_rate": 3.483165883151458e-05, | |
| "loss": 0.1201, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.3109079033311561, | |
| "grad_norm": 0.21316753327846527, | |
| "learning_rate": 3.4620102441861143e-05, | |
| "loss": 0.1072, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.3187459177008491, | |
| "grad_norm": 0.21038144826889038, | |
| "learning_rate": 3.4407734298695895e-05, | |
| "loss": 0.1149, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.3265839320705421, | |
| "grad_norm": 0.2167028933763504, | |
| "learning_rate": 3.4194572321847336e-05, | |
| "loss": 0.1075, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.3344219464402352, | |
| "grad_norm": 0.21729543805122375, | |
| "learning_rate": 3.398063449812844e-05, | |
| "loss": 0.1056, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.3422599608099282, | |
| "grad_norm": 0.20175161957740784, | |
| "learning_rate": 3.376593887981887e-05, | |
| "loss": 0.1053, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.3500979751796212, | |
| "grad_norm": 0.21248085796833038, | |
| "learning_rate": 3.355050358314172e-05, | |
| "loss": 0.1059, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.3579359895493142, | |
| "grad_norm": 0.21043576300144196, | |
| "learning_rate": 3.333434678673489e-05, | |
| "loss": 0.1051, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.3657740039190072, | |
| "grad_norm": 0.21573218703269958, | |
| "learning_rate": 3.311748673011709e-05, | |
| "loss": 0.1168, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.3736120182887002, | |
| "grad_norm": 0.20607827603816986, | |
| "learning_rate": 3.289994171214882e-05, | |
| "loss": 0.1071, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.3814500326583932, | |
| "grad_norm": 0.20888157188892365, | |
| "learning_rate": 3.268173008948826e-05, | |
| "loss": 0.1066, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.3892880470280862, | |
| "grad_norm": 0.20718783140182495, | |
| "learning_rate": 3.246287027504237e-05, | |
| "loss": 0.1034, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.3971260613977792, | |
| "grad_norm": 0.21539223194122314, | |
| "learning_rate": 3.224338073641312e-05, | |
| "loss": 0.1284, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.4049640757674722, | |
| "grad_norm": 0.2096419632434845, | |
| "learning_rate": 3.202327999433924e-05, | |
| "loss": 0.108, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.4128020901371652, | |
| "grad_norm": 0.19958484172821045, | |
| "learning_rate": 3.180258662113338e-05, | |
| "loss": 0.1027, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.4206401045068582, | |
| "grad_norm": 0.20901361107826233, | |
| "learning_rate": 3.158131923911498e-05, | |
| "loss": 0.1025, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.4284781188765514, | |
| "grad_norm": 0.22095659375190735, | |
| "learning_rate": 3.135949651903891e-05, | |
| "loss": 0.1082, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.4363161332462444, | |
| "grad_norm": 0.20527766644954681, | |
| "learning_rate": 3.1137137178519985e-05, | |
| "loss": 0.1114, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.4441541476159374, | |
| "grad_norm": 0.21945808827877045, | |
| "learning_rate": 3.091425998045356e-05, | |
| "loss": 0.1178, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.4519921619856304, | |
| "grad_norm": 0.20016823709011078, | |
| "learning_rate": 3.069088373143234e-05, | |
| "loss": 0.1078, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.4598301763553234, | |
| "grad_norm": 0.20746341347694397, | |
| "learning_rate": 3.04670272801594e-05, | |
| "loss": 0.1134, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.4676681907250164, | |
| "grad_norm": 0.23339882493019104, | |
| "learning_rate": 3.0242709515857758e-05, | |
| "loss": 0.1176, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.4755062050947094, | |
| "grad_norm": 0.21998368203639984, | |
| "learning_rate": 3.001794936667648e-05, | |
| "loss": 0.118, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.4833442194644024, | |
| "grad_norm": 0.2124100923538208, | |
| "learning_rate": 2.9792765798093465e-05, | |
| "loss": 0.1028, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.4911822338340954, | |
| "grad_norm": 0.22203968465328217, | |
| "learning_rate": 2.9567177811315178e-05, | |
| "loss": 0.1127, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.4990202482037884, | |
| "grad_norm": 0.2147323042154312, | |
| "learning_rate": 2.9341204441673266e-05, | |
| "loss": 0.108, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.5068582625734814, | |
| "grad_norm": 0.22974567115306854, | |
| "learning_rate": 2.9114864757018352e-05, | |
| "loss": 0.1063, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.5146962769431744, | |
| "grad_norm": 0.21040025353431702, | |
| "learning_rate": 2.8888177856111083e-05, | |
| "loss": 0.1073, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.5225342913128674, | |
| "grad_norm": 0.20959196984767914, | |
| "learning_rate": 2.8661162867010543e-05, | |
| "loss": 0.0989, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.5303723056825604, | |
| "grad_norm": 0.2233189344406128, | |
| "learning_rate": 2.8433838945460205e-05, | |
| "loss": 0.1153, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.5382103200522534, | |
| "grad_norm": 0.21911266446113586, | |
| "learning_rate": 2.820622527327158e-05, | |
| "loss": 0.1052, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.5460483344219464, | |
| "grad_norm": 0.22261589765548706, | |
| "learning_rate": 2.797834105670559e-05, | |
| "loss": 0.1109, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.5538863487916394, | |
| "grad_norm": 0.19887444376945496, | |
| "learning_rate": 2.7750205524851986e-05, | |
| "loss": 0.1092, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.5617243631613325, | |
| "grad_norm": 0.20779380202293396, | |
| "learning_rate": 2.752183792800671e-05, | |
| "loss": 0.1076, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.5695623775310255, | |
| "grad_norm": 0.2083713710308075, | |
| "learning_rate": 2.729325753604759e-05, | |
| "loss": 0.1135, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.5774003919007185, | |
| "grad_norm": 0.2003626525402069, | |
| "learning_rate": 2.7064483636808313e-05, | |
| "loss": 0.0968, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.5852384062704115, | |
| "grad_norm": 0.20525719225406647, | |
| "learning_rate": 2.6835535534450884e-05, | |
| "loss": 0.1042, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.5930764206401045, | |
| "grad_norm": 0.21359968185424805, | |
| "learning_rate": 2.6606432547836757e-05, | |
| "loss": 0.1126, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.6009144350097975, | |
| "grad_norm": 0.2056104987859726, | |
| "learning_rate": 2.6377194008896637e-05, | |
| "loss": 0.1073, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.6087524493794905, | |
| "grad_norm": 0.19534572958946228, | |
| "learning_rate": 2.6147839260999295e-05, | |
| "loss": 0.113, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.6165904637491835, | |
| "grad_norm": 0.21906475722789764, | |
| "learning_rate": 2.5918387657319316e-05, | |
| "loss": 0.1098, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.6244284781188765, | |
| "grad_norm": 0.19928069412708282, | |
| "learning_rate": 2.5688858559204053e-05, | |
| "loss": 0.1059, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.6322664924885695, | |
| "grad_norm": 0.19612881541252136, | |
| "learning_rate": 2.5459271334539934e-05, | |
| "loss": 0.0951, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.6401045068582625, | |
| "grad_norm": 0.2001960426568985, | |
| "learning_rate": 2.5229645356118163e-05, | |
| "loss": 0.1039, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.6479425212279555, | |
| "grad_norm": 0.20145660638809204, | |
| "learning_rate": 2.5e-05, | |
| "loss": 0.1049, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.6557805355976485, | |
| "grad_norm": 0.18295256793498993, | |
| "learning_rate": 2.4770354643881843e-05, | |
| "loss": 0.0985, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.6636185499673415, | |
| "grad_norm": 0.2013918161392212, | |
| "learning_rate": 2.4540728665460065e-05, | |
| "loss": 0.1065, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.6714565643370345, | |
| "grad_norm": 0.20308274030685425, | |
| "learning_rate": 2.4311141440795953e-05, | |
| "loss": 0.1205, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.6792945787067275, | |
| "grad_norm": 0.19719456136226654, | |
| "learning_rate": 2.4081612342680694e-05, | |
| "loss": 0.1027, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.6871325930764205, | |
| "grad_norm": 0.19175922870635986, | |
| "learning_rate": 2.3852160739000707e-05, | |
| "loss": 0.1, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.6949706074461135, | |
| "grad_norm": 0.20407816767692566, | |
| "learning_rate": 2.3622805991103362e-05, | |
| "loss": 0.102, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.7028086218158065, | |
| "grad_norm": 0.20707584917545319, | |
| "learning_rate": 2.3393567452163252e-05, | |
| "loss": 0.1139, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.7106466361854997, | |
| "grad_norm": 0.18942616879940033, | |
| "learning_rate": 2.3164464465549118e-05, | |
| "loss": 0.091, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.7184846505551927, | |
| "grad_norm": 0.2033768594264984, | |
| "learning_rate": 2.2935516363191693e-05, | |
| "loss": 0.1093, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.7263226649248857, | |
| "grad_norm": 0.2145160436630249, | |
| "learning_rate": 2.270674246395241e-05, | |
| "loss": 0.1083, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.7341606792945787, | |
| "grad_norm": 0.19836893677711487, | |
| "learning_rate": 2.2478162071993298e-05, | |
| "loss": 0.1011, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.7419986936642717, | |
| "grad_norm": 0.20721815526485443, | |
| "learning_rate": 2.224979447514802e-05, | |
| "loss": 0.0975, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.7498367080339647, | |
| "grad_norm": 0.20113082230091095, | |
| "learning_rate": 2.202165894329441e-05, | |
| "loss": 0.1086, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.7576747224036577, | |
| "grad_norm": 0.19542041420936584, | |
| "learning_rate": 2.179377472672842e-05, | |
| "loss": 0.1029, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.7655127367733507, | |
| "grad_norm": 0.19994117319583893, | |
| "learning_rate": 2.1566161054539798e-05, | |
| "loss": 0.1071, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.7733507511430437, | |
| "grad_norm": 0.20020049810409546, | |
| "learning_rate": 2.1338837132989466e-05, | |
| "loss": 0.0995, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.7811887655127367, | |
| "grad_norm": 0.2069658637046814, | |
| "learning_rate": 2.111182214388893e-05, | |
| "loss": 0.1077, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.7890267798824298, | |
| "grad_norm": 0.21685247123241425, | |
| "learning_rate": 2.088513524298165e-05, | |
| "loss": 0.1084, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.7968647942521228, | |
| "grad_norm": 0.19922901690006256, | |
| "learning_rate": 2.0658795558326743e-05, | |
| "loss": 0.0999, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.8047028086218158, | |
| "grad_norm": 0.20437341928482056, | |
| "learning_rate": 2.043282218868483e-05, | |
| "loss": 0.1076, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.812540822991509, | |
| "grad_norm": 0.19708654284477234, | |
| "learning_rate": 2.0207234201906547e-05, | |
| "loss": 0.0971, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.820378837361202, | |
| "grad_norm": 0.21140620112419128, | |
| "learning_rate": 1.9982050633323522e-05, | |
| "loss": 0.1046, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.828216851730895, | |
| "grad_norm": 0.19234000146389008, | |
| "learning_rate": 1.9757290484142244e-05, | |
| "loss": 0.0957, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.836054866100588, | |
| "grad_norm": 0.2157324254512787, | |
| "learning_rate": 1.9532972719840607e-05, | |
| "loss": 0.1082, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.843892880470281, | |
| "grad_norm": 0.19539855420589447, | |
| "learning_rate": 1.9309116268567674e-05, | |
| "loss": 0.1014, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.851730894839974, | |
| "grad_norm": 0.1897420883178711, | |
| "learning_rate": 1.908574001954644e-05, | |
| "loss": 0.094, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.859568909209667, | |
| "grad_norm": 0.20616742968559265, | |
| "learning_rate": 1.8862862821480025e-05, | |
| "loss": 0.1118, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.86740692357936, | |
| "grad_norm": 0.1957971155643463, | |
| "learning_rate": 1.864050348096109e-05, | |
| "loss": 0.0948, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.875244937949053, | |
| "grad_norm": 0.18928657472133636, | |
| "learning_rate": 1.8418680760885027e-05, | |
| "loss": 0.0963, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.883082952318746, | |
| "grad_norm": 0.20447275042533875, | |
| "learning_rate": 1.819741337886662e-05, | |
| "loss": 0.096, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.890920966688439, | |
| "grad_norm": 0.19518423080444336, | |
| "learning_rate": 1.797672000566077e-05, | |
| "loss": 0.1042, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.898758981058132, | |
| "grad_norm": 0.18361036479473114, | |
| "learning_rate": 1.775661926358689e-05, | |
| "loss": 0.0975, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.906596995427825, | |
| "grad_norm": 0.19658760726451874, | |
| "learning_rate": 1.7537129724957642e-05, | |
| "loss": 0.1063, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.914435009797518, | |
| "grad_norm": 0.194893017411232, | |
| "learning_rate": 1.7318269910511736e-05, | |
| "loss": 0.1007, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.922273024167211, | |
| "grad_norm": 0.19551898539066315, | |
| "learning_rate": 1.710005828785119e-05, | |
| "loss": 0.0989, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.930111038536904, | |
| "grad_norm": 0.19419115781784058, | |
| "learning_rate": 1.6882513269882917e-05, | |
| "loss": 0.0894, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.937949052906597, | |
| "grad_norm": 0.19363057613372803, | |
| "learning_rate": 1.666565321326512e-05, | |
| "loss": 0.0954, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.94578706727629, | |
| "grad_norm": 0.18952466547489166, | |
| "learning_rate": 1.6449496416858284e-05, | |
| "loss": 0.0906, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.953625081645983, | |
| "grad_norm": 0.19003801047801971, | |
| "learning_rate": 1.6234061120181142e-05, | |
| "loss": 0.0982, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.961463096015676, | |
| "grad_norm": 0.18380698561668396, | |
| "learning_rate": 1.601936550187157e-05, | |
| "loss": 0.099, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.969301110385369, | |
| "grad_norm": 0.1997416615486145, | |
| "learning_rate": 1.5805427678152677e-05, | |
| "loss": 0.1027, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.977139124755062, | |
| "grad_norm": 0.18836817145347595, | |
| "learning_rate": 1.5592265701304114e-05, | |
| "loss": 0.0983, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.984977139124755, | |
| "grad_norm": 0.2013559639453888, | |
| "learning_rate": 1.5379897558138862e-05, | |
| "loss": 0.102, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.992815153494448, | |
| "grad_norm": 0.19445432722568512, | |
| "learning_rate": 1.5168341168485423e-05, | |
| "loss": 0.0953, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 2.002612671456564, | |
| "grad_norm": 0.3252091705799103, | |
| "learning_rate": 1.495761438367577e-05, | |
| "loss": 0.1459, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.010450685826257, | |
| "grad_norm": 0.20789092779159546, | |
| "learning_rate": 1.4747734985039e-05, | |
| "loss": 0.0344, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 2.01828870019595, | |
| "grad_norm": 0.17120763659477234, | |
| "learning_rate": 1.4538720682400969e-05, | |
| "loss": 0.0347, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 2.026126714565643, | |
| "grad_norm": 0.14296689629554749, | |
| "learning_rate": 1.433058911258991e-05, | |
| "loss": 0.0296, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 2.033964728935336, | |
| "grad_norm": 0.18135866522789001, | |
| "learning_rate": 1.4123357837948175e-05, | |
| "loss": 0.0332, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 2.041802743305029, | |
| "grad_norm": 0.20270270109176636, | |
| "learning_rate": 1.3917044344850355e-05, | |
| "loss": 0.0322, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.049640757674722, | |
| "grad_norm": 0.1781613826751709, | |
| "learning_rate": 1.3711666042227772e-05, | |
| "loss": 0.0309, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 2.057478772044415, | |
| "grad_norm": 0.17559246718883514, | |
| "learning_rate": 1.350724026009944e-05, | |
| "loss": 0.0333, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 2.065316786414108, | |
| "grad_norm": 0.14222192764282227, | |
| "learning_rate": 1.330378424810981e-05, | |
| "loss": 0.0309, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 2.0731548007838017, | |
| "grad_norm": 0.15568028390407562, | |
| "learning_rate": 1.3101315174073162e-05, | |
| "loss": 0.0324, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 2.0809928151534947, | |
| "grad_norm": 0.15495392680168152, | |
| "learning_rate": 1.2899850122525037e-05, | |
| "loss": 0.0329, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.0888308295231877, | |
| "grad_norm": 0.16096574068069458, | |
| "learning_rate": 1.2699406093280547e-05, | |
| "loss": 0.0317, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 2.0966688438928807, | |
| "grad_norm": 0.13694337010383606, | |
| "learning_rate": 1.2500000000000006e-05, | |
| "loss": 0.0282, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 2.1045068582625737, | |
| "grad_norm": 0.13896653056144714, | |
| "learning_rate": 1.2301648668761647e-05, | |
| "loss": 0.0289, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 2.1123448726322667, | |
| "grad_norm": 0.15309906005859375, | |
| "learning_rate": 1.2104368836641908e-05, | |
| "loss": 0.0321, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 2.1201828870019597, | |
| "grad_norm": 0.16871266067028046, | |
| "learning_rate": 1.1908177150303055e-05, | |
| "loss": 0.0335, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.1280209013716527, | |
| "grad_norm": 0.15097489953041077, | |
| "learning_rate": 1.1713090164588607e-05, | |
| "loss": 0.0302, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 2.1358589157413457, | |
| "grad_norm": 0.1583205759525299, | |
| "learning_rate": 1.1519124341126346e-05, | |
| "loss": 0.0337, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 2.1436969301110387, | |
| "grad_norm": 0.1436772346496582, | |
| "learning_rate": 1.1326296046939333e-05, | |
| "loss": 0.0296, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 2.1515349444807317, | |
| "grad_norm": 0.1444602757692337, | |
| "learning_rate": 1.113462155306478e-05, | |
| "loss": 0.0296, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 2.1593729588504247, | |
| "grad_norm": 0.13352134823799133, | |
| "learning_rate": 1.0944117033181151e-05, | |
| "loss": 0.0287, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.1672109732201177, | |
| "grad_norm": 0.1418055146932602, | |
| "learning_rate": 1.0754798562243345e-05, | |
| "loss": 0.0294, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 2.1750489875898107, | |
| "grad_norm": 0.12928378582000732, | |
| "learning_rate": 1.0566682115126344e-05, | |
| "loss": 0.0274, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 2.1828870019595037, | |
| "grad_norm": 0.13986484706401825, | |
| "learning_rate": 1.037978356527716e-05, | |
| "loss": 0.0328, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 2.1907250163291967, | |
| "grad_norm": 0.13403278589248657, | |
| "learning_rate": 1.0194118683375503e-05, | |
| "loss": 0.0296, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 2.1985630306988897, | |
| "grad_norm": 0.14188171923160553, | |
| "learning_rate": 1.0009703136002957e-05, | |
| "loss": 0.0299, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.2064010450685827, | |
| "grad_norm": 0.13912995159626007, | |
| "learning_rate": 9.826552484321087e-06, | |
| "loss": 0.03, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 2.2142390594382757, | |
| "grad_norm": 0.14268967509269714, | |
| "learning_rate": 9.644682182758306e-06, | |
| "loss": 0.0293, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 2.2220770738079687, | |
| "grad_norm": 0.14812886714935303, | |
| "learning_rate": 9.464107577705886e-06, | |
| "loss": 0.0269, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 2.2299150881776617, | |
| "grad_norm": 0.1425481140613556, | |
| "learning_rate": 9.284843906222948e-06, | |
| "loss": 0.0274, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 2.2377531025473547, | |
| "grad_norm": 0.13893434405326843, | |
| "learning_rate": 9.106906294750805e-06, | |
| "loss": 0.0292, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 2.2455911169170477, | |
| "grad_norm": 0.13851140439510345, | |
| "learning_rate": 8.930309757836517e-06, | |
| "loss": 0.0321, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 2.2534291312867407, | |
| "grad_norm": 0.13696281611919403, | |
| "learning_rate": 8.755069196866014e-06, | |
| "loss": 0.0286, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 2.2612671456564337, | |
| "grad_norm": 0.13719762861728668, | |
| "learning_rate": 8.581199398806641e-06, | |
| "loss": 0.0314, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 2.2691051600261267, | |
| "grad_norm": 0.1380627155303955, | |
| "learning_rate": 8.40871503495947e-06, | |
| "loss": 0.0309, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 2.2769431743958197, | |
| "grad_norm": 0.13105949759483337, | |
| "learning_rate": 8.237630659721277e-06, | |
| "loss": 0.0259, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.2847811887655127, | |
| "grad_norm": 0.1306370496749878, | |
| "learning_rate": 8.067960709356478e-06, | |
| "loss": 0.0264, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 2.2926192031352057, | |
| "grad_norm": 0.13892598450183868, | |
| "learning_rate": 7.899719500778923e-06, | |
| "loss": 0.0308, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 2.3004572175048987, | |
| "grad_norm": 0.13053514063358307, | |
| "learning_rate": 7.732921230343892e-06, | |
| "loss": 0.0276, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 2.3082952318745917, | |
| "grad_norm": 0.1372438371181488, | |
| "learning_rate": 7.5675799726501155e-06, | |
| "loss": 0.0271, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 2.3161332462442847, | |
| "grad_norm": 0.1358174979686737, | |
| "learning_rate": 7.403709679352217e-06, | |
| "loss": 0.027, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.3239712606139777, | |
| "grad_norm": 0.13224904239177704, | |
| "learning_rate": 7.2413241779834e-06, | |
| "loss": 0.027, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 2.3318092749836707, | |
| "grad_norm": 0.13056500256061554, | |
| "learning_rate": 7.080437170788723e-06, | |
| "loss": 0.0282, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 2.3396472893533637, | |
| "grad_norm": 0.13113614916801453, | |
| "learning_rate": 6.921062233568831e-06, | |
| "loss": 0.028, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 2.3474853037230567, | |
| "grad_norm": 0.13239531219005585, | |
| "learning_rate": 6.763212814534484e-06, | |
| "loss": 0.0272, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 2.3553233180927498, | |
| "grad_norm": 0.12943539023399353, | |
| "learning_rate": 6.606902233171711e-06, | |
| "loss": 0.0247, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.3631613324624428, | |
| "grad_norm": 0.12735775113105774, | |
| "learning_rate": 6.452143679117964e-06, | |
| "loss": 0.0263, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 2.3709993468321358, | |
| "grad_norm": 0.13229192793369293, | |
| "learning_rate": 6.298950211049115e-06, | |
| "loss": 0.0283, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 2.3788373612018288, | |
| "grad_norm": 0.12901271879673004, | |
| "learning_rate": 6.147334755577596e-06, | |
| "loss": 0.0258, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 2.3866753755715218, | |
| "grad_norm": 0.13257119059562683, | |
| "learning_rate": 5.997310106161589e-06, | |
| "loss": 0.0279, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 2.3945133899412148, | |
| "grad_norm": 0.12502893805503845, | |
| "learning_rate": 5.848888922025553e-06, | |
| "loss": 0.0259, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.4023514043109078, | |
| "grad_norm": 0.13395585119724274, | |
| "learning_rate": 5.702083727091978e-06, | |
| "loss": 0.0286, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 2.4101894186806008, | |
| "grad_norm": 0.12500248849391937, | |
| "learning_rate": 5.556906908924655e-06, | |
| "loss": 0.0266, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 2.4180274330502938, | |
| "grad_norm": 0.13586992025375366, | |
| "learning_rate": 5.413370717683347e-06, | |
| "loss": 0.0302, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 2.4258654474199868, | |
| "grad_norm": 0.13480009138584137, | |
| "learning_rate": 5.271487265090163e-06, | |
| "loss": 0.0281, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 2.43370346178968, | |
| "grad_norm": 0.14151740074157715, | |
| "learning_rate": 5.131268523407509e-06, | |
| "loss": 0.0291, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.441541476159373, | |
| "grad_norm": 0.13139696419239044, | |
| "learning_rate": 4.992726324427901e-06, | |
| "loss": 0.0283, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 2.449379490529066, | |
| "grad_norm": 0.12287548929452896, | |
| "learning_rate": 4.855872358475547e-06, | |
| "loss": 0.0243, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.4572175048987592, | |
| "grad_norm": 0.12606504559516907, | |
| "learning_rate": 4.720718173419947e-06, | |
| "loss": 0.0271, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 2.465055519268452, | |
| "grad_norm": 0.1324881911277771, | |
| "learning_rate": 4.587275173701428e-06, | |
| "loss": 0.0281, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 2.4728935336381452, | |
| "grad_norm": 0.13066086173057556, | |
| "learning_rate": 4.4555546193688735e-06, | |
| "loss": 0.029, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.480731548007838, | |
| "grad_norm": 0.12259192019701004, | |
| "learning_rate": 4.3255676251295456e-06, | |
| "loss": 0.0248, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.4885695623775312, | |
| "grad_norm": 0.13243158161640167, | |
| "learning_rate": 4.19732515941125e-06, | |
| "loss": 0.0282, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 2.496407576747224, | |
| "grad_norm": 0.13366110622882843, | |
| "learning_rate": 4.070838043436786e-06, | |
| "loss": 0.0295, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 2.5042455911169172, | |
| "grad_norm": 0.1202232763171196, | |
| "learning_rate": 3.94611695031086e-06, | |
| "loss": 0.0241, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.51208360548661, | |
| "grad_norm": 0.13794013857841492, | |
| "learning_rate": 3.8231724041194515e-06, | |
| "loss": 0.0284, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.5199216198563033, | |
| "grad_norm": 0.13122816383838654, | |
| "learning_rate": 3.7020147790418263e-06, | |
| "loss": 0.0261, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.527759634225996, | |
| "grad_norm": 0.12636157870292664, | |
| "learning_rate": 3.582654298475091e-06, | |
| "loss": 0.024, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.5355976485956893, | |
| "grad_norm": 0.12592478096485138, | |
| "learning_rate": 3.4651010341716028e-06, | |
| "loss": 0.0261, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.5434356629653823, | |
| "grad_norm": 0.1321692168712616, | |
| "learning_rate": 3.3493649053890326e-06, | |
| "loss": 0.0255, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.5512736773350753, | |
| "grad_norm": 0.12444434314966202, | |
| "learning_rate": 3.2354556780534424e-06, | |
| "loss": 0.0243, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.5591116917047683, | |
| "grad_norm": 0.1310444325208664, | |
| "learning_rate": 3.1233829639351563e-06, | |
| "loss": 0.0264, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.5669497060744613, | |
| "grad_norm": 0.13162964582443237, | |
| "learning_rate": 3.013156219837776e-06, | |
| "loss": 0.0266, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.5747877204441543, | |
| "grad_norm": 0.12017495930194855, | |
| "learning_rate": 2.9047847468001404e-06, | |
| "loss": 0.0248, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.5826257348138473, | |
| "grad_norm": 0.13093486428260803, | |
| "learning_rate": 2.7982776893115627e-06, | |
| "loss": 0.0279, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 2.5904637491835403, | |
| "grad_norm": 0.12869121134281158, | |
| "learning_rate": 2.6936440345401493e-06, | |
| "loss": 0.0252, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.5983017635532333, | |
| "grad_norm": 0.1348036676645279, | |
| "learning_rate": 2.5908926115744997e-06, | |
| "loss": 0.0281, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 2.6061397779229263, | |
| "grad_norm": 0.12896090745925903, | |
| "learning_rate": 2.4900320906786593e-06, | |
| "loss": 0.0267, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 2.6139777922926193, | |
| "grad_norm": 0.1246756836771965, | |
| "learning_rate": 2.391070982560564e-06, | |
| "loss": 0.0256, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 2.6218158066623123, | |
| "grad_norm": 0.12264394015073776, | |
| "learning_rate": 2.2940176376538445e-06, | |
| "loss": 0.0235, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 2.6296538210320053, | |
| "grad_norm": 0.12561777234077454, | |
| "learning_rate": 2.1988802454132403e-06, | |
| "loss": 0.025, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.6374918354016983, | |
| "grad_norm": 0.11878997832536697, | |
| "learning_rate": 2.1056668336235622e-06, | |
| "loss": 0.0247, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 2.6453298497713913, | |
| "grad_norm": 0.12836302816867828, | |
| "learning_rate": 2.0143852677223075e-06, | |
| "loss": 0.0285, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 2.6531678641410843, | |
| "grad_norm": 0.12017780542373657, | |
| "learning_rate": 1.9250432501359354e-06, | |
| "loss": 0.025, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 2.6610058785107773, | |
| "grad_norm": 0.11886154115200043, | |
| "learning_rate": 1.837648319629956e-06, | |
| "loss": 0.0244, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 2.6688438928804703, | |
| "grad_norm": 0.13172321021556854, | |
| "learning_rate": 1.7522078506728074e-06, | |
| "loss": 0.0269, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.6766819072501633, | |
| "grad_norm": 0.12933161854743958, | |
| "learning_rate": 1.6687290528135723e-06, | |
| "loss": 0.027, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 2.6845199216198563, | |
| "grad_norm": 0.13184285163879395, | |
| "learning_rate": 1.5872189700736339e-06, | |
| "loss": 0.0266, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 2.6923579359895493, | |
| "grad_norm": 0.12135521322488785, | |
| "learning_rate": 1.5076844803522922e-06, | |
| "loss": 0.0236, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 2.7001959503592423, | |
| "grad_norm": 0.12548145651817322, | |
| "learning_rate": 1.4301322948464147e-06, | |
| "loss": 0.0259, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 2.7080339647289353, | |
| "grad_norm": 0.12587113678455353, | |
| "learning_rate": 1.3545689574841342e-06, | |
| "loss": 0.0282, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.7158719790986283, | |
| "grad_norm": 0.12320797145366669, | |
| "learning_rate": 1.2810008443726456e-06, | |
| "loss": 0.0251, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 2.7237099934683213, | |
| "grad_norm": 0.12896938621997833, | |
| "learning_rate": 1.2094341632602064e-06, | |
| "loss": 0.0258, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 2.7315480078380143, | |
| "grad_norm": 0.12183412909507751, | |
| "learning_rate": 1.1398749530123127e-06, | |
| "loss": 0.0243, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 2.7393860222077073, | |
| "grad_norm": 0.12513582408428192, | |
| "learning_rate": 1.0723290831021471e-06, | |
| "loss": 0.0254, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 2.7472240365774003, | |
| "grad_norm": 0.13293574750423431, | |
| "learning_rate": 1.006802253115277e-06, | |
| "loss": 0.0287, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.7550620509470933, | |
| "grad_norm": 0.11619393527507782, | |
| "learning_rate": 9.432999922687396e-07, | |
| "loss": 0.0229, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 2.7629000653167863, | |
| "grad_norm": 0.12378672510385513, | |
| "learning_rate": 8.818276589444896e-07, | |
| "loss": 0.0247, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 2.7707380796864793, | |
| "grad_norm": 0.1200721487402916, | |
| "learning_rate": 8.223904402372334e-07, | |
| "loss": 0.0244, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 2.7785760940561723, | |
| "grad_norm": 0.11517145484685898, | |
| "learning_rate": 7.649933515167407e-07, | |
| "loss": 0.0228, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 2.7864141084258653, | |
| "grad_norm": 0.12590059638023376, | |
| "learning_rate": 7.096412360046545e-07, | |
| "loss": 0.0257, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.7942521227955583, | |
| "grad_norm": 0.12404926866292953, | |
| "learning_rate": 6.563387643658076e-07, | |
| "loss": 0.0252, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 2.8020901371652513, | |
| "grad_norm": 0.12552441656589508, | |
| "learning_rate": 6.050904343141095e-07, | |
| "loss": 0.026, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 2.8099281515349444, | |
| "grad_norm": 0.12259657680988312, | |
| "learning_rate": 5.5590057023302e-07, | |
| "loss": 0.0254, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 2.8177661659046374, | |
| "grad_norm": 0.11798401176929474, | |
| "learning_rate": 5.087733228106517e-07, | |
| "loss": 0.024, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 2.8256041802743304, | |
| "grad_norm": 0.12366917729377747, | |
| "learning_rate": 4.637126686895532e-07, | |
| "loss": 0.0257, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.8334421946440234, | |
| "grad_norm": 0.1320623904466629, | |
| "learning_rate": 4.207224101311247e-07, | |
| "loss": 0.0277, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 2.8412802090137164, | |
| "grad_norm": 0.11438736319541931, | |
| "learning_rate": 3.7980617469479953e-07, | |
| "loss": 0.0214, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 2.8491182233834094, | |
| "grad_norm": 0.11790072917938232, | |
| "learning_rate": 3.4096741493194197e-07, | |
| "loss": 0.0238, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 2.856956237753103, | |
| "grad_norm": 0.12108506262302399, | |
| "learning_rate": 3.0420940809451624e-07, | |
| "loss": 0.0241, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 2.8647942521227954, | |
| "grad_norm": 0.12375527620315552, | |
| "learning_rate": 2.6953525585855234e-07, | |
| "loss": 0.0251, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.872632266492489, | |
| "grad_norm": 0.12097339332103729, | |
| "learning_rate": 2.3694788406241896e-07, | |
| "loss": 0.025, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 2.8804702808621814, | |
| "grad_norm": 0.12990431487560272, | |
| "learning_rate": 2.064500424599436e-07, | |
| "loss": 0.0257, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 2.888308295231875, | |
| "grad_norm": 0.11965355277061462, | |
| "learning_rate": 1.7804430448837839e-07, | |
| "loss": 0.0267, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 2.8961463096015674, | |
| "grad_norm": 0.12657780945301056, | |
| "learning_rate": 1.517330670512629e-07, | |
| "loss": 0.0257, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 2.903984323971261, | |
| "grad_norm": 0.12143310904502869, | |
| "learning_rate": 1.27518550316158e-07, | |
| "loss": 0.0242, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.9118223383409534, | |
| "grad_norm": 0.16820454597473145, | |
| "learning_rate": 1.0540279752731253e-07, | |
| "loss": 0.0241, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 2.919660352710647, | |
| "grad_norm": 0.12241283059120178, | |
| "learning_rate": 8.538767483325383e-08, | |
| "loss": 0.0235, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 2.9274983670803394, | |
| "grad_norm": 0.13118112087249756, | |
| "learning_rate": 6.747487112931661e-08, | |
| "loss": 0.0281, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 2.935336381450033, | |
| "grad_norm": 0.11441458761692047, | |
| "learning_rate": 5.166589791513465e-08, | |
| "loss": 0.023, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 2.9431743958197254, | |
| "grad_norm": 0.12388182431459427, | |
| "learning_rate": 3.796208916709565e-08, | |
| "loss": 0.0266, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.951012410189419, | |
| "grad_norm": 0.12555594742298126, | |
| "learning_rate": 2.636460122578399e-08, | |
| "loss": 0.0236, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 2.958850424559112, | |
| "grad_norm": 0.1175137609243393, | |
| "learning_rate": 1.6874412698408836e-08, | |
| "loss": 0.0221, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 2.966688438928805, | |
| "grad_norm": 0.12019700556993484, | |
| "learning_rate": 9.492324376214612e-09, | |
| "loss": 0.0237, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.974526453298498, | |
| "grad_norm": 0.12358499318361282, | |
| "learning_rate": 4.218959166932268e-09, | |
| "loss": 0.0248, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 2.982364467668191, | |
| "grad_norm": 0.13017946481704712, | |
| "learning_rate": 1.0547620421907934e-09, | |
| "loss": 0.0284, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.990202482037884, | |
| "grad_norm": 0.11907056719064713, | |
| "learning_rate": 0.0, | |
| "loss": 0.0251, | |
| "step": 381 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 381, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1405397234876416.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |