| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.8905852417302799, | |
| "eval_steps": 500, | |
| "global_step": 3500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.002544529262086514, | |
| "grad_norm": 8.495231628417969, | |
| "learning_rate": 9.160305343511451e-07, | |
| "loss": 0.1152, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.005089058524173028, | |
| "grad_norm": 9.448420524597168, | |
| "learning_rate": 1.933842239185751e-06, | |
| "loss": 0.1589, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.007633587786259542, | |
| "grad_norm": 12.052129745483398, | |
| "learning_rate": 2.951653944020356e-06, | |
| "loss": 0.1132, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.010178117048346057, | |
| "grad_norm": 13.677345275878906, | |
| "learning_rate": 3.969465648854962e-06, | |
| "loss": 0.1104, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.01272264631043257, | |
| "grad_norm": 9.209020614624023, | |
| "learning_rate": 4.987277353689568e-06, | |
| "loss": 0.0461, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.015267175572519083, | |
| "grad_norm": 14.143465995788574, | |
| "learning_rate": 6.005089058524174e-06, | |
| "loss": 0.0482, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.017811704834605598, | |
| "grad_norm": 1.9570101499557495, | |
| "learning_rate": 7.022900763358779e-06, | |
| "loss": 0.0331, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.020356234096692113, | |
| "grad_norm": 6.45846700668335, | |
| "learning_rate": 8.040712468193384e-06, | |
| "loss": 0.0358, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.022900763358778626, | |
| "grad_norm": 6.0744309425354, | |
| "learning_rate": 9.058524173027991e-06, | |
| "loss": 0.026, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.02544529262086514, | |
| "grad_norm": 8.33021068572998, | |
| "learning_rate": 1.0076335877862595e-05, | |
| "loss": 0.0373, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.027989821882951654, | |
| "grad_norm": 6.4917778968811035, | |
| "learning_rate": 1.1094147582697202e-05, | |
| "loss": 0.0186, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.030534351145038167, | |
| "grad_norm": 4.850451469421387, | |
| "learning_rate": 1.2111959287531807e-05, | |
| "loss": 0.0191, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.03307888040712468, | |
| "grad_norm": 3.528228521347046, | |
| "learning_rate": 1.3129770992366414e-05, | |
| "loss": 0.0244, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.035623409669211195, | |
| "grad_norm": 0.09570707380771637, | |
| "learning_rate": 1.4147582697201019e-05, | |
| "loss": 0.0154, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.03816793893129771, | |
| "grad_norm": 8.72260856628418, | |
| "learning_rate": 1.5165394402035624e-05, | |
| "loss": 0.0251, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.04071246819338423, | |
| "grad_norm": 5.22550106048584, | |
| "learning_rate": 1.618320610687023e-05, | |
| "loss": 0.0181, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.043256997455470736, | |
| "grad_norm": 7.037172317504883, | |
| "learning_rate": 1.7201017811704836e-05, | |
| "loss": 0.007, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.04580152671755725, | |
| "grad_norm": 0.9109981060028076, | |
| "learning_rate": 1.8218829516539443e-05, | |
| "loss": 0.0177, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.04834605597964377, | |
| "grad_norm": 6.822402477264404, | |
| "learning_rate": 1.923664122137405e-05, | |
| "loss": 0.0079, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.05089058524173028, | |
| "grad_norm": 2.628634452819824, | |
| "learning_rate": 2.0254452926208653e-05, | |
| "loss": 0.0142, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.05343511450381679, | |
| "grad_norm": 0.7327078580856323, | |
| "learning_rate": 2.127226463104326e-05, | |
| "loss": 0.0207, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.05597964376590331, | |
| "grad_norm": 7.423736095428467, | |
| "learning_rate": 2.2290076335877867e-05, | |
| "loss": 0.0127, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.058524173027989825, | |
| "grad_norm": 0.46404168009757996, | |
| "learning_rate": 2.330788804071247e-05, | |
| "loss": 0.0158, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.061068702290076333, | |
| "grad_norm": 0.023200908675789833, | |
| "learning_rate": 2.4325699745547078e-05, | |
| "loss": 0.0178, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.06361323155216285, | |
| "grad_norm": 3.8594086170196533, | |
| "learning_rate": 2.5343511450381678e-05, | |
| "loss": 0.0069, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.06615776081424936, | |
| "grad_norm": 0.4054301679134369, | |
| "learning_rate": 2.6361323155216285e-05, | |
| "loss": 0.0223, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.06870229007633588, | |
| "grad_norm": 2.2085139751434326, | |
| "learning_rate": 2.737913486005089e-05, | |
| "loss": 0.0362, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.07124681933842239, | |
| "grad_norm": 0.5911164283752441, | |
| "learning_rate": 2.8396946564885498e-05, | |
| "loss": 0.0187, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.0737913486005089, | |
| "grad_norm": 4.2948102951049805, | |
| "learning_rate": 2.9414758269720102e-05, | |
| "loss": 0.0107, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.07633587786259542, | |
| "grad_norm": 0.2595175504684448, | |
| "learning_rate": 3.043256997455471e-05, | |
| "loss": 0.0085, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.07888040712468193, | |
| "grad_norm": 5.613988876342773, | |
| "learning_rate": 3.145038167938931e-05, | |
| "loss": 0.0281, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.08142493638676845, | |
| "grad_norm": 2.6039955615997314, | |
| "learning_rate": 3.246819338422392e-05, | |
| "loss": 0.0213, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.08396946564885496, | |
| "grad_norm": 0.2858663499355316, | |
| "learning_rate": 3.3486005089058526e-05, | |
| "loss": 0.0141, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.08651399491094147, | |
| "grad_norm": 0.19223034381866455, | |
| "learning_rate": 3.450381679389313e-05, | |
| "loss": 0.0224, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.089058524173028, | |
| "grad_norm": 5.314587116241455, | |
| "learning_rate": 3.552162849872774e-05, | |
| "loss": 0.0129, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.0916030534351145, | |
| "grad_norm": 4.426294326782227, | |
| "learning_rate": 3.653944020356235e-05, | |
| "loss": 0.0224, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.09414758269720101, | |
| "grad_norm": 5.395482063293457, | |
| "learning_rate": 3.755725190839695e-05, | |
| "loss": 0.01, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.09669211195928754, | |
| "grad_norm": 0.7296554446220398, | |
| "learning_rate": 3.8575063613231554e-05, | |
| "loss": 0.0192, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.09923664122137404, | |
| "grad_norm": 0.2524012625217438, | |
| "learning_rate": 3.959287531806616e-05, | |
| "loss": 0.0178, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.10178117048346055, | |
| "grad_norm": 5.55770206451416, | |
| "learning_rate": 3.993214588634436e-05, | |
| "loss": 0.0229, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.10432569974554708, | |
| "grad_norm": 4.481260299682617, | |
| "learning_rate": 3.9819055696918295e-05, | |
| "loss": 0.0138, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.10687022900763359, | |
| "grad_norm": 2.183645725250244, | |
| "learning_rate": 3.970596550749223e-05, | |
| "loss": 0.0201, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.10941475826972011, | |
| "grad_norm": 3.418135404586792, | |
| "learning_rate": 3.959287531806616e-05, | |
| "loss": 0.0225, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.11195928753180662, | |
| "grad_norm": 0.1539747565984726, | |
| "learning_rate": 3.947978512864009e-05, | |
| "loss": 0.0043, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.11450381679389313, | |
| "grad_norm": 3.93865966796875, | |
| "learning_rate": 3.9366694939214026e-05, | |
| "loss": 0.0088, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.11704834605597965, | |
| "grad_norm": 2.0105717182159424, | |
| "learning_rate": 3.925360474978796e-05, | |
| "loss": 0.0101, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.11959287531806616, | |
| "grad_norm": 1.3878592252731323, | |
| "learning_rate": 3.914051456036189e-05, | |
| "loss": 0.0086, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.12213740458015267, | |
| "grad_norm": 0.09708156436681747, | |
| "learning_rate": 3.902742437093582e-05, | |
| "loss": 0.0119, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.12468193384223919, | |
| "grad_norm": 0.4155742824077606, | |
| "learning_rate": 3.8914334181509756e-05, | |
| "loss": 0.0139, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.1272264631043257, | |
| "grad_norm": 0.6312187910079956, | |
| "learning_rate": 3.880124399208369e-05, | |
| "loss": 0.0185, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.1272264631043257, | |
| "eval_loss": 0.0426296703517437, | |
| "eval_runtime": 100.8006, | |
| "eval_samples_per_second": 79.186, | |
| "eval_steps_per_second": 0.625, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.1297709923664122, | |
| "grad_norm": 8.71293830871582, | |
| "learning_rate": 3.868815380265762e-05, | |
| "loss": 0.0236, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.13231552162849872, | |
| "grad_norm": 0.6096652150154114, | |
| "learning_rate": 3.8575063613231554e-05, | |
| "loss": 0.01, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.13486005089058525, | |
| "grad_norm": 0.9275781512260437, | |
| "learning_rate": 3.8461973423805486e-05, | |
| "loss": 0.0091, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.13740458015267176, | |
| "grad_norm": 0.3549460768699646, | |
| "learning_rate": 3.834888323437942e-05, | |
| "loss": 0.0092, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.13994910941475827, | |
| "grad_norm": 3.617748498916626, | |
| "learning_rate": 3.823579304495336e-05, | |
| "loss": 0.0117, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.14249363867684478, | |
| "grad_norm": 3.950561761856079, | |
| "learning_rate": 3.8122702855527284e-05, | |
| "loss": 0.0134, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.1450381679389313, | |
| "grad_norm": 1.9877593517303467, | |
| "learning_rate": 3.8009612666101216e-05, | |
| "loss": 0.0143, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.1475826972010178, | |
| "grad_norm": 1.1295750141143799, | |
| "learning_rate": 3.789652247667515e-05, | |
| "loss": 0.0035, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.15012722646310434, | |
| "grad_norm": 0.07594585418701172, | |
| "learning_rate": 3.778343228724908e-05, | |
| "loss": 0.011, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.15267175572519084, | |
| "grad_norm": 0.28959858417510986, | |
| "learning_rate": 3.767034209782302e-05, | |
| "loss": 0.0034, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.15521628498727735, | |
| "grad_norm": 0.31216809153556824, | |
| "learning_rate": 3.755725190839695e-05, | |
| "loss": 0.0145, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.15776081424936386, | |
| "grad_norm": 0.048563506454229355, | |
| "learning_rate": 3.744416171897088e-05, | |
| "loss": 0.0136, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.16030534351145037, | |
| "grad_norm": 0.18490168452262878, | |
| "learning_rate": 3.733107152954481e-05, | |
| "loss": 0.0157, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.1628498727735369, | |
| "grad_norm": 4.179055690765381, | |
| "learning_rate": 3.721798134011875e-05, | |
| "loss": 0.0113, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.16539440203562342, | |
| "grad_norm": 0.7711288332939148, | |
| "learning_rate": 3.7104891150692684e-05, | |
| "loss": 0.004, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.16793893129770993, | |
| "grad_norm": 2.4299933910369873, | |
| "learning_rate": 3.699180096126661e-05, | |
| "loss": 0.0093, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.17048346055979643, | |
| "grad_norm": 1.621607780456543, | |
| "learning_rate": 3.687871077184054e-05, | |
| "loss": 0.0071, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.17302798982188294, | |
| "grad_norm": 0.06137412041425705, | |
| "learning_rate": 3.676562058241448e-05, | |
| "loss": 0.0216, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.17557251908396945, | |
| "grad_norm": 1.6078226566314697, | |
| "learning_rate": 3.6652530392988414e-05, | |
| "loss": 0.0242, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.178117048346056, | |
| "grad_norm": 2.2799558639526367, | |
| "learning_rate": 3.653944020356235e-05, | |
| "loss": 0.0113, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.1806615776081425, | |
| "grad_norm": 2.2463083267211914, | |
| "learning_rate": 3.642635001413627e-05, | |
| "loss": 0.0054, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.183206106870229, | |
| "grad_norm": 0.4119090437889099, | |
| "learning_rate": 3.6313259824710205e-05, | |
| "loss": 0.0107, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.18575063613231552, | |
| "grad_norm": 3.735358476638794, | |
| "learning_rate": 3.6200169635284144e-05, | |
| "loss": 0.0096, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.18829516539440203, | |
| "grad_norm": 2.4470431804656982, | |
| "learning_rate": 3.608707944585808e-05, | |
| "loss": 0.0089, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.19083969465648856, | |
| "grad_norm": 0.0336996465921402, | |
| "learning_rate": 3.597398925643201e-05, | |
| "loss": 0.0111, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.19338422391857507, | |
| "grad_norm": 0.6565234661102295, | |
| "learning_rate": 3.586089906700594e-05, | |
| "loss": 0.0083, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.19592875318066158, | |
| "grad_norm": 9.512853622436523, | |
| "learning_rate": 3.5747808877579875e-05, | |
| "loss": 0.0051, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.1984732824427481, | |
| "grad_norm": 0.5155350565910339, | |
| "learning_rate": 3.563471868815381e-05, | |
| "loss": 0.008, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.2010178117048346, | |
| "grad_norm": 4.279949188232422, | |
| "learning_rate": 3.552162849872774e-05, | |
| "loss": 0.0081, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.2035623409669211, | |
| "grad_norm": 2.585608959197998, | |
| "learning_rate": 3.540853830930167e-05, | |
| "loss": 0.018, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.20610687022900764, | |
| "grad_norm": 1.0371241569519043, | |
| "learning_rate": 3.5295448119875605e-05, | |
| "loss": 0.0134, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.20865139949109415, | |
| "grad_norm": 0.7093348503112793, | |
| "learning_rate": 3.518235793044954e-05, | |
| "loss": 0.0035, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.21119592875318066, | |
| "grad_norm": 0.48215287923812866, | |
| "learning_rate": 3.506926774102347e-05, | |
| "loss": 0.0099, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.21374045801526717, | |
| "grad_norm": 3.264940023422241, | |
| "learning_rate": 3.49561775515974e-05, | |
| "loss": 0.0152, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.21628498727735368, | |
| "grad_norm": 0.03547951951622963, | |
| "learning_rate": 3.4843087362171335e-05, | |
| "loss": 0.0078, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.21882951653944022, | |
| "grad_norm": 2.6558687686920166, | |
| "learning_rate": 3.472999717274527e-05, | |
| "loss": 0.0134, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.22137404580152673, | |
| "grad_norm": 0.41831302642822266, | |
| "learning_rate": 3.46169069833192e-05, | |
| "loss": 0.0045, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.22391857506361323, | |
| "grad_norm": 0.04895065724849701, | |
| "learning_rate": 3.450381679389313e-05, | |
| "loss": 0.0093, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.22646310432569974, | |
| "grad_norm": 4.139157772064209, | |
| "learning_rate": 3.4390726604467065e-05, | |
| "loss": 0.0122, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.22900763358778625, | |
| "grad_norm": 0.1418493539094925, | |
| "learning_rate": 3.4277636415041e-05, | |
| "loss": 0.0147, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.23155216284987276, | |
| "grad_norm": 0.19460724294185638, | |
| "learning_rate": 3.416454622561493e-05, | |
| "loss": 0.0128, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.2340966921119593, | |
| "grad_norm": 0.18406054377555847, | |
| "learning_rate": 3.405145603618886e-05, | |
| "loss": 0.0115, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.2366412213740458, | |
| "grad_norm": 0.16981343924999237, | |
| "learning_rate": 3.3938365846762796e-05, | |
| "loss": 0.0082, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.23918575063613232, | |
| "grad_norm": 0.009775158949196339, | |
| "learning_rate": 3.382527565733673e-05, | |
| "loss": 0.0027, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.24173027989821882, | |
| "grad_norm": 0.1106642484664917, | |
| "learning_rate": 3.371218546791066e-05, | |
| "loss": 0.0139, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.24427480916030533, | |
| "grad_norm": 0.10489284992218018, | |
| "learning_rate": 3.3599095278484593e-05, | |
| "loss": 0.002, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.24681933842239187, | |
| "grad_norm": 0.26976943016052246, | |
| "learning_rate": 3.3486005089058526e-05, | |
| "loss": 0.0075, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.24936386768447838, | |
| "grad_norm": 0.06751968711614609, | |
| "learning_rate": 3.337291489963246e-05, | |
| "loss": 0.015, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.25190839694656486, | |
| "grad_norm": 0.07933365553617477, | |
| "learning_rate": 3.325982471020639e-05, | |
| "loss": 0.0049, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.2544529262086514, | |
| "grad_norm": 1.002841830253601, | |
| "learning_rate": 3.3146734520780324e-05, | |
| "loss": 0.0082, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.2544529262086514, | |
| "eval_loss": 0.04029834270477295, | |
| "eval_runtime": 100.7792, | |
| "eval_samples_per_second": 79.203, | |
| "eval_steps_per_second": 0.625, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.25699745547073793, | |
| "grad_norm": 0.11880303919315338, | |
| "learning_rate": 3.3033644331354256e-05, | |
| "loss": 0.0071, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.2595419847328244, | |
| "grad_norm": 2.1375386714935303, | |
| "learning_rate": 3.292055414192819e-05, | |
| "loss": 0.0043, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.26208651399491095, | |
| "grad_norm": 1.9007747173309326, | |
| "learning_rate": 3.280746395250212e-05, | |
| "loss": 0.0056, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.26463104325699743, | |
| "grad_norm": 1.3233751058578491, | |
| "learning_rate": 3.2694373763076054e-05, | |
| "loss": 0.0122, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.26717557251908397, | |
| "grad_norm": 1.3246195316314697, | |
| "learning_rate": 3.2581283573649987e-05, | |
| "loss": 0.0082, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.2697201017811705, | |
| "grad_norm": 3.8455071449279785, | |
| "learning_rate": 3.246819338422392e-05, | |
| "loss": 0.0048, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.272264631043257, | |
| "grad_norm": 1.4055747985839844, | |
| "learning_rate": 3.235510319479785e-05, | |
| "loss": 0.008, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.2748091603053435, | |
| "grad_norm": 1.1376632452011108, | |
| "learning_rate": 3.224201300537179e-05, | |
| "loss": 0.0088, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.27735368956743, | |
| "grad_norm": 1.465824007987976, | |
| "learning_rate": 3.212892281594572e-05, | |
| "loss": 0.0082, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.27989821882951654, | |
| "grad_norm": 0.5838302373886108, | |
| "learning_rate": 3.201583262651965e-05, | |
| "loss": 0.0128, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.2824427480916031, | |
| "grad_norm": 0.008899681270122528, | |
| "learning_rate": 3.190274243709358e-05, | |
| "loss": 0.0061, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.28498727735368956, | |
| "grad_norm": 2.9608395099639893, | |
| "learning_rate": 3.178965224766752e-05, | |
| "loss": 0.0217, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.2875318066157761, | |
| "grad_norm": 0.12024850398302078, | |
| "learning_rate": 3.1676562058241454e-05, | |
| "loss": 0.0072, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.2900763358778626, | |
| "grad_norm": 0.09325462579727173, | |
| "learning_rate": 3.156347186881538e-05, | |
| "loss": 0.0079, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.2926208651399491, | |
| "grad_norm": 0.047860078513622284, | |
| "learning_rate": 3.145038167938931e-05, | |
| "loss": 0.0111, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.2951653944020356, | |
| "grad_norm": 0.14491482079029083, | |
| "learning_rate": 3.1337291489963245e-05, | |
| "loss": 0.0079, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.29770992366412213, | |
| "grad_norm": 5.476013660430908, | |
| "learning_rate": 3.1224201300537184e-05, | |
| "loss": 0.0161, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.30025445292620867, | |
| "grad_norm": 0.5506300926208496, | |
| "learning_rate": 3.111111111111112e-05, | |
| "loss": 0.0032, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.30279898218829515, | |
| "grad_norm": 0.5853263735771179, | |
| "learning_rate": 3.099802092168504e-05, | |
| "loss": 0.0107, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.3053435114503817, | |
| "grad_norm": 0.47357645630836487, | |
| "learning_rate": 3.0884930732258975e-05, | |
| "loss": 0.0046, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.30788804071246817, | |
| "grad_norm": 0.004211845807731152, | |
| "learning_rate": 3.0771840542832914e-05, | |
| "loss": 0.0099, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.3104325699745547, | |
| "grad_norm": 0.029812565073370934, | |
| "learning_rate": 3.065875035340685e-05, | |
| "loss": 0.0094, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.31297709923664124, | |
| "grad_norm": 2.437438488006592, | |
| "learning_rate": 3.054566016398078e-05, | |
| "loss": 0.014, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.3155216284987277, | |
| "grad_norm": 0.3200075626373291, | |
| "learning_rate": 3.043256997455471e-05, | |
| "loss": 0.0082, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.31806615776081426, | |
| "grad_norm": 0.023235173895955086, | |
| "learning_rate": 3.031947978512864e-05, | |
| "loss": 0.013, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.32061068702290074, | |
| "grad_norm": 0.042968690395355225, | |
| "learning_rate": 3.0206389595702577e-05, | |
| "loss": 0.0222, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.3231552162849873, | |
| "grad_norm": 0.07905975729227066, | |
| "learning_rate": 3.009329940627651e-05, | |
| "loss": 0.004, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.3256997455470738, | |
| "grad_norm": 0.47882354259490967, | |
| "learning_rate": 2.998020921685044e-05, | |
| "loss": 0.0042, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.3282442748091603, | |
| "grad_norm": 1.036494255065918, | |
| "learning_rate": 2.986711902742437e-05, | |
| "loss": 0.0032, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.33078880407124683, | |
| "grad_norm": 2.536672830581665, | |
| "learning_rate": 2.9754028837998307e-05, | |
| "loss": 0.0109, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 2.7068772315979004, | |
| "learning_rate": 2.964093864857224e-05, | |
| "loss": 0.012, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.33587786259541985, | |
| "grad_norm": 0.03697170689702034, | |
| "learning_rate": 2.9527848459146173e-05, | |
| "loss": 0.01, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.3384223918575064, | |
| "grad_norm": 0.17284952104091644, | |
| "learning_rate": 2.9414758269720102e-05, | |
| "loss": 0.003, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.34096692111959287, | |
| "grad_norm": 3.8745925426483154, | |
| "learning_rate": 2.9301668080294038e-05, | |
| "loss": 0.0156, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.3435114503816794, | |
| "grad_norm": 0.44607672095298767, | |
| "learning_rate": 2.918857789086797e-05, | |
| "loss": 0.0115, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.3460559796437659, | |
| "grad_norm": 6.223470687866211, | |
| "learning_rate": 2.9075487701441903e-05, | |
| "loss": 0.0149, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.3486005089058524, | |
| "grad_norm": 0.10045173764228821, | |
| "learning_rate": 2.8962397512015835e-05, | |
| "loss": 0.0063, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.3511450381679389, | |
| "grad_norm": 2.6154091358184814, | |
| "learning_rate": 2.8849307322589765e-05, | |
| "loss": 0.0099, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.35368956743002544, | |
| "grad_norm": 1.6079035997390747, | |
| "learning_rate": 2.87362171331637e-05, | |
| "loss": 0.0079, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.356234096692112, | |
| "grad_norm": 0.3847286105155945, | |
| "learning_rate": 2.8623126943737633e-05, | |
| "loss": 0.0094, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.35877862595419846, | |
| "grad_norm": 0.762495219707489, | |
| "learning_rate": 2.8510036754311566e-05, | |
| "loss": 0.0138, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.361323155216285, | |
| "grad_norm": 2.905803680419922, | |
| "learning_rate": 2.8396946564885498e-05, | |
| "loss": 0.0126, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.3638676844783715, | |
| "grad_norm": 0.20091155171394348, | |
| "learning_rate": 2.8283856375459434e-05, | |
| "loss": 0.005, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.366412213740458, | |
| "grad_norm": 0.2232949584722519, | |
| "learning_rate": 2.8170766186033363e-05, | |
| "loss": 0.0157, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.36895674300254455, | |
| "grad_norm": 0.11827141046524048, | |
| "learning_rate": 2.8057675996607296e-05, | |
| "loss": 0.0076, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.37150127226463103, | |
| "grad_norm": 0.0741715356707573, | |
| "learning_rate": 2.794458580718123e-05, | |
| "loss": 0.0117, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.37404580152671757, | |
| "grad_norm": 1.2536979913711548, | |
| "learning_rate": 2.783149561775516e-05, | |
| "loss": 0.0017, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.37659033078880405, | |
| "grad_norm": 1.9891741275787354, | |
| "learning_rate": 2.7718405428329097e-05, | |
| "loss": 0.0109, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.3791348600508906, | |
| "grad_norm": 1.0660837888717651, | |
| "learning_rate": 2.7605315238903026e-05, | |
| "loss": 0.007, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.3816793893129771, | |
| "grad_norm": 0.0798741951584816, | |
| "learning_rate": 2.749222504947696e-05, | |
| "loss": 0.0139, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.3816793893129771, | |
| "eval_loss": 0.0349714532494545, | |
| "eval_runtime": 100.7459, | |
| "eval_samples_per_second": 79.229, | |
| "eval_steps_per_second": 0.625, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.3842239185750636, | |
| "grad_norm": 3.056962013244629, | |
| "learning_rate": 2.737913486005089e-05, | |
| "loss": 0.019, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.38676844783715014, | |
| "grad_norm": 0.41486862301826477, | |
| "learning_rate": 2.7266044670624827e-05, | |
| "loss": 0.0027, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.3893129770992366, | |
| "grad_norm": 1.0251950025558472, | |
| "learning_rate": 2.715295448119876e-05, | |
| "loss": 0.0078, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.39185750636132316, | |
| "grad_norm": 0.019924340769648552, | |
| "learning_rate": 2.703986429177269e-05, | |
| "loss": 0.0031, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.3944020356234097, | |
| "grad_norm": 3.1143898963928223, | |
| "learning_rate": 2.692677410234662e-05, | |
| "loss": 0.0054, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.3969465648854962, | |
| "grad_norm": 3.4814693927764893, | |
| "learning_rate": 2.6813683912920558e-05, | |
| "loss": 0.0169, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.3994910941475827, | |
| "grad_norm": 0.34978416562080383, | |
| "learning_rate": 2.670059372349449e-05, | |
| "loss": 0.0017, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.4020356234096692, | |
| "grad_norm": 0.17419882118701935, | |
| "learning_rate": 2.6587503534068423e-05, | |
| "loss": 0.0017, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.40458015267175573, | |
| "grad_norm": 6.330338001251221, | |
| "learning_rate": 2.6474413344642352e-05, | |
| "loss": 0.0081, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.4071246819338422, | |
| "grad_norm": 0.014109604060649872, | |
| "learning_rate": 2.6361323155216285e-05, | |
| "loss": 0.0059, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.40966921119592875, | |
| "grad_norm": 0.06557715684175491, | |
| "learning_rate": 2.624823296579022e-05, | |
| "loss": 0.0025, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.4122137404580153, | |
| "grad_norm": 0.06417052447795868, | |
| "learning_rate": 2.6135142776364153e-05, | |
| "loss": 0.001, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.41475826972010177, | |
| "grad_norm": 2.6139402389526367, | |
| "learning_rate": 2.6022052586938086e-05, | |
| "loss": 0.0049, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.4173027989821883, | |
| "grad_norm": 2.0611274242401123, | |
| "learning_rate": 2.5908962397512015e-05, | |
| "loss": 0.0062, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.4198473282442748, | |
| "grad_norm": 0.0023517871741205454, | |
| "learning_rate": 2.5795872208085954e-05, | |
| "loss": 0.0013, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.4223918575063613, | |
| "grad_norm": 0.15409255027770996, | |
| "learning_rate": 2.5682782018659883e-05, | |
| "loss": 0.0131, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.42493638676844786, | |
| "grad_norm": 0.07905858010053635, | |
| "learning_rate": 2.5569691829233816e-05, | |
| "loss": 0.0037, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.42748091603053434, | |
| "grad_norm": 0.3997408151626587, | |
| "learning_rate": 2.545660163980775e-05, | |
| "loss": 0.0064, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.4300254452926209, | |
| "grad_norm": 0.14963960647583008, | |
| "learning_rate": 2.5343511450381678e-05, | |
| "loss": 0.0107, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.43256997455470736, | |
| "grad_norm": 0.06408097594976425, | |
| "learning_rate": 2.5230421260955617e-05, | |
| "loss": 0.0072, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.4351145038167939, | |
| "grad_norm": 3.4469339847564697, | |
| "learning_rate": 2.5117331071529546e-05, | |
| "loss": 0.008, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.43765903307888043, | |
| "grad_norm": 0.004595658276230097, | |
| "learning_rate": 2.500424088210348e-05, | |
| "loss": 0.0081, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.4402035623409669, | |
| "grad_norm": 4.713712215423584, | |
| "learning_rate": 2.489115069267741e-05, | |
| "loss": 0.0107, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.44274809160305345, | |
| "grad_norm": 0.08359724283218384, | |
| "learning_rate": 2.4778060503251347e-05, | |
| "loss": 0.0018, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.44529262086513993, | |
| "grad_norm": 2.5329370498657227, | |
| "learning_rate": 2.466497031382528e-05, | |
| "loss": 0.0086, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.44783715012722647, | |
| "grad_norm": 0.017571361735463142, | |
| "learning_rate": 2.455188012439921e-05, | |
| "loss": 0.0065, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.45038167938931295, | |
| "grad_norm": 0.40600821375846863, | |
| "learning_rate": 2.443878993497314e-05, | |
| "loss": 0.0062, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.4529262086513995, | |
| "grad_norm": 3.7990636825561523, | |
| "learning_rate": 2.4325699745547078e-05, | |
| "loss": 0.0134, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.455470737913486, | |
| "grad_norm": 0.09965948760509491, | |
| "learning_rate": 2.421260955612101e-05, | |
| "loss": 0.0068, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.4580152671755725, | |
| "grad_norm": 2.918208360671997, | |
| "learning_rate": 2.4099519366694943e-05, | |
| "loss": 0.0093, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.46055979643765904, | |
| "grad_norm": 0.011430525220930576, | |
| "learning_rate": 2.3986429177268872e-05, | |
| "loss": 0.0082, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.4631043256997455, | |
| "grad_norm": 0.15496379137039185, | |
| "learning_rate": 2.3873338987842804e-05, | |
| "loss": 0.0096, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.46564885496183206, | |
| "grad_norm": 0.02492368035018444, | |
| "learning_rate": 2.376024879841674e-05, | |
| "loss": 0.0078, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.4681933842239186, | |
| "grad_norm": 0.05971188098192215, | |
| "learning_rate": 2.3647158608990673e-05, | |
| "loss": 0.0089, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.4707379134860051, | |
| "grad_norm": 0.4808364808559418, | |
| "learning_rate": 2.3534068419564605e-05, | |
| "loss": 0.0047, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.4732824427480916, | |
| "grad_norm": 2.379042863845825, | |
| "learning_rate": 2.3420978230138535e-05, | |
| "loss": 0.0118, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.4758269720101781, | |
| "grad_norm": 0.024242276325821877, | |
| "learning_rate": 2.330788804071247e-05, | |
| "loss": 0.009, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.47837150127226463, | |
| "grad_norm": 1.4494438171386719, | |
| "learning_rate": 2.3194797851286403e-05, | |
| "loss": 0.0035, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.48091603053435117, | |
| "grad_norm": 4.915983200073242, | |
| "learning_rate": 2.3081707661860336e-05, | |
| "loss": 0.0145, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.48346055979643765, | |
| "grad_norm": 2.4147846698760986, | |
| "learning_rate": 2.296861747243427e-05, | |
| "loss": 0.0078, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.4860050890585242, | |
| "grad_norm": 0.41452011466026306, | |
| "learning_rate": 2.2855527283008204e-05, | |
| "loss": 0.0036, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.48854961832061067, | |
| "grad_norm": 0.6090364456176758, | |
| "learning_rate": 2.2742437093582133e-05, | |
| "loss": 0.0054, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.4910941475826972, | |
| "grad_norm": 2.908536434173584, | |
| "learning_rate": 2.2629346904156066e-05, | |
| "loss": 0.0055, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.49363867684478374, | |
| "grad_norm": 3.4067792892456055, | |
| "learning_rate": 2.251625671473e-05, | |
| "loss": 0.0063, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.4961832061068702, | |
| "grad_norm": 0.12075275182723999, | |
| "learning_rate": 2.240316652530393e-05, | |
| "loss": 0.0075, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.49872773536895676, | |
| "grad_norm": 0.10985302925109863, | |
| "learning_rate": 2.2290076335877867e-05, | |
| "loss": 0.0032, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.5012722646310432, | |
| "grad_norm": 1.9272608757019043, | |
| "learning_rate": 2.2176986146451796e-05, | |
| "loss": 0.0069, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.5038167938931297, | |
| "grad_norm": 3.202061891555786, | |
| "learning_rate": 2.206389595702573e-05, | |
| "loss": 0.0073, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.5063613231552163, | |
| "grad_norm": 1.0483746528625488, | |
| "learning_rate": 2.195080576759966e-05, | |
| "loss": 0.0031, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.5089058524173028, | |
| "grad_norm": 1.31486976146698, | |
| "learning_rate": 2.1837715578173597e-05, | |
| "loss": 0.0025, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.5089058524173028, | |
| "eval_loss": 0.034609079360961914, | |
| "eval_runtime": 100.7933, | |
| "eval_samples_per_second": 79.192, | |
| "eval_steps_per_second": 0.625, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.5114503816793893, | |
| "grad_norm": 0.05615166202187538, | |
| "learning_rate": 2.172462538874753e-05, | |
| "loss": 0.0114, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.5139949109414759, | |
| "grad_norm": 1.326418161392212, | |
| "learning_rate": 2.161153519932146e-05, | |
| "loss": 0.0053, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.5165394402035624, | |
| "grad_norm": 1.4809695482254028, | |
| "learning_rate": 2.1498445009895392e-05, | |
| "loss": 0.0085, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.5190839694656488, | |
| "grad_norm": 2.975813388824463, | |
| "learning_rate": 2.1385354820469324e-05, | |
| "loss": 0.0101, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.5216284987277354, | |
| "grad_norm": 0.8962512612342834, | |
| "learning_rate": 2.127226463104326e-05, | |
| "loss": 0.0104, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.5241730279898219, | |
| "grad_norm": 3.6851797103881836, | |
| "learning_rate": 2.1159174441617193e-05, | |
| "loss": 0.0074, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.5267175572519084, | |
| "grad_norm": 2.4362735748291016, | |
| "learning_rate": 2.1046084252191122e-05, | |
| "loss": 0.0119, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.5292620865139949, | |
| "grad_norm": 3.4005730152130127, | |
| "learning_rate": 2.0932994062765055e-05, | |
| "loss": 0.0179, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.5318066157760815, | |
| "grad_norm": 0.020965535193681717, | |
| "learning_rate": 2.081990387333899e-05, | |
| "loss": 0.0108, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.5343511450381679, | |
| "grad_norm": 3.1642138957977295, | |
| "learning_rate": 2.0706813683912923e-05, | |
| "loss": 0.0089, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.5368956743002544, | |
| "grad_norm": 1.4003890752792358, | |
| "learning_rate": 2.0593723494486856e-05, | |
| "loss": 0.0086, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.539440203562341, | |
| "grad_norm": 3.2447030544281006, | |
| "learning_rate": 2.0480633305060785e-05, | |
| "loss": 0.0092, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.5419847328244275, | |
| "grad_norm": 0.20196814835071564, | |
| "learning_rate": 2.0367543115634724e-05, | |
| "loss": 0.0091, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.544529262086514, | |
| "grad_norm": 0.09086956083774567, | |
| "learning_rate": 2.0254452926208653e-05, | |
| "loss": 0.0115, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.5470737913486005, | |
| "grad_norm": 0.8166348934173584, | |
| "learning_rate": 2.0141362736782586e-05, | |
| "loss": 0.0138, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.549618320610687, | |
| "grad_norm": 1.8422399759292603, | |
| "learning_rate": 2.002827254735652e-05, | |
| "loss": 0.0041, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.5521628498727735, | |
| "grad_norm": 1.3676835298538208, | |
| "learning_rate": 1.991518235793045e-05, | |
| "loss": 0.0051, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.55470737913486, | |
| "grad_norm": 3.4715235233306885, | |
| "learning_rate": 1.9802092168504384e-05, | |
| "loss": 0.0101, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.5572519083969466, | |
| "grad_norm": 0.4825442433357239, | |
| "learning_rate": 1.9689001979078316e-05, | |
| "loss": 0.0065, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.5597964376590331, | |
| "grad_norm": 0.03331977128982544, | |
| "learning_rate": 1.957591178965225e-05, | |
| "loss": 0.0136, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.5623409669211196, | |
| "grad_norm": 0.6727198362350464, | |
| "learning_rate": 1.946282160022618e-05, | |
| "loss": 0.0023, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.5648854961832062, | |
| "grad_norm": 2.1888349056243896, | |
| "learning_rate": 1.9349731410800114e-05, | |
| "loss": 0.0095, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.5674300254452926, | |
| "grad_norm": 4.135168075561523, | |
| "learning_rate": 1.923664122137405e-05, | |
| "loss": 0.0179, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.5699745547073791, | |
| "grad_norm": 1.3800098896026611, | |
| "learning_rate": 1.912355103194798e-05, | |
| "loss": 0.0045, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.5725190839694656, | |
| "grad_norm": 0.2383573353290558, | |
| "learning_rate": 1.901046084252191e-05, | |
| "loss": 0.0154, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.5750636132315522, | |
| "grad_norm": 0.2643699645996094, | |
| "learning_rate": 1.8897370653095844e-05, | |
| "loss": 0.0065, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.5776081424936387, | |
| "grad_norm": 0.7938392162322998, | |
| "learning_rate": 1.8784280463669777e-05, | |
| "loss": 0.0074, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.5801526717557252, | |
| "grad_norm": 1.8504903316497803, | |
| "learning_rate": 1.8671190274243713e-05, | |
| "loss": 0.0093, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.5826972010178118, | |
| "grad_norm": 3.9438815116882324, | |
| "learning_rate": 1.8558100084817642e-05, | |
| "loss": 0.0065, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.5852417302798982, | |
| "grad_norm": 0.024695103988051414, | |
| "learning_rate": 1.8445009895391578e-05, | |
| "loss": 0.0007, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.5877862595419847, | |
| "grad_norm": 0.5728102326393127, | |
| "learning_rate": 1.8331919705965507e-05, | |
| "loss": 0.0014, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.5903307888040712, | |
| "grad_norm": 0.0501931831240654, | |
| "learning_rate": 1.8218829516539443e-05, | |
| "loss": 0.006, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.5928753180661578, | |
| "grad_norm": 1.13953697681427, | |
| "learning_rate": 1.8105739327113376e-05, | |
| "loss": 0.006, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.5954198473282443, | |
| "grad_norm": 2.1232895851135254, | |
| "learning_rate": 1.7992649137687308e-05, | |
| "loss": 0.0073, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.5979643765903307, | |
| "grad_norm": 0.37371209263801575, | |
| "learning_rate": 1.787955894826124e-05, | |
| "loss": 0.008, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.6005089058524173, | |
| "grad_norm": 2.0648906230926514, | |
| "learning_rate": 1.776646875883517e-05, | |
| "loss": 0.0063, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.6030534351145038, | |
| "grad_norm": 2.0410525798797607, | |
| "learning_rate": 1.7653378569409106e-05, | |
| "loss": 0.0035, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.6055979643765903, | |
| "grad_norm": 0.1869073212146759, | |
| "learning_rate": 1.754028837998304e-05, | |
| "loss": 0.0088, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.6081424936386769, | |
| "grad_norm": 2.5563478469848633, | |
| "learning_rate": 1.742719819055697e-05, | |
| "loss": 0.0092, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.6106870229007634, | |
| "grad_norm": 2.4307727813720703, | |
| "learning_rate": 1.7314108001130904e-05, | |
| "loss": 0.0101, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.6132315521628499, | |
| "grad_norm": 3.2351479530334473, | |
| "learning_rate": 1.7201017811704836e-05, | |
| "loss": 0.0104, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.6157760814249363, | |
| "grad_norm": 3.1805145740509033, | |
| "learning_rate": 1.708792762227877e-05, | |
| "loss": 0.0129, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.6183206106870229, | |
| "grad_norm": 1.25412118434906, | |
| "learning_rate": 1.69748374328527e-05, | |
| "loss": 0.0024, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.6208651399491094, | |
| "grad_norm": 0.02675499953329563, | |
| "learning_rate": 1.6861747243426634e-05, | |
| "loss": 0.0066, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.6234096692111959, | |
| "grad_norm": 5.5407819747924805, | |
| "learning_rate": 1.6748657054000566e-05, | |
| "loss": 0.007, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.6259541984732825, | |
| "grad_norm": 3.5257675647735596, | |
| "learning_rate": 1.66355668645745e-05, | |
| "loss": 0.0139, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.628498727735369, | |
| "grad_norm": 1.163690209388733, | |
| "learning_rate": 1.652247667514843e-05, | |
| "loss": 0.0041, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.6310432569974554, | |
| "grad_norm": 1.5052272081375122, | |
| "learning_rate": 1.6409386485722364e-05, | |
| "loss": 0.0162, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.6335877862595419, | |
| "grad_norm": 0.1098732277750969, | |
| "learning_rate": 1.6296296296296297e-05, | |
| "loss": 0.0077, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.6361323155216285, | |
| "grad_norm": 2.6149792671203613, | |
| "learning_rate": 1.618320610687023e-05, | |
| "loss": 0.0042, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.6361323155216285, | |
| "eval_loss": 0.030483221635222435, | |
| "eval_runtime": 100.851, | |
| "eval_samples_per_second": 79.146, | |
| "eval_steps_per_second": 0.625, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.638676844783715, | |
| "grad_norm": 0.5812219977378845, | |
| "learning_rate": 1.6070115917444162e-05, | |
| "loss": 0.0107, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.6412213740458015, | |
| "grad_norm": 2.341726303100586, | |
| "learning_rate": 1.5957025728018098e-05, | |
| "loss": 0.0097, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.6437659033078881, | |
| "grad_norm": 1.1993039846420288, | |
| "learning_rate": 1.5843935538592027e-05, | |
| "loss": 0.0088, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.6463104325699746, | |
| "grad_norm": 0.019058074802160263, | |
| "learning_rate": 1.5730845349165963e-05, | |
| "loss": 0.0049, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.648854961832061, | |
| "grad_norm": 1.7715319395065308, | |
| "learning_rate": 1.5617755159739892e-05, | |
| "loss": 0.0077, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.6513994910941476, | |
| "grad_norm": 2.300363302230835, | |
| "learning_rate": 1.5504664970313828e-05, | |
| "loss": 0.0289, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.6539440203562341, | |
| "grad_norm": 3.3478307723999023, | |
| "learning_rate": 1.539157478088776e-05, | |
| "loss": 0.0101, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.6564885496183206, | |
| "grad_norm": 0.5511828660964966, | |
| "learning_rate": 1.5278484591461693e-05, | |
| "loss": 0.013, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.6590330788804071, | |
| "grad_norm": 3.244690418243408, | |
| "learning_rate": 1.5165394402035624e-05, | |
| "loss": 0.0129, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.6615776081424937, | |
| "grad_norm": 0.08320210874080658, | |
| "learning_rate": 1.5052304212609557e-05, | |
| "loss": 0.004, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.6641221374045801, | |
| "grad_norm": 0.06972193717956543, | |
| "learning_rate": 1.493921402318349e-05, | |
| "loss": 0.0077, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 0.010952036827802658, | |
| "learning_rate": 1.4826123833757422e-05, | |
| "loss": 0.0045, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.6692111959287532, | |
| "grad_norm": 0.1785265952348709, | |
| "learning_rate": 1.4713033644331356e-05, | |
| "loss": 0.0011, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.6717557251908397, | |
| "grad_norm": 3.3081789016723633, | |
| "learning_rate": 1.4599943454905287e-05, | |
| "loss": 0.0092, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.6743002544529262, | |
| "grad_norm": 0.011730634607374668, | |
| "learning_rate": 1.4486853265479221e-05, | |
| "loss": 0.0019, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.6768447837150128, | |
| "grad_norm": 1.933732271194458, | |
| "learning_rate": 1.4373763076053154e-05, | |
| "loss": 0.0026, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.6793893129770993, | |
| "grad_norm": 2.0810701847076416, | |
| "learning_rate": 1.4260672886627088e-05, | |
| "loss": 0.0104, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.6819338422391857, | |
| "grad_norm": 4.2147369384765625, | |
| "learning_rate": 1.4147582697201019e-05, | |
| "loss": 0.0257, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.6844783715012722, | |
| "grad_norm": 6.2209014892578125, | |
| "learning_rate": 1.4034492507774953e-05, | |
| "loss": 0.0112, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.6870229007633588, | |
| "grad_norm": 4.587314128875732, | |
| "learning_rate": 1.3921402318348884e-05, | |
| "loss": 0.007, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.6895674300254453, | |
| "grad_norm": 5.7180256843566895, | |
| "learning_rate": 1.3808312128922817e-05, | |
| "loss": 0.0107, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.6921119592875318, | |
| "grad_norm": 1.0686004161834717, | |
| "learning_rate": 1.369522193949675e-05, | |
| "loss": 0.0136, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.6946564885496184, | |
| "grad_norm": 3.1578478813171387, | |
| "learning_rate": 1.3582131750070682e-05, | |
| "loss": 0.0101, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.6972010178117048, | |
| "grad_norm": 1.4626826047897339, | |
| "learning_rate": 1.3469041560644616e-05, | |
| "loss": 0.0051, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.6997455470737913, | |
| "grad_norm": 1.409559726715088, | |
| "learning_rate": 1.3355951371218547e-05, | |
| "loss": 0.0048, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.7022900763358778, | |
| "grad_norm": 1.5923247337341309, | |
| "learning_rate": 1.3242861181792481e-05, | |
| "loss": 0.0052, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.7048346055979644, | |
| "grad_norm": 0.01753048412501812, | |
| "learning_rate": 1.3129770992366414e-05, | |
| "loss": 0.0051, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.7073791348600509, | |
| "grad_norm": 0.022448958829045296, | |
| "learning_rate": 1.3016680802940346e-05, | |
| "loss": 0.0008, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.7099236641221374, | |
| "grad_norm": 0.010822678916156292, | |
| "learning_rate": 1.2903590613514279e-05, | |
| "loss": 0.003, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.712468193384224, | |
| "grad_norm": 1.4215025901794434, | |
| "learning_rate": 1.2790500424088213e-05, | |
| "loss": 0.0061, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.7150127226463104, | |
| "grad_norm": 0.11338794231414795, | |
| "learning_rate": 1.2677410234662144e-05, | |
| "loss": 0.0045, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.7175572519083969, | |
| "grad_norm": 2.464766263961792, | |
| "learning_rate": 1.2564320045236076e-05, | |
| "loss": 0.0045, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.7201017811704835, | |
| "grad_norm": 0.09698779135942459, | |
| "learning_rate": 1.2451229855810009e-05, | |
| "loss": 0.0037, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.72264631043257, | |
| "grad_norm": 0.04683073237538338, | |
| "learning_rate": 1.2338139666383942e-05, | |
| "loss": 0.0174, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.7251908396946565, | |
| "grad_norm": 0.013533544726669788, | |
| "learning_rate": 1.2225049476957876e-05, | |
| "loss": 0.0051, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.727735368956743, | |
| "grad_norm": 1.7438645362854004, | |
| "learning_rate": 1.2111959287531807e-05, | |
| "loss": 0.0161, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.7302798982188295, | |
| "grad_norm": 1.247225046157837, | |
| "learning_rate": 1.1998869098105741e-05, | |
| "loss": 0.0006, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.732824427480916, | |
| "grad_norm": 5.181498050689697, | |
| "learning_rate": 1.1885778908679672e-05, | |
| "loss": 0.0059, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.7353689567430025, | |
| "grad_norm": 2.543138027191162, | |
| "learning_rate": 1.1772688719253606e-05, | |
| "loss": 0.015, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.7379134860050891, | |
| "grad_norm": 0.5215864777565002, | |
| "learning_rate": 1.1659598529827539e-05, | |
| "loss": 0.0144, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.7404580152671756, | |
| "grad_norm": 0.017565440386533737, | |
| "learning_rate": 1.1546508340401473e-05, | |
| "loss": 0.0213, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.7430025445292621, | |
| "grad_norm": 0.2629789113998413, | |
| "learning_rate": 1.1433418150975404e-05, | |
| "loss": 0.0051, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.7455470737913485, | |
| "grad_norm": 0.07063543051481247, | |
| "learning_rate": 1.1320327961549336e-05, | |
| "loss": 0.0024, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.7480916030534351, | |
| "grad_norm": 4.683466911315918, | |
| "learning_rate": 1.1207237772123269e-05, | |
| "loss": 0.0079, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.7506361323155216, | |
| "grad_norm": 1.2442190647125244, | |
| "learning_rate": 1.1094147582697202e-05, | |
| "loss": 0.0129, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.7531806615776081, | |
| "grad_norm": 1.3911083936691284, | |
| "learning_rate": 1.0981057393271136e-05, | |
| "loss": 0.0057, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.7557251908396947, | |
| "grad_norm": 2.5614712238311768, | |
| "learning_rate": 1.0867967203845067e-05, | |
| "loss": 0.0069, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.7582697201017812, | |
| "grad_norm": 0.22300787270069122, | |
| "learning_rate": 1.0754877014419001e-05, | |
| "loss": 0.0075, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.7608142493638677, | |
| "grad_norm": 0.11393741518259048, | |
| "learning_rate": 1.0641786824992932e-05, | |
| "loss": 0.0042, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.7633587786259542, | |
| "grad_norm": 3.6102774143218994, | |
| "learning_rate": 1.0528696635566866e-05, | |
| "loss": 0.0065, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.7633587786259542, | |
| "eval_loss": 0.028713775798678398, | |
| "eval_runtime": 100.7408, | |
| "eval_samples_per_second": 79.233, | |
| "eval_steps_per_second": 0.625, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.7659033078880407, | |
| "grad_norm": 0.09737461805343628, | |
| "learning_rate": 1.0415606446140799e-05, | |
| "loss": 0.0064, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.7684478371501272, | |
| "grad_norm": 0.17835134267807007, | |
| "learning_rate": 1.0302516256714731e-05, | |
| "loss": 0.0043, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.7709923664122137, | |
| "grad_norm": 7.2621588706970215, | |
| "learning_rate": 1.0189426067288664e-05, | |
| "loss": 0.0051, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.7735368956743003, | |
| "grad_norm": 1.3994181156158447, | |
| "learning_rate": 1.0076335877862595e-05, | |
| "loss": 0.0052, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.7760814249363868, | |
| "grad_norm": 0.3551107943058014, | |
| "learning_rate": 9.963245688436529e-06, | |
| "loss": 0.0028, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.7786259541984732, | |
| "grad_norm": 3.8419830799102783, | |
| "learning_rate": 9.850155499010461e-06, | |
| "loss": 0.0092, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.7811704834605598, | |
| "grad_norm": 0.03152012452483177, | |
| "learning_rate": 9.737065309584394e-06, | |
| "loss": 0.0166, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.7837150127226463, | |
| "grad_norm": 1.2947672605514526, | |
| "learning_rate": 9.623975120158328e-06, | |
| "loss": 0.0044, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.7862595419847328, | |
| "grad_norm": 0.0874599888920784, | |
| "learning_rate": 9.51088493073226e-06, | |
| "loss": 0.0054, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.7888040712468194, | |
| "grad_norm": 0.06683657318353653, | |
| "learning_rate": 9.397794741306192e-06, | |
| "loss": 0.0064, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.7913486005089059, | |
| "grad_norm": 0.09484909474849701, | |
| "learning_rate": 9.284704551880124e-06, | |
| "loss": 0.0036, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.7938931297709924, | |
| "grad_norm": 0.599434494972229, | |
| "learning_rate": 9.171614362454057e-06, | |
| "loss": 0.004, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.7964376590330788, | |
| "grad_norm": 0.015714434906840324, | |
| "learning_rate": 9.058524173027991e-06, | |
| "loss": 0.0114, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.7989821882951654, | |
| "grad_norm": 0.23739086091518402, | |
| "learning_rate": 8.945433983601924e-06, | |
| "loss": 0.0048, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.8015267175572519, | |
| "grad_norm": 0.10904980450868607, | |
| "learning_rate": 8.832343794175856e-06, | |
| "loss": 0.0052, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.8040712468193384, | |
| "grad_norm": 3.4536166191101074, | |
| "learning_rate": 8.719253604749789e-06, | |
| "loss": 0.0165, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.806615776081425, | |
| "grad_norm": 0.13291485607624054, | |
| "learning_rate": 8.606163415323721e-06, | |
| "loss": 0.0119, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.8091603053435115, | |
| "grad_norm": 1.6434555053710938, | |
| "learning_rate": 8.493073225897654e-06, | |
| "loss": 0.009, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.811704834605598, | |
| "grad_norm": 0.05683699622750282, | |
| "learning_rate": 8.379983036471587e-06, | |
| "loss": 0.0131, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.8142493638676844, | |
| "grad_norm": 3.6685760021209717, | |
| "learning_rate": 8.266892847045519e-06, | |
| "loss": 0.0277, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.816793893129771, | |
| "grad_norm": 0.07328160107135773, | |
| "learning_rate": 8.153802657619452e-06, | |
| "loss": 0.002, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.8193384223918575, | |
| "grad_norm": 4.1098480224609375, | |
| "learning_rate": 8.040712468193384e-06, | |
| "loss": 0.0046, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.821882951653944, | |
| "grad_norm": 0.4604232907295227, | |
| "learning_rate": 7.927622278767317e-06, | |
| "loss": 0.0146, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.8244274809160306, | |
| "grad_norm": 0.6996018886566162, | |
| "learning_rate": 7.81453208934125e-06, | |
| "loss": 0.0025, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.8269720101781171, | |
| "grad_norm": 0.20250293612480164, | |
| "learning_rate": 7.701441899915184e-06, | |
| "loss": 0.0046, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.8295165394402035, | |
| "grad_norm": 1.6295372247695923, | |
| "learning_rate": 7.588351710489115e-06, | |
| "loss": 0.0018, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.8320610687022901, | |
| "grad_norm": 2.616621732711792, | |
| "learning_rate": 7.475261521063049e-06, | |
| "loss": 0.0074, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.8346055979643766, | |
| "grad_norm": 3.2906081676483154, | |
| "learning_rate": 7.362171331636981e-06, | |
| "loss": 0.0143, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.8371501272264631, | |
| "grad_norm": 1.3064147233963013, | |
| "learning_rate": 7.249081142210914e-06, | |
| "loss": 0.0056, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.8396946564885496, | |
| "grad_norm": 3.039973020553589, | |
| "learning_rate": 7.135990952784847e-06, | |
| "loss": 0.0095, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.8422391857506362, | |
| "grad_norm": 0.09989529848098755, | |
| "learning_rate": 7.022900763358779e-06, | |
| "loss": 0.0014, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.8447837150127226, | |
| "grad_norm": 0.2573828399181366, | |
| "learning_rate": 6.909810573932712e-06, | |
| "loss": 0.0061, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.8473282442748091, | |
| "grad_norm": 0.2958613932132721, | |
| "learning_rate": 6.796720384506644e-06, | |
| "loss": 0.004, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.8498727735368957, | |
| "grad_norm": 0.24500970542430878, | |
| "learning_rate": 6.683630195080577e-06, | |
| "loss": 0.0071, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.8524173027989822, | |
| "grad_norm": 1.2140284776687622, | |
| "learning_rate": 6.57054000565451e-06, | |
| "loss": 0.0036, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.8549618320610687, | |
| "grad_norm": 0.9599927067756653, | |
| "learning_rate": 6.457449816228443e-06, | |
| "loss": 0.0149, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.8575063613231552, | |
| "grad_norm": 0.27447447180747986, | |
| "learning_rate": 6.344359626802375e-06, | |
| "loss": 0.0074, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.8600508905852418, | |
| "grad_norm": 0.7029807567596436, | |
| "learning_rate": 6.231269437376308e-06, | |
| "loss": 0.007, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.8625954198473282, | |
| "grad_norm": 0.054117076098918915, | |
| "learning_rate": 6.118179247950241e-06, | |
| "loss": 0.0059, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.8651399491094147, | |
| "grad_norm": 0.4443982243537903, | |
| "learning_rate": 6.005089058524174e-06, | |
| "loss": 0.006, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.8676844783715013, | |
| "grad_norm": 3.201941967010498, | |
| "learning_rate": 5.8919988690981064e-06, | |
| "loss": 0.0118, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.8702290076335878, | |
| "grad_norm": 0.044088855385780334, | |
| "learning_rate": 5.778908679672038e-06, | |
| "loss": 0.0032, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.8727735368956743, | |
| "grad_norm": 1.8481858968734741, | |
| "learning_rate": 5.6658184902459716e-06, | |
| "loss": 0.0029, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.8753180661577609, | |
| "grad_norm": 0.014196817763149738, | |
| "learning_rate": 5.552728300819904e-06, | |
| "loss": 0.0062, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.8778625954198473, | |
| "grad_norm": 0.021374225616455078, | |
| "learning_rate": 5.439638111393837e-06, | |
| "loss": 0.0087, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.8804071246819338, | |
| "grad_norm": 0.0040234895423054695, | |
| "learning_rate": 5.326547921967769e-06, | |
| "loss": 0.0059, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.8829516539440203, | |
| "grad_norm": 0.07329881191253662, | |
| "learning_rate": 5.213457732541703e-06, | |
| "loss": 0.0023, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.8854961832061069, | |
| "grad_norm": 1.4617124795913696, | |
| "learning_rate": 5.100367543115635e-06, | |
| "loss": 0.0055, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.8880407124681934, | |
| "grad_norm": 2.9887943267822266, | |
| "learning_rate": 4.987277353689568e-06, | |
| "loss": 0.0106, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.8905852417302799, | |
| "grad_norm": 0.0617634542286396, | |
| "learning_rate": 4.8741871642635e-06, | |
| "loss": 0.0216, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.8905852417302799, | |
| "eval_loss": 0.02892346866428852, | |
| "eval_runtime": 101.0102, | |
| "eval_samples_per_second": 79.022, | |
| "eval_steps_per_second": 0.624, | |
| "step": 3500 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 3930, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.9923556583082885e+18, | |
| "train_batch_size": 128, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |