| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 6.438194280311983, | |
| "eval_steps": 500, | |
| "global_step": 3405, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.018908059560387616, | |
| "grad_norm": 0.025965163484215736, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 0.8123, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03781611912077523, | |
| "grad_norm": 0.027516305446624756, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 0.8007, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.05672417868116285, | |
| "grad_norm": 0.02769283391535282, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 0.8213, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.07563223824155046, | |
| "grad_norm": 0.025368299335241318, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.7853, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.09454029780193808, | |
| "grad_norm": 0.026271438226103783, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.8131, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.1134483573623257, | |
| "grad_norm": 0.027166498824954033, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.7867, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.1323564169227133, | |
| "grad_norm": 0.028786830604076385, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.8183, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.15126447648310093, | |
| "grad_norm": 0.026403894647955894, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.8384, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.17017253604348853, | |
| "grad_norm": 0.028935490176081657, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.8062, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.18908059560387616, | |
| "grad_norm": 0.031438667327165604, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.8085, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.20798865516426376, | |
| "grad_norm": 0.031011823564767838, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.8057, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.2268967147246514, | |
| "grad_norm": 0.03556632995605469, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.7454, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.245804774285039, | |
| "grad_norm": 0.0362481027841568, | |
| "learning_rate": 5.2e-06, | |
| "loss": 0.8343, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.2647128338454266, | |
| "grad_norm": 0.032787468284368515, | |
| "learning_rate": 5.600000000000001e-06, | |
| "loss": 0.7836, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.2836208934058142, | |
| "grad_norm": 0.04355374351143837, | |
| "learning_rate": 6e-06, | |
| "loss": 0.7873, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.30252895296620186, | |
| "grad_norm": 0.035903844982385635, | |
| "learning_rate": 6.4000000000000006e-06, | |
| "loss": 0.7888, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.32143701252658946, | |
| "grad_norm": 0.04153973609209061, | |
| "learning_rate": 6.800000000000001e-06, | |
| "loss": 0.7646, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.34034507208697706, | |
| "grad_norm": 0.04311086982488632, | |
| "learning_rate": 7.2000000000000005e-06, | |
| "loss": 0.7963, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.35925313164736467, | |
| "grad_norm": 0.047484926879405975, | |
| "learning_rate": 7.600000000000001e-06, | |
| "loss": 0.7631, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.3781611912077523, | |
| "grad_norm": 0.05766135826706886, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.7649, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.3970692507681399, | |
| "grad_norm": 0.044645749032497406, | |
| "learning_rate": 8.400000000000001e-06, | |
| "loss": 0.7443, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.4159773103285275, | |
| "grad_norm": 0.05479070544242859, | |
| "learning_rate": 8.8e-06, | |
| "loss": 0.7184, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.43488536988891513, | |
| "grad_norm": 0.045226652175188065, | |
| "learning_rate": 9.200000000000002e-06, | |
| "loss": 0.7487, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.4537934294493028, | |
| "grad_norm": 0.041623398661613464, | |
| "learning_rate": 9.600000000000001e-06, | |
| "loss": 0.6997, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.4727014890096904, | |
| "grad_norm": 0.03361015394330025, | |
| "learning_rate": 1e-05, | |
| "loss": 0.712, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.491609548570078, | |
| "grad_norm": 0.03581312671303749, | |
| "learning_rate": 9.999752122311021e-06, | |
| "loss": 0.7277, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.5105176081304656, | |
| "grad_norm": 0.040930092334747314, | |
| "learning_rate": 9.99900851382142e-06, | |
| "loss": 0.7027, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.5294256676908532, | |
| "grad_norm": 0.033486612141132355, | |
| "learning_rate": 9.99776924826078e-06, | |
| "loss": 0.7078, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.5483337272512409, | |
| "grad_norm": 0.04255792871117592, | |
| "learning_rate": 9.996034448503617e-06, | |
| "loss": 0.7065, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.5672417868116284, | |
| "grad_norm": 0.03878919407725334, | |
| "learning_rate": 9.993804286557189e-06, | |
| "loss": 0.7006, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.5861498463720161, | |
| "grad_norm": 0.03808160126209259, | |
| "learning_rate": 9.991078983544453e-06, | |
| "loss": 0.6788, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.6050579059324037, | |
| "grad_norm": 0.03979503735899925, | |
| "learning_rate": 9.987858809682133e-06, | |
| "loss": 0.7208, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.6239659654927913, | |
| "grad_norm": 0.035877715796232224, | |
| "learning_rate": 9.984144084253932e-06, | |
| "loss": 0.6906, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.6428740250531789, | |
| "grad_norm": 0.03366890922188759, | |
| "learning_rate": 9.979935175578873e-06, | |
| "loss": 0.7089, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.6617820846135666, | |
| "grad_norm": 0.03546404838562012, | |
| "learning_rate": 9.975232500974777e-06, | |
| "loss": 0.6748, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.6806901441739541, | |
| "grad_norm": 0.037561580538749695, | |
| "learning_rate": 9.970036526716889e-06, | |
| "loss": 0.6739, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.6995982037343418, | |
| "grad_norm": 0.034097544848918915, | |
| "learning_rate": 9.964347767991645e-06, | |
| "loss": 0.6683, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.7185062632947293, | |
| "grad_norm": 0.034007199108600616, | |
| "learning_rate": 9.958166788845593e-06, | |
| "loss": 0.7276, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.737414322855117, | |
| "grad_norm": 0.0377151183784008, | |
| "learning_rate": 9.95149420212946e-06, | |
| "loss": 0.6728, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.7563223824155046, | |
| "grad_norm": 0.03274886682629585, | |
| "learning_rate": 9.944330669437402e-06, | |
| "loss": 0.7146, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.7752304419758922, | |
| "grad_norm": 0.035580553114414215, | |
| "learning_rate": 9.936676901041386e-06, | |
| "loss": 0.7084, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.7941385015362799, | |
| "grad_norm": 0.03300866112112999, | |
| "learning_rate": 9.92853365582078e-06, | |
| "loss": 0.7168, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.8130465610966675, | |
| "grad_norm": 0.03483871743083, | |
| "learning_rate": 9.91990174118711e-06, | |
| "loss": 0.7041, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.831954620657055, | |
| "grad_norm": 0.02913292683660984, | |
| "learning_rate": 9.910782013003995e-06, | |
| "loss": 0.6911, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.8508626802174427, | |
| "grad_norm": 0.03175769001245499, | |
| "learning_rate": 9.90117537550229e-06, | |
| "loss": 0.6849, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.8697707397778303, | |
| "grad_norm": 0.031419094651937485, | |
| "learning_rate": 9.89108278119044e-06, | |
| "loss": 0.7276, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.8886787993382179, | |
| "grad_norm": 0.03397994861006737, | |
| "learning_rate": 9.880505230760027e-06, | |
| "loss": 0.7295, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.9075868588986056, | |
| "grad_norm": 0.03156342729926109, | |
| "learning_rate": 9.869443772986552e-06, | |
| "loss": 0.6624, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.9264949184589931, | |
| "grad_norm": 0.03708454221487045, | |
| "learning_rate": 9.85789950462545e-06, | |
| "loss": 0.7073, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.9454029780193808, | |
| "grad_norm": 0.035610634833574295, | |
| "learning_rate": 9.845873570303346e-06, | |
| "loss": 0.7135, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.9643110375797683, | |
| "grad_norm": 0.03249524533748627, | |
| "learning_rate": 9.833367162404564e-06, | |
| "loss": 0.6935, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.983219097140156, | |
| "grad_norm": 0.03497915342450142, | |
| "learning_rate": 9.8203815209529e-06, | |
| "loss": 0.7198, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.0021271567005436, | |
| "grad_norm": 0.03658578172326088, | |
| "learning_rate": 9.806917933488668e-06, | |
| "loss": 0.7324, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.0210352162609313, | |
| "grad_norm": 0.03637828305363655, | |
| "learning_rate": 9.792977734941049e-06, | |
| "loss": 0.7391, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.0399432758213187, | |
| "grad_norm": 0.030937302857637405, | |
| "learning_rate": 9.778562307495722e-06, | |
| "loss": 0.6518, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.0588513353817064, | |
| "grad_norm": 0.034040458500385284, | |
| "learning_rate": 9.763673080457822e-06, | |
| "loss": 0.6846, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.077759394942094, | |
| "grad_norm": 0.034276511520147324, | |
| "learning_rate": 9.74831153011023e-06, | |
| "loss": 0.6396, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.0966674545024817, | |
| "grad_norm": 0.036644868552684784, | |
| "learning_rate": 9.732479179567178e-06, | |
| "loss": 0.6882, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.1155755140628694, | |
| "grad_norm": 0.03399999067187309, | |
| "learning_rate": 9.716177598623258e-06, | |
| "loss": 0.6809, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.1344835736232568, | |
| "grad_norm": 0.03530950844287872, | |
| "learning_rate": 9.69940840359775e-06, | |
| "loss": 0.7231, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.1533916331836445, | |
| "grad_norm": 0.03427257388830185, | |
| "learning_rate": 9.68217325717438e-06, | |
| "loss": 0.6532, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.1722996927440321, | |
| "grad_norm": 0.03930140659213066, | |
| "learning_rate": 9.664473868236452e-06, | |
| "loss": 0.6725, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.1912077523044198, | |
| "grad_norm": 0.033676933497190475, | |
| "learning_rate": 9.646311991697421e-06, | |
| "loss": 0.6813, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.2101158118648074, | |
| "grad_norm": 0.03723222389817238, | |
| "learning_rate": 9.627689428326875e-06, | |
| "loss": 0.7048, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.2290238714251949, | |
| "grad_norm": 0.04367990419268608, | |
| "learning_rate": 9.608608024572004e-06, | |
| "loss": 0.6879, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.2479319309855825, | |
| "grad_norm": 0.03997201845049858, | |
| "learning_rate": 9.589069672374516e-06, | |
| "loss": 0.7123, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.2668399905459702, | |
| "grad_norm": 0.03593291714787483, | |
| "learning_rate": 9.569076308983046e-06, | |
| "loss": 0.7099, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.2857480501063578, | |
| "grad_norm": 0.03724157065153122, | |
| "learning_rate": 9.548629916761077e-06, | |
| "loss": 0.6893, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.3046561096667455, | |
| "grad_norm": 0.038895316421985626, | |
| "learning_rate": 9.527732522990391e-06, | |
| "loss": 0.7025, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.323564169227133, | |
| "grad_norm": 0.034872233867645264, | |
| "learning_rate": 9.50638619967006e-06, | |
| "loss": 0.7254, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.3424722287875208, | |
| "grad_norm": 0.040150392800569534, | |
| "learning_rate": 9.484593063310998e-06, | |
| "loss": 0.7077, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.3613802883479083, | |
| "grad_norm": 0.03619126230478287, | |
| "learning_rate": 9.462355274726117e-06, | |
| "loss": 0.6835, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.380288347908296, | |
| "grad_norm": 0.03600538149476051, | |
| "learning_rate": 9.439675038816071e-06, | |
| "loss": 0.6701, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.3991964074686836, | |
| "grad_norm": 0.03670930117368698, | |
| "learning_rate": 9.41655460435065e-06, | |
| "loss": 0.6516, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.418104467029071, | |
| "grad_norm": 0.04134395346045494, | |
| "learning_rate": 9.392996263745796e-06, | |
| "loss": 0.6648, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.4370125265894589, | |
| "grad_norm": 0.036546383053064346, | |
| "learning_rate": 9.36900235283632e-06, | |
| "loss": 0.6653, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.4559205861498463, | |
| "grad_norm": 0.039511166512966156, | |
| "learning_rate": 9.344575250644297e-06, | |
| "loss": 0.7179, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.474828645710234, | |
| "grad_norm": 0.04085073992609978, | |
| "learning_rate": 9.31971737914318e-06, | |
| "loss": 0.6504, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.4937367052706216, | |
| "grad_norm": 0.03965315595269203, | |
| "learning_rate": 9.29443120301767e-06, | |
| "loss": 0.6915, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.512644764831009, | |
| "grad_norm": 0.04195081442594528, | |
| "learning_rate": 9.268719229419325e-06, | |
| "loss": 0.6647, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.531552824391397, | |
| "grad_norm": 0.040395915508270264, | |
| "learning_rate": 9.242584007717983e-06, | |
| "loss": 0.6891, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.5504608839517844, | |
| "grad_norm": 0.040031347423791885, | |
| "learning_rate": 9.216028129248986e-06, | |
| "loss": 0.683, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.569368943512172, | |
| "grad_norm": 0.042245492339134216, | |
| "learning_rate": 9.189054227056247e-06, | |
| "loss": 0.6932, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.5882770030725597, | |
| "grad_norm": 0.041085727512836456, | |
| "learning_rate": 9.161664975631184e-06, | |
| "loss": 0.662, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.6071850626329471, | |
| "grad_norm": 0.04155590385198593, | |
| "learning_rate": 9.133863090647533e-06, | |
| "loss": 0.7057, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.626093122193335, | |
| "grad_norm": 0.03857599198818207, | |
| "learning_rate": 9.105651328692093e-06, | |
| "loss": 0.7096, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.6450011817537225, | |
| "grad_norm": 0.04026035964488983, | |
| "learning_rate": 9.077032486991409e-06, | |
| "loss": 0.7072, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.66390924131411, | |
| "grad_norm": 0.04304495453834534, | |
| "learning_rate": 9.048009403134416e-06, | |
| "loss": 0.6984, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.6828173008744978, | |
| "grad_norm": 0.04076143354177475, | |
| "learning_rate": 9.018584954791096e-06, | |
| "loss": 0.6906, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.7017253604348852, | |
| "grad_norm": 0.04462475702166557, | |
| "learning_rate": 8.988762059427151e-06, | |
| "loss": 0.6823, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.720633419995273, | |
| "grad_norm": 0.0413421094417572, | |
| "learning_rate": 8.958543674014733e-06, | |
| "loss": 0.6567, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.7395414795556605, | |
| "grad_norm": 0.043752122670412064, | |
| "learning_rate": 8.927932794739258e-06, | |
| "loss": 0.7111, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.7584495391160482, | |
| "grad_norm": 0.04320874437689781, | |
| "learning_rate": 8.896932456702332e-06, | |
| "loss": 0.6818, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.7773575986764358, | |
| "grad_norm": 0.04405179247260094, | |
| "learning_rate": 8.865545733620816e-06, | |
| "loss": 0.6954, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.7962656582368235, | |
| "grad_norm": 0.04215887188911438, | |
| "learning_rate": 8.83377573752206e-06, | |
| "loss": 0.6932, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.8151737177972112, | |
| "grad_norm": 0.041994575411081314, | |
| "learning_rate": 8.80162561843535e-06, | |
| "loss": 0.7062, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.8340817773575986, | |
| "grad_norm": 0.044879619032144547, | |
| "learning_rate": 8.769098564079575e-06, | |
| "loss": 0.6746, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.8529898369179862, | |
| "grad_norm": 0.044902119785547256, | |
| "learning_rate": 8.736197799547158e-06, | |
| "loss": 0.6808, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.871897896478374, | |
| "grad_norm": 0.041419774293899536, | |
| "learning_rate": 8.702926586984295e-06, | |
| "loss": 0.6933, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.8908059560387616, | |
| "grad_norm": 0.044605787843465805, | |
| "learning_rate": 8.669288225267491e-06, | |
| "loss": 0.7086, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.9097140155991492, | |
| "grad_norm": 0.046282704919576645, | |
| "learning_rate": 8.635286049676497e-06, | |
| "loss": 0.6531, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.9286220751595367, | |
| "grad_norm": 0.043578825891017914, | |
| "learning_rate": 8.60092343156359e-06, | |
| "loss": 0.7319, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.9475301347199245, | |
| "grad_norm": 0.04184921458363533, | |
| "learning_rate": 8.566203778019323e-06, | |
| "loss": 0.6714, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.966438194280312, | |
| "grad_norm": 0.044262003153562546, | |
| "learning_rate": 8.531130531534683e-06, | |
| "loss": 0.6804, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.9853462538406996, | |
| "grad_norm": 0.04731186479330063, | |
| "learning_rate": 8.495707169659786e-06, | |
| "loss": 0.7039, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.0042543134010873, | |
| "grad_norm": 0.04587468504905701, | |
| "learning_rate": 8.459937204659064e-06, | |
| "loss": 0.7089, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.0231623729614747, | |
| "grad_norm": 0.0443338043987751, | |
| "learning_rate": 8.423824183163016e-06, | |
| "loss": 0.7337, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.0420704325218626, | |
| "grad_norm": 0.0441860668361187, | |
| "learning_rate": 8.387371685816573e-06, | |
| "loss": 0.6772, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.06097849208225, | |
| "grad_norm": 0.0456668958067894, | |
| "learning_rate": 8.35058332692405e-06, | |
| "loss": 0.7313, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.0798865516426375, | |
| "grad_norm": 0.04839623346924782, | |
| "learning_rate": 8.3134627540908e-06, | |
| "loss": 0.7097, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.0987946112030254, | |
| "grad_norm": 0.04605531319975853, | |
| "learning_rate": 8.276013647861551e-06, | |
| "loss": 0.6692, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.117702670763413, | |
| "grad_norm": 0.04938478022813797, | |
| "learning_rate": 8.23823972135546e-06, | |
| "loss": 0.7164, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.1366107303238007, | |
| "grad_norm": 0.04710887372493744, | |
| "learning_rate": 8.200144719897974e-06, | |
| "loss": 0.6791, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.155518789884188, | |
| "grad_norm": 0.04783915355801582, | |
| "learning_rate": 8.16173242064946e-06, | |
| "loss": 0.6915, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.1744268494445755, | |
| "grad_norm": 0.04555421695113182, | |
| "learning_rate": 8.123006632230702e-06, | |
| "loss": 0.679, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.1933349090049634, | |
| "grad_norm": 0.05009933188557625, | |
| "learning_rate": 8.08397119434528e-06, | |
| "loss": 0.6971, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.212242968565351, | |
| "grad_norm": 0.05265818163752556, | |
| "learning_rate": 8.044629977398846e-06, | |
| "loss": 0.6619, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.2311510281257387, | |
| "grad_norm": 0.049943771213293076, | |
| "learning_rate": 8.00498688211537e-06, | |
| "loss": 0.6774, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.250059087686126, | |
| "grad_norm": 0.046229682862758636, | |
| "learning_rate": 7.965045839150393e-06, | |
| "loss": 0.6598, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.2689671472465136, | |
| "grad_norm": 0.04708926007151604, | |
| "learning_rate": 7.924810808701286e-06, | |
| "loss": 0.6866, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.2878752068069015, | |
| "grad_norm": 0.04711465537548065, | |
| "learning_rate": 7.884285780114592e-06, | |
| "loss": 0.6921, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.306783266367289, | |
| "grad_norm": 0.04712436720728874, | |
| "learning_rate": 7.843474771490486e-06, | |
| "loss": 0.678, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.325691325927677, | |
| "grad_norm": 0.05399713292717934, | |
| "learning_rate": 7.802381829284366e-06, | |
| "loss": 0.6913, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.3445993854880642, | |
| "grad_norm": 0.05280677229166031, | |
| "learning_rate": 7.761011027905655e-06, | |
| "loss": 0.6885, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.3635074450484517, | |
| "grad_norm": 0.050864093005657196, | |
| "learning_rate": 7.719366469313807e-06, | |
| "loss": 0.669, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.3824155046088396, | |
| "grad_norm": 0.049628086388111115, | |
| "learning_rate": 7.677452282611595e-06, | |
| "loss": 0.672, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.401323564169227, | |
| "grad_norm": 0.04994028061628342, | |
| "learning_rate": 7.635272623635717e-06, | |
| "loss": 0.6958, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.420231623729615, | |
| "grad_norm": 0.0501830168068409, | |
| "learning_rate": 7.592831674544728e-06, | |
| "loss": 0.685, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.4391396832900023, | |
| "grad_norm": 0.0533694364130497, | |
| "learning_rate": 7.550133643404377e-06, | |
| "loss": 0.662, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.4580477428503897, | |
| "grad_norm": 0.05292650684714317, | |
| "learning_rate": 7.507182763770382e-06, | |
| "loss": 0.6927, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.4769558024107776, | |
| "grad_norm": 0.05379761382937431, | |
| "learning_rate": 7.463983294268651e-06, | |
| "loss": 0.6999, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.495863861971165, | |
| "grad_norm": 0.05280940234661102, | |
| "learning_rate": 7.420539518173053e-06, | |
| "loss": 0.6753, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.514771921531553, | |
| "grad_norm": 0.05738574638962746, | |
| "learning_rate": 7.376855742980718e-06, | |
| "loss": 0.6765, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.5336799810919404, | |
| "grad_norm": 0.0549960657954216, | |
| "learning_rate": 7.332936299984936e-06, | |
| "loss": 0.6672, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.552588040652328, | |
| "grad_norm": 0.05606316402554512, | |
| "learning_rate": 7.2887855438457245e-06, | |
| "loss": 0.7149, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 2.5714961002127157, | |
| "grad_norm": 0.05418732389807701, | |
| "learning_rate": 7.244407852158042e-06, | |
| "loss": 0.6798, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 2.590404159773103, | |
| "grad_norm": 0.056678082793951035, | |
| "learning_rate": 7.19980762501775e-06, | |
| "loss": 0.6821, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 2.609312219333491, | |
| "grad_norm": 0.06135575473308563, | |
| "learning_rate": 7.154989284585342e-06, | |
| "loss": 0.6999, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 2.6282202788938784, | |
| "grad_norm": 0.053315985947847366, | |
| "learning_rate": 7.109957274647477e-06, | |
| "loss": 0.6951, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 2.647128338454266, | |
| "grad_norm": 0.057439010590314865, | |
| "learning_rate": 7.06471606017638e-06, | |
| "loss": 0.7058, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.6660363980146538, | |
| "grad_norm": 0.05061256140470505, | |
| "learning_rate": 7.019270126887123e-06, | |
| "loss": 0.6472, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 2.6849444575750416, | |
| "grad_norm": 0.05650395527482033, | |
| "learning_rate": 6.973623980792875e-06, | |
| "loss": 0.7099, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 2.703852517135429, | |
| "grad_norm": 0.05364992842078209, | |
| "learning_rate": 6.927782147758118e-06, | |
| "loss": 0.6958, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 2.7227605766958165, | |
| "grad_norm": 0.05512566491961479, | |
| "learning_rate": 6.881749173049902e-06, | |
| "loss": 0.6266, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 2.741668636256204, | |
| "grad_norm": 0.054573893547058105, | |
| "learning_rate": 6.835529620887184e-06, | |
| "loss": 0.6358, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 2.760576695816592, | |
| "grad_norm": 0.05702576786279678, | |
| "learning_rate": 6.789128073988276e-06, | |
| "loss": 0.6709, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 2.7794847553769797, | |
| "grad_norm": 0.06671806424856186, | |
| "learning_rate": 6.742549133116459e-06, | |
| "loss": 0.6921, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 2.798392814937367, | |
| "grad_norm": 0.05824644863605499, | |
| "learning_rate": 6.695797416623821e-06, | |
| "loss": 0.6382, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 2.8173008744977546, | |
| "grad_norm": 0.05811014026403427, | |
| "learning_rate": 6.64887755999334e-06, | |
| "loss": 0.6728, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 2.836208934058142, | |
| "grad_norm": 0.05731780454516411, | |
| "learning_rate": 6.601794215379266e-06, | |
| "loss": 0.6531, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.85511699361853, | |
| "grad_norm": 0.05633535236120224, | |
| "learning_rate": 6.5545520511458605e-06, | |
| "loss": 0.7032, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 2.8740250531789178, | |
| "grad_norm": 0.06355008482933044, | |
| "learning_rate": 6.507155751404518e-06, | |
| "loss": 0.6529, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 2.892933112739305, | |
| "grad_norm": 0.06318964809179306, | |
| "learning_rate": 6.45961001554934e-06, | |
| "loss": 0.678, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 2.9118411722996926, | |
| "grad_norm": 0.0571432039141655, | |
| "learning_rate": 6.411919557791176e-06, | |
| "loss": 0.6691, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 2.9307492318600805, | |
| "grad_norm": 0.0572994127869606, | |
| "learning_rate": 6.364089106690209e-06, | |
| "loss": 0.6973, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 2.949657291420468, | |
| "grad_norm": 0.060109131038188934, | |
| "learning_rate": 6.316123404687108e-06, | |
| "loss": 0.6711, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 2.968565350980856, | |
| "grad_norm": 0.0652967318892479, | |
| "learning_rate": 6.268027207632822e-06, | |
| "loss": 0.6829, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 2.9874734105412433, | |
| "grad_norm": 0.06120815873146057, | |
| "learning_rate": 6.2198052843170184e-06, | |
| "loss": 0.6615, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 3.0063814701016307, | |
| "grad_norm": 0.05858151242136955, | |
| "learning_rate": 6.171462415995263e-06, | |
| "loss": 0.6505, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 3.0252895296620186, | |
| "grad_norm": 0.06770987808704376, | |
| "learning_rate": 6.123003395914946e-06, | |
| "loss": 0.6791, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 3.044197589222406, | |
| "grad_norm": 0.07135272771120071, | |
| "learning_rate": 6.074433028840029e-06, | |
| "loss": 0.6923, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 3.0631056487827935, | |
| "grad_norm": 0.062477704137563705, | |
| "learning_rate": 6.0257561305746524e-06, | |
| "loss": 0.6795, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 3.0820137083431813, | |
| "grad_norm": 0.06046826392412186, | |
| "learning_rate": 5.976977527485634e-06, | |
| "loss": 0.6415, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 3.1009217679035688, | |
| "grad_norm": 0.06548671424388885, | |
| "learning_rate": 5.928102056023935e-06, | |
| "loss": 0.6811, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 3.1198298274639567, | |
| "grad_norm": 0.06585259735584259, | |
| "learning_rate": 5.879134562245124e-06, | |
| "loss": 0.7001, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 3.138737887024344, | |
| "grad_norm": 0.060643814504146576, | |
| "learning_rate": 5.830079901328876e-06, | |
| "loss": 0.6876, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 3.157645946584732, | |
| "grad_norm": 0.06475692987442017, | |
| "learning_rate": 5.780942937097584e-06, | |
| "loss": 0.6451, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 3.1765540061451194, | |
| "grad_norm": 0.06038076803088188, | |
| "learning_rate": 5.7317285415341015e-06, | |
| "loss": 0.7299, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 3.195462065705507, | |
| "grad_norm": 0.0708499401807785, | |
| "learning_rate": 5.682441594298684e-06, | |
| "loss": 0.6758, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 3.2143701252658947, | |
| "grad_norm": 0.059092409908771515, | |
| "learning_rate": 5.633086982245166e-06, | |
| "loss": 0.6753, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 3.233278184826282, | |
| "grad_norm": 0.07227720320224762, | |
| "learning_rate": 5.583669598936414e-06, | |
| "loss": 0.7004, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 3.25218624438667, | |
| "grad_norm": 0.06542421877384186, | |
| "learning_rate": 5.5341943441591364e-06, | |
| "loss": 0.6322, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 3.2710943039470575, | |
| "grad_norm": 0.06533703953027725, | |
| "learning_rate": 5.48466612343806e-06, | |
| "loss": 0.6379, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 3.290002363507445, | |
| "grad_norm": 0.0733456239104271, | |
| "learning_rate": 5.435089847549541e-06, | |
| "loss": 0.6609, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 3.308910423067833, | |
| "grad_norm": 0.06628765165805817, | |
| "learning_rate": 5.38547043203466e-06, | |
| "loss": 0.6694, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 3.32781848262822, | |
| "grad_norm": 0.07368200272321701, | |
| "learning_rate": 5.3358127967118355e-06, | |
| "loss": 0.6572, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 3.346726542188608, | |
| "grad_norm": 0.06234520301222801, | |
| "learning_rate": 5.286121865189016e-06, | |
| "loss": 0.7065, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 3.3656346017489955, | |
| "grad_norm": 0.06914972513914108, | |
| "learning_rate": 5.236402564375514e-06, | |
| "loss": 0.7108, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 3.384542661309383, | |
| "grad_norm": 0.06980318576097488, | |
| "learning_rate": 5.186659823993482e-06, | |
| "loss": 0.7008, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 3.403450720869771, | |
| "grad_norm": 0.06499587744474411, | |
| "learning_rate": 5.136898576089131e-06, | |
| "loss": 0.6951, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 3.4223587804301583, | |
| "grad_norm": 0.0658080130815506, | |
| "learning_rate": 5.087123754543715e-06, | |
| "loss": 0.6703, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 3.441266839990546, | |
| "grad_norm": 0.06298273801803589, | |
| "learning_rate": 5.037340294584323e-06, | |
| "loss": 0.6709, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 3.4601748995509336, | |
| "grad_norm": 0.07536780089139938, | |
| "learning_rate": 4.9875531322945635e-06, | |
| "loss": 0.6573, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 3.479082959111321, | |
| "grad_norm": 0.07163431495428085, | |
| "learning_rate": 4.937767204125122e-06, | |
| "loss": 0.6739, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 3.497991018671709, | |
| "grad_norm": 0.06597751379013062, | |
| "learning_rate": 4.88798744640433e-06, | |
| "loss": 0.648, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 3.5168990782320964, | |
| "grad_norm": 0.061648089438676834, | |
| "learning_rate": 4.838218794848706e-06, | |
| "loss": 0.6848, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 3.5358071377924842, | |
| "grad_norm": 0.06730767339468002, | |
| "learning_rate": 4.788466184073586e-06, | |
| "loss": 0.6555, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 3.5547151973528717, | |
| "grad_norm": 0.070393867790699, | |
| "learning_rate": 4.738734547103836e-06, | |
| "loss": 0.6613, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 3.573623256913259, | |
| "grad_norm": 0.07426159828901291, | |
| "learning_rate": 4.6890288148847564e-06, | |
| "loss": 0.6941, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 3.592531316473647, | |
| "grad_norm": 0.0717974454164505, | |
| "learning_rate": 4.639353915793159e-06, | |
| "loss": 0.733, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 3.6114393760340344, | |
| "grad_norm": 0.07027947902679443, | |
| "learning_rate": 4.589714775148719e-06, | |
| "loss": 0.6731, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 3.6303474355944223, | |
| "grad_norm": 0.06726668030023575, | |
| "learning_rate": 4.540116314725622e-06, | |
| "loss": 0.6837, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 3.6492554951548097, | |
| "grad_norm": 0.062310460954904556, | |
| "learning_rate": 4.49056345226457e-06, | |
| "loss": 0.7209, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 3.668163554715197, | |
| "grad_norm": 0.07572998106479645, | |
| "learning_rate": 4.441061100985169e-06, | |
| "loss": 0.6864, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 3.687071614275585, | |
| "grad_norm": 0.07206368446350098, | |
| "learning_rate": 4.391614169098795e-06, | |
| "loss": 0.6827, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 3.7059796738359725, | |
| "grad_norm": 0.07162526249885559, | |
| "learning_rate": 4.342227559321932e-06, | |
| "loss": 0.6844, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 3.7248877333963604, | |
| "grad_norm": 0.07335004955530167, | |
| "learning_rate": 4.292906168390056e-06, | |
| "loss": 0.7004, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 3.743795792956748, | |
| "grad_norm": 0.07719966024160385, | |
| "learning_rate": 4.24365488657213e-06, | |
| "loss": 0.6674, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 3.7627038525171352, | |
| "grad_norm": 0.06994116306304932, | |
| "learning_rate": 4.194478597185716e-06, | |
| "loss": 0.6682, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 3.781611912077523, | |
| "grad_norm": 0.071551114320755, | |
| "learning_rate": 4.145382176112804e-06, | |
| "loss": 0.6676, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 3.8005199716379106, | |
| "grad_norm": 0.07608342170715332, | |
| "learning_rate": 4.0963704913163525e-06, | |
| "loss": 0.6799, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 3.8194280311982984, | |
| "grad_norm": 0.06601590663194656, | |
| "learning_rate": 4.047448402357622e-06, | |
| "loss": 0.675, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 3.838336090758686, | |
| "grad_norm": 0.07468881458044052, | |
| "learning_rate": 3.9986207599143566e-06, | |
| "loss": 0.6544, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 3.8572441503190733, | |
| "grad_norm": 0.06805694103240967, | |
| "learning_rate": 3.9498924052998195e-06, | |
| "loss": 0.6856, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 3.876152209879461, | |
| "grad_norm": 0.08336406946182251, | |
| "learning_rate": 3.901268169982784e-06, | |
| "loss": 0.6535, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 3.8950602694398486, | |
| "grad_norm": 0.06868647038936615, | |
| "learning_rate": 3.852752875108483e-06, | |
| "loss": 0.6657, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 3.9139683290002365, | |
| "grad_norm": 0.07650279253721237, | |
| "learning_rate": 3.804351331020584e-06, | |
| "loss": 0.6587, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 3.932876388560624, | |
| "grad_norm": 0.07571345567703247, | |
| "learning_rate": 3.7560683367842456e-06, | |
| "loss": 0.6694, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 3.9517844481210114, | |
| "grad_norm": 0.06946905702352524, | |
| "learning_rate": 3.707908679710276e-06, | |
| "loss": 0.6646, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 3.9706925076813993, | |
| "grad_norm": 0.07537796348333359, | |
| "learning_rate": 3.659877134880477e-06, | |
| "loss": 0.6716, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 3.9896005672417867, | |
| "grad_norm": 0.07623406499624252, | |
| "learning_rate": 3.6119784646741807e-06, | |
| "loss": 0.6503, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 4.008508626802175, | |
| "grad_norm": 0.07248073071241379, | |
| "learning_rate": 3.5642174182960554e-06, | |
| "loss": 0.6953, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 4.027416686362562, | |
| "grad_norm": 0.07673463970422745, | |
| "learning_rate": 3.5165987313052225e-06, | |
| "loss": 0.7001, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 4.046324745922949, | |
| "grad_norm": 0.06918472051620483, | |
| "learning_rate": 3.469127125145717e-06, | |
| "loss": 0.6945, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 4.065232805483337, | |
| "grad_norm": 0.06996827572584152, | |
| "learning_rate": 3.4218073066783464e-06, | |
| "loss": 0.6454, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 4.084140865043725, | |
| "grad_norm": 0.08209812641143799, | |
| "learning_rate": 3.374643967714011e-06, | |
| "loss": 0.6974, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 4.103048924604113, | |
| "grad_norm": 0.07365689426660538, | |
| "learning_rate": 3.327641784548495e-06, | |
| "loss": 0.6876, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 4.1219569841645, | |
| "grad_norm": 0.07676127552986145, | |
| "learning_rate": 3.2808054174988165e-06, | |
| "loss": 0.6322, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 4.1408650437248875, | |
| "grad_norm": 0.08051763474941254, | |
| "learning_rate": 3.2341395104411467e-06, | |
| "loss": 0.6707, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 4.159773103285275, | |
| "grad_norm": 0.08313941955566406, | |
| "learning_rate": 3.18764869035036e-06, | |
| "loss": 0.661, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 4.178681162845663, | |
| "grad_norm": 0.0765809416770935, | |
| "learning_rate": 3.141337566841277e-06, | |
| "loss": 0.6962, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 4.197589222406051, | |
| "grad_norm": 0.10051856189966202, | |
| "learning_rate": 3.095210731711603e-06, | |
| "loss": 0.6339, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 4.216497281966438, | |
| "grad_norm": 0.07644385099411011, | |
| "learning_rate": 3.0492727584866554e-06, | |
| "loss": 0.707, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 4.235405341526826, | |
| "grad_norm": 0.07614172250032425, | |
| "learning_rate": 3.003528201965893e-06, | |
| "loss": 0.6511, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 4.254313401087213, | |
| "grad_norm": 0.07704110443592072, | |
| "learning_rate": 2.957981597771292e-06, | |
| "loss": 0.7068, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 4.273221460647601, | |
| "grad_norm": 0.07838738709688187, | |
| "learning_rate": 2.9126374618976526e-06, | |
| "loss": 0.6293, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 4.292129520207989, | |
| "grad_norm": 0.07882346957921982, | |
| "learning_rate": 2.8675002902648148e-06, | |
| "loss": 0.6677, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 4.311037579768376, | |
| "grad_norm": 0.07263699918985367, | |
| "learning_rate": 2.8225745582718964e-06, | |
| "loss": 0.6794, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 4.329945639328764, | |
| "grad_norm": 0.0822502002120018, | |
| "learning_rate": 2.7778647203535474e-06, | |
| "loss": 0.665, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 4.348853698889151, | |
| "grad_norm": 0.07821501791477203, | |
| "learning_rate": 2.7333752095382883e-06, | |
| "loss": 0.6642, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 4.367761758449539, | |
| "grad_norm": 0.08269735425710678, | |
| "learning_rate": 2.6891104370089644e-06, | |
| "loss": 0.7082, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 4.386669818009927, | |
| "grad_norm": 0.07760092616081238, | |
| "learning_rate": 2.645074791665386e-06, | |
| "loss": 0.6985, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 4.405577877570314, | |
| "grad_norm": 0.09205616265535355, | |
| "learning_rate": 2.601272639689152e-06, | |
| "loss": 0.6436, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 4.424485937130702, | |
| "grad_norm": 0.07168934494256973, | |
| "learning_rate": 2.557708324110747e-06, | |
| "loss": 0.6426, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 4.443393996691089, | |
| "grad_norm": 0.07921484857797623, | |
| "learning_rate": 2.5143861643789153e-06, | |
| "loss": 0.6513, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 4.4623020562514775, | |
| "grad_norm": 0.07425461709499359, | |
| "learning_rate": 2.4713104559323896e-06, | |
| "loss": 0.6513, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 4.481210115811865, | |
| "grad_norm": 0.08021389693021774, | |
| "learning_rate": 2.4284854697739974e-06, | |
| "loss": 0.6922, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 4.500118175372252, | |
| "grad_norm": 0.08110744506120682, | |
| "learning_rate": 2.385915452047177e-06, | |
| "loss": 0.6894, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 4.51902623493264, | |
| "grad_norm": 0.08530419319868088, | |
| "learning_rate": 2.343604623614974e-06, | |
| "loss": 0.6671, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 4.537934294493027, | |
| "grad_norm": 0.07330188900232315, | |
| "learning_rate": 2.3015571796415397e-06, | |
| "loss": 0.6694, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 4.5568423540534155, | |
| "grad_norm": 0.07557007670402527, | |
| "learning_rate": 2.2597772891761655e-06, | |
| "loss": 0.6843, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 4.575750413613803, | |
| "grad_norm": 0.07595443725585938, | |
| "learning_rate": 2.2182690947399306e-06, | |
| "loss": 0.6942, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 4.59465847317419, | |
| "grad_norm": 0.09174559265375137, | |
| "learning_rate": 2.17703671191496e-06, | |
| "loss": 0.7176, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 4.613566532734578, | |
| "grad_norm": 0.077406145632267, | |
| "learning_rate": 2.1360842289363616e-06, | |
| "loss": 0.7041, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 4.632474592294965, | |
| "grad_norm": 0.07479016482830048, | |
| "learning_rate": 2.0954157062868667e-06, | |
| "loss": 0.6791, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 4.651382651855354, | |
| "grad_norm": 0.08275721967220306, | |
| "learning_rate": 2.0550351762942427e-06, | |
| "loss": 0.6529, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 4.670290711415741, | |
| "grad_norm": 0.08451598882675171, | |
| "learning_rate": 2.0149466427314684e-06, | |
| "loss": 0.6607, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 4.6891987709761285, | |
| "grad_norm": 0.07672012597322464, | |
| "learning_rate": 1.9751540804197667e-06, | |
| "loss": 0.6645, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 4.708106830536516, | |
| "grad_norm": 0.07269056886434555, | |
| "learning_rate": 1.935661434834491e-06, | |
| "loss": 0.6861, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 4.727014890096903, | |
| "grad_norm": 0.07922536134719849, | |
| "learning_rate": 1.89647262171393e-06, | |
| "loss": 0.6497, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 4.745922949657292, | |
| "grad_norm": 0.07756451517343521, | |
| "learning_rate": 1.8575915266710565e-06, | |
| "loss": 0.6736, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 4.764831009217679, | |
| "grad_norm": 0.08501828461885452, | |
| "learning_rate": 1.8190220048082613e-06, | |
| "loss": 0.6626, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 4.7837390687780665, | |
| "grad_norm": 0.07419081032276154, | |
| "learning_rate": 1.7807678803351236e-06, | |
| "loss": 0.6612, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 4.802647128338454, | |
| "grad_norm": 0.08240891993045807, | |
| "learning_rate": 1.742832946189233e-06, | |
| "loss": 0.6822, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 4.821555187898841, | |
| "grad_norm": 0.08003178238868713, | |
| "learning_rate": 1.7052209636601086e-06, | |
| "loss": 0.6583, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 4.84046324745923, | |
| "grad_norm": 0.08147577196359634, | |
| "learning_rate": 1.6679356620162767e-06, | |
| "loss": 0.6486, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 4.859371307019617, | |
| "grad_norm": 0.08541478961706161, | |
| "learning_rate": 1.6309807381354958e-06, | |
| "loss": 0.6622, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 4.878279366580005, | |
| "grad_norm": 0.0798458531498909, | |
| "learning_rate": 1.594359856138219e-06, | |
| "loss": 0.636, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 4.897187426140392, | |
| "grad_norm": 0.07943376153707504, | |
| "learning_rate": 1.5580766470242853e-06, | |
| "loss": 0.6839, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 4.9160954857007795, | |
| "grad_norm": 0.07922222465276718, | |
| "learning_rate": 1.5221347083129045e-06, | |
| "loss": 0.6999, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 4.935003545261168, | |
| "grad_norm": 0.07585081458091736, | |
| "learning_rate": 1.4865376036859597e-06, | |
| "loss": 0.6508, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 4.953911604821555, | |
| "grad_norm": 0.09331846982240677, | |
| "learning_rate": 1.45128886263466e-06, | |
| "loss": 0.6792, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 4.972819664381943, | |
| "grad_norm": 0.08158135414123535, | |
| "learning_rate": 1.4163919801095955e-06, | |
| "loss": 0.6796, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 4.99172772394233, | |
| "grad_norm": 0.07948708534240723, | |
| "learning_rate": 1.381850416174203e-06, | |
| "loss": 0.6901, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 5.010635783502718, | |
| "grad_norm": 0.08439305424690247, | |
| "learning_rate": 1.3476675956617008e-06, | |
| "loss": 0.6418, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 5.029543843063106, | |
| "grad_norm": 0.07965263724327087, | |
| "learning_rate": 1.313846907835507e-06, | |
| "loss": 0.6565, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 5.048451902623493, | |
| "grad_norm": 0.08111811429262161, | |
| "learning_rate": 1.2803917060531994e-06, | |
| "loss": 0.6952, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 5.067359962183881, | |
| "grad_norm": 0.08763491362333298, | |
| "learning_rate": 1.2473053074340159e-06, | |
| "loss": 0.7075, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 5.086268021744268, | |
| "grad_norm": 0.07912111282348633, | |
| "learning_rate": 1.2145909925299687e-06, | |
| "loss": 0.6754, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 5.1051760813046565, | |
| "grad_norm": 0.08513174206018448, | |
| "learning_rate": 1.1822520050005677e-06, | |
| "loss": 0.6667, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 5.124084140865044, | |
| "grad_norm": 0.07933773100376129, | |
| "learning_rate": 1.1502915512912095e-06, | |
| "loss": 0.6672, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 5.142992200425431, | |
| "grad_norm": 0.08284608274698257, | |
| "learning_rate": 1.118712800315258e-06, | |
| "loss": 0.6442, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 5.161900259985819, | |
| "grad_norm": 0.0854770690202713, | |
| "learning_rate": 1.0875188831398354e-06, | |
| "loss": 0.6357, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 5.180808319546206, | |
| "grad_norm": 0.07782963663339615, | |
| "learning_rate": 1.0567128926753827e-06, | |
| "loss": 0.6778, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 5.199716379106595, | |
| "grad_norm": 0.07680613547563553, | |
| "learning_rate": 1.0262978833689907e-06, | |
| "loss": 0.6519, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 5.218624438666982, | |
| "grad_norm": 0.07989546656608582, | |
| "learning_rate": 9.962768709015436e-07, | |
| "loss": 0.6956, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 5.237532498227369, | |
| "grad_norm": 0.08639136701822281, | |
| "learning_rate": 9.666528318887198e-07, | |
| "loss": 0.6724, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 5.256440557787757, | |
| "grad_norm": 0.08070726692676544, | |
| "learning_rate": 9.374287035858492e-07, | |
| "loss": 0.6969, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 5.275348617348144, | |
| "grad_norm": 0.08380179107189178, | |
| "learning_rate": 9.086073835966869e-07, | |
| "loss": 0.6846, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 5.294256676908533, | |
| "grad_norm": 0.07772370427846909, | |
| "learning_rate": 8.801917295861101e-07, | |
| "loss": 0.7061, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 5.31316473646892, | |
| "grad_norm": 0.07815873622894287, | |
| "learning_rate": 8.521845589967775e-07, | |
| "loss": 0.6832, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 5.3320727960293075, | |
| "grad_norm": 0.08794362843036652, | |
| "learning_rate": 8.24588648769778e-07, | |
| "loss": 0.6796, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 5.350980855589695, | |
| "grad_norm": 0.08358251303434372, | |
| "learning_rate": 7.974067350692921e-07, | |
| "loss": 0.6958, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 5.369888915150082, | |
| "grad_norm": 0.08284702897071838, | |
| "learning_rate": 7.706415130112993e-07, | |
| "loss": 0.6696, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 5.388796974710471, | |
| "grad_norm": 0.08031268417835236, | |
| "learning_rate": 7.44295636396356e-07, | |
| "loss": 0.6463, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 5.407705034270858, | |
| "grad_norm": 0.0891246348619461, | |
| "learning_rate": 7.183717174464649e-07, | |
| "loss": 0.6802, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 5.426613093831246, | |
| "grad_norm": 0.08383399248123169, | |
| "learning_rate": 6.928723265460735e-07, | |
| "loss": 0.6742, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 5.445521153391633, | |
| "grad_norm": 0.08535037934780121, | |
| "learning_rate": 6.677999919872185e-07, | |
| "loss": 0.6898, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 5.46442921295202, | |
| "grad_norm": 0.08179079741239548, | |
| "learning_rate": 6.431571997188363e-07, | |
| "loss": 0.6393, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 5.483337272512409, | |
| "grad_norm": 0.0862477719783783, | |
| "learning_rate": 6.189463931002871e-07, | |
| "loss": 0.6808, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 5.502245332072796, | |
| "grad_norm": 0.08993732929229736, | |
| "learning_rate": 5.951699726590881e-07, | |
| "loss": 0.6702, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 5.521153391633184, | |
| "grad_norm": 0.08879116177558899, | |
| "learning_rate": 5.718302958528999e-07, | |
| "loss": 0.6776, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 5.540061451193571, | |
| "grad_norm": 0.08478083461523056, | |
| "learning_rate": 5.489296768357827e-07, | |
| "loss": 0.666, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 5.5589695107539585, | |
| "grad_norm": 0.09191175550222397, | |
| "learning_rate": 5.264703862287418e-07, | |
| "loss": 0.6819, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 5.577877570314347, | |
| "grad_norm": 0.09217491000890732, | |
| "learning_rate": 5.044546508945997e-07, | |
| "loss": 0.6555, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 5.596785629874734, | |
| "grad_norm": 0.08284752815961838, | |
| "learning_rate": 4.828846537171933e-07, | |
| "loss": 0.6863, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 5.615693689435122, | |
| "grad_norm": 0.09084407240152359, | |
| "learning_rate": 4.617625333849435e-07, | |
| "loss": 0.6511, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 5.634601748995509, | |
| "grad_norm": 0.08455785363912582, | |
| "learning_rate": 4.4109038417880003e-07, | |
| "loss": 0.6841, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 5.653509808555897, | |
| "grad_norm": 0.086379773914814, | |
| "learning_rate": 4.208702557645933e-07, | |
| "loss": 0.6792, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 5.672417868116285, | |
| "grad_norm": 0.08563714474439621, | |
| "learning_rate": 4.0110415298980187e-07, | |
| "loss": 0.6813, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 5.691325927676672, | |
| "grad_norm": 0.08284977078437805, | |
| "learning_rate": 3.817940356847766e-07, | |
| "loss": 0.6625, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 5.71023398723706, | |
| "grad_norm": 0.08987167477607727, | |
| "learning_rate": 3.6294181846841856e-07, | |
| "loss": 0.6621, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 5.729142046797447, | |
| "grad_norm": 0.08199549466371536, | |
| "learning_rate": 3.445493705583419e-07, | |
| "loss": 0.6684, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 5.748050106357835, | |
| "grad_norm": 0.08346036076545715, | |
| "learning_rate": 3.266185155855406e-07, | |
| "loss": 0.6584, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 5.766958165918223, | |
| "grad_norm": 0.08736700564622879, | |
| "learning_rate": 3.0915103141356984e-07, | |
| "loss": 0.653, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 5.78586622547861, | |
| "grad_norm": 0.09791529923677444, | |
| "learning_rate": 2.92148649962275e-07, | |
| "loss": 0.6633, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 5.804774285038998, | |
| "grad_norm": 0.07101596146821976, | |
| "learning_rate": 2.756130570360621e-07, | |
| "loss": 0.6742, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 5.823682344599385, | |
| "grad_norm": 0.08588287234306335, | |
| "learning_rate": 2.595458921567573e-07, | |
| "loss": 0.6596, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 5.842590404159774, | |
| "grad_norm": 0.08986763656139374, | |
| "learning_rate": 2.4394874840104e-07, | |
| "loss": 0.6637, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 5.861498463720161, | |
| "grad_norm": 0.08732476830482483, | |
| "learning_rate": 2.2882317224248886e-07, | |
| "loss": 0.6658, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 5.8804065232805485, | |
| "grad_norm": 0.0841900184750557, | |
| "learning_rate": 2.1417066339824934e-07, | |
| "loss": 0.643, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 5.899314582840936, | |
| "grad_norm": 0.08480223268270493, | |
| "learning_rate": 1.9999267468033323e-07, | |
| "loss": 0.6721, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 5.918222642401323, | |
| "grad_norm": 0.08658278733491898, | |
| "learning_rate": 1.8629061185157228e-07, | |
| "loss": 0.6757, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 5.937130701961712, | |
| "grad_norm": 0.08789494633674622, | |
| "learning_rate": 1.730658334862334e-07, | |
| "loss": 0.668, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 5.956038761522099, | |
| "grad_norm": 0.08130628615617752, | |
| "learning_rate": 1.603196508353161e-07, | |
| "loss": 0.6859, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 5.9749468210824865, | |
| "grad_norm": 0.08904296159744263, | |
| "learning_rate": 1.4805332769654013e-07, | |
| "loss": 0.6657, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 5.993854880642874, | |
| "grad_norm": 0.0906049981713295, | |
| "learning_rate": 1.362680802890376e-07, | |
| "loss": 0.6263, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 6.012762940203261, | |
| "grad_norm": 0.07836922258138657, | |
| "learning_rate": 1.2496507713276484e-07, | |
| "loss": 0.6531, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 6.031670999763649, | |
| "grad_norm": 0.0807194784283638, | |
| "learning_rate": 1.1414543893264351e-07, | |
| "loss": 0.6953, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 6.050579059324037, | |
| "grad_norm": 0.08561025559902191, | |
| "learning_rate": 1.0381023846743831e-07, | |
| "loss": 0.6435, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 6.069487118884425, | |
| "grad_norm": 0.07596372067928314, | |
| "learning_rate": 9.3960500483391e-08, | |
| "loss": 0.6553, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 6.088395178444812, | |
| "grad_norm": 0.08634670078754425, | |
| "learning_rate": 8.459720159261719e-08, | |
| "loss": 0.6475, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 6.1073032380051995, | |
| "grad_norm": 0.10798340290784836, | |
| "learning_rate": 7.572127017627328e-08, | |
| "loss": 0.6866, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 6.126211297565587, | |
| "grad_norm": 0.0855603814125061, | |
| "learning_rate": 6.73335862925062e-08, | |
| "loss": 0.7054, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 6.145119357125975, | |
| "grad_norm": 0.08026163280010223, | |
| "learning_rate": 5.9434981589195363e-08, | |
| "loss": 0.6163, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 6.164027416686363, | |
| "grad_norm": 0.09041197597980499, | |
| "learning_rate": 5.2026239221491524e-08, | |
| "loss": 0.6661, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 6.18293547624675, | |
| "grad_norm": 0.07854273915290833, | |
| "learning_rate": 4.510809377416936e-08, | |
| "loss": 0.7043, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 6.2018435358071375, | |
| "grad_norm": 0.08152135461568832, | |
| "learning_rate": 3.8681231188791857e-08, | |
| "loss": 0.6621, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 6.220751595367525, | |
| "grad_norm": 0.08624988794326782, | |
| "learning_rate": 3.27462886956964e-08, | |
| "loss": 0.7024, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 6.239659654927913, | |
| "grad_norm": 0.08582387864589691, | |
| "learning_rate": 2.730385475081532e-08, | |
| "loss": 0.6288, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 6.258567714488301, | |
| "grad_norm": 0.09809162467718124, | |
| "learning_rate": 2.2354468977327516e-08, | |
| "loss": 0.6968, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 6.277475774048688, | |
| "grad_norm": 0.08818063884973526, | |
| "learning_rate": 1.7898622112156316e-08, | |
| "loss": 0.666, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 6.296383833609076, | |
| "grad_norm": 0.08366916328668594, | |
| "learning_rate": 1.3936755957311143e-08, | |
| "loss": 0.6828, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 6.315291893169464, | |
| "grad_norm": 0.08646037429571152, | |
| "learning_rate": 1.0469263336082003e-08, | |
| "loss": 0.6642, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 6.334199952729851, | |
| "grad_norm": 0.08840810507535934, | |
| "learning_rate": 7.496488054092866e-09, | |
| "loss": 0.6554, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 6.353108012290239, | |
| "grad_norm": 0.08178407698869705, | |
| "learning_rate": 5.01872486520949e-09, | |
| "loss": 0.6952, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 6.372016071850626, | |
| "grad_norm": 0.08657605946063995, | |
| "learning_rate": 3.0362194423172452e-09, | |
| "loss": 0.6672, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 6.390924131411014, | |
| "grad_norm": 0.08829477429389954, | |
| "learning_rate": 1.5491683529617051e-09, | |
| "loss": 0.6847, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 6.409832190971402, | |
| "grad_norm": 0.07726303488016129, | |
| "learning_rate": 5.577190398575738e-10, | |
| "loss": 0.6313, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 6.428740250531789, | |
| "grad_norm": 0.10468406975269318, | |
| "learning_rate": 6.196980627093396e-11, | |
| "loss": 0.6979, | |
| "step": 3400 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 3405, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 7, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.668270529052672e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |