| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9974145360528583, | |
| "eval_steps": 100, | |
| "global_step": 870, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02298190175237001, | |
| "grad_norm": 17.473215136625974, | |
| "learning_rate": 3.703703703703703e-07, | |
| "loss": 1.4133, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.04596380350474002, | |
| "grad_norm": 14.901168774762104, | |
| "learning_rate": 7.407407407407406e-07, | |
| "loss": 1.3532, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06894570525711002, | |
| "grad_norm": 20.27126148400169, | |
| "learning_rate": 9.999687519737637e-07, | |
| "loss": 1.0492, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.09192760700948004, | |
| "grad_norm": 4.636768648212712, | |
| "learning_rate": 9.994133401546208e-07, | |
| "loss": 0.4967, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.11490950876185005, | |
| "grad_norm": 0.9099176924012666, | |
| "learning_rate": 9.981644155508344e-07, | |
| "loss": 0.3076, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.13789141051422005, | |
| "grad_norm": 0.9678246586532336, | |
| "learning_rate": 9.962237124876827e-07, | |
| "loss": 0.2864, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.16087331226659007, | |
| "grad_norm": 0.9659318174214611, | |
| "learning_rate": 9.935939259319937e-07, | |
| "loss": 0.2787, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.18385521401896007, | |
| "grad_norm": 0.805167386524419, | |
| "learning_rate": 9.902787077497684e-07, | |
| "loss": 0.2702, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.20683711577133007, | |
| "grad_norm": 0.9470260811178999, | |
| "learning_rate": 9.86282661634998e-07, | |
| "loss": 0.2655, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.2298190175237001, | |
| "grad_norm": 0.8090766933333231, | |
| "learning_rate": 9.816113367167225e-07, | |
| "loss": 0.2676, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.25280091927607007, | |
| "grad_norm": 0.7368007334104237, | |
| "learning_rate": 9.76271219853204e-07, | |
| "loss": 0.2576, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.2757828210284401, | |
| "grad_norm": 0.8981477534949046, | |
| "learning_rate": 9.70269726623921e-07, | |
| "loss": 0.2575, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.2987647227808101, | |
| "grad_norm": 0.8279384079792477, | |
| "learning_rate": 9.636151910318863e-07, | |
| "loss": 0.2567, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.32174662453318015, | |
| "grad_norm": 0.7524359854054471, | |
| "learning_rate": 9.563168539305926e-07, | |
| "loss": 0.2522, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.3447285262855501, | |
| "grad_norm": 0.9062422906574367, | |
| "learning_rate": 9.483848501916577e-07, | |
| "loss": 0.2532, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.36771042803792015, | |
| "grad_norm": 0.8033233708041935, | |
| "learning_rate": 9.398301946309833e-07, | |
| "loss": 0.2493, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.39069232979029017, | |
| "grad_norm": 0.7450917369382344, | |
| "learning_rate": 9.306647667129779e-07, | |
| "loss": 0.2538, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.41367423154266014, | |
| "grad_norm": 0.9250714135548187, | |
| "learning_rate": 9.209012940540805e-07, | |
| "loss": 0.2561, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.43665613329503017, | |
| "grad_norm": 0.9058620445402442, | |
| "learning_rate": 9.105533347484926e-07, | |
| "loss": 0.2504, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.4596380350474002, | |
| "grad_norm": 0.8785859807305939, | |
| "learning_rate": 8.996352585406653e-07, | |
| "loss": 0.2584, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.48261993679977017, | |
| "grad_norm": 0.7116360370849345, | |
| "learning_rate": 8.881622268706824e-07, | |
| "loss": 0.2493, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.5056018385521401, | |
| "grad_norm": 1.0284687042792997, | |
| "learning_rate": 8.761501718202527e-07, | |
| "loss": 0.2536, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.5285837403045102, | |
| "grad_norm": 0.8893106047180209, | |
| "learning_rate": 8.636157739885461e-07, | |
| "loss": 0.247, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.5515656420568802, | |
| "grad_norm": 0.8004232352394812, | |
| "learning_rate": 8.505764393285983e-07, | |
| "loss": 0.2553, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.5745475438092502, | |
| "grad_norm": 0.759365345423453, | |
| "learning_rate": 8.370502749764485e-07, | |
| "loss": 0.2429, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.5975294455616202, | |
| "grad_norm": 0.7308397489232427, | |
| "learning_rate": 8.230560641065758e-07, | |
| "loss": 0.2527, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.6205113473139903, | |
| "grad_norm": 0.8096275403043853, | |
| "learning_rate": 8.086132398485523e-07, | |
| "loss": 0.246, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.6434932490663603, | |
| "grad_norm": 0.6917369274707355, | |
| "learning_rate": 7.937418583011324e-07, | |
| "loss": 0.2475, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.6664751508187302, | |
| "grad_norm": 0.7690495541865903, | |
| "learning_rate": 7.784625706812521e-07, | |
| "loss": 0.2435, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.6894570525711002, | |
| "grad_norm": 0.8546741074565729, | |
| "learning_rate": 7.627965946466166e-07, | |
| "loss": 0.2359, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.7124389543234703, | |
| "grad_norm": 0.6943293008079208, | |
| "learning_rate": 7.467656848316945e-07, | |
| "loss": 0.2411, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.7354208560758403, | |
| "grad_norm": 0.6740313335607717, | |
| "learning_rate": 7.303921026380388e-07, | |
| "loss": 0.2421, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.7584027578282103, | |
| "grad_norm": 0.7401309143105904, | |
| "learning_rate": 7.136985853208823e-07, | |
| "loss": 0.2403, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.7813846595805803, | |
| "grad_norm": 0.8972697133537101, | |
| "learning_rate": 6.967083144149364e-07, | |
| "loss": 0.2427, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.8043665613329503, | |
| "grad_norm": 0.8034283234751667, | |
| "learning_rate": 6.794448835432393e-07, | |
| "loss": 0.2462, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.8273484630853203, | |
| "grad_norm": 0.8789698751068616, | |
| "learning_rate": 6.619322656537552e-07, | |
| "loss": 0.251, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.8503303648376903, | |
| "grad_norm": 0.9764465079161455, | |
| "learning_rate": 6.441947797292236e-07, | |
| "loss": 0.2453, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.8733122665900603, | |
| "grad_norm": 1.1399876406432237, | |
| "learning_rate": 6.262570570164836e-07, | |
| "loss": 0.2527, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.8962941683424304, | |
| "grad_norm": 0.68445267974101, | |
| "learning_rate": 6.081440068221722e-07, | |
| "loss": 0.247, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.9192760700948004, | |
| "grad_norm": 0.8957092040404943, | |
| "learning_rate": 5.898807819222944e-07, | |
| "loss": 0.2585, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.9422579718471703, | |
| "grad_norm": 0.8770324868068373, | |
| "learning_rate": 5.714927436336963e-07, | |
| "loss": 0.2463, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.9652398735995403, | |
| "grad_norm": 0.9677676134901866, | |
| "learning_rate": 5.530054265959485e-07, | |
| "loss": 0.2389, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.9882217753519104, | |
| "grad_norm": 0.8462887454752388, | |
| "learning_rate": 5.344445033125437e-07, | |
| "loss": 0.2454, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.009192760700948, | |
| "grad_norm": 0.945055581666683, | |
| "learning_rate": 5.15835748500649e-07, | |
| "loss": 0.2213, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.032174662453318, | |
| "grad_norm": 0.8021937472797979, | |
| "learning_rate": 4.972050032989174e-07, | |
| "loss": 0.2397, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.055156564205688, | |
| "grad_norm": 0.752181939596644, | |
| "learning_rate": 4.785781393830657e-07, | |
| "loss": 0.2424, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.078138465958058, | |
| "grad_norm": 0.8486783739435911, | |
| "learning_rate": 4.5998102303904327e-07, | |
| "loss": 0.247, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.101120367710428, | |
| "grad_norm": 0.8362567515559186, | |
| "learning_rate": 4.4143947924368765e-07, | |
| "loss": 0.244, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.124102269462798, | |
| "grad_norm": 0.811906457503492, | |
| "learning_rate": 4.2297925580274513e-07, | |
| "loss": 0.2399, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.147084171215168, | |
| "grad_norm": 0.777970615999953, | |
| "learning_rate": 4.0462598759605194e-07, | |
| "loss": 0.2426, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.170066072967538, | |
| "grad_norm": 0.8530171512659619, | |
| "learning_rate": 3.8640516097953404e-07, | |
| "loss": 0.2472, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.193047974719908, | |
| "grad_norm": 0.8981370161868097, | |
| "learning_rate": 3.683420783934537e-07, | |
| "loss": 0.2442, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.2160298764722781, | |
| "grad_norm": 0.9690978990653164, | |
| "learning_rate": 3.50461823226051e-07, | |
| "loss": 0.2391, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.2390117782246481, | |
| "grad_norm": 0.6701870042781312, | |
| "learning_rate": 3.3278922498137454e-07, | |
| "loss": 0.2433, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.2619936799770182, | |
| "grad_norm": 0.9229238547578185, | |
| "learning_rate": 3.153488247996686e-07, | |
| "loss": 0.2445, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.284975581729388, | |
| "grad_norm": 0.9247475568229516, | |
| "learning_rate": 2.981648413781984e-07, | |
| "loss": 0.2374, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.3079574834817582, | |
| "grad_norm": 0.9627262851467593, | |
| "learning_rate": 2.8126113733983646e-07, | |
| "loss": 0.2448, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.330939385234128, | |
| "grad_norm": 0.9120263302438554, | |
| "learning_rate": 2.6466118609611595e-07, | |
| "loss": 0.2404, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.353921286986498, | |
| "grad_norm": 0.8397564783689743, | |
| "learning_rate": 2.4838803925076145e-07, | |
| "loss": 0.243, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.376903188738868, | |
| "grad_norm": 0.9304678269053254, | |
| "learning_rate": 2.3246429458896632e-07, | |
| "loss": 0.2452, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.399885090491238, | |
| "grad_norm": 1.0020243730326204, | |
| "learning_rate": 2.1691206469686806e-07, | |
| "loss": 0.2422, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.4228669922436081, | |
| "grad_norm": 0.9391802799011835, | |
| "learning_rate": 2.0175294625479694e-07, | |
| "loss": 0.2456, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.4458488939959782, | |
| "grad_norm": 0.8231683928715978, | |
| "learning_rate": 1.8700799004693917e-07, | |
| "loss": 0.2397, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.4688307957483482, | |
| "grad_norm": 0.8520636087166905, | |
| "learning_rate": 1.7269767172906352e-07, | |
| "loss": 0.2441, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.4918126975007182, | |
| "grad_norm": 0.9752243817047155, | |
| "learning_rate": 1.5884186339490007e-07, | |
| "loss": 0.2401, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.5147945992530882, | |
| "grad_norm": 0.7665318624135639, | |
| "learning_rate": 1.4545980598066088e-07, | |
| "loss": 0.2439, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.537776501005458, | |
| "grad_norm": 0.7876208993507292, | |
| "learning_rate": 1.325700825460192e-07, | |
| "loss": 0.241, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.5607584027578283, | |
| "grad_norm": 0.793689651615786, | |
| "learning_rate": 1.201905924686518e-07, | |
| "loss": 0.2439, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.583740304510198, | |
| "grad_norm": 0.8116081679288228, | |
| "learning_rate": 1.0833852658818165e-07, | |
| "loss": 0.2453, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.6067222062625683, | |
| "grad_norm": 0.7757866512002836, | |
| "learning_rate": 9.703034333403315e-08, | |
| "loss": 0.2401, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.6297041080149381, | |
| "grad_norm": 0.722479725366503, | |
| "learning_rate": 8.628174587035341e-08, | |
| "loss": 0.2337, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.6526860097673084, | |
| "grad_norm": 0.8093501442802761, | |
| "learning_rate": 7.610766028973709e-08, | |
| "loss": 0.2359, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.6756679115196782, | |
| "grad_norm": 0.7148511605688279, | |
| "learning_rate": 6.65222148860341e-08, | |
| "loss": 0.2372, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.6986498132720482, | |
| "grad_norm": 0.7422092393121354, | |
| "learning_rate": 5.7538720535024675e-08, | |
| "loss": 0.2449, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.7216317150244183, | |
| "grad_norm": 0.7434466170466291, | |
| "learning_rate": 4.916965221020752e-08, | |
| "loss": 0.2395, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.7446136167767883, | |
| "grad_norm": 0.7850559068635626, | |
| "learning_rate": 4.142663165936577e-08, | |
| "loss": 0.2436, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.7675955185291583, | |
| "grad_norm": 0.7936937510399781, | |
| "learning_rate": 3.4320411265970126e-08, | |
| "loss": 0.2449, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.7905774202815283, | |
| "grad_norm": 0.8234052802867052, | |
| "learning_rate": 2.7860859117828982e-08, | |
| "loss": 0.2367, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.8135593220338984, | |
| "grad_norm": 0.7210582583571304, | |
| "learning_rate": 2.2056945303719654e-08, | |
| "loss": 0.241, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.8365412237862682, | |
| "grad_norm": 0.7831307857864389, | |
| "learning_rate": 1.6916729457030876e-08, | |
| "loss": 0.2493, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.8595231255386384, | |
| "grad_norm": 0.6495236499007406, | |
| "learning_rate": 1.2447349563713182e-08, | |
| "loss": 0.2364, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.8825050272910082, | |
| "grad_norm": 0.8108447079621942, | |
| "learning_rate": 8.655012050079568e-09, | |
| "loss": 0.2418, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.9054869290433785, | |
| "grad_norm": 0.8061530065813931, | |
| "learning_rate": 5.5449831642214174e-09, | |
| "loss": 0.2466, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.9284688307957483, | |
| "grad_norm": 0.6679927143166136, | |
| "learning_rate": 3.1215816630071335e-09, | |
| "loss": 0.2349, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.9514507325481185, | |
| "grad_norm": 0.8406010249602488, | |
| "learning_rate": 1.3881728148191773e-09, | |
| "loss": 0.2448, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.9744326343004883, | |
| "grad_norm": 0.7452328846254022, | |
| "learning_rate": 3.471637263576799e-10, | |
| "loss": 0.2369, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.9974145360528583, | |
| "grad_norm": 0.9360407578833112, | |
| "learning_rate": 0.0, | |
| "loss": 0.243, | |
| "step": 870 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 870, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 264326257573888.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |