| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.07256367462448299, |
| "eval_steps": 500, |
| "global_step": 1000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0007256367462448298, |
| "grad_norm": 45.75, |
| "learning_rate": 5e-06, |
| "loss": 1.9641, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.0014512734924896596, |
| "grad_norm": 46.5, |
| "learning_rate": 1e-05, |
| "loss": 1.7641, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.0021769102387344894, |
| "grad_norm": 35.75, |
| "learning_rate": 1.5e-05, |
| "loss": 1.4723, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.0029025469849793192, |
| "grad_norm": 31.0, |
| "learning_rate": 2e-05, |
| "loss": 1.0985, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.003628183731224149, |
| "grad_norm": 28.875, |
| "learning_rate": 2.5e-05, |
| "loss": 1.0381, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.004353820477468979, |
| "grad_norm": 26.625, |
| "learning_rate": 3e-05, |
| "loss": 0.6977, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.005079457223713809, |
| "grad_norm": 23.75, |
| "learning_rate": 3.5e-05, |
| "loss": 0.6392, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.0058050939699586385, |
| "grad_norm": 26.5, |
| "learning_rate": 4e-05, |
| "loss": 0.7459, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.006530730716203468, |
| "grad_norm": 29.875, |
| "learning_rate": 4.5e-05, |
| "loss": 0.6155, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.007256367462448298, |
| "grad_norm": 21.5, |
| "learning_rate": 5e-05, |
| "loss": 0.3941, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.007982004208693128, |
| "grad_norm": 17.0, |
| "learning_rate": 4.9999934086574596e-05, |
| "loss": 0.4513, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.008707640954937958, |
| "grad_norm": 23.875, |
| "learning_rate": 4.9999736346645943e-05, |
| "loss": 0.5292, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.009433277701182788, |
| "grad_norm": 17.25, |
| "learning_rate": 4.999940678125673e-05, |
| "loss": 0.3869, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.010158914447427617, |
| "grad_norm": 24.75, |
| "learning_rate": 4.9998945392144796e-05, |
| "loss": 0.4121, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.010884551193672447, |
| "grad_norm": 16.625, |
| "learning_rate": 4.999835218174307e-05, |
| "loss": 0.4199, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.011610187939917277, |
| "grad_norm": 11.8125, |
| "learning_rate": 4.99976271531796e-05, |
| "loss": 0.4694, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.012335824686162107, |
| "grad_norm": 15.9375, |
| "learning_rate": 4.9996770310277506e-05, |
| "loss": 0.3736, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.013061461432406937, |
| "grad_norm": 18.0, |
| "learning_rate": 4.9995781657555e-05, |
| "loss": 0.4224, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.013787098178651766, |
| "grad_norm": 23.0, |
| "learning_rate": 4.99946612002253e-05, |
| "loss": 0.4307, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.014512734924896596, |
| "grad_norm": 19.375, |
| "learning_rate": 4.9993408944196676e-05, |
| "loss": 0.3449, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.015238371671141426, |
| "grad_norm": 11.0, |
| "learning_rate": 4.9992024896072364e-05, |
| "loss": 0.3243, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.015964008417386256, |
| "grad_norm": 17.875, |
| "learning_rate": 4.999050906315055e-05, |
| "loss": 0.3002, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.016689645163631087, |
| "grad_norm": 6.875, |
| "learning_rate": 4.998886145342434e-05, |
| "loss": 0.3882, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.017415281909875915, |
| "grad_norm": 12.125, |
| "learning_rate": 4.9987082075581684e-05, |
| "loss": 0.4211, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.018140918656120747, |
| "grad_norm": 16.875, |
| "learning_rate": 4.9985170939005386e-05, |
| "loss": 0.2967, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.018866555402365575, |
| "grad_norm": 26.125, |
| "learning_rate": 4.998312805377302e-05, |
| "loss": 0.3283, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.019592192148610407, |
| "grad_norm": 18.0, |
| "learning_rate": 4.998095343065685e-05, |
| "loss": 0.3455, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.020317828894855235, |
| "grad_norm": 15.4375, |
| "learning_rate": 4.997864708112384e-05, |
| "loss": 0.2891, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.021043465641100066, |
| "grad_norm": 30.25, |
| "learning_rate": 4.997620901733554e-05, |
| "loss": 0.3645, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.021769102387344894, |
| "grad_norm": 9.875, |
| "learning_rate": 4.997363925214803e-05, |
| "loss": 0.3047, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.022494739133589726, |
| "grad_norm": 18.625, |
| "learning_rate": 4.9970937799111896e-05, |
| "loss": 0.2943, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.023220375879834554, |
| "grad_norm": 30.625, |
| "learning_rate": 4.996810467247207e-05, |
| "loss": 0.3315, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.023946012626079385, |
| "grad_norm": 14.3125, |
| "learning_rate": 4.9965139887167856e-05, |
| "loss": 0.2473, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.024671649372324213, |
| "grad_norm": 21.5, |
| "learning_rate": 4.996204345883278e-05, |
| "loss": 0.3173, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.025397286118569045, |
| "grad_norm": 16.125, |
| "learning_rate": 4.9958815403794546e-05, |
| "loss": 0.3813, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.026122922864813873, |
| "grad_norm": 15.0625, |
| "learning_rate": 4.995545573907492e-05, |
| "loss": 0.3366, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.026848559611058705, |
| "grad_norm": 19.5, |
| "learning_rate": 4.995196448238966e-05, |
| "loss": 0.3188, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.027574196357303533, |
| "grad_norm": 20.125, |
| "learning_rate": 4.9948341652148436e-05, |
| "loss": 0.2776, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.028299833103548364, |
| "grad_norm": 17.25, |
| "learning_rate": 4.994458726745468e-05, |
| "loss": 0.3574, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.029025469849793192, |
| "grad_norm": 5.375, |
| "learning_rate": 4.9940701348105554e-05, |
| "loss": 0.2566, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.029751106596038024, |
| "grad_norm": 31.0, |
| "learning_rate": 4.99366839145918e-05, |
| "loss": 0.275, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.030476743342282852, |
| "grad_norm": 21.75, |
| "learning_rate": 4.993253498809762e-05, |
| "loss": 0.2928, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.031202380088527683, |
| "grad_norm": 21.25, |
| "learning_rate": 4.9928254590500646e-05, |
| "loss": 0.3466, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.03192801683477251, |
| "grad_norm": 15.25, |
| "learning_rate": 4.9923842744371707e-05, |
| "loss": 0.2911, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.03265365358101734, |
| "grad_norm": 8.9375, |
| "learning_rate": 4.99192994729748e-05, |
| "loss": 0.2153, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.033379290327262175, |
| "grad_norm": 20.5, |
| "learning_rate": 4.991462480026693e-05, |
| "loss": 0.383, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.034104927073507, |
| "grad_norm": 6.46875, |
| "learning_rate": 4.9909818750898e-05, |
| "loss": 0.3099, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.03483056381975183, |
| "grad_norm": 34.0, |
| "learning_rate": 4.990488135021065e-05, |
| "loss": 0.2897, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.03555620056599666, |
| "grad_norm": 7.78125, |
| "learning_rate": 4.989981262424017e-05, |
| "loss": 0.3098, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.036281837312241494, |
| "grad_norm": 41.0, |
| "learning_rate": 4.989461259971432e-05, |
| "loss": 0.2428, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.03700747405848632, |
| "grad_norm": 17.375, |
| "learning_rate": 4.988928130405323e-05, |
| "loss": 0.2485, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.03773311080473115, |
| "grad_norm": 15.625, |
| "learning_rate": 4.9883818765369194e-05, |
| "loss": 0.2694, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.03845874755097598, |
| "grad_norm": 17.75, |
| "learning_rate": 4.98782250124666e-05, |
| "loss": 0.2752, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.03918438429722081, |
| "grad_norm": 9.8125, |
| "learning_rate": 4.987250007484172e-05, |
| "loss": 0.2687, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.03991002104346564, |
| "grad_norm": 3.6875, |
| "learning_rate": 4.986664398268256e-05, |
| "loss": 0.268, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.04063565778971047, |
| "grad_norm": 24.125, |
| "learning_rate": 4.986065676686874e-05, |
| "loss": 0.2602, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.0413612945359553, |
| "grad_norm": 23.125, |
| "learning_rate": 4.98545384589713e-05, |
| "loss": 0.2731, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.04208693128220013, |
| "grad_norm": 13.5625, |
| "learning_rate": 4.984828909125251e-05, |
| "loss": 0.2537, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.042812568028444964, |
| "grad_norm": 6.15625, |
| "learning_rate": 4.9841908696665764e-05, |
| "loss": 0.1723, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.04353820477468979, |
| "grad_norm": 5.03125, |
| "learning_rate": 4.9835397308855344e-05, |
| "loss": 0.2414, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.04426384152093462, |
| "grad_norm": 24.0, |
| "learning_rate": 4.9828754962156286e-05, |
| "loss": 0.2811, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.04498947826717945, |
| "grad_norm": 25.875, |
| "learning_rate": 4.9821981691594175e-05, |
| "loss": 0.2949, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.04571511501342428, |
| "grad_norm": 12.875, |
| "learning_rate": 4.981507753288497e-05, |
| "loss": 0.2662, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.04644075175966911, |
| "grad_norm": 19.375, |
| "learning_rate": 4.9808042522434814e-05, |
| "loss": 0.2659, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.04716638850591394, |
| "grad_norm": 22.25, |
| "learning_rate": 4.9800876697339824e-05, |
| "loss": 0.3333, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.04789202525215877, |
| "grad_norm": 18.875, |
| "learning_rate": 4.979358009538594e-05, |
| "loss": 0.3197, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.0486176619984036, |
| "grad_norm": 24.0, |
| "learning_rate": 4.978615275504869e-05, |
| "loss": 0.2385, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.04934329874464843, |
| "grad_norm": 19.5, |
| "learning_rate": 4.977859471549297e-05, |
| "loss": 0.2673, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.05006893549089326, |
| "grad_norm": 11.1875, |
| "learning_rate": 4.977090601657289e-05, |
| "loss": 0.2107, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.05079457223713809, |
| "grad_norm": 9.8125, |
| "learning_rate": 4.976308669883153e-05, |
| "loss": 0.2825, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.05152020898338292, |
| "grad_norm": 12.6875, |
| "learning_rate": 4.975513680350073e-05, |
| "loss": 0.2604, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.052245845729627746, |
| "grad_norm": 19.375, |
| "learning_rate": 4.974705637250089e-05, |
| "loss": 0.209, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.05297148247587258, |
| "grad_norm": 20.625, |
| "learning_rate": 4.97388454484407e-05, |
| "loss": 0.255, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.05369711922211741, |
| "grad_norm": 17.25, |
| "learning_rate": 4.973050407461698e-05, |
| "loss": 0.2526, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.05442275596836224, |
| "grad_norm": 3.453125, |
| "learning_rate": 4.972203229501441e-05, |
| "loss": 0.2028, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.055148392714607065, |
| "grad_norm": 18.625, |
| "learning_rate": 4.971343015430532e-05, |
| "loss": 0.2395, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.0558740294608519, |
| "grad_norm": 27.5, |
| "learning_rate": 4.970469769784941e-05, |
| "loss": 0.218, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.05659966620709673, |
| "grad_norm": 24.375, |
| "learning_rate": 4.96958349716936e-05, |
| "loss": 0.2264, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.05732530295334156, |
| "grad_norm": 27.125, |
| "learning_rate": 4.968684202257169e-05, |
| "loss": 0.3467, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.058050939699586385, |
| "grad_norm": 18.0, |
| "learning_rate": 4.967771889790416e-05, |
| "loss": 0.2894, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.058776576445831216, |
| "grad_norm": 8.375, |
| "learning_rate": 4.966846564579792e-05, |
| "loss": 0.1818, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.05950221319207605, |
| "grad_norm": 13.75, |
| "learning_rate": 4.965908231504607e-05, |
| "loss": 0.3376, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.06022784993832088, |
| "grad_norm": 53.5, |
| "learning_rate": 4.964956895512759e-05, |
| "loss": 0.2843, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.060953486684565704, |
| "grad_norm": 12.3125, |
| "learning_rate": 4.963992561620714e-05, |
| "loss": 0.2396, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.061679123430810535, |
| "grad_norm": 16.0, |
| "learning_rate": 4.963015234913475e-05, |
| "loss": 0.255, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.06240476017705537, |
| "grad_norm": 6.8125, |
| "learning_rate": 4.96202492054456e-05, |
| "loss": 0.2316, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.06313039692330019, |
| "grad_norm": 22.875, |
| "learning_rate": 4.9610216237359684e-05, |
| "loss": 0.3816, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.06385603366954502, |
| "grad_norm": 23.875, |
| "learning_rate": 4.960005349778159e-05, |
| "loss": 0.2203, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.06458167041578985, |
| "grad_norm": 38.0, |
| "learning_rate": 4.95897610403002e-05, |
| "loss": 0.3093, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.06530730716203469, |
| "grad_norm": 19.75, |
| "learning_rate": 4.95793389191884e-05, |
| "loss": 0.2255, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.06603294390827952, |
| "grad_norm": 16.5, |
| "learning_rate": 4.95687871894028e-05, |
| "loss": 0.1937, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.06675858065452435, |
| "grad_norm": 10.0, |
| "learning_rate": 4.9558105906583466e-05, |
| "loss": 0.2242, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.06748421740076918, |
| "grad_norm": 21.0, |
| "learning_rate": 4.9547295127053586e-05, |
| "loss": 0.3581, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.068209854147014, |
| "grad_norm": 17.125, |
| "learning_rate": 4.95363549078192e-05, |
| "loss": 0.2132, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.06893549089325883, |
| "grad_norm": 17.25, |
| "learning_rate": 4.952528530656889e-05, |
| "loss": 0.2659, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.06966112763950366, |
| "grad_norm": 19.5, |
| "learning_rate": 4.9514086381673496e-05, |
| "loss": 0.2169, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.0703867643857485, |
| "grad_norm": 20.625, |
| "learning_rate": 4.9502758192185774e-05, |
| "loss": 0.3276, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.07111240113199332, |
| "grad_norm": 23.125, |
| "learning_rate": 4.949130079784009e-05, |
| "loss": 0.2512, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.07183803787823816, |
| "grad_norm": 18.125, |
| "learning_rate": 4.9479714259052143e-05, |
| "loss": 0.2827, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.07256367462448299, |
| "grad_norm": 18.375, |
| "learning_rate": 4.946799863691862e-05, |
| "loss": 0.206, |
| "step": 1000 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 13781, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 500, |
| "total_flos": 0.0, |
| "train_batch_size": 5, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|