| { | |
| "best_global_step": 876, | |
| "best_metric": 0.9277850539806721, | |
| "best_model_checkpoint": "./saved_best/sst2_BERT-20000/full_text/checkpoint-876", | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 1314, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0228310502283105, | |
| "grad_norm": 127786.96875, | |
| "learning_rate": 1.9863013698630137e-05, | |
| "loss": 0.7128, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.045662100456621, | |
| "grad_norm": 288305.125, | |
| "learning_rate": 1.971080669710807e-05, | |
| "loss": 0.6404, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0684931506849315, | |
| "grad_norm": 372462.9375, | |
| "learning_rate": 1.9558599695585997e-05, | |
| "loss": 0.5327, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.091324200913242, | |
| "grad_norm": 602445.375, | |
| "learning_rate": 1.940639269406393e-05, | |
| "loss": 0.432, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.1141552511415525, | |
| "grad_norm": 358026.8125, | |
| "learning_rate": 1.9254185692541858e-05, | |
| "loss": 0.3758, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.136986301369863, | |
| "grad_norm": 402071.21875, | |
| "learning_rate": 1.910197869101979e-05, | |
| "loss": 0.3234, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.1598173515981735, | |
| "grad_norm": 352748.53125, | |
| "learning_rate": 1.8949771689497718e-05, | |
| "loss": 0.3151, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.182648401826484, | |
| "grad_norm": 905835.5625, | |
| "learning_rate": 1.879756468797565e-05, | |
| "loss": 0.3166, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.2054794520547945, | |
| "grad_norm": 466547.9375, | |
| "learning_rate": 1.8645357686453578e-05, | |
| "loss": 0.3419, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.228310502283105, | |
| "grad_norm": 423154.84375, | |
| "learning_rate": 1.849315068493151e-05, | |
| "loss": 0.3481, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.2511415525114155, | |
| "grad_norm": 297082.96875, | |
| "learning_rate": 1.8340943683409438e-05, | |
| "loss": 0.3115, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.273972602739726, | |
| "grad_norm": 369912.21875, | |
| "learning_rate": 1.818873668188737e-05, | |
| "loss": 0.316, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.2968036529680365, | |
| "grad_norm": 197489.21875, | |
| "learning_rate": 1.80365296803653e-05, | |
| "loss": 0.295, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.319634703196347, | |
| "grad_norm": 557747.25, | |
| "learning_rate": 1.788432267884323e-05, | |
| "loss": 0.2646, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.3424657534246575, | |
| "grad_norm": 1081537.125, | |
| "learning_rate": 1.773211567732116e-05, | |
| "loss": 0.2154, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.365296803652968, | |
| "grad_norm": 468382.625, | |
| "learning_rate": 1.757990867579909e-05, | |
| "loss": 0.3291, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.3881278538812785, | |
| "grad_norm": 1200675.625, | |
| "learning_rate": 1.742770167427702e-05, | |
| "loss": 0.3015, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.410958904109589, | |
| "grad_norm": 290634.03125, | |
| "learning_rate": 1.727549467275495e-05, | |
| "loss": 0.2645, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.4337899543378995, | |
| "grad_norm": 785782.75, | |
| "learning_rate": 1.712328767123288e-05, | |
| "loss": 0.3038, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.45662100456621, | |
| "grad_norm": 454525.84375, | |
| "learning_rate": 1.6971080669710807e-05, | |
| "loss": 0.2706, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.4794520547945205, | |
| "grad_norm": 366387.75, | |
| "learning_rate": 1.681887366818874e-05, | |
| "loss": 0.2778, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.502283105022831, | |
| "grad_norm": 213485.28125, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.2431, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.5251141552511416, | |
| "grad_norm": 434280.5625, | |
| "learning_rate": 1.65144596651446e-05, | |
| "loss": 0.2712, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.547945205479452, | |
| "grad_norm": 444368.0625, | |
| "learning_rate": 1.6362252663622528e-05, | |
| "loss": 0.2257, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.5707762557077626, | |
| "grad_norm": 325988.9375, | |
| "learning_rate": 1.6210045662100456e-05, | |
| "loss": 0.2046, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.593607305936073, | |
| "grad_norm": 376345.3125, | |
| "learning_rate": 1.6057838660578388e-05, | |
| "loss": 0.2685, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.6164383561643836, | |
| "grad_norm": 596176.125, | |
| "learning_rate": 1.5905631659056316e-05, | |
| "loss": 0.2729, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.639269406392694, | |
| "grad_norm": 650678.25, | |
| "learning_rate": 1.5753424657534248e-05, | |
| "loss": 0.2352, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.6621004566210046, | |
| "grad_norm": 688517.5, | |
| "learning_rate": 1.5601217656012176e-05, | |
| "loss": 0.2395, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.684931506849315, | |
| "grad_norm": 550269.5625, | |
| "learning_rate": 1.5449010654490108e-05, | |
| "loss": 0.2338, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.7077625570776256, | |
| "grad_norm": 382038.0625, | |
| "learning_rate": 1.5296803652968037e-05, | |
| "loss": 0.2819, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.730593607305936, | |
| "grad_norm": 413848.71875, | |
| "learning_rate": 1.5144596651445968e-05, | |
| "loss": 0.182, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.7534246575342466, | |
| "grad_norm": 833068.375, | |
| "learning_rate": 1.4992389649923897e-05, | |
| "loss": 0.237, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.776255707762557, | |
| "grad_norm": 249349.609375, | |
| "learning_rate": 1.4840182648401829e-05, | |
| "loss": 0.2295, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.7990867579908676, | |
| "grad_norm": 195666.703125, | |
| "learning_rate": 1.4687975646879757e-05, | |
| "loss": 0.1964, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.821917808219178, | |
| "grad_norm": 526547.625, | |
| "learning_rate": 1.4535768645357689e-05, | |
| "loss": 0.3156, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.8447488584474886, | |
| "grad_norm": 733883.0, | |
| "learning_rate": 1.4383561643835617e-05, | |
| "loss": 0.2027, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.867579908675799, | |
| "grad_norm": 299320.78125, | |
| "learning_rate": 1.4231354642313549e-05, | |
| "loss": 0.2713, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.8904109589041096, | |
| "grad_norm": 440281.78125, | |
| "learning_rate": 1.4079147640791477e-05, | |
| "loss": 0.2047, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.91324200913242, | |
| "grad_norm": 462590.125, | |
| "learning_rate": 1.3926940639269409e-05, | |
| "loss": 0.2395, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.9360730593607306, | |
| "grad_norm": 1140073.75, | |
| "learning_rate": 1.3774733637747338e-05, | |
| "loss": 0.216, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.958904109589041, | |
| "grad_norm": 197822.625, | |
| "learning_rate": 1.3622526636225268e-05, | |
| "loss": 0.2449, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.9817351598173516, | |
| "grad_norm": 638955.5625, | |
| "learning_rate": 1.3470319634703198e-05, | |
| "loss": 0.253, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.9223333333333333, | |
| "eval_f1_macro": 0.9212113479103987, | |
| "eval_f1_weighted": 0.9221891676085987, | |
| "eval_loss": 0.2171599268913269, | |
| "eval_runtime": 14.7894, | |
| "eval_samples_per_second": 405.695, | |
| "eval_steps_per_second": 12.712, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.004566210045662, | |
| "grad_norm": 654569.875, | |
| "learning_rate": 1.3318112633181128e-05, | |
| "loss": 0.18, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.0273972602739727, | |
| "grad_norm": 182483.90625, | |
| "learning_rate": 1.3165905631659058e-05, | |
| "loss": 0.1107, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.0502283105022832, | |
| "grad_norm": 185298.140625, | |
| "learning_rate": 1.3013698630136988e-05, | |
| "loss": 0.1458, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.0730593607305936, | |
| "grad_norm": 21770.05078125, | |
| "learning_rate": 1.2861491628614916e-05, | |
| "loss": 0.1592, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.095890410958904, | |
| "grad_norm": 851453.125, | |
| "learning_rate": 1.2709284627092848e-05, | |
| "loss": 0.1383, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.1187214611872145, | |
| "grad_norm": 348289.34375, | |
| "learning_rate": 1.2557077625570777e-05, | |
| "loss": 0.1205, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.1415525114155252, | |
| "grad_norm": 510975.78125, | |
| "learning_rate": 1.2404870624048708e-05, | |
| "loss": 0.1321, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.1643835616438356, | |
| "grad_norm": 1570861.375, | |
| "learning_rate": 1.2252663622526637e-05, | |
| "loss": 0.221, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.187214611872146, | |
| "grad_norm": 948502.5625, | |
| "learning_rate": 1.2100456621004569e-05, | |
| "loss": 0.1814, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.2100456621004567, | |
| "grad_norm": 597450.25, | |
| "learning_rate": 1.1948249619482497e-05, | |
| "loss": 0.0911, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.2328767123287672, | |
| "grad_norm": 397320.8125, | |
| "learning_rate": 1.1796042617960429e-05, | |
| "loss": 0.1649, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.2557077625570776, | |
| "grad_norm": 513346.84375, | |
| "learning_rate": 1.1643835616438357e-05, | |
| "loss": 0.1953, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.278538812785388, | |
| "grad_norm": 504690.0625, | |
| "learning_rate": 1.1491628614916289e-05, | |
| "loss": 0.14, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.3013698630136985, | |
| "grad_norm": 1110943.625, | |
| "learning_rate": 1.1339421613394217e-05, | |
| "loss": 0.1191, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.3242009132420092, | |
| "grad_norm": 200145.484375, | |
| "learning_rate": 1.1187214611872147e-05, | |
| "loss": 0.1339, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.3470319634703196, | |
| "grad_norm": 221766.21875, | |
| "learning_rate": 1.1035007610350078e-05, | |
| "loss": 0.1427, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.36986301369863, | |
| "grad_norm": 881168.3125, | |
| "learning_rate": 1.0882800608828008e-05, | |
| "loss": 0.1089, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.3926940639269407, | |
| "grad_norm": 854495.5625, | |
| "learning_rate": 1.0730593607305938e-05, | |
| "loss": 0.1368, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.4155251141552512, | |
| "grad_norm": 115748.078125, | |
| "learning_rate": 1.0578386605783868e-05, | |
| "loss": 0.15, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.4383561643835616, | |
| "grad_norm": 334710.25, | |
| "learning_rate": 1.0426179604261796e-05, | |
| "loss": 0.2423, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.461187214611872, | |
| "grad_norm": 972609.8125, | |
| "learning_rate": 1.0273972602739728e-05, | |
| "loss": 0.093, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.4840182648401825, | |
| "grad_norm": 519533.71875, | |
| "learning_rate": 1.0121765601217656e-05, | |
| "loss": 0.217, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.5068493150684932, | |
| "grad_norm": 156004.296875, | |
| "learning_rate": 9.969558599695586e-06, | |
| "loss": 0.1424, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.5296803652968036, | |
| "grad_norm": 398576.53125, | |
| "learning_rate": 9.817351598173517e-06, | |
| "loss": 0.1404, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.5525114155251143, | |
| "grad_norm": 648208.25, | |
| "learning_rate": 9.665144596651447e-06, | |
| "loss": 0.1277, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.5753424657534247, | |
| "grad_norm": 71873.3046875, | |
| "learning_rate": 9.512937595129377e-06, | |
| "loss": 0.1247, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.5981735159817352, | |
| "grad_norm": 349757.6875, | |
| "learning_rate": 9.360730593607307e-06, | |
| "loss": 0.1786, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.6210045662100456, | |
| "grad_norm": 395071.59375, | |
| "learning_rate": 9.208523592085237e-06, | |
| "loss": 0.1687, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.643835616438356, | |
| "grad_norm": 689678.75, | |
| "learning_rate": 9.056316590563167e-06, | |
| "loss": 0.1239, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 609217.75, | |
| "learning_rate": 8.904109589041097e-06, | |
| "loss": 0.1193, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.6894977168949772, | |
| "grad_norm": 429054.15625, | |
| "learning_rate": 8.751902587519027e-06, | |
| "loss": 0.0972, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.7123287671232876, | |
| "grad_norm": 658631.0, | |
| "learning_rate": 8.599695585996957e-06, | |
| "loss": 0.2026, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.7351598173515983, | |
| "grad_norm": 696978.75, | |
| "learning_rate": 8.447488584474887e-06, | |
| "loss": 0.2093, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.7579908675799087, | |
| "grad_norm": 1273698.5, | |
| "learning_rate": 8.295281582952816e-06, | |
| "loss": 0.131, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.7808219178082192, | |
| "grad_norm": 381059.125, | |
| "learning_rate": 8.143074581430746e-06, | |
| "loss": 0.0753, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.8036529680365296, | |
| "grad_norm": 760526.8125, | |
| "learning_rate": 7.990867579908676e-06, | |
| "loss": 0.1004, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.82648401826484, | |
| "grad_norm": 197580.625, | |
| "learning_rate": 7.838660578386606e-06, | |
| "loss": 0.1652, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.8493150684931505, | |
| "grad_norm": 1663549.75, | |
| "learning_rate": 7.686453576864536e-06, | |
| "loss": 0.1465, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.8721461187214612, | |
| "grad_norm": 517745.75, | |
| "learning_rate": 7.534246575342466e-06, | |
| "loss": 0.1703, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.8949771689497716, | |
| "grad_norm": 680767.5625, | |
| "learning_rate": 7.382039573820396e-06, | |
| "loss": 0.1623, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.9178082191780823, | |
| "grad_norm": 697195.1875, | |
| "learning_rate": 7.2298325722983265e-06, | |
| "loss": 0.1733, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.9406392694063928, | |
| "grad_norm": 263220.8125, | |
| "learning_rate": 7.077625570776257e-06, | |
| "loss": 0.116, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.9634703196347032, | |
| "grad_norm": 336279.6875, | |
| "learning_rate": 6.925418569254187e-06, | |
| "loss": 0.1565, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.9863013698630136, | |
| "grad_norm": 230299.015625, | |
| "learning_rate": 6.773211567732117e-06, | |
| "loss": 0.1184, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.9285, | |
| "eval_f1_macro": 0.9277850539806721, | |
| "eval_f1_weighted": 0.92853233424208, | |
| "eval_loss": 0.2250971645116806, | |
| "eval_runtime": 15.009, | |
| "eval_samples_per_second": 399.76, | |
| "eval_steps_per_second": 12.526, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 2.009132420091324, | |
| "grad_norm": 105916.703125, | |
| "learning_rate": 6.621004566210046e-06, | |
| "loss": 0.1208, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.0319634703196345, | |
| "grad_norm": 231579.703125, | |
| "learning_rate": 6.468797564687976e-06, | |
| "loss": 0.0994, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.0547945205479454, | |
| "grad_norm": 226497.375, | |
| "learning_rate": 6.316590563165906e-06, | |
| "loss": 0.0713, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.077625570776256, | |
| "grad_norm": 275743.15625, | |
| "learning_rate": 6.164383561643836e-06, | |
| "loss": 0.0873, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.1004566210045663, | |
| "grad_norm": 28719.689453125, | |
| "learning_rate": 6.012176560121766e-06, | |
| "loss": 0.1131, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.1232876712328768, | |
| "grad_norm": 947971.3125, | |
| "learning_rate": 5.8599695585996965e-06, | |
| "loss": 0.0514, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.146118721461187, | |
| "grad_norm": 768273.125, | |
| "learning_rate": 5.7077625570776266e-06, | |
| "loss": 0.0813, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.1689497716894977, | |
| "grad_norm": 202898.15625, | |
| "learning_rate": 5.555555555555557e-06, | |
| "loss": 0.0813, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.191780821917808, | |
| "grad_norm": 481656.03125, | |
| "learning_rate": 5.403348554033486e-06, | |
| "loss": 0.1004, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.2146118721461185, | |
| "grad_norm": 250803.953125, | |
| "learning_rate": 5.251141552511416e-06, | |
| "loss": 0.1432, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.237442922374429, | |
| "grad_norm": 12995.201171875, | |
| "learning_rate": 5.098934550989346e-06, | |
| "loss": 0.0464, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.26027397260274, | |
| "grad_norm": 902957.0625, | |
| "learning_rate": 4.946727549467275e-06, | |
| "loss": 0.137, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.2831050228310503, | |
| "grad_norm": 534876.0, | |
| "learning_rate": 4.7945205479452054e-06, | |
| "loss": 0.0922, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.3059360730593608, | |
| "grad_norm": 178206.484375, | |
| "learning_rate": 4.6423135464231355e-06, | |
| "loss": 0.0705, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.328767123287671, | |
| "grad_norm": 40234.16796875, | |
| "learning_rate": 4.490106544901066e-06, | |
| "loss": 0.0372, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.3515981735159817, | |
| "grad_norm": 251605.671875, | |
| "learning_rate": 4.337899543378996e-06, | |
| "loss": 0.0483, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.374429223744292, | |
| "grad_norm": 1325790.375, | |
| "learning_rate": 4.185692541856926e-06, | |
| "loss": 0.0806, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.3972602739726026, | |
| "grad_norm": 793383.375, | |
| "learning_rate": 4.033485540334856e-06, | |
| "loss": 0.123, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.4200913242009134, | |
| "grad_norm": 626307.3125, | |
| "learning_rate": 3.881278538812785e-06, | |
| "loss": 0.117, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.442922374429224, | |
| "grad_norm": 390102.96875, | |
| "learning_rate": 3.7290715372907157e-06, | |
| "loss": 0.1038, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.4657534246575343, | |
| "grad_norm": 965051.0625, | |
| "learning_rate": 3.5768645357686453e-06, | |
| "loss": 0.0869, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.4885844748858448, | |
| "grad_norm": 81258.328125, | |
| "learning_rate": 3.4246575342465754e-06, | |
| "loss": 0.0941, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.5114155251141552, | |
| "grad_norm": 707149.125, | |
| "learning_rate": 3.2724505327245055e-06, | |
| "loss": 0.0825, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.5342465753424657, | |
| "grad_norm": 10573.8984375, | |
| "learning_rate": 3.120243531202435e-06, | |
| "loss": 0.0806, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.557077625570776, | |
| "grad_norm": 2018027.5, | |
| "learning_rate": 2.9680365296803653e-06, | |
| "loss": 0.1719, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.5799086757990866, | |
| "grad_norm": 706195.4375, | |
| "learning_rate": 2.8158295281582954e-06, | |
| "loss": 0.0954, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.602739726027397, | |
| "grad_norm": 230678.828125, | |
| "learning_rate": 2.6636225266362255e-06, | |
| "loss": 0.103, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.625570776255708, | |
| "grad_norm": 455807.78125, | |
| "learning_rate": 2.511415525114155e-06, | |
| "loss": 0.0965, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.6484018264840183, | |
| "grad_norm": 38607.32421875, | |
| "learning_rate": 2.3592085235920852e-06, | |
| "loss": 0.0381, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.671232876712329, | |
| "grad_norm": 449129.09375, | |
| "learning_rate": 2.2070015220700153e-06, | |
| "loss": 0.0836, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.6940639269406392, | |
| "grad_norm": 334416.4375, | |
| "learning_rate": 2.0547945205479454e-06, | |
| "loss": 0.0511, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.7168949771689497, | |
| "grad_norm": 490780.5625, | |
| "learning_rate": 1.9025875190258753e-06, | |
| "loss": 0.1427, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.73972602739726, | |
| "grad_norm": 6038.24072265625, | |
| "learning_rate": 1.7503805175038052e-06, | |
| "loss": 0.1099, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.762557077625571, | |
| "grad_norm": 202150.234375, | |
| "learning_rate": 1.5981735159817353e-06, | |
| "loss": 0.119, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.7853881278538815, | |
| "grad_norm": 232674.578125, | |
| "learning_rate": 1.4459665144596652e-06, | |
| "loss": 0.181, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.808219178082192, | |
| "grad_norm": 1792436.625, | |
| "learning_rate": 1.2937595129375953e-06, | |
| "loss": 0.0718, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.8310502283105023, | |
| "grad_norm": 2906556.0, | |
| "learning_rate": 1.1415525114155251e-06, | |
| "loss": 0.097, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.853881278538813, | |
| "grad_norm": 82039.2734375, | |
| "learning_rate": 9.89345509893455e-07, | |
| "loss": 0.1275, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.8767123287671232, | |
| "grad_norm": 271356.25, | |
| "learning_rate": 8.371385083713851e-07, | |
| "loss": 0.1401, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.8995433789954337, | |
| "grad_norm": 9588.64453125, | |
| "learning_rate": 6.849315068493151e-07, | |
| "loss": 0.1627, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.922374429223744, | |
| "grad_norm": 1104557.125, | |
| "learning_rate": 5.327245053272451e-07, | |
| "loss": 0.0942, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.9452054794520546, | |
| "grad_norm": 31101.583984375, | |
| "learning_rate": 3.8051750380517503e-07, | |
| "loss": 0.0714, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.968036529680365, | |
| "grad_norm": 740889.8125, | |
| "learning_rate": 2.2831050228310502e-07, | |
| "loss": 0.1083, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.990867579908676, | |
| "grad_norm": 7040.435546875, | |
| "learning_rate": 7.6103500761035e-08, | |
| "loss": 0.1593, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.9261666666666667, | |
| "eval_f1_macro": 0.9252761556296953, | |
| "eval_f1_weighted": 0.9261245203427795, | |
| "eval_loss": 0.37234973907470703, | |
| "eval_runtime": 15.0862, | |
| "eval_samples_per_second": 397.714, | |
| "eval_steps_per_second": 12.462, | |
| "step": 1314 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 1314, | |
| "total_flos": 1537621007523840.0, | |
| "train_loss": 0.18048278951118707, | |
| "train_runtime": 317.0714, | |
| "train_samples_per_second": 132.462, | |
| "train_steps_per_second": 4.144 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.9285, | |
| "eval_f1_macro": 0.9277850539806721, | |
| "eval_f1_weighted": 0.92853233424208, | |
| "eval_loss": 0.2250971645116806, | |
| "eval_runtime": 14.8809, | |
| "eval_samples_per_second": 403.201, | |
| "eval_steps_per_second": 12.634, | |
| "step": 1314 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 1314, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1537621007523840.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |