| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.007712082262211, | |
| "eval_steps": 15000, | |
| "global_step": 2340, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 3.1914893617021277e-06, | |
| "loss": 0.8544, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 6.3829787234042555e-06, | |
| "loss": 0.8342, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9.574468085106385e-06, | |
| "loss": 0.8016, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.2765957446808511e-05, | |
| "loss": 0.7802, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.5957446808510637e-05, | |
| "loss": 0.7127, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.914893617021277e-05, | |
| "loss": 0.6504, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 2.2340425531914894e-05, | |
| "loss": 0.6011, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 2.5531914893617022e-05, | |
| "loss": 0.6859, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2.872340425531915e-05, | |
| "loss": 0.5766, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 2.9999474091752356e-05, | |
| "loss": 0.6617, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 2.9996260341563078e-05, | |
| "loss": 0.6804, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 2.999012563764485e-05, | |
| "loss": 0.5635, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 2.9981071174907998e-05, | |
| "loss": 0.5021, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 2.9969098716969983e-05, | |
| "loss": 0.575, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 2.995421059581191e-05, | |
| "loss": 0.6085, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 2.993640971132428e-05, | |
| "loss": 0.6241, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 2.991569953074218e-05, | |
| "loss": 0.6405, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 2.9892084087969912e-05, | |
| "loss": 0.6318, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 2.98655679827953e-05, | |
| "loss": 0.5592, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 2.9836156379993726e-05, | |
| "loss": 0.5625, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 2.980385500832215e-05, | |
| "loss": 0.5857, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 2.9768670159403283e-05, | |
| "loss": 0.6439, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 2.9730608686500093e-05, | |
| "loss": 0.4829, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 2.9689678003180932e-05, | |
| "loss": 0.5554, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 2.964588608187556e-05, | |
| "loss": 0.5177, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 2.9599241452322243e-05, | |
| "loss": 0.5375, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 2.9549753199906383e-05, | |
| "loss": 0.5527, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 2.9497430963890838e-05, | |
| "loss": 0.5166, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 2.9442284935538424e-05, | |
| "loss": 0.5353, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 2.9384325856126867e-05, | |
| "loss": 0.531, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 2.932356501485663e-05, | |
| "loss": 0.5583, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 2.9260014246652e-05, | |
| "loss": 0.5355, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 2.919368592985593e-05, | |
| "loss": 0.5351, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 2.9124592983818963e-05, | |
| "loss": 0.5349, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.9052748866382848e-05, | |
| "loss": 0.5094, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.8978167571259202e-05, | |
| "loss": 0.538, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.8900863625303867e-05, | |
| "loss": 0.5041, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.8820852085687373e-05, | |
| "loss": 0.4906, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.873814853696211e-05, | |
| "loss": 0.5246, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.8652769088026798e-05, | |
| "loss": 0.5484, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.856473036898882e-05, | |
| "loss": 0.4325, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.847404952792501e-05, | |
| "loss": 0.4965, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.8380744227541592e-05, | |
| "loss": 0.509, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.8284832641733857e-05, | |
| "loss": 0.5052, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.8186333452046256e-05, | |
| "loss": 0.5119, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 2.808526584403364e-05, | |
| "loss": 0.4206, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 2.7981649503524323e-05, | |
| "loss": 0.5834, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 2.7875504612785717e-05, | |
| "loss": 0.4789, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 2.7766851846593215e-05, | |
| "loss": 0.4196, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 2.7655712368203213e-05, | |
| "loss": 0.4959, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 2.7542107825230944e-05, | |
| "loss": 0.4784, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 2.7426060345433984e-05, | |
| "loss": 0.52, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 2.7307592532402248e-05, | |
| "loss": 0.517, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 2.7186727461155252e-05, | |
| "loss": 0.4751, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 2.706348867364763e-05, | |
| "loss": 0.4711, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 2.6937900174183637e-05, | |
| "loss": 0.4961, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 2.6809986424741623e-05, | |
| "loss": 0.5276, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 2.6679772340209362e-05, | |
| "loss": 0.5341, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 2.6547283283531153e-05, | |
| "loss": 0.5208, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 2.6412545060767663e-05, | |
| "loss": 0.4345, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.6275583916069456e-05, | |
| "loss": 0.4814, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.6136426526565168e-05, | |
| "loss": 0.4222, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 2.5995099997165395e-05, | |
| "loss": 0.5428, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 2.585163185528322e-05, | |
| "loss": 0.4636, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 2.5706050045472446e-05, | |
| "loss": 0.4903, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.55583829239846e-05, | |
| "loss": 0.4702, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.540865925324574e-05, | |
| "loss": 0.5231, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.5256908196254126e-05, | |
| "loss": 0.5089, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.510315931089991e-05, | |
| "loss": 0.4572, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 2.494744254420787e-05, | |
| "loss": 0.471, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 2.478978822650436e-05, | |
| "loss": 0.3929, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.4630227065509627e-05, | |
| "loss": 0.476, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.446879014035656e-05, | |
| "loss": 0.5348, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.4305508895537186e-05, | |
| "loss": 0.4399, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.4140415134777895e-05, | |
| "loss": 0.4443, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.3973541014844783e-05, | |
| "loss": 0.5046, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.380491903928019e-05, | |
| "loss": 0.4318, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 2.3634582052071682e-05, | |
| "loss": 0.5244, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 2.3462563231254764e-05, | |
| "loss": 0.5364, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 2.328889608245048e-05, | |
| "loss": 0.5278, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 2.3113614432339247e-05, | |
| "loss": 0.5021, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 2.2936752422072105e-05, | |
| "loss": 0.4363, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 2.275834450062077e-05, | |
| "loss": 0.4657, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 2.2578425418067713e-05, | |
| "loss": 0.4355, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 2.239703021883754e-05, | |
| "loss": 0.402, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 2.221419423487113e-05, | |
| "loss": 0.5007, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 2.202995307874367e-05, | |
| "loss": 0.4388, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 2.1844342636728133e-05, | |
| "loss": 0.5182, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 2.165739906180535e-05, | |
| "loss": 0.5118, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 2.1469158766622195e-05, | |
| "loss": 0.3888, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 2.127965841639918e-05, | |
| "loss": 0.3978, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 2.1088934921788813e-05, | |
| "loss": 0.5071, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 2.0897025431686234e-05, | |
| "loss": 0.4999, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.0703967325993373e-05, | |
| "loss": 0.4734, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.0509798208338107e-05, | |
| "loss": 0.5421, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 2.0314555898749934e-05, | |
| "loss": 0.4867, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 2.0118278426293376e-05, | |
| "loss": 0.4662, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.9921004021660762e-05, | |
| "loss": 0.4709, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.972277110972567e-05, | |
| "loss": 0.5178, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.952361830205859e-05, | |
| "loss": 0.5188, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.932358438940622e-05, | |
| "loss": 0.412, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.9122708334135816e-05, | |
| "loss": 0.4236, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.8921029262646187e-05, | |
| "loss": 0.4594, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.8718586457746694e-05, | |
| "loss": 0.4521, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.8515419351005745e-05, | |
| "loss": 0.4427, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.8311567515070438e-05, | |
| "loss": 0.4509, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.8107070655958584e-05, | |
| "loss": 0.4548, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.7901968605324847e-05, | |
| "loss": 0.479, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.7696301312702383e-05, | |
| "loss": 0.4867, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.7490108837721493e-05, | |
| "loss": 0.4894, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.7283431342306884e-05, | |
| "loss": 0.4646, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.7076309082854963e-05, | |
| "loss": 0.4802, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.686878240239274e-05, | |
| "loss": 0.4287, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.666089172271988e-05, | |
| "loss": 0.4808, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.6452677536535375e-05, | |
| "loss": 0.4639, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.624418039955042e-05, | |
| "loss": 0.4263, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.6035440922589054e-05, | |
| "loss": 0.492, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.5826499763677982e-05, | |
| "loss": 0.487, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.561739762012729e-05, | |
| "loss": 0.3992, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.5408175220603464e-05, | |
| "loss": 0.4221, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.5198873317196304e-05, | |
| "loss": 0.4954, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.4989532677481273e-05, | |
| "loss": 0.4486, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.4780194076578861e-05, | |
| "loss": 0.4657, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.4570898289212423e-05, | |
| "loss": 0.3984, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.4361686081766154e-05, | |
| "loss": 0.512, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.4152598204344656e-05, | |
| "loss": 0.4178, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.3943675382835672e-05, | |
| "loss": 0.3971, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.3734958310977542e-05, | |
| "loss": 0.4343, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.3526487642432953e-05, | |
| "loss": 0.4675, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.331830398287044e-05, | |
| "loss": 0.469, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.3110447882055255e-05, | |
| "loss": 0.4457, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.290295982595117e-05, | |
| "loss": 0.4495, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 1.2695880228834631e-05, | |
| "loss": 0.4329, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.2489249425422943e-05, | |
| "loss": 0.4681, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.2283107663017894e-05, | |
| "loss": 0.4718, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.207749509366645e-05, | |
| "loss": 0.5212, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 1.1872451766340002e-05, | |
| "loss": 0.4652, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 1.1668017619133619e-05, | |
| "loss": 0.3881, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 1.1464232471487033e-05, | |
| "loss": 0.4384, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 1.126113601642859e-05, | |
| "loss": 0.4238, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.1058767812843915e-05, | |
| "loss": 0.4394, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.0857167277770699e-05, | |
| "loss": 0.4405, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.0656373678721051e-05, | |
| "loss": 0.4231, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.0456426126033047e-05, | |
| "loss": 0.4997, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 1.0257363565252867e-05, | |
| "loss": 0.4339, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.0059224769549022e-05, | |
| "loss": 0.4579, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 9.862048332160156e-06, | |
| "loss": 0.491, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 9.665872658877904e-06, | |
| "loss": 0.4215, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 9.470735960566274e-06, | |
| "loss": 0.3476, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 9.276676245718955e-06, | |
| "loss": 0.4651, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 9.083731313056076e-06, | |
| "loss": 0.4833, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 8.891938744161832e-06, | |
| "loss": 0.3936, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 8.701335896164365e-06, | |
| "loss": 0.4424, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 8.511959894459403e-06, | |
| "loss": 0.4624, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 8.32384762547901e-06, | |
| "loss": 0.3945, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 8.13703572950691e-06, | |
| "loss": 0.5025, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 7.951560593541705e-06, | |
| "loss": 0.5045, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 7.767458344209473e-06, | |
| "loss": 0.5225, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 7.584764840727073e-06, | |
| "loss": 0.4701, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 7.403515667917537e-06, | |
| "loss": 0.3575, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 7.223746129278879e-06, | |
| "loss": 0.447, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 7.045491240107726e-06, | |
| "loss": 0.4191, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 6.868785720679093e-06, | |
| "loss": 0.3947, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 6.6936639894836e-06, | |
| "loss": 0.4557, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 6.520160156523481e-06, | |
| "loss": 0.404, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 6.348308016668679e-06, | |
| "loss": 0.4951, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 6.178141043074322e-06, | |
| "loss": 0.4677, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 6.0096923806608595e-06, | |
| "loss": 0.3628, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 5.842994839658134e-06, | |
| "loss": 0.4757, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 5.6780808892146444e-06, | |
| "loss": 0.387, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 5.514982651073259e-06, | |
| "loss": 0.4685, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 5.3537318933145445e-06, | |
| "loss": 0.4819, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 5.19436002416905e-06, | |
| "loss": 0.4856, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 5.036898085899639e-06, | |
| "loss": 0.4924, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 4.881376748755111e-06, | |
| "loss": 0.4058, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 4.727826304996294e-06, | |
| "loss": 0.4738, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 4.576276662995751e-06, | |
| "loss": 0.519, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 4.426757341412246e-06, | |
| "loss": 0.4648, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 4.279297463441161e-06, | |
| "loss": 0.3798, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 4.133925751141913e-06, | |
| "loss": 0.4013, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 3.990670519843484e-06, | |
| "loss": 0.4445, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 3.849559672629211e-06, | |
| "loss": 0.4292, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 3.7106206949018525e-06, | |
| "loss": 0.4329, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 3.5738806490300157e-06, | |
| "loss": 0.4456, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 3.439366169076979e-06, | |
| "loss": 0.4048, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 3.307103455612942e-06, | |
| "loss": 0.4805, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 3.1771182706117107e-06, | |
| "loss": 0.4863, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 3.049435932432809e-06, | |
| "loss": 0.4468, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 2.9240813108900007e-06, | |
| "loss": 0.4431, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 2.801078822407166e-06, | |
| "loss": 0.4527, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 2.68045242526253e-06, | |
| "loss": 0.4331, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 2.562225614922054e-06, | |
| "loss": 0.4499, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 2.446421419463053e-06, | |
| "loss": 0.4549, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 2.333062395088795e-06, | |
| "loss": 0.4125, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 2.222170621735036e-06, | |
| "loss": 0.4973, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 2.1137676987693256e-06, | |
| "loss": 0.4298, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 2.0078747407838893e-06, | |
| "loss": 0.4095, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.90451237348297e-06, | |
| "loss": 0.3815, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.8037007296653558e-06, | |
| "loss": 0.4799, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.7054594453029748e-06, | |
| "loss": 0.4514, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 1.6098076557161873e-06, | |
| "loss": 0.4336, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 1.5167639918466492e-06, | |
| "loss": 0.433, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 1.426346576628394e-06, | |
| "loss": 0.4462, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 1.3385730214578711e-06, | |
| "loss": 0.4031, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 1.253460422763622e-06, | |
| "loss": 0.4046, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 1.1710253586762516e-06, | |
| "loss": 0.4105, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 1.0912838857993612e-06, | |
| "loss": 0.4747, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 1.0142515360820698e-06, | |
| "loss": 0.4577, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 9.399433137937074e-07, | |
| "loss": 0.4083, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 8.683736926013108e-07, | |
| "loss": 0.4248, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 7.995566127504644e-07, | |
| "loss": 0.4173, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 7.335054783500128e-07, | |
| "loss": 0.4909, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 6.702331547612523e-07, | |
| "loss": 0.4548, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 6.097519660920137e-07, | |
| "loss": 0.486, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 5.520736927961911e-07, | |
| "loss": 0.4523, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 4.972095693791673e-07, | |
| "loss": 0.3658, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 4.451702822095588e-07, | |
| "loss": 0.4309, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 3.959659674377414e-07, | |
| "loss": 0.407, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 3.496062090215424e-07, | |
| "loss": 0.4581, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 3.0610003685949073e-07, | |
| "loss": 0.4145, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 2.6545592503199046e-07, | |
| "loss": 0.44, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 2.2768179015073443e-07, | |
| "loss": 0.4814, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 1.9278498981672444e-07, | |
| "loss": 0.4276, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 1.607723211871709e-07, | |
| "loss": 0.4293, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 1.3165001965153854e-07, | |
| "loss": 0.4662, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 1.054237576170397e-07, | |
| "loss": 0.4045, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 8.209864340375795e-08, | |
| "loss": 0.4039, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 6.167922024965744e-08, | |
| "loss": 0.4408, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 4.416946542566247e-08, | |
| "loss": 0.4574, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 2.9572789460963756e-08, | |
| "loss": 0.4278, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 1.789203547871987e-08, | |
| "loss": 0.4004, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 9.129478642283528e-09, | |
| "loss": 0.4565, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 3.2868257120344247e-09, | |
| "loss": 0.4554, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 3.65214712955142e-10, | |
| "loss": 0.4397, | |
| "step": 2340 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 2345, | |
| "num_train_epochs": 4, | |
| "save_steps": 260, | |
| "total_flos": 1.340869517825833e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |