| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9666505558240696, | |
| "eval_steps": 500, | |
| "global_step": 2000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 75.8236083984375, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 3.7148, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 52.23726272583008, | |
| "learning_rate": 9e-07, | |
| "loss": 3.2721, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 35.391998291015625, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 2.4262, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 15.225934028625488, | |
| "learning_rate": 1.9e-06, | |
| "loss": 1.8828, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 13.488654136657715, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 1.5398, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 8.773133277893066, | |
| "learning_rate": 2.8500000000000002e-06, | |
| "loss": 1.2772, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 6.736795902252197, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 1.1159, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.9463019371032715, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.9883, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.3851969242095947, | |
| "learning_rate": 4.35e-06, | |
| "loss": 0.902, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 1.4642181396484375, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.8701, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.6514687538146973, | |
| "learning_rate": 5.3500000000000004e-06, | |
| "loss": 0.8548, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.4448617994785309, | |
| "learning_rate": 5.850000000000001e-06, | |
| "loss": 0.8038, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.5425733327865601, | |
| "learning_rate": 6.35e-06, | |
| "loss": 0.825, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.47352057695388794, | |
| "learning_rate": 6.8500000000000005e-06, | |
| "loss": 0.812, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.40401744842529297, | |
| "learning_rate": 7.35e-06, | |
| "loss": 0.7912, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.3112718164920807, | |
| "learning_rate": 7.850000000000001e-06, | |
| "loss": 0.8057, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.45351892709732056, | |
| "learning_rate": 8.350000000000001e-06, | |
| "loss": 0.7912, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.38080641627311707, | |
| "learning_rate": 8.85e-06, | |
| "loss": 0.7956, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.22949431836605072, | |
| "learning_rate": 9.35e-06, | |
| "loss": 0.7785, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.8331744074821472, | |
| "learning_rate": 9.85e-06, | |
| "loss": 0.7769, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.30213871598243713, | |
| "learning_rate": 1.035e-05, | |
| "loss": 0.7797, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.32015591859817505, | |
| "learning_rate": 1.0850000000000001e-05, | |
| "loss": 0.7941, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.22447015345096588, | |
| "learning_rate": 1.1350000000000001e-05, | |
| "loss": 0.8014, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.2546006143093109, | |
| "learning_rate": 1.185e-05, | |
| "loss": 0.771, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.23833389580249786, | |
| "learning_rate": 1.235e-05, | |
| "loss": 0.7956, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.1831669956445694, | |
| "learning_rate": 1.285e-05, | |
| "loss": 0.7833, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.19860319793224335, | |
| "learning_rate": 1.3350000000000001e-05, | |
| "loss": 0.7956, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.21220380067825317, | |
| "learning_rate": 1.3850000000000001e-05, | |
| "loss": 0.7942, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.26025038957595825, | |
| "learning_rate": 1.435e-05, | |
| "loss": 0.7877, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.2029470056295395, | |
| "learning_rate": 1.485e-05, | |
| "loss": 0.7637, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.21639183163642883, | |
| "learning_rate": 1.535e-05, | |
| "loss": 0.7885, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.23670387268066406, | |
| "learning_rate": 1.5850000000000002e-05, | |
| "loss": 0.7558, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.2855335772037506, | |
| "learning_rate": 1.635e-05, | |
| "loss": 0.7854, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.21515759825706482, | |
| "learning_rate": 1.6850000000000003e-05, | |
| "loss": 0.7577, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.18981383740901947, | |
| "learning_rate": 1.7349999999999998e-05, | |
| "loss": 0.7802, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.2674611508846283, | |
| "learning_rate": 1.785e-05, | |
| "loss": 0.7809, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.24821774661540985, | |
| "learning_rate": 1.8350000000000002e-05, | |
| "loss": 0.7965, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.3644929528236389, | |
| "learning_rate": 1.885e-05, | |
| "loss": 0.7957, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.3568972647190094, | |
| "learning_rate": 1.9350000000000003e-05, | |
| "loss": 0.7882, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.3236319422721863, | |
| "learning_rate": 1.985e-05, | |
| "loss": 0.7841, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.24553723633289337, | |
| "learning_rate": 2.035e-05, | |
| "loss": 0.7824, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.1855400949716568, | |
| "learning_rate": 2.085e-05, | |
| "loss": 0.7645, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.26212435960769653, | |
| "learning_rate": 2.135e-05, | |
| "loss": 0.7824, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.3167509138584137, | |
| "learning_rate": 2.1850000000000003e-05, | |
| "loss": 0.7908, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.21845023334026337, | |
| "learning_rate": 2.235e-05, | |
| "loss": 0.7888, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.30318892002105713, | |
| "learning_rate": 2.2850000000000003e-05, | |
| "loss": 0.7682, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.28828638792037964, | |
| "learning_rate": 2.3350000000000002e-05, | |
| "loss": 0.7588, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.2422240674495697, | |
| "learning_rate": 2.385e-05, | |
| "loss": 0.7787, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.3579326272010803, | |
| "learning_rate": 2.435e-05, | |
| "loss": 0.7932, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.3549540042877197, | |
| "learning_rate": 2.485e-05, | |
| "loss": 0.7634, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "eval_loss": 0.7744565606117249, | |
| "eval_runtime": 43.9626, | |
| "eval_samples_per_second": 45.493, | |
| "eval_steps_per_second": 0.364, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.2931281328201294, | |
| "learning_rate": 2.5350000000000003e-05, | |
| "loss": 0.7763, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.21228083968162537, | |
| "learning_rate": 2.585e-05, | |
| "loss": 0.7748, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.26342302560806274, | |
| "learning_rate": 2.6350000000000004e-05, | |
| "loss": 0.789, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.2840302288532257, | |
| "learning_rate": 2.6850000000000002e-05, | |
| "loss": 0.768, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.273771196603775, | |
| "learning_rate": 2.7350000000000004e-05, | |
| "loss": 0.7707, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.6022837162017822, | |
| "learning_rate": 2.7850000000000003e-05, | |
| "loss": 0.7705, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.3082919716835022, | |
| "learning_rate": 2.8349999999999998e-05, | |
| "loss": 0.7849, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.25341132283210754, | |
| "learning_rate": 2.885e-05, | |
| "loss": 0.7658, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.2250708043575287, | |
| "learning_rate": 2.935e-05, | |
| "loss": 0.756, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.23522531986236572, | |
| "learning_rate": 2.985e-05, | |
| "loss": 0.7692, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.2448827624320984, | |
| "learning_rate": 3.035e-05, | |
| "loss": 0.7635, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.19717586040496826, | |
| "learning_rate": 3.0850000000000004e-05, | |
| "loss": 0.7548, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.24299098551273346, | |
| "learning_rate": 3.135e-05, | |
| "loss": 0.7673, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.2555953562259674, | |
| "learning_rate": 3.185e-05, | |
| "loss": 0.7528, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.2102127969264984, | |
| "learning_rate": 3.235e-05, | |
| "loss": 0.7678, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.21767646074295044, | |
| "learning_rate": 3.2850000000000006e-05, | |
| "loss": 0.7564, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.3582068085670471, | |
| "learning_rate": 3.3350000000000004e-05, | |
| "loss": 0.7645, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.27804431319236755, | |
| "learning_rate": 3.385e-05, | |
| "loss": 0.7645, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.2464047521352768, | |
| "learning_rate": 3.435e-05, | |
| "loss": 0.7325, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.22323842346668243, | |
| "learning_rate": 3.485e-05, | |
| "loss": 0.7579, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.28329703211784363, | |
| "learning_rate": 3.535e-05, | |
| "loss": 0.7399, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.2471865713596344, | |
| "learning_rate": 3.585e-05, | |
| "loss": 0.765, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.314908504486084, | |
| "learning_rate": 3.635e-05, | |
| "loss": 0.7529, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.21466995775699615, | |
| "learning_rate": 3.685e-05, | |
| "loss": 0.7365, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.2545580565929413, | |
| "learning_rate": 3.735e-05, | |
| "loss": 0.7458, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.2651059329509735, | |
| "learning_rate": 3.7850000000000005e-05, | |
| "loss": 0.7578, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.2408379763364792, | |
| "learning_rate": 3.8350000000000004e-05, | |
| "loss": 0.7283, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.2575875520706177, | |
| "learning_rate": 3.885e-05, | |
| "loss": 0.7367, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.28182050585746765, | |
| "learning_rate": 3.935e-05, | |
| "loss": 0.7784, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.20579397678375244, | |
| "learning_rate": 3.9850000000000006e-05, | |
| "loss": 0.7533, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.1906532198190689, | |
| "learning_rate": 4.0350000000000005e-05, | |
| "loss": 0.7544, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.18311142921447754, | |
| "learning_rate": 4.085e-05, | |
| "loss": 0.75, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.1910839080810547, | |
| "learning_rate": 4.135e-05, | |
| "loss": 0.7538, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.2178005427122116, | |
| "learning_rate": 4.185e-05, | |
| "loss": 0.7456, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.27642378211021423, | |
| "learning_rate": 4.235e-05, | |
| "loss": 0.744, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.26685312390327454, | |
| "learning_rate": 4.285e-05, | |
| "loss": 0.7611, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.2021845579147339, | |
| "learning_rate": 4.335e-05, | |
| "loss": 0.758, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.20516808331012726, | |
| "learning_rate": 4.385e-05, | |
| "loss": 0.7404, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.20619668066501617, | |
| "learning_rate": 4.435e-05, | |
| "loss": 0.7379, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.22439967095851898, | |
| "learning_rate": 4.4850000000000006e-05, | |
| "loss": 0.7495, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.2192295491695404, | |
| "learning_rate": 4.5350000000000005e-05, | |
| "loss": 0.7377, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.26503369212150574, | |
| "learning_rate": 4.585e-05, | |
| "loss": 0.7481, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.2249353975057602, | |
| "learning_rate": 4.635e-05, | |
| "loss": 0.742, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.21413160860538483, | |
| "learning_rate": 4.685000000000001e-05, | |
| "loss": 0.7485, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.20971478521823883, | |
| "learning_rate": 4.735e-05, | |
| "loss": 0.7339, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.20295798778533936, | |
| "learning_rate": 4.785e-05, | |
| "loss": 0.722, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.1957596242427826, | |
| "learning_rate": 4.835e-05, | |
| "loss": 0.7538, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.18014560639858246, | |
| "learning_rate": 4.885e-05, | |
| "loss": 0.7251, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.19334861636161804, | |
| "learning_rate": 4.935e-05, | |
| "loss": 0.7444, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.1753440499305725, | |
| "learning_rate": 4.9850000000000006e-05, | |
| "loss": 0.7376, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "eval_loss": 0.73988938331604, | |
| "eval_runtime": 42.9535, | |
| "eval_samples_per_second": 46.562, | |
| "eval_steps_per_second": 0.372, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.24455028772354126, | |
| "learning_rate": 4.99999307775404e-05, | |
| "loss": 0.7404, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.2149570733308792, | |
| "learning_rate": 4.999959172968145e-05, | |
| "loss": 0.7342, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.1931283175945282, | |
| "learning_rate": 4.999897014592085e-05, | |
| "loss": 0.735, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.2331906110048294, | |
| "learning_rate": 4.999806603328352e-05, | |
| "loss": 0.7145, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.21177324652671814, | |
| "learning_rate": 4.999687940198738e-05, | |
| "loss": 0.7308, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.2026621401309967, | |
| "learning_rate": 4.9995410265443286e-05, | |
| "loss": 0.7445, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.19811251759529114, | |
| "learning_rate": 4.999365864025487e-05, | |
| "loss": 0.738, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.1981566846370697, | |
| "learning_rate": 4.9991624546218334e-05, | |
| "loss": 0.7286, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.17122291028499603, | |
| "learning_rate": 4.9989308006322235e-05, | |
| "loss": 0.737, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.2182038575410843, | |
| "learning_rate": 4.9986709046747225e-05, | |
| "loss": 0.7354, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.1823730617761612, | |
| "learning_rate": 4.9983827696865764e-05, | |
| "loss": 0.725, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.2195768803358078, | |
| "learning_rate": 4.998066398924177e-05, | |
| "loss": 0.7173, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.18349726498126984, | |
| "learning_rate": 4.997721795963028e-05, | |
| "loss": 0.7159, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.2440386265516281, | |
| "learning_rate": 4.997348964697699e-05, | |
| "loss": 0.7171, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.20371629297733307, | |
| "learning_rate": 4.9969479093417894e-05, | |
| "loss": 0.7179, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.2982828617095947, | |
| "learning_rate": 4.9965186344278746e-05, | |
| "loss": 0.7222, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.2097097784280777, | |
| "learning_rate": 4.9960611448074555e-05, | |
| "loss": 0.7174, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.2264368236064911, | |
| "learning_rate": 4.995575445650907e-05, | |
| "loss": 0.7079, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.24737605452537537, | |
| "learning_rate": 4.995061542447418e-05, | |
| "loss": 0.7272, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.19530241191387177, | |
| "learning_rate": 4.9945194410049254e-05, | |
| "loss": 0.7216, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.2791181802749634, | |
| "learning_rate": 4.993949147450055e-05, | |
| "loss": 0.712, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.2095661610364914, | |
| "learning_rate": 4.993350668228049e-05, | |
| "loss": 0.7067, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.17129307985305786, | |
| "learning_rate": 4.9927240101026915e-05, | |
| "loss": 0.7087, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.2154540866613388, | |
| "learning_rate": 4.992069180156235e-05, | |
| "loss": 0.7411, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.22642803192138672, | |
| "learning_rate": 4.991386185789319e-05, | |
| "loss": 0.7266, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.19676081836223602, | |
| "learning_rate": 4.9906750347208866e-05, | |
| "loss": 0.7012, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.2095811814069748, | |
| "learning_rate": 4.989935734988098e-05, | |
| "loss": 0.7162, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.21455398201942444, | |
| "learning_rate": 4.9891682949462385e-05, | |
| "loss": 0.7177, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.20205985009670258, | |
| "learning_rate": 4.988372723268623e-05, | |
| "loss": 0.718, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.18780440092086792, | |
| "learning_rate": 4.987549028946502e-05, | |
| "loss": 0.7103, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.20362792909145355, | |
| "learning_rate": 4.986697221288956e-05, | |
| "loss": 0.7304, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.2820493280887604, | |
| "learning_rate": 4.985817309922792e-05, | |
| "loss": 0.7182, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.20787598192691803, | |
| "learning_rate": 4.984909304792435e-05, | |
| "loss": 0.7142, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.21624509990215302, | |
| "learning_rate": 4.983973216159813e-05, | |
| "loss": 0.7208, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.19343602657318115, | |
| "learning_rate": 4.983009054604246e-05, | |
| "loss": 0.7097, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.254402756690979, | |
| "learning_rate": 4.9820168310223215e-05, | |
| "loss": 0.7193, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.1940561830997467, | |
| "learning_rate": 4.980996556627774e-05, | |
| "loss": 0.6988, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.22593462467193604, | |
| "learning_rate": 4.9799482429513576e-05, | |
| "loss": 0.7054, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.22020135819911957, | |
| "learning_rate": 4.978871901840716e-05, | |
| "loss": 0.7221, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.22807644307613373, | |
| "learning_rate": 4.977767545460248e-05, | |
| "loss": 0.7213, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.1925562173128128, | |
| "learning_rate": 4.9766351862909703e-05, | |
| "loss": 0.7127, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.20845136046409607, | |
| "learning_rate": 4.9754748371303775e-05, | |
| "loss": 0.7207, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.18583528697490692, | |
| "learning_rate": 4.974286511092294e-05, | |
| "loss": 0.7098, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.2019319236278534, | |
| "learning_rate": 4.973070221606732e-05, | |
| "loss": 0.7029, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.1966458261013031, | |
| "learning_rate": 4.971825982419731e-05, | |
| "loss": 0.7241, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.20924623310565948, | |
| "learning_rate": 4.970553807593214e-05, | |
| "loss": 0.7126, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.20655803382396698, | |
| "learning_rate": 4.969253711504814e-05, | |
| "loss": 0.6903, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.2179209142923355, | |
| "learning_rate": 4.967925708847727e-05, | |
| "loss": 0.683, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.2198410928249359, | |
| "learning_rate": 4.966569814630534e-05, | |
| "loss": 0.7087, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.17970815300941467, | |
| "learning_rate": 4.9651860441770374e-05, | |
| "loss": 0.7006, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "eval_loss": 0.7137542366981506, | |
| "eval_runtime": 43.0052, | |
| "eval_samples_per_second": 46.506, | |
| "eval_steps_per_second": 0.372, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.1931023746728897, | |
| "learning_rate": 4.963774413126086e-05, | |
| "loss": 0.7245, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.21812495589256287, | |
| "learning_rate": 4.9623349374313973e-05, | |
| "loss": 0.7119, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.20784322917461395, | |
| "learning_rate": 4.960867633361381e-05, | |
| "loss": 0.704, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.18697650730609894, | |
| "learning_rate": 4.9593725174989507e-05, | |
| "loss": 0.7129, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.25061744451522827, | |
| "learning_rate": 4.957849606741338e-05, | |
| "loss": 0.6816, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.1888163685798645, | |
| "learning_rate": 4.956298918299903e-05, | |
| "loss": 0.6905, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.1991235315799713, | |
| "learning_rate": 4.954720469699938e-05, | |
| "loss": 0.7258, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.18779252469539642, | |
| "learning_rate": 4.953114278780471e-05, | |
| "loss": 0.6918, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.21268677711486816, | |
| "learning_rate": 4.951480363694061e-05, | |
| "loss": 0.6995, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.20242677628993988, | |
| "learning_rate": 4.949818742906597e-05, | |
| "loss": 0.7132, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.2314455509185791, | |
| "learning_rate": 4.9481294351970874e-05, | |
| "loss": 0.6866, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.19638539850711823, | |
| "learning_rate": 4.9464124596574465e-05, | |
| "loss": 0.6993, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.185603529214859, | |
| "learning_rate": 4.944667835692281e-05, | |
| "loss": 0.6971, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.1594298928976059, | |
| "learning_rate": 4.942895583018669e-05, | |
| "loss": 0.7258, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.21844029426574707, | |
| "learning_rate": 4.9410957216659374e-05, | |
| "loss": 0.7022, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.19146642088890076, | |
| "learning_rate": 4.9392682719754364e-05, | |
| "loss": 0.6954, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.20112347602844238, | |
| "learning_rate": 4.9374132546003096e-05, | |
| "loss": 0.7017, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.22593769431114197, | |
| "learning_rate": 4.935530690505259e-05, | |
| "loss": 0.7397, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.2611504793167114, | |
| "learning_rate": 4.933620600966311e-05, | |
| "loss": 0.7048, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.23995471000671387, | |
| "learning_rate": 4.931683007570571e-05, | |
| "loss": 0.709, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.22278735041618347, | |
| "learning_rate": 4.9297179322159856e-05, | |
| "loss": 0.7116, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.2096502035856247, | |
| "learning_rate": 4.927725397111093e-05, | |
| "loss": 0.7061, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.22335517406463623, | |
| "learning_rate": 4.9257054247747644e-05, | |
| "loss": 0.6982, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.18597687780857086, | |
| "learning_rate": 4.923658038035965e-05, | |
| "loss": 0.6927, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.1971946507692337, | |
| "learning_rate": 4.9215832600334816e-05, | |
| "loss": 0.6768, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.2101408839225769, | |
| "learning_rate": 4.9194811142156674e-05, | |
| "loss": 0.6933, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.17352430522441864, | |
| "learning_rate": 4.917351624340178e-05, | |
| "loss": 0.6918, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.18568973243236542, | |
| "learning_rate": 4.915194814473699e-05, | |
| "loss": 0.6711, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.22991472482681274, | |
| "learning_rate": 4.913010708991679e-05, | |
| "loss": 0.6938, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.201872780919075, | |
| "learning_rate": 4.910799332578048e-05, | |
| "loss": 0.7051, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.2069864720106125, | |
| "learning_rate": 4.908560710224943e-05, | |
| "loss": 0.6983, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.2075955718755722, | |
| "learning_rate": 4.906294867232426e-05, | |
| "loss": 0.6996, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.19524317979812622, | |
| "learning_rate": 4.904001829208194e-05, | |
| "loss": 0.691, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.1967206597328186, | |
| "learning_rate": 4.901681622067293e-05, | |
| "loss": 0.6964, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.2195650041103363, | |
| "learning_rate": 4.899334272031825e-05, | |
| "loss": 0.6982, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.23072399199008942, | |
| "learning_rate": 4.8969598056306475e-05, | |
| "loss": 0.6838, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.1950792670249939, | |
| "learning_rate": 4.89455824969908e-05, | |
| "loss": 0.6984, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.2289438396692276, | |
| "learning_rate": 4.892129631378597e-05, | |
| "loss": 0.7012, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.22316494584083557, | |
| "learning_rate": 4.8896739781165215e-05, | |
| "loss": 0.6904, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.24066634476184845, | |
| "learning_rate": 4.8871913176657135e-05, | |
| "loss": 0.6974, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.18922199308872223, | |
| "learning_rate": 4.8846816780842606e-05, | |
| "loss": 0.686, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.2425553798675537, | |
| "learning_rate": 4.882145087735158e-05, | |
| "loss": 0.715, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.2267230898141861, | |
| "learning_rate": 4.879581575285988e-05, | |
| "loss": 0.698, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.22119645774364471, | |
| "learning_rate": 4.876991169708592e-05, | |
| "loss": 0.6942, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.22854161262512207, | |
| "learning_rate": 4.874373900278756e-05, | |
| "loss": 0.6912, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.1927136778831482, | |
| "learning_rate": 4.871729796575863e-05, | |
| "loss": 0.6829, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.2238394170999527, | |
| "learning_rate": 4.869058888482571e-05, | |
| "loss": 0.6831, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.17643775045871735, | |
| "learning_rate": 4.86636120618447e-05, | |
| "loss": 0.6875, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.2161882072687149, | |
| "learning_rate": 4.863636780169742e-05, | |
| "loss": 0.6893, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.20689886808395386, | |
| "learning_rate": 4.860885641228815e-05, | |
| "loss": 0.6721, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "eval_loss": 0.701475203037262, | |
| "eval_runtime": 43.016, | |
| "eval_samples_per_second": 46.494, | |
| "eval_steps_per_second": 0.372, | |
| "step": 2000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10345, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "total_flos": 5.061082117488771e+18, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |