Invalid JSON: Unexpected token 'I', ..."ad_norm": Infinity,
"... is not valid JSON
| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.833252779120348, | |
| "eval_steps": 500, | |
| "global_step": 10000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 75.8236083984375, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 3.7148, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 52.23726272583008, | |
| "learning_rate": 9e-07, | |
| "loss": 3.2721, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 35.391998291015625, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 2.4262, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 15.225934028625488, | |
| "learning_rate": 1.9e-06, | |
| "loss": 1.8828, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 13.488654136657715, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 1.5398, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 8.773133277893066, | |
| "learning_rate": 2.8500000000000002e-06, | |
| "loss": 1.2772, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 6.736795902252197, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 1.1159, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.9463019371032715, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.9883, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.3851969242095947, | |
| "learning_rate": 4.35e-06, | |
| "loss": 0.902, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 1.4642181396484375, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.8701, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 0.6514687538146973, | |
| "learning_rate": 5.3500000000000004e-06, | |
| "loss": 0.8548, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.4448617994785309, | |
| "learning_rate": 5.850000000000001e-06, | |
| "loss": 0.8038, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 0.5425733327865601, | |
| "learning_rate": 6.35e-06, | |
| "loss": 0.825, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.47352057695388794, | |
| "learning_rate": 6.8500000000000005e-06, | |
| "loss": 0.812, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 0.40401744842529297, | |
| "learning_rate": 7.35e-06, | |
| "loss": 0.7912, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.3112718164920807, | |
| "learning_rate": 7.850000000000001e-06, | |
| "loss": 0.8057, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.45351892709732056, | |
| "learning_rate": 8.350000000000001e-06, | |
| "loss": 0.7912, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.38080641627311707, | |
| "learning_rate": 8.85e-06, | |
| "loss": 0.7956, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 0.22949431836605072, | |
| "learning_rate": 9.35e-06, | |
| "loss": 0.7785, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.8331744074821472, | |
| "learning_rate": 9.85e-06, | |
| "loss": 0.7769, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 0.30213871598243713, | |
| "learning_rate": 1.035e-05, | |
| "loss": 0.7797, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.32015591859817505, | |
| "learning_rate": 1.0850000000000001e-05, | |
| "loss": 0.7941, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.22447015345096588, | |
| "learning_rate": 1.1350000000000001e-05, | |
| "loss": 0.8014, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.2546006143093109, | |
| "learning_rate": 1.185e-05, | |
| "loss": 0.771, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.23833389580249786, | |
| "learning_rate": 1.235e-05, | |
| "loss": 0.7956, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.1831669956445694, | |
| "learning_rate": 1.285e-05, | |
| "loss": 0.7833, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.19860319793224335, | |
| "learning_rate": 1.3350000000000001e-05, | |
| "loss": 0.7956, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.21220380067825317, | |
| "learning_rate": 1.3850000000000001e-05, | |
| "loss": 0.7942, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.26025038957595825, | |
| "learning_rate": 1.435e-05, | |
| "loss": 0.7877, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 0.2029470056295395, | |
| "learning_rate": 1.485e-05, | |
| "loss": 0.7637, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.21639183163642883, | |
| "learning_rate": 1.535e-05, | |
| "loss": 0.7885, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.23670387268066406, | |
| "learning_rate": 1.5850000000000002e-05, | |
| "loss": 0.7558, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.2855335772037506, | |
| "learning_rate": 1.635e-05, | |
| "loss": 0.7854, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.21515759825706482, | |
| "learning_rate": 1.6850000000000003e-05, | |
| "loss": 0.7577, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.18981383740901947, | |
| "learning_rate": 1.7349999999999998e-05, | |
| "loss": 0.7802, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.2674611508846283, | |
| "learning_rate": 1.785e-05, | |
| "loss": 0.7809, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.24821774661540985, | |
| "learning_rate": 1.8350000000000002e-05, | |
| "loss": 0.7965, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 0.3644929528236389, | |
| "learning_rate": 1.885e-05, | |
| "loss": 0.7957, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.3568972647190094, | |
| "learning_rate": 1.9350000000000003e-05, | |
| "loss": 0.7882, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.3236319422721863, | |
| "learning_rate": 1.985e-05, | |
| "loss": 0.7841, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.24553723633289337, | |
| "learning_rate": 2.035e-05, | |
| "loss": 0.7824, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.1855400949716568, | |
| "learning_rate": 2.085e-05, | |
| "loss": 0.7645, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.26212435960769653, | |
| "learning_rate": 2.135e-05, | |
| "loss": 0.7824, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.3167509138584137, | |
| "learning_rate": 2.1850000000000003e-05, | |
| "loss": 0.7908, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.21845023334026337, | |
| "learning_rate": 2.235e-05, | |
| "loss": 0.7888, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 0.30318892002105713, | |
| "learning_rate": 2.2850000000000003e-05, | |
| "loss": 0.7682, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.28828638792037964, | |
| "learning_rate": 2.3350000000000002e-05, | |
| "loss": 0.7588, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.2422240674495697, | |
| "learning_rate": 2.385e-05, | |
| "loss": 0.7787, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.3579326272010803, | |
| "learning_rate": 2.435e-05, | |
| "loss": 0.7932, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.3549540042877197, | |
| "learning_rate": 2.485e-05, | |
| "loss": 0.7634, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "eval_loss": 0.7744565606117249, | |
| "eval_runtime": 43.9626, | |
| "eval_samples_per_second": 45.493, | |
| "eval_steps_per_second": 0.364, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.2931281328201294, | |
| "learning_rate": 2.5350000000000003e-05, | |
| "loss": 0.7763, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.21228083968162537, | |
| "learning_rate": 2.585e-05, | |
| "loss": 0.7748, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.26342302560806274, | |
| "learning_rate": 2.6350000000000004e-05, | |
| "loss": 0.789, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.2840302288532257, | |
| "learning_rate": 2.6850000000000002e-05, | |
| "loss": 0.768, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.273771196603775, | |
| "learning_rate": 2.7350000000000004e-05, | |
| "loss": 0.7707, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.6022837162017822, | |
| "learning_rate": 2.7850000000000003e-05, | |
| "loss": 0.7705, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.3082919716835022, | |
| "learning_rate": 2.8349999999999998e-05, | |
| "loss": 0.7849, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.25341132283210754, | |
| "learning_rate": 2.885e-05, | |
| "loss": 0.7658, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.2250708043575287, | |
| "learning_rate": 2.935e-05, | |
| "loss": 0.756, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.23522531986236572, | |
| "learning_rate": 2.985e-05, | |
| "loss": 0.7692, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.2448827624320984, | |
| "learning_rate": 3.035e-05, | |
| "loss": 0.7635, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.19717586040496826, | |
| "learning_rate": 3.0850000000000004e-05, | |
| "loss": 0.7548, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.24299098551273346, | |
| "learning_rate": 3.135e-05, | |
| "loss": 0.7673, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.2555953562259674, | |
| "learning_rate": 3.185e-05, | |
| "loss": 0.7528, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.2102127969264984, | |
| "learning_rate": 3.235e-05, | |
| "loss": 0.7678, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.21767646074295044, | |
| "learning_rate": 3.2850000000000006e-05, | |
| "loss": 0.7564, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.3582068085670471, | |
| "learning_rate": 3.3350000000000004e-05, | |
| "loss": 0.7645, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.27804431319236755, | |
| "learning_rate": 3.385e-05, | |
| "loss": 0.7645, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.2464047521352768, | |
| "learning_rate": 3.435e-05, | |
| "loss": 0.7325, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.22323842346668243, | |
| "learning_rate": 3.485e-05, | |
| "loss": 0.7579, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.28329703211784363, | |
| "learning_rate": 3.535e-05, | |
| "loss": 0.7399, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.2471865713596344, | |
| "learning_rate": 3.585e-05, | |
| "loss": 0.765, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.314908504486084, | |
| "learning_rate": 3.635e-05, | |
| "loss": 0.7529, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.21466995775699615, | |
| "learning_rate": 3.685e-05, | |
| "loss": 0.7365, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.2545580565929413, | |
| "learning_rate": 3.735e-05, | |
| "loss": 0.7458, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.2651059329509735, | |
| "learning_rate": 3.7850000000000005e-05, | |
| "loss": 0.7578, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.2408379763364792, | |
| "learning_rate": 3.8350000000000004e-05, | |
| "loss": 0.7283, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.2575875520706177, | |
| "learning_rate": 3.885e-05, | |
| "loss": 0.7367, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.28182050585746765, | |
| "learning_rate": 3.935e-05, | |
| "loss": 0.7784, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.20579397678375244, | |
| "learning_rate": 3.9850000000000006e-05, | |
| "loss": 0.7533, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.1906532198190689, | |
| "learning_rate": 4.0350000000000005e-05, | |
| "loss": 0.7544, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.18311142921447754, | |
| "learning_rate": 4.085e-05, | |
| "loss": 0.75, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.1910839080810547, | |
| "learning_rate": 4.135e-05, | |
| "loss": 0.7538, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.2178005427122116, | |
| "learning_rate": 4.185e-05, | |
| "loss": 0.7456, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.27642378211021423, | |
| "learning_rate": 4.235e-05, | |
| "loss": 0.744, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.26685312390327454, | |
| "learning_rate": 4.285e-05, | |
| "loss": 0.7611, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.2021845579147339, | |
| "learning_rate": 4.335e-05, | |
| "loss": 0.758, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.20516808331012726, | |
| "learning_rate": 4.385e-05, | |
| "loss": 0.7404, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.20619668066501617, | |
| "learning_rate": 4.435e-05, | |
| "loss": 0.7379, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.22439967095851898, | |
| "learning_rate": 4.4850000000000006e-05, | |
| "loss": 0.7495, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.2192295491695404, | |
| "learning_rate": 4.5350000000000005e-05, | |
| "loss": 0.7377, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.26503369212150574, | |
| "learning_rate": 4.585e-05, | |
| "loss": 0.7481, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.2249353975057602, | |
| "learning_rate": 4.635e-05, | |
| "loss": 0.742, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.21413160860538483, | |
| "learning_rate": 4.685000000000001e-05, | |
| "loss": 0.7485, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.20971478521823883, | |
| "learning_rate": 4.735e-05, | |
| "loss": 0.7339, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.20295798778533936, | |
| "learning_rate": 4.785e-05, | |
| "loss": 0.722, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.1957596242427826, | |
| "learning_rate": 4.835e-05, | |
| "loss": 0.7538, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.18014560639858246, | |
| "learning_rate": 4.885e-05, | |
| "loss": 0.7251, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.19334861636161804, | |
| "learning_rate": 4.935e-05, | |
| "loss": 0.7444, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.1753440499305725, | |
| "learning_rate": 4.9850000000000006e-05, | |
| "loss": 0.7376, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "eval_loss": 0.73988938331604, | |
| "eval_runtime": 42.9535, | |
| "eval_samples_per_second": 46.562, | |
| "eval_steps_per_second": 0.372, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.24455028772354126, | |
| "learning_rate": 4.99999307775404e-05, | |
| "loss": 0.7404, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.2149570733308792, | |
| "learning_rate": 4.999959172968145e-05, | |
| "loss": 0.7342, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.1931283175945282, | |
| "learning_rate": 4.999897014592085e-05, | |
| "loss": 0.735, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.2331906110048294, | |
| "learning_rate": 4.999806603328352e-05, | |
| "loss": 0.7145, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.21177324652671814, | |
| "learning_rate": 4.999687940198738e-05, | |
| "loss": 0.7308, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.2026621401309967, | |
| "learning_rate": 4.9995410265443286e-05, | |
| "loss": 0.7445, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.19811251759529114, | |
| "learning_rate": 4.999365864025487e-05, | |
| "loss": 0.738, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.1981566846370697, | |
| "learning_rate": 4.9991624546218334e-05, | |
| "loss": 0.7286, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.17122291028499603, | |
| "learning_rate": 4.9989308006322235e-05, | |
| "loss": 0.737, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.2182038575410843, | |
| "learning_rate": 4.9986709046747225e-05, | |
| "loss": 0.7354, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.1823730617761612, | |
| "learning_rate": 4.9983827696865764e-05, | |
| "loss": 0.725, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.2195768803358078, | |
| "learning_rate": 4.998066398924177e-05, | |
| "loss": 0.7173, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.18349726498126984, | |
| "learning_rate": 4.997721795963028e-05, | |
| "loss": 0.7159, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.2440386265516281, | |
| "learning_rate": 4.997348964697699e-05, | |
| "loss": 0.7171, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.20371629297733307, | |
| "learning_rate": 4.9969479093417894e-05, | |
| "loss": 0.7179, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.2982828617095947, | |
| "learning_rate": 4.9965186344278746e-05, | |
| "loss": 0.7222, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.2097097784280777, | |
| "learning_rate": 4.9960611448074555e-05, | |
| "loss": 0.7174, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.2264368236064911, | |
| "learning_rate": 4.995575445650907e-05, | |
| "loss": 0.7079, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.24737605452537537, | |
| "learning_rate": 4.995061542447418e-05, | |
| "loss": 0.7272, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.19530241191387177, | |
| "learning_rate": 4.9945194410049254e-05, | |
| "loss": 0.7216, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.2791181802749634, | |
| "learning_rate": 4.993949147450055e-05, | |
| "loss": 0.712, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.2095661610364914, | |
| "learning_rate": 4.993350668228049e-05, | |
| "loss": 0.7067, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.17129307985305786, | |
| "learning_rate": 4.9927240101026915e-05, | |
| "loss": 0.7087, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.2154540866613388, | |
| "learning_rate": 4.992069180156235e-05, | |
| "loss": 0.7411, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.22642803192138672, | |
| "learning_rate": 4.991386185789319e-05, | |
| "loss": 0.7266, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.19676081836223602, | |
| "learning_rate": 4.9906750347208866e-05, | |
| "loss": 0.7012, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.2095811814069748, | |
| "learning_rate": 4.989935734988098e-05, | |
| "loss": 0.7162, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.21455398201942444, | |
| "learning_rate": 4.9891682949462385e-05, | |
| "loss": 0.7177, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.20205985009670258, | |
| "learning_rate": 4.988372723268623e-05, | |
| "loss": 0.718, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.18780440092086792, | |
| "learning_rate": 4.987549028946502e-05, | |
| "loss": 0.7103, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.20362792909145355, | |
| "learning_rate": 4.986697221288956e-05, | |
| "loss": 0.7304, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.2820493280887604, | |
| "learning_rate": 4.985817309922792e-05, | |
| "loss": 0.7182, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.20787598192691803, | |
| "learning_rate": 4.984909304792435e-05, | |
| "loss": 0.7142, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.21624509990215302, | |
| "learning_rate": 4.983973216159813e-05, | |
| "loss": 0.7208, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.19343602657318115, | |
| "learning_rate": 4.983009054604246e-05, | |
| "loss": 0.7097, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.254402756690979, | |
| "learning_rate": 4.9820168310223215e-05, | |
| "loss": 0.7193, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.1940561830997467, | |
| "learning_rate": 4.980996556627774e-05, | |
| "loss": 0.6988, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.22593462467193604, | |
| "learning_rate": 4.9799482429513576e-05, | |
| "loss": 0.7054, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.22020135819911957, | |
| "learning_rate": 4.978871901840716e-05, | |
| "loss": 0.7221, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.22807644307613373, | |
| "learning_rate": 4.977767545460248e-05, | |
| "loss": 0.7213, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.1925562173128128, | |
| "learning_rate": 4.9766351862909703e-05, | |
| "loss": 0.7127, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.20845136046409607, | |
| "learning_rate": 4.9754748371303775e-05, | |
| "loss": 0.7207, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.18583528697490692, | |
| "learning_rate": 4.974286511092294e-05, | |
| "loss": 0.7098, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.2019319236278534, | |
| "learning_rate": 4.973070221606732e-05, | |
| "loss": 0.7029, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.1966458261013031, | |
| "learning_rate": 4.971825982419731e-05, | |
| "loss": 0.7241, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.20924623310565948, | |
| "learning_rate": 4.970553807593214e-05, | |
| "loss": 0.7126, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.20655803382396698, | |
| "learning_rate": 4.969253711504814e-05, | |
| "loss": 0.6903, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.2179209142923355, | |
| "learning_rate": 4.967925708847727e-05, | |
| "loss": 0.683, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.2198410928249359, | |
| "learning_rate": 4.966569814630534e-05, | |
| "loss": 0.7087, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.17970815300941467, | |
| "learning_rate": 4.9651860441770374e-05, | |
| "loss": 0.7006, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "eval_loss": 0.7137542366981506, | |
| "eval_runtime": 43.0052, | |
| "eval_samples_per_second": 46.506, | |
| "eval_steps_per_second": 0.372, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.1931023746728897, | |
| "learning_rate": 4.963774413126086e-05, | |
| "loss": 0.7245, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.21812495589256287, | |
| "learning_rate": 4.9623349374313973e-05, | |
| "loss": 0.7119, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.20784322917461395, | |
| "learning_rate": 4.960867633361381e-05, | |
| "loss": 0.704, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.18697650730609894, | |
| "learning_rate": 4.9593725174989507e-05, | |
| "loss": 0.7129, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.25061744451522827, | |
| "learning_rate": 4.957849606741338e-05, | |
| "loss": 0.6816, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.1888163685798645, | |
| "learning_rate": 4.956298918299903e-05, | |
| "loss": 0.6905, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.1991235315799713, | |
| "learning_rate": 4.954720469699938e-05, | |
| "loss": 0.7258, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.18779252469539642, | |
| "learning_rate": 4.953114278780471e-05, | |
| "loss": 0.6918, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.21268677711486816, | |
| "learning_rate": 4.951480363694061e-05, | |
| "loss": 0.6995, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.20242677628993988, | |
| "learning_rate": 4.949818742906597e-05, | |
| "loss": 0.7132, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.2314455509185791, | |
| "learning_rate": 4.9481294351970874e-05, | |
| "loss": 0.6866, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.19638539850711823, | |
| "learning_rate": 4.9464124596574465e-05, | |
| "loss": 0.6993, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.185603529214859, | |
| "learning_rate": 4.944667835692281e-05, | |
| "loss": 0.6971, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.1594298928976059, | |
| "learning_rate": 4.942895583018669e-05, | |
| "loss": 0.7258, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.21844029426574707, | |
| "learning_rate": 4.9410957216659374e-05, | |
| "loss": 0.7022, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.19146642088890076, | |
| "learning_rate": 4.9392682719754364e-05, | |
| "loss": 0.6954, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.20112347602844238, | |
| "learning_rate": 4.9374132546003096e-05, | |
| "loss": 0.7017, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.22593769431114197, | |
| "learning_rate": 4.935530690505259e-05, | |
| "loss": 0.7397, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.2611504793167114, | |
| "learning_rate": 4.933620600966311e-05, | |
| "loss": 0.7048, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.23995471000671387, | |
| "learning_rate": 4.931683007570571e-05, | |
| "loss": 0.709, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.22278735041618347, | |
| "learning_rate": 4.9297179322159856e-05, | |
| "loss": 0.7116, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.2096502035856247, | |
| "learning_rate": 4.927725397111093e-05, | |
| "loss": 0.7061, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.22335517406463623, | |
| "learning_rate": 4.9257054247747644e-05, | |
| "loss": 0.6982, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.18597687780857086, | |
| "learning_rate": 4.923658038035965e-05, | |
| "loss": 0.6927, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.1971946507692337, | |
| "learning_rate": 4.9215832600334816e-05, | |
| "loss": 0.6768, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.2101408839225769, | |
| "learning_rate": 4.9194811142156674e-05, | |
| "loss": 0.6933, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.17352430522441864, | |
| "learning_rate": 4.917351624340178e-05, | |
| "loss": 0.6918, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.18568973243236542, | |
| "learning_rate": 4.915194814473699e-05, | |
| "loss": 0.6711, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.22991472482681274, | |
| "learning_rate": 4.913010708991679e-05, | |
| "loss": 0.6938, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.201872780919075, | |
| "learning_rate": 4.910799332578048e-05, | |
| "loss": 0.7051, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.2069864720106125, | |
| "learning_rate": 4.908560710224943e-05, | |
| "loss": 0.6983, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.2075955718755722, | |
| "learning_rate": 4.906294867232426e-05, | |
| "loss": 0.6996, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.19524317979812622, | |
| "learning_rate": 4.904001829208194e-05, | |
| "loss": 0.691, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.1967206597328186, | |
| "learning_rate": 4.901681622067293e-05, | |
| "loss": 0.6964, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.2195650041103363, | |
| "learning_rate": 4.899334272031825e-05, | |
| "loss": 0.6982, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.23072399199008942, | |
| "learning_rate": 4.8969598056306475e-05, | |
| "loss": 0.6838, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.1950792670249939, | |
| "learning_rate": 4.89455824969908e-05, | |
| "loss": 0.6984, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.2289438396692276, | |
| "learning_rate": 4.892129631378597e-05, | |
| "loss": 0.7012, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.22316494584083557, | |
| "learning_rate": 4.8896739781165215e-05, | |
| "loss": 0.6904, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.24066634476184845, | |
| "learning_rate": 4.8871913176657135e-05, | |
| "loss": 0.6974, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.18922199308872223, | |
| "learning_rate": 4.8846816780842606e-05, | |
| "loss": 0.686, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.2425553798675537, | |
| "learning_rate": 4.882145087735158e-05, | |
| "loss": 0.715, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.2267230898141861, | |
| "learning_rate": 4.879581575285988e-05, | |
| "loss": 0.698, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.22119645774364471, | |
| "learning_rate": 4.876991169708592e-05, | |
| "loss": 0.6942, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.22854161262512207, | |
| "learning_rate": 4.874373900278756e-05, | |
| "loss": 0.6912, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.1927136778831482, | |
| "learning_rate": 4.871729796575863e-05, | |
| "loss": 0.6829, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.2238394170999527, | |
| "learning_rate": 4.869058888482571e-05, | |
| "loss": 0.6831, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.17643775045871735, | |
| "learning_rate": 4.86636120618447e-05, | |
| "loss": 0.6875, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.2161882072687149, | |
| "learning_rate": 4.863636780169742e-05, | |
| "loss": 0.6893, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.20689886808395386, | |
| "learning_rate": 4.860885641228815e-05, | |
| "loss": 0.6721, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "eval_loss": 0.701475203037262, | |
| "eval_runtime": 43.016, | |
| "eval_samples_per_second": 46.494, | |
| "eval_steps_per_second": 0.372, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.18866655230522156, | |
| "learning_rate": 4.8581078204540196e-05, | |
| "loss": 0.6941, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.20633092522621155, | |
| "learning_rate": 4.855303349239231e-05, | |
| "loss": 0.6943, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.2335944026708603, | |
| "learning_rate": 4.85247225927952e-05, | |
| "loss": 0.6986, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.18950659036636353, | |
| "learning_rate": 4.8496145825707905e-05, | |
| "loss": 0.6894, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.22117875516414642, | |
| "learning_rate": 4.8467303514094204e-05, | |
| "loss": 0.6956, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.23478396236896515, | |
| "learning_rate": 4.8438195983918967e-05, | |
| "loss": 0.6919, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.24754443764686584, | |
| "learning_rate": 4.8408823564144454e-05, | |
| "loss": 0.6904, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "grad_norm": 0.19141903519630432, | |
| "learning_rate": 4.837918658672661e-05, | |
| "loss": 0.6914, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "grad_norm": 0.21387813985347748, | |
| "learning_rate": 4.834928538661131e-05, | |
| "loss": 0.6965, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "grad_norm": 0.20495550334453583, | |
| "learning_rate": 4.831912030173058e-05, | |
| "loss": 0.6949, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "grad_norm": 0.35174649953842163, | |
| "learning_rate": 4.8288691672998765e-05, | |
| "loss": 0.6813, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "grad_norm": 0.2461618334054947, | |
| "learning_rate": 4.825799984430867e-05, | |
| "loss": 0.6687, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "grad_norm": 0.20039792358875275, | |
| "learning_rate": 4.8227045162527714e-05, | |
| "loss": 0.6737, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "grad_norm": 0.19774436950683594, | |
| "learning_rate": 4.8195827977493955e-05, | |
| "loss": 0.6815, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 0.21472251415252686, | |
| "learning_rate": 4.8164348642012194e-05, | |
| "loss": 0.6957, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 0.1990274041891098, | |
| "learning_rate": 4.813260751184992e-05, | |
| "loss": 0.6916, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "grad_norm": 0.20387563109397888, | |
| "learning_rate": 4.810060494573335e-05, | |
| "loss": 0.6814, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "grad_norm": 0.1792985051870346, | |
| "learning_rate": 4.806834130534336e-05, | |
| "loss": 0.698, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "grad_norm": 0.22255714237689972, | |
| "learning_rate": 4.803581695531134e-05, | |
| "loss": 0.6765, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "grad_norm": 0.19549421966075897, | |
| "learning_rate": 4.8003032263215185e-05, | |
| "loss": 0.6867, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "grad_norm": 0.1929677277803421, | |
| "learning_rate": 4.796998759957504e-05, | |
| "loss": 0.6935, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "grad_norm": 0.22705228626728058, | |
| "learning_rate": 4.793668333784915e-05, | |
| "loss": 0.6897, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 0.21798522770404816, | |
| "learning_rate": 4.790311985442966e-05, | |
| "loss": 0.6694, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 0.19095788896083832, | |
| "learning_rate": 4.7869297528638315e-05, | |
| "loss": 0.6699, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "grad_norm": 0.23002374172210693, | |
| "learning_rate": 4.7835216742722225e-05, | |
| "loss": 0.6807, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "grad_norm": 0.2107260823249817, | |
| "learning_rate": 4.780087788184947e-05, | |
| "loss": 0.6855, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 0.23853424191474915, | |
| "learning_rate": 4.776628133410487e-05, | |
| "loss": 0.6946, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 0.25372380018234253, | |
| "learning_rate": 4.7731427490485455e-05, | |
| "loss": 0.6724, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "grad_norm": 0.182255819439888, | |
| "learning_rate": 4.7696316744896146e-05, | |
| "loss": 0.6976, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "grad_norm": 0.20137862861156464, | |
| "learning_rate": 4.7660949494145276e-05, | |
| "loss": 0.6853, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 0.19899927079677582, | |
| "learning_rate": 4.7625326137940106e-05, | |
| "loss": 0.6696, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 0.24147948622703552, | |
| "learning_rate": 4.758944707888228e-05, | |
| "loss": 0.6615, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "grad_norm": 0.19519510865211487, | |
| "learning_rate": 4.7553312722463305e-05, | |
| "loss": 0.685, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "grad_norm": 0.20128916203975677, | |
| "learning_rate": 4.751692347705998e-05, | |
| "loss": 0.6829, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "grad_norm": 0.16112156212329865, | |
| "learning_rate": 4.748027975392976e-05, | |
| "loss": 0.6714, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "grad_norm": 0.2659178674221039, | |
| "learning_rate": 4.744338196720608e-05, | |
| "loss": 0.6834, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "grad_norm": 0.20477250218391418, | |
| "learning_rate": 4.740623053389374e-05, | |
| "loss": 0.6664, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "grad_norm": 0.18069259822368622, | |
| "learning_rate": 4.7368825873864154e-05, | |
| "loss": 0.669, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 0.2146090418100357, | |
| "learning_rate": 4.733116840985058e-05, | |
| "loss": 0.6836, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 0.20656266808509827, | |
| "learning_rate": 4.729325856744341e-05, | |
| "loss": 0.6818, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 0.1764717549085617, | |
| "learning_rate": 4.725509677508528e-05, | |
| "loss": 0.7012, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "grad_norm": 0.19059453904628754, | |
| "learning_rate": 4.721668346406631e-05, | |
| "loss": 0.6885, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "grad_norm": 0.1867195963859558, | |
| "learning_rate": 4.7178019068519165e-05, | |
| "loss": 0.6704, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 0.19844096899032593, | |
| "learning_rate": 4.713910402541416e-05, | |
| "loss": 0.6746, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 0.21426193416118622, | |
| "learning_rate": 4.709993877455436e-05, | |
| "loss": 0.6627, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "grad_norm": 0.19326262176036835, | |
| "learning_rate": 4.706052375857058e-05, | |
| "loss": 0.6652, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "grad_norm": 0.229305237531662, | |
| "learning_rate": 4.7020859422916365e-05, | |
| "loss": 0.6746, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 0.20060321688652039, | |
| "learning_rate": 4.698094621586299e-05, | |
| "loss": 0.6658, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 0.21455462276935577, | |
| "learning_rate": 4.694078458849438e-05, | |
| "loss": 0.6966, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "grad_norm": 0.18379837274551392, | |
| "learning_rate": 4.690037499470202e-05, | |
| "loss": 0.6753, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "eval_loss": 0.6940956711769104, | |
| "eval_runtime": 42.7889, | |
| "eval_samples_per_second": 46.741, | |
| "eval_steps_per_second": 0.374, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "grad_norm": 0.19967205822467804, | |
| "learning_rate": 4.68597178911798e-05, | |
| "loss": 0.697, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 0.1688542366027832, | |
| "learning_rate": 4.681881373741888e-05, | |
| "loss": 0.681, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 0.19124309718608856, | |
| "learning_rate": 4.67776629957025e-05, | |
| "loss": 0.6855, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "grad_norm": 0.22720417380332947, | |
| "learning_rate": 4.6736266131100706e-05, | |
| "loss": 0.6747, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "grad_norm": 0.21285779774188995, | |
| "learning_rate": 4.6694623611465185e-05, | |
| "loss": 0.687, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 0.1932476907968521, | |
| "learning_rate": 4.6652735907423886e-05, | |
| "loss": 0.6745, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 0.2050684541463852, | |
| "learning_rate": 4.661060349237574e-05, | |
| "loss": 0.6865, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.20403894782066345, | |
| "learning_rate": 4.656822684248533e-05, | |
| "loss": 0.6897, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.20080254971981049, | |
| "learning_rate": 4.652560643667747e-05, | |
| "loss": 0.6768, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "grad_norm": 0.20147019624710083, | |
| "learning_rate": 4.648274275663183e-05, | |
| "loss": 0.6911, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "grad_norm": 0.17825543880462646, | |
| "learning_rate": 4.643963628677743e-05, | |
| "loss": 0.6635, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "grad_norm": 0.20095126330852509, | |
| "learning_rate": 4.6396287514287275e-05, | |
| "loss": 0.664, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "grad_norm": 0.20084308087825775, | |
| "learning_rate": 4.6352696929072727e-05, | |
| "loss": 0.6653, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 0.21178288757801056, | |
| "learning_rate": 4.630886502377805e-05, | |
| "loss": 0.6981, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 0.1963459551334381, | |
| "learning_rate": 4.62647922937748e-05, | |
| "loss": 0.6666, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "grad_norm": 0.21702295541763306, | |
| "learning_rate": 4.6220479237156254e-05, | |
| "loss": 0.6868, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "grad_norm": 0.20558403432369232, | |
| "learning_rate": 4.6175926354731785e-05, | |
| "loss": 0.6794, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 0.20398521423339844, | |
| "learning_rate": 4.613113415002115e-05, | |
| "loss": 0.69, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 0.2166917473077774, | |
| "learning_rate": 4.6086103129248846e-05, | |
| "loss": 0.6604, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "grad_norm": 0.2121194452047348, | |
| "learning_rate": 4.604083380133841e-05, | |
| "loss": 0.6653, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "grad_norm": 0.252469927072525, | |
| "learning_rate": 4.5995326677906605e-05, | |
| "loss": 0.6678, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "grad_norm": 0.1864558607339859, | |
| "learning_rate": 4.5949582273257656e-05, | |
| "loss": 0.6806, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 0.19438904523849487, | |
| "learning_rate": 4.59036011043775e-05, | |
| "loss": 0.6832, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 0.1954311728477478, | |
| "learning_rate": 4.5857383690927844e-05, | |
| "loss": 0.6772, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "grad_norm": 0.38669708371162415, | |
| "learning_rate": 4.5810930555240364e-05, | |
| "loss": 0.6663, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "grad_norm": 0.5850951075553894, | |
| "learning_rate": 4.576424222231078e-05, | |
| "loss": 0.6578, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "grad_norm": 0.22347845137119293, | |
| "learning_rate": 4.571731921979292e-05, | |
| "loss": 0.6735, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "grad_norm": 0.22810417413711548, | |
| "learning_rate": 4.567016207799276e-05, | |
| "loss": 0.6628, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "grad_norm": 0.18823856115341187, | |
| "learning_rate": 4.562277132986241e-05, | |
| "loss": 0.6803, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "grad_norm": 0.219554141163826, | |
| "learning_rate": 4.557514751099415e-05, | |
| "loss": 0.6701, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "grad_norm": 0.1830976903438568, | |
| "learning_rate": 4.55272911596143e-05, | |
| "loss": 0.6864, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "grad_norm": 0.18995565176010132, | |
| "learning_rate": 4.5479202816577195e-05, | |
| "loss": 0.6683, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "grad_norm": 0.19047056138515472, | |
| "learning_rate": 4.543088302535903e-05, | |
| "loss": 0.6855, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "grad_norm": 0.18707235157489777, | |
| "learning_rate": 4.538233233205177e-05, | |
| "loss": 0.6786, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 0.19992856681346893, | |
| "learning_rate": 4.533355128535693e-05, | |
| "loss": 0.6676, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 0.20344853401184082, | |
| "learning_rate": 4.5284540436579395e-05, | |
| "loss": 0.6938, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "grad_norm": 0.1867518275976181, | |
| "learning_rate": 4.5235300339621164e-05, | |
| "loss": 0.6736, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "grad_norm": 0.23014211654663086, | |
| "learning_rate": 4.518583155097517e-05, | |
| "loss": 0.674, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 0.1905904859304428, | |
| "learning_rate": 4.5136134629718855e-05, | |
| "loss": 0.6704, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 0.2236865758895874, | |
| "learning_rate": 4.5086210137508e-05, | |
| "loss": 0.6633, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "grad_norm": 0.19701044261455536, | |
| "learning_rate": 4.5036058638570264e-05, | |
| "loss": 0.6626, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "grad_norm": 0.19464708864688873, | |
| "learning_rate": 4.4985680699698855e-05, | |
| "loss": 0.6595, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 0.19034789502620697, | |
| "learning_rate": 4.493507689024614e-05, | |
| "loss": 0.6688, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 0.19750766456127167, | |
| "learning_rate": 4.488424778211717e-05, | |
| "loss": 0.6521, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "grad_norm": 0.20544354617595673, | |
| "learning_rate": 4.483319394976325e-05, | |
| "loss": 0.6947, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "grad_norm": 0.22409361600875854, | |
| "learning_rate": 4.478191597017541e-05, | |
| "loss": 0.674, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 0.19409336149692535, | |
| "learning_rate": 4.473041442287793e-05, | |
| "loss": 0.6978, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 0.20303764939308167, | |
| "learning_rate": 4.4678689889921755e-05, | |
| "loss": 0.6771, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 0.193495512008667, | |
| "learning_rate": 4.462674295587794e-05, | |
| "loss": 0.6693, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 0.18171487748622894, | |
| "learning_rate": 4.457457420783103e-05, | |
| "loss": 0.6716, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "eval_loss": 0.6894034147262573, | |
| "eval_runtime": 43.0285, | |
| "eval_samples_per_second": 46.481, | |
| "eval_steps_per_second": 0.372, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "grad_norm": 0.22247907519340515, | |
| "learning_rate": 4.452218423537241e-05, | |
| "loss": 0.6753, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "grad_norm": 0.2469698041677475, | |
| "learning_rate": 4.4469573630593686e-05, | |
| "loss": 0.6715, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "grad_norm": 0.18804265558719635, | |
| "learning_rate": 4.4416742988079945e-05, | |
| "loss": 0.6546, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "grad_norm": 0.19240662455558777, | |
| "learning_rate": 4.436369290490307e-05, | |
| "loss": 0.6857, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "grad_norm": 0.22342869639396667, | |
| "learning_rate": 4.431042398061499e-05, | |
| "loss": 0.6938, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 0.20553608238697052, | |
| "learning_rate": 4.425693681724086e-05, | |
| "loss": 0.6687, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 0.19881366193294525, | |
| "learning_rate": 4.420323201927231e-05, | |
| "loss": 0.6644, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "grad_norm": 0.18151846528053284, | |
| "learning_rate": 4.41493101936606e-05, | |
| "loss": 0.6667, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "grad_norm": 0.1906978040933609, | |
| "learning_rate": 4.409517194980974e-05, | |
| "loss": 0.6578, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.18123480677604675, | |
| "learning_rate": 4.4040817899569644e-05, | |
| "loss": 0.681, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.18974031507968903, | |
| "learning_rate": 4.3986248657229134e-05, | |
| "loss": 0.6645, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "grad_norm": 0.2028564214706421, | |
| "learning_rate": 4.3931464839509105e-05, | |
| "loss": 0.6913, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "grad_norm": 0.17814776301383972, | |
| "learning_rate": 4.387646706555548e-05, | |
| "loss": 0.6949, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 0.21407385170459747, | |
| "learning_rate": 4.382125595693224e-05, | |
| "loss": 0.6656, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 0.21102961897850037, | |
| "learning_rate": 4.376583213761438e-05, | |
| "loss": 0.6612, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "grad_norm": 0.19947603344917297, | |
| "learning_rate": 4.371019623398088e-05, | |
| "loss": 0.6894, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "grad_norm": 0.20861291885375977, | |
| "learning_rate": 4.365434887480763e-05, | |
| "loss": 0.6739, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "grad_norm": 0.2098263055086136, | |
| "learning_rate": 4.359829069126028e-05, | |
| "loss": 0.6739, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "grad_norm": 0.19079270958900452, | |
| "learning_rate": 4.3542022316887166e-05, | |
| "loss": 0.6739, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "grad_norm": 0.21104255318641663, | |
| "learning_rate": 4.34855443876121e-05, | |
| "loss": 0.652, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "grad_norm": 0.19791875779628754, | |
| "learning_rate": 4.342885754172721e-05, | |
| "loss": 0.6551, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "grad_norm": 0.22481437027454376, | |
| "learning_rate": 4.337196241988573e-05, | |
| "loss": 0.6649, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "grad_norm": 0.19483722746372223, | |
| "learning_rate": 4.3314859665094745e-05, | |
| "loss": 0.6762, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "grad_norm": 0.18499700725078583, | |
| "learning_rate": 4.3257549922707926e-05, | |
| "loss": 0.6754, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "grad_norm": 0.21884921193122864, | |
| "learning_rate": 4.320003384041823e-05, | |
| "loss": 0.6757, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "grad_norm": 0.20222313702106476, | |
| "learning_rate": 4.314231206825061e-05, | |
| "loss": 0.6527, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "grad_norm": 0.18140584230422974, | |
| "learning_rate": 4.3084385258554635e-05, | |
| "loss": 0.6731, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "grad_norm": 0.21039867401123047, | |
| "learning_rate": 4.302625406599713e-05, | |
| "loss": 0.6722, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "grad_norm": 0.18162797391414642, | |
| "learning_rate": 4.296791914755478e-05, | |
| "loss": 0.6563, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "grad_norm": 0.178864985704422, | |
| "learning_rate": 4.29093811625067e-05, | |
| "loss": 0.6666, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 0.21808697283267975, | |
| "learning_rate": 4.285064077242699e-05, | |
| "loss": 0.6689, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 0.20632699131965637, | |
| "learning_rate": 4.279169864117727e-05, | |
| "loss": 0.6664, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "grad_norm": 0.20482878386974335, | |
| "learning_rate": 4.273255543489912e-05, | |
| "loss": 0.6407, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "grad_norm": 0.18613804876804352, | |
| "learning_rate": 4.267321182200664e-05, | |
| "loss": 0.666, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "grad_norm": 0.23219124972820282, | |
| "learning_rate": 4.2613668473178836e-05, | |
| "loss": 0.6802, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "grad_norm": 0.17973752319812775, | |
| "learning_rate": 4.255392606135202e-05, | |
| "loss": 0.6571, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "grad_norm": 0.21650458872318268, | |
| "learning_rate": 4.2493985261712285e-05, | |
| "loss": 0.6727, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "grad_norm": 0.19476266205310822, | |
| "learning_rate": 4.2433846751687815e-05, | |
| "loss": 0.6836, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "grad_norm": 0.20223549008369446, | |
| "learning_rate": 4.237351121094121e-05, | |
| "loss": 0.6666, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "grad_norm": 0.2078579217195511, | |
| "learning_rate": 4.231297932136189e-05, | |
| "loss": 0.673, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "grad_norm": 0.193430095911026, | |
| "learning_rate": 4.225225176705829e-05, | |
| "loss": 0.6718, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "grad_norm": 0.18017272651195526, | |
| "learning_rate": 4.2191329234350194e-05, | |
| "loss": 0.6806, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "grad_norm": 0.20289075374603271, | |
| "learning_rate": 4.213021241176096e-05, | |
| "loss": 0.667, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "grad_norm": 0.2014080286026001, | |
| "learning_rate": 4.2068901990009726e-05, | |
| "loss": 0.6441, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "grad_norm": 0.17385190725326538, | |
| "learning_rate": 4.200739866200363e-05, | |
| "loss": 0.6564, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "grad_norm": 0.20427776873111725, | |
| "learning_rate": 4.1945703122829924e-05, | |
| "loss": 0.6509, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "grad_norm": 0.2093549370765686, | |
| "learning_rate": 4.1883816069748214e-05, | |
| "loss": 0.6741, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "grad_norm": 0.21946479380130768, | |
| "learning_rate": 4.182173820218249e-05, | |
| "loss": 0.6778, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "grad_norm": 0.2244766503572464, | |
| "learning_rate": 4.175947022171326e-05, | |
| "loss": 0.681, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "grad_norm": 0.22939437627792358, | |
| "learning_rate": 4.169701283206961e-05, | |
| "loss": 0.6595, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "eval_loss": 0.6864724159240723, | |
| "eval_runtime": 43.0148, | |
| "eval_samples_per_second": 46.496, | |
| "eval_steps_per_second": 0.372, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "grad_norm": 0.2838324010372162, | |
| "learning_rate": 4.163436673912127e-05, | |
| "loss": 0.666, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "grad_norm": 0.1862134337425232, | |
| "learning_rate": 4.1571532650870614e-05, | |
| "loss": 0.6671, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "grad_norm": 0.2170250117778778, | |
| "learning_rate": 4.1508511277444674e-05, | |
| "loss": 0.6566, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "grad_norm": 0.17831502854824066, | |
| "learning_rate": 4.1445303331087096e-05, | |
| "loss": 0.6776, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 0.17921501398086548, | |
| "learning_rate": 4.138190952615012e-05, | |
| "loss": 0.6876, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 0.19673947989940643, | |
| "learning_rate": 4.131833057908648e-05, | |
| "loss": 0.6596, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "grad_norm": 0.1829831749200821, | |
| "learning_rate": 4.1254567208441296e-05, | |
| "loss": 0.6712, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "grad_norm": 0.3023044764995575, | |
| "learning_rate": 4.119062013484402e-05, | |
| "loss": 0.6747, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "grad_norm": 0.21133002638816833, | |
| "learning_rate": 4.11264900810002e-05, | |
| "loss": 0.674, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "grad_norm": 0.19868247210979462, | |
| "learning_rate": 4.106217777168339e-05, | |
| "loss": 0.68, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "grad_norm": 0.21643079817295074, | |
| "learning_rate": 4.0997683933726894e-05, | |
| "loss": 0.6555, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 0.1953793615102768, | |
| "learning_rate": 4.093300929601562e-05, | |
| "loss": 0.6704, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 0.21795640885829926, | |
| "learning_rate": 4.086815458947777e-05, | |
| "loss": 0.6583, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 0.22441154718399048, | |
| "learning_rate": 4.080312054707665e-05, | |
| "loss": 0.6772, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 0.20651431381702423, | |
| "learning_rate": 4.073790790380232e-05, | |
| "loss": 0.6793, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "grad_norm": 0.21724598109722137, | |
| "learning_rate": 4.0672517396663354e-05, | |
| "loss": 0.6742, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "grad_norm": 0.27277424931526184, | |
| "learning_rate": 4.0606949764678434e-05, | |
| "loss": 0.6495, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "grad_norm": 0.21489045023918152, | |
| "learning_rate": 4.054120574886808e-05, | |
| "loss": 0.6516, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "grad_norm": 0.19597062468528748, | |
| "learning_rate": 4.0475286092246215e-05, | |
| "loss": 0.6498, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "grad_norm": 0.20840667188167572, | |
| "learning_rate": 4.0409191539811774e-05, | |
| "loss": 0.6394, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "grad_norm": 0.23823177814483643, | |
| "learning_rate": 4.0342922838540334e-05, | |
| "loss": 0.6798, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 0.20505908131599426, | |
| "learning_rate": 4.0276480737375606e-05, | |
| "loss": 0.6607, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 0.28554779291152954, | |
| "learning_rate": 4.0209865987221014e-05, | |
| "loss": 0.6467, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "grad_norm": 0.24168336391448975, | |
| "learning_rate": 4.014307934093119e-05, | |
| "loss": 0.6625, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "grad_norm": 0.21519850194454193, | |
| "learning_rate": 4.007612155330348e-05, | |
| "loss": 0.6431, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "grad_norm": 0.1954280287027359, | |
| "learning_rate": 4.000899338106939e-05, | |
| "loss": 0.6669, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "grad_norm": 0.22829805314540863, | |
| "learning_rate": 3.9941695582886065e-05, | |
| "loss": 0.6507, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "grad_norm": 0.30064237117767334, | |
| "learning_rate": 3.9874228919327685e-05, | |
| "loss": 0.664, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "grad_norm": 0.21391743421554565, | |
| "learning_rate": 3.980659415287689e-05, | |
| "loss": 0.6577, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "grad_norm": 0.237900048494339, | |
| "learning_rate": 3.9738792047916143e-05, | |
| "loss": 0.6678, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "grad_norm": 0.2035730481147766, | |
| "learning_rate": 3.967082337071911e-05, | |
| "loss": 0.6822, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "grad_norm": 0.25883620977401733, | |
| "learning_rate": 3.960268888944202e-05, | |
| "loss": 0.6657, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "grad_norm": 0.21861082315444946, | |
| "learning_rate": 3.9534389374114905e-05, | |
| "loss": 0.6608, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "grad_norm": 0.2185363620519638, | |
| "learning_rate": 3.946592559663299e-05, | |
| "loss": 0.6763, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "grad_norm": 0.2056983858346939, | |
| "learning_rate": 3.9397298330747905e-05, | |
| "loss": 0.6703, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "grad_norm": 0.19903923571109772, | |
| "learning_rate": 3.932850835205899e-05, | |
| "loss": 0.6887, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "grad_norm": 0.19114772975444794, | |
| "learning_rate": 3.9259556438004476e-05, | |
| "loss": 0.6563, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 0.20320531725883484, | |
| "learning_rate": 3.9190443367852736e-05, | |
| "loss": 0.6471, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 0.23587201535701752, | |
| "learning_rate": 3.912116992269348e-05, | |
| "loss": 0.6705, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 0.19107288122177124, | |
| "learning_rate": 3.9051736885428886e-05, | |
| "loss": 0.6696, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "grad_norm": 0.20316600799560547, | |
| "learning_rate": 3.898214504076482e-05, | |
| "loss": 0.6724, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "grad_norm": 0.2201535403728485, | |
| "learning_rate": 3.89123951752019e-05, | |
| "loss": 0.658, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 0.20604528486728668, | |
| "learning_rate": 3.884248807702665e-05, | |
| "loss": 0.6746, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 0.24204471707344055, | |
| "learning_rate": 3.8772424536302564e-05, | |
| "loss": 0.6737, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "grad_norm": 0.20363375544548035, | |
| "learning_rate": 3.870220534486121e-05, | |
| "loss": 0.669, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "grad_norm": 0.22277967631816864, | |
| "learning_rate": 3.8631831296293246e-05, | |
| "loss": 0.6676, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 0.18391238152980804, | |
| "learning_rate": 3.856130318593947e-05, | |
| "loss": 0.6651, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 0.185078427195549, | |
| "learning_rate": 3.849062181088183e-05, | |
| "loss": 0.6432, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "grad_norm": 0.2277977466583252, | |
| "learning_rate": 3.841978796993442e-05, | |
| "loss": 0.6518, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "grad_norm": 0.22232377529144287, | |
| "learning_rate": 3.834880246363443e-05, | |
| "loss": 0.6743, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "eval_loss": 0.6847629547119141, | |
| "eval_runtime": 42.7658, | |
| "eval_samples_per_second": 46.766, | |
| "eval_steps_per_second": 0.374, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "grad_norm": 0.20745177567005157, | |
| "learning_rate": 3.8277666094233115e-05, | |
| "loss": 0.6787, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "grad_norm": 0.21468785405158997, | |
| "learning_rate": 3.820637966568675e-05, | |
| "loss": 0.6603, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "grad_norm": 0.18202567100524902, | |
| "learning_rate": 3.81349439836475e-05, | |
| "loss": 0.6487, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "grad_norm": 0.21424496173858643, | |
| "learning_rate": 3.806335985545434e-05, | |
| "loss": 0.6674, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 0.19061969220638275, | |
| "learning_rate": 3.7991628090123933e-05, | |
| "loss": 0.651, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 0.20192697644233704, | |
| "learning_rate": 3.7919749498341477e-05, | |
| "loss": 0.661, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "grad_norm": 0.1942390650510788, | |
| "learning_rate": 3.784772489245155e-05, | |
| "loss": 0.6688, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "grad_norm": 0.19686420261859894, | |
| "learning_rate": 3.777555508644893e-05, | |
| "loss": 0.6492, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "grad_norm": 0.2590291202068329, | |
| "learning_rate": 3.770324089596937e-05, | |
| "loss": 0.6597, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "grad_norm": 0.208131343126297, | |
| "learning_rate": 3.763078313828043e-05, | |
| "loss": 0.6695, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "grad_norm": 0.215104341506958, | |
| "learning_rate": 3.755818263227219e-05, | |
| "loss": 0.6521, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "grad_norm": 0.22334003448486328, | |
| "learning_rate": 3.748544019844803e-05, | |
| "loss": 0.6775, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.2614120841026306, | |
| "learning_rate": 3.741255665891534e-05, | |
| "loss": 0.6643, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.22348462045192719, | |
| "learning_rate": 3.733953283737625e-05, | |
| "loss": 0.6598, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "grad_norm": 0.21082797646522522, | |
| "learning_rate": 3.726636955911825e-05, | |
| "loss": 0.6697, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "grad_norm": 0.20834925770759583, | |
| "learning_rate": 3.719306765100499e-05, | |
| "loss": 0.6562, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "grad_norm": 0.223398357629776, | |
| "learning_rate": 3.711962794146682e-05, | |
| "loss": 0.6403, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "grad_norm": 0.1990179866552353, | |
| "learning_rate": 3.704605126049147e-05, | |
| "loss": 0.6662, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "grad_norm": 0.20885860919952393, | |
| "learning_rate": 3.6972338439614676e-05, | |
| "loss": 0.6669, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "grad_norm": 0.21816882491111755, | |
| "learning_rate": 3.6898490311910774e-05, | |
| "loss": 0.6378, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "grad_norm": 0.21286895871162415, | |
| "learning_rate": 3.6824507711983294e-05, | |
| "loss": 0.633, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "grad_norm": 0.20113909244537354, | |
| "learning_rate": 3.6750391475955506e-05, | |
| "loss": 0.6561, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "grad_norm": 0.1805492788553238, | |
| "learning_rate": 3.6676142441461e-05, | |
| "loss": 0.6478, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "grad_norm": 0.198676198720932, | |
| "learning_rate": 3.66017614476342e-05, | |
| "loss": 0.6477, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "grad_norm": 0.18248610198497772, | |
| "learning_rate": 3.6527249335100856e-05, | |
| "loss": 0.6476, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "grad_norm": 0.20229819416999817, | |
| "learning_rate": 3.645260694596862e-05, | |
| "loss": 0.6675, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "grad_norm": 0.24611905217170715, | |
| "learning_rate": 3.637783512381745e-05, | |
| "loss": 0.6546, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "grad_norm": 0.1821327954530716, | |
| "learning_rate": 3.6302934713690114e-05, | |
| "loss": 0.6482, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "grad_norm": 0.19002540409564972, | |
| "learning_rate": 3.622790656208263e-05, | |
| "loss": 0.6468, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "grad_norm": 0.20899777114391327, | |
| "learning_rate": 3.615275151693471e-05, | |
| "loss": 0.6649, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "grad_norm": 0.1918490082025528, | |
| "learning_rate": 3.607747042762016e-05, | |
| "loss": 0.6554, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "grad_norm": 0.2790839970111847, | |
| "learning_rate": 3.600206414493728e-05, | |
| "loss": 0.6607, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "grad_norm": 0.21416689455509186, | |
| "learning_rate": 3.592653352109929e-05, | |
| "loss": 0.6537, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "grad_norm": 0.19238290190696716, | |
| "learning_rate": 3.5850879409724624e-05, | |
| "loss": 0.6566, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "grad_norm": 0.24673143029212952, | |
| "learning_rate": 3.577510266582737e-05, | |
| "loss": 0.6599, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "grad_norm": 0.1863541454076767, | |
| "learning_rate": 3.569920414580754e-05, | |
| "loss": 0.6593, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "grad_norm": 0.2196236401796341, | |
| "learning_rate": 3.56231847074414e-05, | |
| "loss": 0.6483, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "grad_norm": 0.18300732970237732, | |
| "learning_rate": 3.5547045209871806e-05, | |
| "loss": 0.6359, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "grad_norm": 0.18719661235809326, | |
| "learning_rate": 3.5470786513598476e-05, | |
| "loss": 0.6707, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "grad_norm": 0.1748167872428894, | |
| "learning_rate": 3.539440948046827e-05, | |
| "loss": 0.6516, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "grad_norm": 0.21993610262870789, | |
| "learning_rate": 3.531791497366543e-05, | |
| "loss": 0.6539, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "grad_norm": 0.20704199373722076, | |
| "learning_rate": 3.524130385770186e-05, | |
| "loss": 0.6734, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "grad_norm": 0.23367135226726532, | |
| "learning_rate": 3.516457699840733e-05, | |
| "loss": 0.6855, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "grad_norm": 0.1824340671300888, | |
| "learning_rate": 3.50877352629197e-05, | |
| "loss": 0.6575, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "grad_norm": 0.22375932335853577, | |
| "learning_rate": 3.50107795196751e-05, | |
| "loss": 0.6487, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 0.23603610694408417, | |
| "learning_rate": 3.4933710638398156e-05, | |
| "loss": 0.6505, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 0.22663302719593048, | |
| "learning_rate": 3.485652949009212e-05, | |
| "loss": 0.6569, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "grad_norm": 0.21095499396324158, | |
| "learning_rate": 3.4779236947029055e-05, | |
| "loss": 0.6657, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "grad_norm": 0.2422657608985901, | |
| "learning_rate": 3.470183388273995e-05, | |
| "loss": 0.645, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "grad_norm": 0.19883093237876892, | |
| "learning_rate": 3.462432117200489e-05, | |
| "loss": 0.6647, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "eval_loss": 0.6818509697914124, | |
| "eval_runtime": 42.9789, | |
| "eval_samples_per_second": 46.534, | |
| "eval_steps_per_second": 0.372, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "grad_norm": 0.23546861112117767, | |
| "learning_rate": 3.454669969084312e-05, | |
| "loss": 0.6594, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "grad_norm": 0.20284776389598846, | |
| "learning_rate": 3.446897031650316e-05, | |
| "loss": 0.65, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "grad_norm": 0.19946908950805664, | |
| "learning_rate": 3.4391133927452925e-05, | |
| "loss": 0.6573, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "grad_norm": 0.2393037974834442, | |
| "learning_rate": 3.431319140336975e-05, | |
| "loss": 0.6502, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 0.2206243872642517, | |
| "learning_rate": 3.423514362513048e-05, | |
| "loss": 0.6704, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 0.20536164939403534, | |
| "learning_rate": 3.415699147480149e-05, | |
| "loss": 0.6629, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "grad_norm": 0.22283408045768738, | |
| "learning_rate": 3.407873583562873e-05, | |
| "loss": 0.6789, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "grad_norm": 0.21526776254177094, | |
| "learning_rate": 3.4000377592027754e-05, | |
| "loss": 0.6608, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "grad_norm": 0.2566728889942169, | |
| "learning_rate": 3.3921917629573695e-05, | |
| "loss": 0.6624, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "grad_norm": 0.21391907334327698, | |
| "learning_rate": 3.384335683499129e-05, | |
| "loss": 0.6536, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "grad_norm": 0.21451455354690552, | |
| "learning_rate": 3.376469609614484e-05, | |
| "loss": 0.648, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "grad_norm": 0.19541354477405548, | |
| "learning_rate": 3.368593630202818e-05, | |
| "loss": 0.657, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 0.2338065356016159, | |
| "learning_rate": 3.360707834275459e-05, | |
| "loss": 0.6686, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 0.1928481012582779, | |
| "learning_rate": 3.3528123109546844e-05, | |
| "loss": 0.6496, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 0.23110058903694153, | |
| "learning_rate": 3.344907149472702e-05, | |
| "loss": 0.6556, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 0.22316649556159973, | |
| "learning_rate": 3.336992439170649e-05, | |
| "loss": 0.663, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "grad_norm": 0.1999579668045044, | |
| "learning_rate": 3.3290682694975775e-05, | |
| "loss": 0.6436, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "grad_norm": 0.276755154132843, | |
| "learning_rate": 3.321134730009446e-05, | |
| "loss": 0.6355, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "grad_norm": 0.24972842633724213, | |
| "learning_rate": 3.313191910368111e-05, | |
| "loss": 0.6454, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "grad_norm": 0.2169172465801239, | |
| "learning_rate": 3.3052399003403046e-05, | |
| "loss": 0.6419, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "grad_norm": 0.20602068305015564, | |
| "learning_rate": 3.297278789796629e-05, | |
| "loss": 0.6459, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "grad_norm": 0.25669389963150024, | |
| "learning_rate": 3.289308668710532e-05, | |
| "loss": 0.6675, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "grad_norm": 0.21008487045764923, | |
| "learning_rate": 3.281329627157302e-05, | |
| "loss": 0.642, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "grad_norm": 0.21287119388580322, | |
| "learning_rate": 3.2733417553130384e-05, | |
| "loss": 0.6348, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "grad_norm": 0.21862877905368805, | |
| "learning_rate": 3.2653451434536394e-05, | |
| "loss": 0.6579, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "grad_norm": 0.19740484654903412, | |
| "learning_rate": 3.2573398819537767e-05, | |
| "loss": 0.6428, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "grad_norm": 0.2695826292037964, | |
| "learning_rate": 3.2493260612858803e-05, | |
| "loss": 0.658, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "grad_norm": 0.19435593485832214, | |
| "learning_rate": 3.2413037720191096e-05, | |
| "loss": 0.6615, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 0.21996192634105682, | |
| "learning_rate": 3.233273104818337e-05, | |
| "loss": 0.6588, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 0.26496437191963196, | |
| "learning_rate": 3.225234150443114e-05, | |
| "loss": 0.6548, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 0.2110251486301422, | |
| "learning_rate": 3.217186999746654e-05, | |
| "loss": 0.6618, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "grad_norm": 0.19939462840557098, | |
| "learning_rate": 3.209131743674803e-05, | |
| "loss": 0.6531, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "grad_norm": 0.19813968241214752, | |
| "learning_rate": 3.201068473265007e-05, | |
| "loss": 0.6484, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "grad_norm": 0.2121964991092682, | |
| "learning_rate": 3.192997279645291e-05, | |
| "loss": 0.6556, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "grad_norm": 0.19998233020305634, | |
| "learning_rate": 3.1849182540332214e-05, | |
| "loss": 0.6554, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "grad_norm": 0.23478324711322784, | |
| "learning_rate": 3.176831487734882e-05, | |
| "loss": 0.637, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "grad_norm": 0.19482719898223877, | |
| "learning_rate": 3.168737072143838e-05, | |
| "loss": 0.6464, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "grad_norm": 0.21184729039669037, | |
| "learning_rate": 3.160635098740103e-05, | |
| "loss": 0.6584, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "grad_norm": 0.27235257625579834, | |
| "learning_rate": 3.152525659089106e-05, | |
| "loss": 0.6597, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "grad_norm": 0.22343479096889496, | |
| "learning_rate": 3.1444088448406584e-05, | |
| "loss": 0.6673, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "grad_norm": 0.19164462387561798, | |
| "learning_rate": 3.136284747727916e-05, | |
| "loss": 0.6617, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "grad_norm": 0.21662816405296326, | |
| "learning_rate": 3.128153459566341e-05, | |
| "loss": 0.6505, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "grad_norm": 0.23168237507343292, | |
| "learning_rate": 3.1200150722526697e-05, | |
| "loss": 0.6479, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "grad_norm": 0.19140580296516418, | |
| "learning_rate": 3.111869677763865e-05, | |
| "loss": 0.6413, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "grad_norm": 0.25583407282829285, | |
| "learning_rate": 3.1037173681560874e-05, | |
| "loss": 0.6443, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 0.1979406476020813, | |
| "learning_rate": 3.095558235563647e-05, | |
| "loss": 0.6668, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 0.21006809175014496, | |
| "learning_rate": 3.0873923721979645e-05, | |
| "loss": 0.6505, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "grad_norm": 0.2352473884820938, | |
| "learning_rate": 3.07921987034653e-05, | |
| "loss": 0.6552, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "grad_norm": 0.22581614553928375, | |
| "learning_rate": 3.07104082237186e-05, | |
| "loss": 0.6489, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "grad_norm": 0.21772272884845734, | |
| "learning_rate": 3.06285532071045e-05, | |
| "loss": 0.6721, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "eval_loss": 0.6797133684158325, | |
| "eval_runtime": 43.0653, | |
| "eval_samples_per_second": 46.441, | |
| "eval_steps_per_second": 0.372, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "grad_norm": 0.20226365327835083, | |
| "learning_rate": 3.054663457871736e-05, | |
| "loss": 0.6723, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "grad_norm": 0.2123214453458786, | |
| "learning_rate": 3.046465326437043e-05, | |
| "loss": 0.6479, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "grad_norm": 0.21145904064178467, | |
| "learning_rate": 3.0382610190585435e-05, | |
| "loss": 0.6508, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "grad_norm": 0.2079765647649765, | |
| "learning_rate": 3.030050628458206e-05, | |
| "loss": 0.6494, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "grad_norm": 0.24468085169792175, | |
| "learning_rate": 3.0218342474267513e-05, | |
| "loss": 0.6628, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "grad_norm": 0.21137215197086334, | |
| "learning_rate": 3.0136119688225996e-05, | |
| "loss": 0.6463, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "grad_norm": 0.20148161053657532, | |
| "learning_rate": 3.0053838855708243e-05, | |
| "loss": 0.6635, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "grad_norm": 0.21683721244335175, | |
| "learning_rate": 2.9971500906621027e-05, | |
| "loss": 0.6585, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "grad_norm": 0.1892833113670349, | |
| "learning_rate": 2.988910677151659e-05, | |
| "loss": 0.6621, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "grad_norm": 0.3029578924179077, | |
| "learning_rate": 2.980665738158221e-05, | |
| "loss": 0.6742, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "grad_norm": 0.2016330510377884, | |
| "learning_rate": 2.972415366862959e-05, | |
| "loss": 0.6686, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "grad_norm": 0.20674502849578857, | |
| "learning_rate": 2.964159656508441e-05, | |
| "loss": 0.6577, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 0.20945307612419128, | |
| "learning_rate": 2.9558987003975736e-05, | |
| "loss": 0.6673, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 0.2278815060853958, | |
| "learning_rate": 2.9476325918925485e-05, | |
| "loss": 0.6585, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "grad_norm": 0.21968629956245422, | |
| "learning_rate": 2.9393614244137875e-05, | |
| "loss": 0.6547, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "grad_norm": 0.22052909433841705, | |
| "learning_rate": 2.9310852914388875e-05, | |
| "loss": 0.6706, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.24963468313217163, | |
| "learning_rate": 2.9228042865015647e-05, | |
| "loss": 0.635, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.21189726889133453, | |
| "learning_rate": 2.914518503190595e-05, | |
| "loss": 0.6529, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "grad_norm": 0.19712771475315094, | |
| "learning_rate": 2.9062280351487587e-05, | |
| "loss": 0.6589, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "grad_norm": 0.23704534769058228, | |
| "learning_rate": 2.8979329760717788e-05, | |
| "loss": 0.6459, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "grad_norm": 0.3048954904079437, | |
| "learning_rate": 2.8896334197072667e-05, | |
| "loss": 0.6468, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "grad_norm": 0.30929285287857056, | |
| "learning_rate": 2.8813294598536606e-05, | |
| "loss": 0.651, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "grad_norm": 0.2547236382961273, | |
| "learning_rate": 2.8730211903591636e-05, | |
| "loss": 0.6374, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "grad_norm": 0.23137128353118896, | |
| "learning_rate": 2.8647087051206862e-05, | |
| "loss": 0.6486, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "grad_norm": 0.21269945800304413, | |
| "learning_rate": 2.856392098082783e-05, | |
| "loss": 0.6751, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "grad_norm": 0.19768312573432922, | |
| "learning_rate": 2.8480714632365906e-05, | |
| "loss": 0.6511, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "grad_norm": 0.2293408066034317, | |
| "learning_rate": 2.83974689461877e-05, | |
| "loss": 0.6337, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "grad_norm": 0.22362858057022095, | |
| "learning_rate": 2.8314184863104347e-05, | |
| "loss": 0.6449, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 0.22398819029331207, | |
| "learning_rate": 2.8230863324360977e-05, | |
| "loss": 0.6437, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 0.22831834852695465, | |
| "learning_rate": 2.8147505271626002e-05, | |
| "loss": 0.6493, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "grad_norm": 0.2500370740890503, | |
| "learning_rate": 2.8064111646980524e-05, | |
| "loss": 0.6451, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "grad_norm": 0.21840785443782806, | |
| "learning_rate": 2.7980683392907632e-05, | |
| "loss": 0.6558, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "grad_norm": 0.21673984825611115, | |
| "learning_rate": 2.7897221452281813e-05, | |
| "loss": 0.6493, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "grad_norm": 0.22077977657318115, | |
| "learning_rate": 2.7813726768358263e-05, | |
| "loss": 0.6443, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "grad_norm": 0.22384098172187805, | |
| "learning_rate": 2.7730200284762215e-05, | |
| "loss": 0.6525, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "grad_norm": 0.2188693881034851, | |
| "learning_rate": 2.7646642945478314e-05, | |
| "loss": 0.649, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 0.21913747489452362, | |
| "learning_rate": 2.7563055694839884e-05, | |
| "loss": 0.6543, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 0.20839491486549377, | |
| "learning_rate": 2.7479439477518344e-05, | |
| "loss": 0.6462, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "grad_norm": 0.21764138340950012, | |
| "learning_rate": 2.7395795238512446e-05, | |
| "loss": 0.6527, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "grad_norm": 0.21299828588962555, | |
| "learning_rate": 2.7312123923137667e-05, | |
| "loss": 0.638, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "grad_norm": 0.20629416406154633, | |
| "learning_rate": 2.7228426477015447e-05, | |
| "loss": 0.6592, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "grad_norm": 0.23245516419410706, | |
| "learning_rate": 2.714470384606258e-05, | |
| "loss": 0.6424, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "grad_norm": 0.2076595425605774, | |
| "learning_rate": 2.706095697648048e-05, | |
| "loss": 0.6335, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "grad_norm": 0.19132575392723083, | |
| "learning_rate": 2.6977186814744503e-05, | |
| "loss": 0.649, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "grad_norm": 0.21425828337669373, | |
| "learning_rate": 2.6893394307593228e-05, | |
| "loss": 0.6485, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 0.24599085748195648, | |
| "learning_rate": 2.680958040201778e-05, | |
| "loss": 0.6719, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 0.2181704044342041, | |
| "learning_rate": 2.6725746045251126e-05, | |
| "loss": 0.6403, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "grad_norm": 0.2156282663345337, | |
| "learning_rate": 2.6641892184757365e-05, | |
| "loss": 0.6414, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "grad_norm": 0.210015669465065, | |
| "learning_rate": 2.655801976822102e-05, | |
| "loss": 0.6616, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "grad_norm": 0.22476404905319214, | |
| "learning_rate": 2.6474129743536323e-05, | |
| "loss": 0.6642, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "eval_loss": 0.6780422329902649, | |
| "eval_runtime": 42.8508, | |
| "eval_samples_per_second": 46.674, | |
| "eval_steps_per_second": 0.373, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "grad_norm": 0.2253371626138687, | |
| "learning_rate": 2.6390223058796497e-05, | |
| "loss": 0.6704, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "grad_norm": 0.2072548270225525, | |
| "learning_rate": 2.6306300662283073e-05, | |
| "loss": 0.6319, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "grad_norm": 0.1958586722612381, | |
| "learning_rate": 2.6222363502455133e-05, | |
| "loss": 0.6462, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "grad_norm": 0.19193071126937866, | |
| "learning_rate": 2.6138412527938617e-05, | |
| "loss": 0.6408, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "grad_norm": 0.235664963722229, | |
| "learning_rate": 2.605444868751558e-05, | |
| "loss": 0.6585, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "grad_norm": 0.27757498621940613, | |
| "learning_rate": 2.5970472930113515e-05, | |
| "loss": 0.6437, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "grad_norm": 0.19725674390792847, | |
| "learning_rate": 2.588648620479455e-05, | |
| "loss": 0.6533, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "grad_norm": 0.20764590799808502, | |
| "learning_rate": 2.5802489460744817e-05, | |
| "loss": 0.6473, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "grad_norm": 0.2171824425458908, | |
| "learning_rate": 2.571848364726363e-05, | |
| "loss": 0.6344, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "grad_norm": 0.20881757140159607, | |
| "learning_rate": 2.563446971375283e-05, | |
| "loss": 0.6554, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "grad_norm": 0.2502697706222534, | |
| "learning_rate": 2.5550448609706035e-05, | |
| "loss": 0.6767, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "grad_norm": 0.26100224256515503, | |
| "learning_rate": 2.546642128469787e-05, | |
| "loss": 0.6651, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "grad_norm": 0.22215646505355835, | |
| "learning_rate": 2.5382388688373288e-05, | |
| "loss": 0.6371, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "grad_norm": 0.2387877255678177, | |
| "learning_rate": 2.529835177043682e-05, | |
| "loss": 0.6422, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "grad_norm": 0.23950129747390747, | |
| "learning_rate": 2.5214311480641823e-05, | |
| "loss": 0.6602, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "grad_norm": 0.2006569504737854, | |
| "learning_rate": 2.513026876877978e-05, | |
| "loss": 0.6513, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "grad_norm": 0.1996818333864212, | |
| "learning_rate": 2.5046224584669537e-05, | |
| "loss": 0.6509, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.21665816009044647, | |
| "learning_rate": 2.496217987814656e-05, | |
| "loss": 0.6698, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.24070510268211365, | |
| "learning_rate": 2.4878135599052265e-05, | |
| "loss": 0.6668, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.19807234406471252, | |
| "learning_rate": 2.47940926972232e-05, | |
| "loss": 0.6494, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "grad_norm": 0.2191707193851471, | |
| "learning_rate": 2.4710052122480345e-05, | |
| "loss": 0.6356, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "grad_norm": 0.19569790363311768, | |
| "learning_rate": 2.4626014824618415e-05, | |
| "loss": 0.6587, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "grad_norm": 0.2184823751449585, | |
| "learning_rate": 2.4541981753395045e-05, | |
| "loss": 0.6644, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "grad_norm": 0.22695264220237732, | |
| "learning_rate": 2.445795385852015e-05, | |
| "loss": 0.6327, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "grad_norm": 0.20581942796707153, | |
| "learning_rate": 2.4373932089645117e-05, | |
| "loss": 0.6506, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "grad_norm": 0.21353425085544586, | |
| "learning_rate": 2.428991739635208e-05, | |
| "loss": 0.6466, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "grad_norm": 0.252079039812088, | |
| "learning_rate": 2.420591072814326e-05, | |
| "loss": 0.6559, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "grad_norm": 0.19642478227615356, | |
| "learning_rate": 2.4121913034430123e-05, | |
| "loss": 0.6441, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 0.19868357479572296, | |
| "learning_rate": 2.4037925264522766e-05, | |
| "loss": 0.6424, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 0.20668402314186096, | |
| "learning_rate": 2.3953948367619085e-05, | |
| "loss": 0.6698, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "grad_norm": 0.21798357367515564, | |
| "learning_rate": 2.38699832927941e-05, | |
| "loss": 0.6346, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "grad_norm": 0.23406192660331726, | |
| "learning_rate": 2.3786030988989257e-05, | |
| "loss": 0.6535, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "grad_norm": 0.20458374917507172, | |
| "learning_rate": 2.3702092405001627e-05, | |
| "loss": 0.6426, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "grad_norm": 0.2875809073448181, | |
| "learning_rate": 2.3618168489473258e-05, | |
| "loss": 0.6344, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "grad_norm": 0.21651776134967804, | |
| "learning_rate": 2.3534260190880396e-05, | |
| "loss": 0.6403, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "grad_norm": 0.22318010032176971, | |
| "learning_rate": 2.3450368457522787e-05, | |
| "loss": 0.6662, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "grad_norm": 0.20754455029964447, | |
| "learning_rate": 2.3366494237513e-05, | |
| "loss": 0.64, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "grad_norm": 0.20914232730865479, | |
| "learning_rate": 2.3282638478765634e-05, | |
| "loss": 0.6374, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "grad_norm": 0.22443871200084686, | |
| "learning_rate": 2.3198802128986673e-05, | |
| "loss": 0.6576, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "grad_norm": 0.2213885337114334, | |
| "learning_rate": 2.3114986135662728e-05, | |
| "loss": 0.6434, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "grad_norm": 0.2547857463359833, | |
| "learning_rate": 2.3031191446050348e-05, | |
| "loss": 0.6519, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "grad_norm": 0.2463701069355011, | |
| "learning_rate": 2.2947419007165355e-05, | |
| "loss": 0.642, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "grad_norm": 0.24427345395088196, | |
| "learning_rate": 2.286366976577205e-05, | |
| "loss": 0.6661, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "grad_norm": 0.20179685950279236, | |
| "learning_rate": 2.2779944668372596e-05, | |
| "loss": 0.6619, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 0.2376372367143631, | |
| "learning_rate": 2.2696244661196285e-05, | |
| "loss": 0.6364, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 0.22518636286258698, | |
| "learning_rate": 2.2612570690188828e-05, | |
| "loss": 0.6619, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "grad_norm": 0.21611256897449493, | |
| "learning_rate": 2.252892370100172e-05, | |
| "loss": 0.665, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "grad_norm": 0.19709035754203796, | |
| "learning_rate": 2.2445304638981483e-05, | |
| "loss": 0.6461, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 0.21152737736701965, | |
| "learning_rate": 2.236171444915905e-05, | |
| "loss": 0.6564, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 0.23283518850803375, | |
| "learning_rate": 2.227815407623903e-05, | |
| "loss": 0.6653, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "eval_loss": 0.6763524413108826, | |
| "eval_runtime": 43.1507, | |
| "eval_samples_per_second": 46.349, | |
| "eval_steps_per_second": 0.371, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 0.24773156642913818, | |
| "learning_rate": 2.2194624464589052e-05, | |
| "loss": 0.6409, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "grad_norm": 0.19031673669815063, | |
| "learning_rate": 2.2111126558229102e-05, | |
| "loss": 0.641, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "grad_norm": 0.21194177865982056, | |
| "learning_rate": 2.2027661300820833e-05, | |
| "loss": 0.6411, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "grad_norm": 0.22639860212802887, | |
| "learning_rate": 2.1944229635656938e-05, | |
| "loss": 0.6559, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "grad_norm": 0.20527996122837067, | |
| "learning_rate": 2.186083250565043e-05, | |
| "loss": 0.6441, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "grad_norm": 0.21662482619285583, | |
| "learning_rate": 2.1777470853324045e-05, | |
| "loss": 0.6517, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "grad_norm": 0.21164235472679138, | |
| "learning_rate": 2.1694145620799543e-05, | |
| "loss": 0.634, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "grad_norm": 0.20878757536411285, | |
| "learning_rate": 2.161085774978709e-05, | |
| "loss": 0.6676, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "grad_norm": 0.19167086482048035, | |
| "learning_rate": 2.1527608181574616e-05, | |
| "loss": 0.6569, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "grad_norm": 0.18406851589679718, | |
| "learning_rate": 2.1444397857017154e-05, | |
| "loss": 0.6455, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "grad_norm": 0.21034060418605804, | |
| "learning_rate": 2.1361227716526226e-05, | |
| "loss": 0.6445, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 0.20496372878551483, | |
| "learning_rate": 2.1278098700059212e-05, | |
| "loss": 0.659, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 0.22188597917556763, | |
| "learning_rate": 2.11950117471087e-05, | |
| "loss": 0.636, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "grad_norm": 0.21596181392669678, | |
| "learning_rate": 2.1111967796691946e-05, | |
| "loss": 0.6567, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "grad_norm": 0.24058407545089722, | |
| "learning_rate": 2.102896778734013e-05, | |
| "loss": 0.6336, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "grad_norm": 0.2177901268005371, | |
| "learning_rate": 2.094601265708791e-05, | |
| "loss": 0.666, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "grad_norm": 0.25122541189193726, | |
| "learning_rate": 2.086310334346268e-05, | |
| "loss": 0.6363, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "grad_norm": 0.21556386351585388, | |
| "learning_rate": 2.0780240783474045e-05, | |
| "loss": 0.6571, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "grad_norm": 0.21746757626533508, | |
| "learning_rate": 2.069742591360323e-05, | |
| "loss": 0.6398, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.18603651225566864, | |
| "learning_rate": 2.0614659669792467e-05, | |
| "loss": 0.6534, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.2334885150194168, | |
| "learning_rate": 2.053194298743446e-05, | |
| "loss": 0.6707, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "grad_norm": 0.24816961586475372, | |
| "learning_rate": 2.0449276801361766e-05, | |
| "loss": 0.6462, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "grad_norm": 0.29403042793273926, | |
| "learning_rate": 2.036666204583625e-05, | |
| "loss": 0.6335, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "grad_norm": 0.23217493295669556, | |
| "learning_rate": 2.0284099654538556e-05, | |
| "loss": 0.6326, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "grad_norm": 0.20210884511470795, | |
| "learning_rate": 2.0201590560557483e-05, | |
| "loss": 0.6366, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "grad_norm": 0.22504980862140656, | |
| "learning_rate": 2.0119135696379536e-05, | |
| "loss": 0.65, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "grad_norm": 0.24354460835456848, | |
| "learning_rate": 2.0036735993878296e-05, | |
| "loss": 0.6626, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "grad_norm": 0.21757520735263824, | |
| "learning_rate": 1.9954392384303942e-05, | |
| "loss": 0.6287, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "grad_norm": 0.19983159005641937, | |
| "learning_rate": 1.9872105798272712e-05, | |
| "loss": 0.6422, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "grad_norm": 0.2321237325668335, | |
| "learning_rate": 1.9789877165756378e-05, | |
| "loss": 0.6578, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "grad_norm": 0.20653708279132843, | |
| "learning_rate": 1.9707707416071762e-05, | |
| "loss": 0.6409, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "grad_norm": 0.2338685691356659, | |
| "learning_rate": 1.96255974778702e-05, | |
| "loss": 0.6484, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "grad_norm": 0.24382874369621277, | |
| "learning_rate": 1.9543548279127048e-05, | |
| "loss": 0.6389, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "grad_norm": 0.21047548949718475, | |
| "learning_rate": 1.946156074713124e-05, | |
| "loss": 0.6374, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "grad_norm": 0.2392827272415161, | |
| "learning_rate": 1.937963580847475e-05, | |
| "loss": 0.6313, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "grad_norm": 0.2402975857257843, | |
| "learning_rate": 1.9297774389042164e-05, | |
| "loss": 0.6673, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "grad_norm": 0.2071339339017868, | |
| "learning_rate": 1.921597741400018e-05, | |
| "loss": 0.6492, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "grad_norm": 0.22973790764808655, | |
| "learning_rate": 1.9134245807787177e-05, | |
| "loss": 0.6498, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "grad_norm": 0.2571215331554413, | |
| "learning_rate": 1.905258049410277e-05, | |
| "loss": 0.6189, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "grad_norm": 0.22487790882587433, | |
| "learning_rate": 1.897098239589735e-05, | |
| "loss": 0.6558, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "grad_norm": 0.2679091989994049, | |
| "learning_rate": 1.888945243536167e-05, | |
| "loss": 0.6487, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "grad_norm": 0.25580644607543945, | |
| "learning_rate": 1.8807991533916395e-05, | |
| "loss": 0.6338, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "grad_norm": 0.2231510728597641, | |
| "learning_rate": 1.8726600612201765e-05, | |
| "loss": 0.6449, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "grad_norm": 0.253778338432312, | |
| "learning_rate": 1.8645280590067072e-05, | |
| "loss": 0.632, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "grad_norm": 0.21836714446544647, | |
| "learning_rate": 1.8564032386560367e-05, | |
| "loss": 0.6444, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "grad_norm": 0.21077515184879303, | |
| "learning_rate": 1.8482856919918017e-05, | |
| "loss": 0.645, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "grad_norm": 0.25506341457366943, | |
| "learning_rate": 1.8401755107554344e-05, | |
| "loss": 0.6618, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "grad_norm": 0.20035551488399506, | |
| "learning_rate": 1.8320727866051286e-05, | |
| "loss": 0.6638, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "grad_norm": 0.21936412155628204, | |
| "learning_rate": 1.823977611114797e-05, | |
| "loss": 0.649, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "grad_norm": 0.21108798682689667, | |
| "learning_rate": 1.8158900757730423e-05, | |
| "loss": 0.643, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "eval_loss": 0.6756294965744019, | |
| "eval_runtime": 43.1292, | |
| "eval_samples_per_second": 46.372, | |
| "eval_steps_per_second": 0.371, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "grad_norm": 0.23786285519599915, | |
| "learning_rate": 1.807810271982121e-05, | |
| "loss": 0.6417, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "grad_norm": 0.2198764979839325, | |
| "learning_rate": 1.79973829105691e-05, | |
| "loss": 0.6596, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "grad_norm": 0.201913520693779, | |
| "learning_rate": 1.791674224223876e-05, | |
| "loss": 0.6539, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "grad_norm": 0.18852323293685913, | |
| "learning_rate": 1.7836181626200425e-05, | |
| "loss": 0.6442, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "grad_norm": 0.2102418839931488, | |
| "learning_rate": 1.7755701972919607e-05, | |
| "loss": 0.6323, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "grad_norm": 0.20450511574745178, | |
| "learning_rate": 1.7675304191946835e-05, | |
| "loss": 0.6348, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "grad_norm": 0.21254278719425201, | |
| "learning_rate": 1.7594989191907315e-05, | |
| "loss": 0.6532, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "grad_norm": 0.22247746586799622, | |
| "learning_rate": 1.751475788049072e-05, | |
| "loss": 0.6574, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "grad_norm": 0.22997824847698212, | |
| "learning_rate": 1.743461116444089e-05, | |
| "loss": 0.6464, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "grad_norm": 0.22006092965602875, | |
| "learning_rate": 1.7354549949545586e-05, | |
| "loss": 0.625, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "grad_norm": 0.24055121839046478, | |
| "learning_rate": 1.7274575140626318e-05, | |
| "loss": 0.6348, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "grad_norm": 0.21221566200256348, | |
| "learning_rate": 1.7194687641528013e-05, | |
| "loss": 0.6571, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "grad_norm": 0.24489928781986237, | |
| "learning_rate": 1.711488835510889e-05, | |
| "loss": 0.6354, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "grad_norm": 0.21739131212234497, | |
| "learning_rate": 1.703517818323021e-05, | |
| "loss": 0.6563, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "grad_norm": 0.21341422200202942, | |
| "learning_rate": 1.695555802674608e-05, | |
| "loss": 0.6352, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "grad_norm": 0.24106374382972717, | |
| "learning_rate": 1.687602878549333e-05, | |
| "loss": 0.6569, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "grad_norm": 0.21376299858093262, | |
| "learning_rate": 1.6796591358281255e-05, | |
| "loss": 0.635, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "grad_norm": 0.24214418232440948, | |
| "learning_rate": 1.671724664288153e-05, | |
| "loss": 0.6589, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "grad_norm": 0.2144564688205719, | |
| "learning_rate": 1.6637995536018023e-05, | |
| "loss": 0.6443, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "grad_norm": 0.20162871479988098, | |
| "learning_rate": 1.6558838933356676e-05, | |
| "loss": 0.6273, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "grad_norm": 0.22555021941661835, | |
| "learning_rate": 1.6479777729495387e-05, | |
| "loss": 0.6482, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "grad_norm": 0.2198752909898758, | |
| "learning_rate": 1.640081281795388e-05, | |
| "loss": 0.6592, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "grad_norm": 0.20376479625701904, | |
| "learning_rate": 1.6321945091163642e-05, | |
| "loss": 0.6486, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "grad_norm": 0.20879609882831573, | |
| "learning_rate": 1.62431754404578e-05, | |
| "loss": 0.6352, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "grad_norm": 0.23657208681106567, | |
| "learning_rate": 1.616450475606105e-05, | |
| "loss": 0.6365, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "grad_norm": 0.22932332754135132, | |
| "learning_rate": 1.608593392707964e-05, | |
| "loss": 0.6257, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "grad_norm": 0.23238502442836761, | |
| "learning_rate": 1.600746384149125e-05, | |
| "loss": 0.6421, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "grad_norm": 0.22237473726272583, | |
| "learning_rate": 1.5929095386135035e-05, | |
| "loss": 0.646, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "grad_norm": 0.2505503296852112, | |
| "learning_rate": 1.5850829446701548e-05, | |
| "loss": 0.6564, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "grad_norm": 0.24202491343021393, | |
| "learning_rate": 1.5772666907722732e-05, | |
| "loss": 0.6293, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "grad_norm": 0.2263512760400772, | |
| "learning_rate": 1.569460865256196e-05, | |
| "loss": 0.6464, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "grad_norm": 0.20612984895706177, | |
| "learning_rate": 1.5616655563404005e-05, | |
| "loss": 0.6356, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "grad_norm": 0.2401859611272812, | |
| "learning_rate": 1.5538808521245132e-05, | |
| "loss": 0.6335, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "grad_norm": 0.23205476999282837, | |
| "learning_rate": 1.546106840588306e-05, | |
| "loss": 0.637, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "grad_norm": 0.21428649127483368, | |
| "learning_rate": 1.538343609590709e-05, | |
| "loss": 0.6445, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "grad_norm": 0.22657857835292816, | |
| "learning_rate": 1.5305912468688132e-05, | |
| "loss": 0.6505, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "grad_norm": 0.24213473498821259, | |
| "learning_rate": 1.52284984003688e-05, | |
| "loss": 0.621, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "grad_norm": 0.292008638381958, | |
| "learning_rate": 1.5151194765853557e-05, | |
| "loss": 0.6334, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "grad_norm": 0.23929265141487122, | |
| "learning_rate": 1.5074002438798717e-05, | |
| "loss": 0.6358, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "grad_norm": 0.2356884777545929, | |
| "learning_rate": 1.4996922291602709e-05, | |
| "loss": 0.6383, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "grad_norm": 0.27760398387908936, | |
| "learning_rate": 1.4919955195396096e-05, | |
| "loss": 0.6149, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "grad_norm": 0.24114906787872314, | |
| "learning_rate": 1.4843102020031796e-05, | |
| "loss": 0.6435, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "grad_norm": 0.22814461588859558, | |
| "learning_rate": 1.476636363407526e-05, | |
| "loss": 0.649, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "grad_norm": 0.23456543684005737, | |
| "learning_rate": 1.4689740904794588e-05, | |
| "loss": 0.6244, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "grad_norm": 0.24401907622814178, | |
| "learning_rate": 1.4613234698150824e-05, | |
| "loss": 0.6543, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "grad_norm": 0.23866045475006104, | |
| "learning_rate": 1.4536845878788086e-05, | |
| "loss": 0.6516, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "grad_norm": 0.2096366286277771, | |
| "learning_rate": 1.4460575310023805e-05, | |
| "loss": 0.6303, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "grad_norm": 0.22500860691070557, | |
| "learning_rate": 1.4384423853839055e-05, | |
| "loss": 0.6354, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "grad_norm": 0.20588767528533936, | |
| "learning_rate": 1.4308392370868673e-05, | |
| "loss": 0.6485, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "grad_norm": 0.24658453464508057, | |
| "learning_rate": 1.4232481720391645e-05, | |
| "loss": 0.6532, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "eval_loss": 0.6749144196510315, | |
| "eval_runtime": 42.8253, | |
| "eval_samples_per_second": 46.701, | |
| "eval_steps_per_second": 0.374, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "grad_norm": 0.1951073855161667, | |
| "learning_rate": 1.4156692760321357e-05, | |
| "loss": 0.659, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "grad_norm": 0.22289149463176727, | |
| "learning_rate": 1.4081026347195853e-05, | |
| "loss": 0.6573, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "grad_norm": 0.21882636845111847, | |
| "learning_rate": 1.4005483336168268e-05, | |
| "loss": 0.6515, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "grad_norm": 0.19442416727542877, | |
| "learning_rate": 1.3930064580997026e-05, | |
| "loss": 0.6497, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "grad_norm": 0.2646746039390564, | |
| "learning_rate": 1.3854770934036293e-05, | |
| "loss": 0.6639, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "grad_norm": 0.2990243136882782, | |
| "learning_rate": 1.3779603246226314e-05, | |
| "loss": 0.6428, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "grad_norm": 0.2094077467918396, | |
| "learning_rate": 1.3704562367083756e-05, | |
| "loss": 0.6219, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "grad_norm": 0.21577470004558563, | |
| "learning_rate": 1.3629649144692189e-05, | |
| "loss": 0.6535, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "grad_norm": 0.2171671837568283, | |
| "learning_rate": 1.3554864425692421e-05, | |
| "loss": 0.6413, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "grad_norm": 0.23857831954956055, | |
| "learning_rate": 1.348020905527298e-05, | |
| "loss": 0.6359, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "grad_norm": 0.22427092492580414, | |
| "learning_rate": 1.3405683877160552e-05, | |
| "loss": 0.6298, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "grad_norm": 0.2328975945711136, | |
| "learning_rate": 1.3331289733610403e-05, | |
| "loss": 0.6527, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "grad_norm": 0.23730657994747162, | |
| "learning_rate": 1.3257027465396931e-05, | |
| "loss": 0.6466, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "grad_norm": 0.23366102576255798, | |
| "learning_rate": 1.318289791180412e-05, | |
| "loss": 0.6473, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "grad_norm": 0.21646711230278015, | |
| "learning_rate": 1.3108901910616067e-05, | |
| "loss": 0.6368, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "grad_norm": 0.2109076976776123, | |
| "learning_rate": 1.3035040298107509e-05, | |
| "loss": 0.6542, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "grad_norm": 0.1987828016281128, | |
| "learning_rate": 1.296131390903435e-05, | |
| "loss": 0.6309, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "grad_norm": 0.20900775492191315, | |
| "learning_rate": 1.2887723576624284e-05, | |
| "loss": 0.6415, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "grad_norm": 0.2363632619380951, | |
| "learning_rate": 1.2814270132567327e-05, | |
| "loss": 0.6614, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "grad_norm": 0.2310895472764969, | |
| "learning_rate": 1.2740954407006439e-05, | |
| "loss": 0.6406, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "grad_norm": 0.2274298369884491, | |
| "learning_rate": 1.2667777228528132e-05, | |
| "loss": 0.6386, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "grad_norm": 0.2044222056865692, | |
| "learning_rate": 1.2594739424153134e-05, | |
| "loss": 0.6366, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "grad_norm": 0.2088422328233719, | |
| "learning_rate": 1.2521841819326979e-05, | |
| "loss": 0.6493, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "grad_norm": 0.1974959522485733, | |
| "learning_rate": 1.2449085237910746e-05, | |
| "loss": 0.632, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "grad_norm": 0.20174655318260193, | |
| "learning_rate": 1.2376470502171733e-05, | |
| "loss": 0.6355, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "grad_norm": 0.2152842879295349, | |
| "learning_rate": 1.2303998432774102e-05, | |
| "loss": 0.6361, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "grad_norm": 0.2618776261806488, | |
| "learning_rate": 1.2231669848769728e-05, | |
| "loss": 0.639, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "grad_norm": 0.21340912580490112, | |
| "learning_rate": 1.2159485567588802e-05, | |
| "loss": 0.6352, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "grad_norm": 0.22044430673122406, | |
| "learning_rate": 1.2087446405030689e-05, | |
| "loss": 0.6386, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "grad_norm": 0.22984373569488525, | |
| "learning_rate": 1.201555317525469e-05, | |
| "loss": 0.6219, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "grad_norm": 0.23688668012619019, | |
| "learning_rate": 1.1943806690770783e-05, | |
| "loss": 0.6413, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "grad_norm": 0.20583494007587433, | |
| "learning_rate": 1.1872207762430553e-05, | |
| "loss": 0.6185, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "grad_norm": 0.23525865375995636, | |
| "learning_rate": 1.1800757199417903e-05, | |
| "loss": 0.6394, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "grad_norm": 0.2524894177913666, | |
| "learning_rate": 1.1729455809239994e-05, | |
| "loss": 0.6541, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "grad_norm": 0.25640588998794556, | |
| "learning_rate": 1.16583043977181e-05, | |
| "loss": 0.656, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "grad_norm": 0.23114340007305145, | |
| "learning_rate": 1.1587303768978453e-05, | |
| "loss": 0.6543, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "grad_norm": 0.2003646343946457, | |
| "learning_rate": 1.151645472544326e-05, | |
| "loss": 0.6736, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "grad_norm": 0.22911614179611206, | |
| "learning_rate": 1.1445758067821508e-05, | |
| "loss": 0.6273, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "grad_norm": 0.23477129638195038, | |
| "learning_rate": 1.1375214595100006e-05, | |
| "loss": 0.6285, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "grad_norm": 0.22911326587200165, | |
| "learning_rate": 1.1304825104534334e-05, | |
| "loss": 0.6352, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "grad_norm": 0.22421817481517792, | |
| "learning_rate": 1.123459039163978e-05, | |
| "loss": 0.6474, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "grad_norm": 0.24066098034381866, | |
| "learning_rate": 1.116451125018246e-05, | |
| "loss": 0.6613, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "grad_norm": 0.23741333186626434, | |
| "learning_rate": 1.1094588472170233e-05, | |
| "loss": 0.672, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "grad_norm": 0.2175961285829544, | |
| "learning_rate": 1.1024822847843785e-05, | |
| "loss": 0.6384, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "grad_norm": 0.21132118999958038, | |
| "learning_rate": 1.0955215165667777e-05, | |
| "loss": 0.6351, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "grad_norm": 0.2364446073770523, | |
| "learning_rate": 1.088576621232179e-05, | |
| "loss": 0.6255, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "grad_norm": 0.2264924943447113, | |
| "learning_rate": 1.0816476772691567e-05, | |
| "loss": 0.6472, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "grad_norm": 0.2071733921766281, | |
| "learning_rate": 1.0747347629860072e-05, | |
| "loss": 0.6405, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "grad_norm": 0.21756674349308014, | |
| "learning_rate": 1.0678379565098626e-05, | |
| "loss": 0.6239, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "grad_norm": 0.21607337892055511, | |
| "learning_rate": 1.0609573357858166e-05, | |
| "loss": 0.6299, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "eval_loss": 0.6744876503944397, | |
| "eval_runtime": 42.9649, | |
| "eval_samples_per_second": 46.55, | |
| "eval_steps_per_second": 0.372, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "grad_norm": 0.21408307552337646, | |
| "learning_rate": 1.054092978576032e-05, | |
| "loss": 0.6398, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "grad_norm": 0.2123579978942871, | |
| "learning_rate": 1.0472449624588706e-05, | |
| "loss": 0.6325, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "grad_norm": 0.201943501830101, | |
| "learning_rate": 1.0404133648280137e-05, | |
| "loss": 0.6288, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "grad_norm": 0.19975706934928894, | |
| "learning_rate": 1.033598262891583e-05, | |
| "loss": 0.6556, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "grad_norm": 0.2010466605424881, | |
| "learning_rate": 1.026799733671279e-05, | |
| "loss": 0.6313, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "grad_norm": 0.22423765063285828, | |
| "learning_rate": 1.0200178540014969e-05, | |
| "loss": 0.6504, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "grad_norm": 0.2070777714252472, | |
| "learning_rate": 1.0132527005284686e-05, | |
| "loss": 0.64, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "grad_norm": 0.21738991141319275, | |
| "learning_rate": 1.0065043497093937e-05, | |
| "loss": 0.6461, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "grad_norm": 0.20665381848812103, | |
| "learning_rate": 9.9977287781157e-06, | |
| "loss": 0.6319, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "grad_norm": 0.21484661102294922, | |
| "learning_rate": 9.930583609115426e-06, | |
| "loss": 0.6694, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "grad_norm": 0.21878722310066223, | |
| "learning_rate": 9.863608748942319e-06, | |
| "loss": 0.6372, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "grad_norm": 0.20911160111427307, | |
| "learning_rate": 9.796804954520847e-06, | |
| "loss": 0.6589, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "grad_norm": 0.19322942197322845, | |
| "learning_rate": 9.730172980842151e-06, | |
| "loss": 0.6394, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "grad_norm": 0.21291927993297577, | |
| "learning_rate": 9.663713580955527e-06, | |
| "loss": 0.642, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "grad_norm": 0.2318650186061859, | |
| "learning_rate": 9.597427505959886e-06, | |
| "loss": 0.6463, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "grad_norm": 0.2421536147594452, | |
| "learning_rate": 9.531315504995306e-06, | |
| "loss": 0.6299, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "grad_norm": 0.22403296828269958, | |
| "learning_rate": 9.465378325234545e-06, | |
| "loss": 0.6429, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "grad_norm": 0.23239101469516754, | |
| "learning_rate": 9.39961671187459e-06, | |
| "loss": 0.6359, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "grad_norm": 0.21775169670581818, | |
| "learning_rate": 9.334031408128257e-06, | |
| "loss": 0.6316, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "grad_norm": 0.20338045060634613, | |
| "learning_rate": 9.26862315521575e-06, | |
| "loss": 0.6555, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "grad_norm": 0.22011174261569977, | |
| "learning_rate": 9.20339269235634e-06, | |
| "loss": 0.6488, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "grad_norm": 0.21809840202331543, | |
| "learning_rate": 9.138340756759972e-06, | |
| "loss": 0.6421, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "grad_norm": 0.2247348427772522, | |
| "learning_rate": 9.073468083618945e-06, | |
| "loss": 0.6267, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "grad_norm": 0.2357349693775177, | |
| "learning_rate": 9.008775406099602e-06, | |
| "loss": 0.6364, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "grad_norm": 0.2163088023662567, | |
| "learning_rate": 8.944263455334032e-06, | |
| "loss": 0.6513, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "grad_norm": 0.21349021792411804, | |
| "learning_rate": 8.87993296041183e-06, | |
| "loss": 0.6343, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "grad_norm": 0.23505771160125732, | |
| "learning_rate": 8.815784648371853e-06, | |
| "loss": 0.6307, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "grad_norm": 0.22224752604961395, | |
| "learning_rate": 8.751819244193981e-06, | |
| "loss": 0.6395, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "grad_norm": 0.22711637616157532, | |
| "learning_rate": 8.688037470790958e-06, | |
| "loss": 0.6498, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "grad_norm": 0.22289259731769562, | |
| "learning_rate": 8.624440049000171e-06, | |
| "loss": 0.636, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "grad_norm": 0.24401511251926422, | |
| "learning_rate": 8.561027697575563e-06, | |
| "loss": 0.6352, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "grad_norm": 0.23475363850593567, | |
| "learning_rate": 8.497801133179481e-06, | |
| "loss": 0.6259, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "grad_norm": 0.23915499448776245, | |
| "learning_rate": 8.434761070374548e-06, | |
| "loss": 0.6583, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "grad_norm": 0.22687704861164093, | |
| "learning_rate": 8.371908221615665e-06, | |
| "loss": 0.6233, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "grad_norm": 0.23513992130756378, | |
| "learning_rate": 8.309243297241867e-06, | |
| "loss": 0.6409, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "grad_norm": 0.22081585228443146, | |
| "learning_rate": 8.246767005468358e-06, | |
| "loss": 0.6534, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "grad_norm": 0.2252863198518753, | |
| "learning_rate": 8.1844800523785e-06, | |
| "loss": 0.6333, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "grad_norm": 0.20887097716331482, | |
| "learning_rate": 8.122383141915777e-06, | |
| "loss": 0.6447, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "grad_norm": 0.22476619482040405, | |
| "learning_rate": 8.060476975875947e-06, | |
| "loss": 0.6559, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "grad_norm": 0.2176969200372696, | |
| "learning_rate": 7.998762253898995e-06, | |
| "loss": 0.6374, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "grad_norm": 0.2264891415834427, | |
| "learning_rate": 7.937239673461294e-06, | |
| "loss": 0.6546, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "grad_norm": 0.2604604959487915, | |
| "learning_rate": 7.875909929867724e-06, | |
| "loss": 0.6344, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "grad_norm": 0.23269446194171906, | |
| "learning_rate": 7.814773716243757e-06, | |
| "loss": 0.6316, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "grad_norm": 0.2207861691713333, | |
| "learning_rate": 7.753831723527714e-06, | |
| "loss": 0.6426, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "grad_norm": 0.22989043593406677, | |
| "learning_rate": 7.693084640462852e-06, | |
| "loss": 0.6439, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "grad_norm": 0.2180193066596985, | |
| "learning_rate": 7.632533153589671e-06, | |
| "loss": 0.632, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "grad_norm": 0.27170613408088684, | |
| "learning_rate": 7.572177947238113e-06, | |
| "loss": 0.6507, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "grad_norm": 0.23820172250270844, | |
| "learning_rate": 7.512019703519793e-06, | |
| "loss": 0.6473, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "grad_norm": 0.2189197838306427, | |
| "learning_rate": 7.452059102320394e-06, | |
| "loss": 0.6522, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "grad_norm": 0.21386443078517914, | |
| "learning_rate": 7.3922968212918695e-06, | |
| "loss": 0.6442, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "eval_loss": 0.6736823320388794, | |
| "eval_runtime": 43.0481, | |
| "eval_samples_per_second": 46.46, | |
| "eval_steps_per_second": 0.372, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "grad_norm": 0.25133392214775085, | |
| "learning_rate": 7.332733535844829e-06, | |
| "loss": 0.6546, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "grad_norm": 0.22904422879219055, | |
| "learning_rate": 7.273369919140963e-06, | |
| "loss": 0.637, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "grad_norm": 0.21408450603485107, | |
| "learning_rate": 7.214206642085322e-06, | |
| "loss": 0.6439, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "grad_norm": 0.2534807324409485, | |
| "learning_rate": 7.155244373318825e-06, | |
| "loss": 0.6246, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "grad_norm": 0.23308417201042175, | |
| "learning_rate": 7.096483779210667e-06, | |
| "loss": 0.6341, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "grad_norm": 0.2280711680650711, | |
| "learning_rate": 7.037925523850786e-06, | |
| "loss": 0.6327, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "grad_norm": 0.21193630993366241, | |
| "learning_rate": 6.979570269042382e-06, | |
| "loss": 0.6588, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "grad_norm": 0.22662419080734253, | |
| "learning_rate": 6.9214186742943915e-06, | |
| "loss": 0.6407, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "grad_norm": 0.21723264455795288, | |
| "learning_rate": 6.86347139681408e-06, | |
| "loss": 0.6558, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "grad_norm": 0.23677177727222443, | |
| "learning_rate": 6.805729091499599e-06, | |
| "loss": 0.6483, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "grad_norm": 0.22076842188835144, | |
| "learning_rate": 6.748192410932574e-06, | |
| "loss": 0.6354, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "grad_norm": 0.23974132537841797, | |
| "learning_rate": 6.690862005370755e-06, | |
| "loss": 0.6448, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "grad_norm": 0.23561669886112213, | |
| "learning_rate": 6.633738522740612e-06, | |
| "loss": 0.6361, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "grad_norm": 0.262416273355484, | |
| "learning_rate": 6.576822608630087e-06, | |
| "loss": 0.6511, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "grad_norm": 0.23164430260658264, | |
| "learning_rate": 6.52011490628125e-06, | |
| "loss": 0.6397, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "grad_norm": 0.24281373620033264, | |
| "learning_rate": 6.463616056583038e-06, | |
| "loss": 0.6509, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "grad_norm": 0.26119861006736755, | |
| "learning_rate": 6.407326698064025e-06, | |
| "loss": 0.6357, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "grad_norm": 0.21345050632953644, | |
| "learning_rate": 6.351247466885171e-06, | |
| "loss": 0.6465, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "grad_norm": 0.3391309976577759, | |
| "learning_rate": 6.2953789968326795e-06, | |
| "loss": 0.6535, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "grad_norm": 0.27176231145858765, | |
| "learning_rate": 6.239721919310806e-06, | |
| "loss": 0.6451, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "grad_norm": 0.2767736613750458, | |
| "learning_rate": 6.184276863334729e-06, | |
| "loss": 0.6615, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "grad_norm": 0.23259542882442474, | |
| "learning_rate": 6.1290444555234275e-06, | |
| "loss": 0.6344, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "grad_norm": 0.225265771150589, | |
| "learning_rate": 6.074025320092625e-06, | |
| "loss": 0.6436, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "grad_norm": 0.2767943739891052, | |
| "learning_rate": 6.0192200788477175e-06, | |
| "loss": 0.6396, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "grad_norm": 0.2067541927099228, | |
| "learning_rate": 5.964629351176751e-06, | |
| "loss": 0.635, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "grad_norm": 0.2741214632987976, | |
| "learning_rate": 5.91025375404343e-06, | |
| "loss": 0.6481, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.2164599895477295, | |
| "learning_rate": 5.856093901980103e-06, | |
| "loss": 0.6419, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.20791101455688477, | |
| "learning_rate": 5.802150407080886e-06, | |
| "loss": 0.6518, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "grad_norm": 0.24703148007392883, | |
| "learning_rate": 5.748423878994691e-06, | |
| "loss": 0.6166, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "grad_norm": 0.2015557438135147, | |
| "learning_rate": 5.694914924918355e-06, | |
| "loss": 0.6312, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "grad_norm": 0.2072596400976181, | |
| "learning_rate": 5.641624149589786e-06, | |
| "loss": 0.6242, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "grad_norm": 0.2478257268667221, | |
| "learning_rate": 5.588552155281096e-06, | |
| "loss": 0.627, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "grad_norm": 0.24107235670089722, | |
| "learning_rate": 5.535699541791836e-06, | |
| "loss": 0.6334, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "grad_norm": 0.22481875121593475, | |
| "learning_rate": 5.4830669064421945e-06, | |
| "loss": 0.6387, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "grad_norm": 0.24436788260936737, | |
| "learning_rate": 5.430654844066246e-06, | |
| "loss": 0.6406, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "grad_norm": 0.22910743951797485, | |
| "learning_rate": 5.378463947005249e-06, | |
| "loss": 0.6494, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "grad_norm": 0.20779405534267426, | |
| "learning_rate": 5.326494805100904e-06, | |
| "loss": 0.6596, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "grad_norm": 0.1871562898159027, | |
| "learning_rate": 5.274748005688745e-06, | |
| "loss": 0.6372, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "grad_norm": 0.20629480481147766, | |
| "learning_rate": 5.223224133591476e-06, | |
| "loss": 0.6414, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "grad_norm": 0.2312021702528, | |
| "learning_rate": 5.1719237711123305e-06, | |
| "loss": 0.6344, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "grad_norm": 0.24816015362739563, | |
| "learning_rate": 5.120847498028569e-06, | |
| "loss": 0.6274, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "grad_norm": 0.21163330972194672, | |
| "learning_rate": 5.069995891584839e-06, | |
| "loss": 0.6328, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "grad_norm": 0.2076953798532486, | |
| "learning_rate": 5.0193695264867055e-06, | |
| "loss": 0.6442, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "grad_norm": 0.20648670196533203, | |
| "learning_rate": 4.9689689748941505e-06, | |
| "loss": 0.6396, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "grad_norm": 0.2734256386756897, | |
| "learning_rate": 4.918794806415067e-06, | |
| "loss": 0.6457, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "grad_norm": 0.2449502795934677, | |
| "learning_rate": 4.868847588098901e-06, | |
| "loss": 0.6335, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "grad_norm": 0.22369302809238434, | |
| "learning_rate": 4.819127884430141e-06, | |
| "loss": 0.6381, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "grad_norm": 0.2563876509666443, | |
| "learning_rate": 4.769636257322027e-06, | |
| "loss": 0.6403, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "grad_norm": 0.22850382328033447, | |
| "learning_rate": 4.720373266110159e-06, | |
| "loss": 0.6234, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "grad_norm": 0.2683540880680084, | |
| "learning_rate": 4.671339467546151e-06, | |
| "loss": 0.6347, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "eval_loss": 0.6733194589614868, | |
| "eval_runtime": 43.029, | |
| "eval_samples_per_second": 46.48, | |
| "eval_steps_per_second": 0.372, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "grad_norm": 0.22873437404632568, | |
| "learning_rate": 4.622535415791423e-06, | |
| "loss": 0.6313, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "grad_norm": 0.22920221090316772, | |
| "learning_rate": 4.57396166241083e-06, | |
| "loss": 0.6464, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "grad_norm": 0.23248647153377533, | |
| "learning_rate": 4.525618756366517e-06, | |
| "loss": 0.6434, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "grad_norm": 0.2609855830669403, | |
| "learning_rate": 4.477507244011669e-06, | |
| "loss": 0.6488, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "grad_norm": 0.2475024312734604, | |
| "learning_rate": 4.429627669084338e-06, | |
| "loss": 0.6398, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "grad_norm": 0.24800314009189606, | |
| "learning_rate": 4.381980572701322e-06, | |
| "loss": 0.6397, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "grad_norm": 0.23294275999069214, | |
| "learning_rate": 4.334566493352024e-06, | |
| "loss": 0.6333, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "grad_norm": 0.2337343692779541, | |
| "learning_rate": 4.287385966892385e-06, | |
| "loss": 0.6233, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "grad_norm": 0.21502436697483063, | |
| "learning_rate": 4.240439526538823e-06, | |
| "loss": 0.6425, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "grad_norm": 0.2317812144756317, | |
| "learning_rate": 4.193727702862182e-06, | |
| "loss": 0.6379, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "grad_norm": 0.22363489866256714, | |
| "learning_rate": 4.147251023781778e-06, | |
| "loss": 0.6438, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "grad_norm": 0.23377270996570587, | |
| "learning_rate": 4.101010014559411e-06, | |
| "loss": 0.6305, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "grad_norm": 0.22052331268787384, | |
| "learning_rate": 4.055005197793421e-06, | |
| "loss": 0.6403, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "grad_norm": 0.2220488041639328, | |
| "learning_rate": 4.009237093412798e-06, | |
| "loss": 0.6362, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "grad_norm": 0.22706325352191925, | |
| "learning_rate": 3.963706218671287e-06, | |
| "loss": 0.6458, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "grad_norm": 0.22374244034290314, | |
| "learning_rate": 3.918413088141568e-06, | |
| "loss": 0.6422, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "grad_norm": 0.21233348548412323, | |
| "learning_rate": 3.873358213709413e-06, | |
| "loss": 0.6415, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "grad_norm": 0.24870729446411133, | |
| "learning_rate": 3.828542104567928e-06, | |
| "loss": 0.6521, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "grad_norm": 0.2119089961051941, | |
| "learning_rate": 3.783965267211778e-06, | |
| "loss": 0.6379, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "grad_norm": 0.26291346549987793, | |
| "learning_rate": 3.7396282054314556e-06, | |
| "loss": 0.6464, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "grad_norm": 0.2240053117275238, | |
| "learning_rate": 3.6955314203076242e-06, | |
| "loss": 0.6276, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "grad_norm": 0.2831818759441376, | |
| "learning_rate": 3.6516754102054128e-06, | |
| "loss": 0.6291, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "grad_norm": 0.2092774212360382, | |
| "learning_rate": 3.60806067076882e-06, | |
| "loss": 0.6396, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "grad_norm": 0.23838895559310913, | |
| "learning_rate": 3.5646876949150725e-06, | |
| "loss": 0.6348, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "grad_norm": 0.24497564136981964, | |
| "learning_rate": 3.521556972829096e-06, | |
| "loss": 0.647, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "grad_norm": 0.2559151351451874, | |
| "learning_rate": 3.4786689919579452e-06, | |
| "loss": 0.6389, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "grad_norm": 0.25005242228507996, | |
| "learning_rate": 3.4360242370053142e-06, | |
| "loss": 0.6565, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "grad_norm": 0.2180882692337036, | |
| "learning_rate": 3.3936231899260486e-06, | |
| "loss": 0.6502, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "grad_norm": 0.2549244463443756, | |
| "learning_rate": 3.3514663299206856e-06, | |
| "loss": 0.62, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "grad_norm": 0.20678193867206573, | |
| "learning_rate": 3.3095541334300704e-06, | |
| "loss": 0.6457, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "grad_norm": 0.21259096264839172, | |
| "learning_rate": 3.2678870741299467e-06, | |
| "loss": 0.6356, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "grad_norm": 0.2321808636188507, | |
| "learning_rate": 3.2264656229256117e-06, | |
| "loss": 0.6383, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "grad_norm": 0.2410227656364441, | |
| "learning_rate": 3.1852902479466014e-06, | |
| "loss": 0.6298, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "grad_norm": 0.2831228971481323, | |
| "learning_rate": 3.1443614145413607e-06, | |
| "loss": 0.6513, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "grad_norm": 0.21885719895362854, | |
| "learning_rate": 3.1036795852720623e-06, | |
| "loss": 0.6452, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "grad_norm": 0.23801299929618835, | |
| "learning_rate": 3.0632452199092915e-06, | |
| "loss": 0.644, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "grad_norm": 0.2184232473373413, | |
| "learning_rate": 3.023058775426912e-06, | |
| "loss": 0.6327, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "grad_norm": 0.2340967059135437, | |
| "learning_rate": 2.983120705996878e-06, | |
| "loss": 0.6204, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "grad_norm": 0.2038649469614029, | |
| "learning_rate": 2.9434314629840838e-06, | |
| "loss": 0.6333, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "grad_norm": Infinity, | |
| "learning_rate": 2.9079242616463776e-06, | |
| "loss": 0.6562, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "grad_norm": 0.222800612449646, | |
| "learning_rate": 2.8687090222609154e-06, | |
| "loss": 0.6371, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "grad_norm": 0.22500859200954437, | |
| "learning_rate": 2.8297439023301565e-06, | |
| "loss": 0.6412, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "grad_norm": 0.222296342253685, | |
| "learning_rate": 2.791029342223084e-06, | |
| "loss": 0.6279, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "grad_norm": 0.21163935959339142, | |
| "learning_rate": 2.75256577947699e-06, | |
| "loss": 0.6448, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "grad_norm": 0.21069630980491638, | |
| "learning_rate": 2.714353648792456e-06, | |
| "loss": 0.6297, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "grad_norm": 0.2138666957616806, | |
| "learning_rate": 2.6763933820284915e-06, | |
| "loss": 0.6308, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "grad_norm": 0.26078271865844727, | |
| "learning_rate": 2.638685408197633e-06, | |
| "loss": 0.6433, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "grad_norm": 0.2324589192867279, | |
| "learning_rate": 2.6012301534610817e-06, | |
| "loss": 0.6385, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "grad_norm": 0.23216955363750458, | |
| "learning_rate": 2.5640280411239365e-06, | |
| "loss": 0.649, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "grad_norm": 0.22352440655231476, | |
| "learning_rate": 2.5270794916303464e-06, | |
| "loss": 0.6364, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "eval_loss": 0.6730201244354248, | |
| "eval_runtime": 43.073, | |
| "eval_samples_per_second": 46.433, | |
| "eval_steps_per_second": 0.371, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "grad_norm": 0.23798608779907227, | |
| "learning_rate": 2.4903849225588e-06, | |
| "loss": 0.6475, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "grad_norm": 0.23490963876247406, | |
| "learning_rate": 2.4539447486174066e-06, | |
| "loss": 0.6407, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "grad_norm": 0.24397426843643188, | |
| "learning_rate": 2.4177593816391676e-06, | |
| "loss": 0.6505, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "grad_norm": 0.22615879774093628, | |
| "learning_rate": 2.381829230577398e-06, | |
| "loss": 0.6337, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "grad_norm": 0.21998465061187744, | |
| "learning_rate": 2.346154701501013e-06, | |
| "loss": 0.6339, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "grad_norm": 0.23627543449401855, | |
| "learning_rate": 2.3107361975900187e-06, | |
| "loss": 0.6435, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "grad_norm": 0.23869889974594116, | |
| "learning_rate": 2.2755741191309103e-06, | |
| "loss": 0.644, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "grad_norm": 0.2242850810289383, | |
| "learning_rate": 2.2406688635121494e-06, | |
| "loss": 0.6323, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "grad_norm": 0.23529429733753204, | |
| "learning_rate": 2.2060208252196985e-06, | |
| "loss": 0.6567, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "grad_norm": 0.2256379872560501, | |
| "learning_rate": 2.171630395832544e-06, | |
| "loss": 0.6374, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "grad_norm": 0.20806780457496643, | |
| "learning_rate": 2.1374979640182672e-06, | |
| "loss": 0.63, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "grad_norm": 0.27723440527915955, | |
| "learning_rate": 2.1036239155286685e-06, | |
| "loss": 0.6416, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "grad_norm": 0.23301304876804352, | |
| "learning_rate": 2.0700086331953973e-06, | |
| "loss": 0.6339, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "grad_norm": 0.2449181079864502, | |
| "learning_rate": 2.036652496925609e-06, | |
| "loss": 0.656, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "grad_norm": 0.225547656416893, | |
| "learning_rate": 2.003555883697708e-06, | |
| "loss": 0.6454, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "grad_norm": 0.260080486536026, | |
| "learning_rate": 1.970719167557061e-06, | |
| "loss": 0.6498, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "grad_norm": 0.24074916541576385, | |
| "learning_rate": 1.9381427196117795e-06, | |
| "loss": 0.6396, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "grad_norm": 0.22878335416316986, | |
| "learning_rate": 1.9058269080285213e-06, | |
| "loss": 0.6394, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "grad_norm": 0.2029305100440979, | |
| "learning_rate": 1.8737720980283203e-06, | |
| "loss": 0.6351, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "grad_norm": 0.23044219613075256, | |
| "learning_rate": 1.8419786518824839e-06, | |
| "loss": 0.6381, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "grad_norm": 0.22734399139881134, | |
| "learning_rate": 1.810446928908477e-06, | |
| "loss": 0.6409, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "grad_norm": 0.23863095045089722, | |
| "learning_rate": 1.7791772854658744e-06, | |
| "loss": 0.6386, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "grad_norm": 0.25361311435699463, | |
| "learning_rate": 1.748170074952324e-06, | |
| "loss": 0.6403, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "grad_norm": 0.20445138216018677, | |
| "learning_rate": 1.7174256477995477e-06, | |
| "loss": 0.6248, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "grad_norm": 0.23286859691143036, | |
| "learning_rate": 1.6869443514694023e-06, | |
| "loss": 0.6294, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "grad_norm": 0.22533519566059113, | |
| "learning_rate": 1.6567265304499425e-06, | |
| "loss": 0.6227, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "grad_norm": 0.2162843644618988, | |
| "learning_rate": 1.6267725262515022e-06, | |
| "loss": 0.6244, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "grad_norm": 0.2384515404701233, | |
| "learning_rate": 1.597082677402892e-06, | |
| "loss": 0.6316, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "grad_norm": 0.2290886789560318, | |
| "learning_rate": 1.567657319447502e-06, | |
| "loss": 0.6283, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "grad_norm": 0.30616798996925354, | |
| "learning_rate": 1.5384967849395776e-06, | |
| "loss": 0.6349, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "grad_norm": 0.2351292222738266, | |
| "learning_rate": 1.5096014034404137e-06, | |
| "loss": 0.6379, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "grad_norm": 0.23513749241828918, | |
| "learning_rate": 1.4809715015146413e-06, | |
| "loss": 0.6563, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "grad_norm": 0.24764598906040192, | |
| "learning_rate": 1.4526074027265663e-06, | |
| "loss": 0.6264, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "grad_norm": 0.25509247183799744, | |
| "learning_rate": 1.424509427636464e-06, | |
| "loss": 0.6399, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "grad_norm": 0.22282171249389648, | |
| "learning_rate": 1.3966778937969854e-06, | |
| "loss": 0.632, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "grad_norm": 0.2554634213447571, | |
| "learning_rate": 1.369113115749579e-06, | |
| "loss": 0.6363, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "grad_norm": 0.24383343756198883, | |
| "learning_rate": 1.3418154050208936e-06, | |
| "loss": 0.6307, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "grad_norm": 0.25589898228645325, | |
| "learning_rate": 1.314785070119315e-06, | |
| "loss": 0.6494, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "grad_norm": 0.20207960903644562, | |
| "learning_rate": 1.288022416531426e-06, | |
| "loss": 0.6522, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "grad_norm": 0.23992317914962769, | |
| "learning_rate": 1.2615277467185855e-06, | |
| "loss": 0.6257, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "grad_norm": 0.22749055922031403, | |
| "learning_rate": 1.2353013601135027e-06, | |
| "loss": 0.6316, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "grad_norm": 0.2607298493385315, | |
| "learning_rate": 1.2093435531168428e-06, | |
| "loss": 0.642, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "grad_norm": 0.22615773975849152, | |
| "learning_rate": 1.1836546190939019e-06, | |
| "loss": 0.6356, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "grad_norm": 0.22804035246372223, | |
| "learning_rate": 1.1582348483712647e-06, | |
| "loss": 0.6324, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "grad_norm": 0.2576032876968384, | |
| "learning_rate": 1.1330845282335272e-06, | |
| "loss": 0.6304, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "grad_norm": 0.23848937451839447, | |
| "learning_rate": 1.1082039429200802e-06, | |
| "loss": 0.6357, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "grad_norm": 0.24179770052433014, | |
| "learning_rate": 1.0835933736218495e-06, | |
| "loss": 0.6504, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "grad_norm": 0.25548461079597473, | |
| "learning_rate": 1.0592530984781578e-06, | |
| "loss": 0.6537, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "grad_norm": 0.24816744029521942, | |
| "learning_rate": 1.0351833925735577e-06, | |
| "loss": 0.6544, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "grad_norm": 0.25196966528892517, | |
| "learning_rate": 1.011384527934736e-06, | |
| "loss": 0.6456, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "eval_loss": 0.672783613204956, | |
| "eval_runtime": 43.0139, | |
| "eval_samples_per_second": 46.497, | |
| "eval_steps_per_second": 0.372, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "grad_norm": 0.2182924747467041, | |
| "learning_rate": 9.878567735274374e-07, | |
| "loss": 0.6464, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "grad_norm": 0.3179493248462677, | |
| "learning_rate": 9.646003952534065e-07, | |
| "loss": 0.6521, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "grad_norm": 0.21265274286270142, | |
| "learning_rate": 9.416156559474115e-07, | |
| "loss": 0.6341, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "grad_norm": 0.28855758905410767, | |
| "learning_rate": 9.18902815374259e-07, | |
| "loss": 0.6397, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "grad_norm": 0.21997155249118805, | |
| "learning_rate": 8.96462130225853e-07, | |
| "loss": 0.6512, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "grad_norm": 0.42403122782707214, | |
| "learning_rate": 8.742938541183105e-07, | |
| "loss": 0.6252, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "grad_norm": 0.28224098682403564, | |
| "learning_rate": 8.523982375890721e-07, | |
| "loss": 0.6528, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "grad_norm": 0.2471909075975418, | |
| "learning_rate": 8.307755280940882e-07, | |
| "loss": 0.6395, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "grad_norm": 0.2387973815202713, | |
| "learning_rate": 8.094259700050205e-07, | |
| "loss": 0.6389, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "grad_norm": 0.20990094542503357, | |
| "learning_rate": 7.883498046064752e-07, | |
| "loss": 0.6434, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "grad_norm": 0.24316251277923584, | |
| "learning_rate": 7.675472700932829e-07, | |
| "loss": 0.6557, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "grad_norm": 0.26184114813804626, | |
| "learning_rate": 7.470186015677894e-07, | |
| "loss": 0.6256, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "grad_norm": 0.2425517439842224, | |
| "learning_rate": 7.267640310372192e-07, | |
| "loss": 0.6187, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "grad_norm": 0.25056472420692444, | |
| "learning_rate": 7.067837874110472e-07, | |
| "loss": 0.638, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "grad_norm": 0.22093383967876434, | |
| "learning_rate": 6.870780964984086e-07, | |
| "loss": 0.6393, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "grad_norm": 0.22322039306163788, | |
| "learning_rate": 6.676471810055429e-07, | |
| "loss": 0.6338, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "grad_norm": 0.21115179359912872, | |
| "learning_rate": 6.484912605332849e-07, | |
| "loss": 0.6353, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "grad_norm": 0.2656739056110382, | |
| "learning_rate": 6.296105515745831e-07, | |
| "loss": 0.6492, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "grad_norm": 0.21333497762680054, | |
| "learning_rate": 6.110052675120465e-07, | |
| "loss": 0.6413, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "grad_norm": 0.19868426024913788, | |
| "learning_rate": 5.926756186155402e-07, | |
| "loss": 0.6323, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "grad_norm": 0.22111816704273224, | |
| "learning_rate": 5.746218120397967e-07, | |
| "loss": 0.6443, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "grad_norm": 0.26926514506340027, | |
| "learning_rate": 5.568440518220919e-07, | |
| "loss": 0.6323, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "grad_norm": 0.23122546076774597, | |
| "learning_rate": 5.393425388799278e-07, | |
| "loss": 0.6454, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "grad_norm": 0.2143920212984085, | |
| "learning_rate": 5.221174710087622e-07, | |
| "loss": 0.6461, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "grad_norm": 0.21134312450885773, | |
| "learning_rate": 5.051690428797828e-07, | |
| "loss": 0.6321, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "grad_norm": 0.2336469441652298, | |
| "learning_rate": 4.884974460376862e-07, | |
| "loss": 0.6411, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "grad_norm": 0.21542507410049438, | |
| "learning_rate": 4.7210286889854126e-07, | |
| "loss": 0.6685, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "grad_norm": 0.2598770260810852, | |
| "learning_rate": 4.559854967476379e-07, | |
| "loss": 0.6238, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "grad_norm": 0.30308809876441956, | |
| "learning_rate": 4.4014551173739983e-07, | |
| "loss": 0.6456, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "grad_norm": 0.24044351279735565, | |
| "learning_rate": 4.245830928853334e-07, | |
| "loss": 0.6394, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "grad_norm": 0.22069400548934937, | |
| "learning_rate": 4.092984160719876e-07, | |
| "loss": 0.6421, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "grad_norm": 0.22370630502700806, | |
| "learning_rate": 3.942916540389807e-07, | |
| "loss": 0.6748, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "grad_norm": 0.2218630164861679, | |
| "learning_rate": 3.7956297638705175e-07, | |
| "loss": 0.6242, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "grad_norm": 0.2351781576871872, | |
| "learning_rate": 3.651125495741148e-07, | |
| "loss": 0.6392, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "grad_norm": 0.23447324335575104, | |
| "learning_rate": 3.509405369134244e-07, | |
| "loss": 0.6475, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "grad_norm": 0.2265343815088272, | |
| "learning_rate": 3.370470985716856e-07, | |
| "loss": 0.6432, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "grad_norm": 0.21131916344165802, | |
| "learning_rate": 3.234323915672688e-07, | |
| "loss": 0.6316, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "grad_norm": 0.2470100075006485, | |
| "learning_rate": 3.100965697684255e-07, | |
| "loss": 0.6464, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "grad_norm": 0.2239593267440796, | |
| "learning_rate": 2.970397838915562e-07, | |
| "loss": 0.6224, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "grad_norm": 0.2659773528575897, | |
| "learning_rate": 2.842621814995033e-07, | |
| "loss": 0.6337, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "grad_norm": 0.23355723917484283, | |
| "learning_rate": 2.7176390699987785e-07, | |
| "loss": 0.6527, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "grad_norm": 0.242411270737648, | |
| "learning_rate": 2.59545101643438e-07, | |
| "loss": 0.6421, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "grad_norm": 0.23726269602775574, | |
| "learning_rate": 2.4760590352248814e-07, | |
| "loss": 0.6372, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "grad_norm": 0.22319750487804413, | |
| "learning_rate": 2.3594644756931294e-07, | |
| "loss": 0.641, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "grad_norm": 0.2723524868488312, | |
| "learning_rate": 2.2456686555466511e-07, | |
| "loss": 0.6406, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "grad_norm": 0.2540217339992523, | |
| "learning_rate": 2.1346728608626077e-07, | |
| "loss": 0.6406, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "grad_norm": 0.2367355227470398, | |
| "learning_rate": 2.0264783460733348e-07, | |
| "loss": 0.6392, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "grad_norm": 0.33749625086784363, | |
| "learning_rate": 1.9210863339522434e-07, | |
| "loss": 0.6187, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "grad_norm": 0.2291213721036911, | |
| "learning_rate": 1.8184980155998287e-07, | |
| "loss": 0.6363, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "grad_norm": 0.23851270973682404, | |
| "learning_rate": 1.7187145504304058e-07, | |
| "loss": 0.6338, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "eval_loss": 0.6729329228401184, | |
| "eval_runtime": 43.029, | |
| "eval_samples_per_second": 46.48, | |
| "eval_steps_per_second": 0.372, | |
| "step": 10000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10345, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "total_flos": 2.531690135153751e+19, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |