| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.11117064023573534, | |
| "eval_steps": 500, | |
| "global_step": 1660, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.000669702652022502, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.693440428380188e-07, | |
| "loss": 2.3902, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.001339405304045004, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3386880856760376e-06, | |
| "loss": 2.4415, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.002009107956067506, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0080321285140564e-06, | |
| "loss": 2.3648, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.002678810608090008, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6773761713520752e-06, | |
| "loss": 2.2811, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.00334851326011251, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.346720214190094e-06, | |
| "loss": 2.4023, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.004018215912135012, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.016064257028113e-06, | |
| "loss": 2.3196, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.004687918564157514, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.685408299866132e-06, | |
| "loss": 2.2711, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.005357621216180016, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.3547523427041504e-06, | |
| "loss": 2.3224, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.006027323868202518, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.024096385542169e-06, | |
| "loss": 2.287, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.00669702652022502, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.693440428380188e-06, | |
| "loss": 2.2395, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0073667291722475225, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.362784471218207e-06, | |
| "loss": 2.3105, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.008036431824270024, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.032128514056226e-06, | |
| "loss": 2.4276, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.008706134476292525, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.701472556894244e-06, | |
| "loss": 2.3688, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.009375837128315028, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.370816599732263e-06, | |
| "loss": 2.3826, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.01004553978033753, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0040160642570281e-05, | |
| "loss": 2.383, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.010715242432360031, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.0709504685408301e-05, | |
| "loss": 2.3427, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.011384945084382534, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.1378848728246319e-05, | |
| "loss": 2.2256, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.012054647736405036, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2048192771084338e-05, | |
| "loss": 2.3393, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.012724350388427539, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.2717536813922356e-05, | |
| "loss": 2.3954, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.01339405304045004, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.3386880856760376e-05, | |
| "loss": 2.2947, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.014063755692472542, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4056224899598394e-05, | |
| "loss": 2.2528, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.014733458344495045, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.4725568942436414e-05, | |
| "loss": 2.2722, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.015403160996517546, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.5394912985274433e-05, | |
| "loss": 2.3249, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.016072863648540048, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.606425702811245e-05, | |
| "loss": 2.3077, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.01674256630056255, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.673360107095047e-05, | |
| "loss": 2.3056, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.01741226895258505, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.7402945113788487e-05, | |
| "loss": 2.3655, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.018081971604607554, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8072289156626505e-05, | |
| "loss": 2.2622, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.018751674256630057, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.8741633199464527e-05, | |
| "loss": 2.2843, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.019421376908652557, | |
| "grad_norm": 0.0, | |
| "learning_rate": 1.9410977242302544e-05, | |
| "loss": 2.1858, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.02009107956067506, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.0080321285140562e-05, | |
| "loss": 2.3016, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.020760782212697563, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.074966532797858e-05, | |
| "loss": 2.2798, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.021430484864720063, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.1419009370816602e-05, | |
| "loss": 2.2639, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.022100187516742566, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.208835341365462e-05, | |
| "loss": 2.2364, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.02276989016876507, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.2757697456492638e-05, | |
| "loss": 2.2894, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.023439592820787572, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.3427041499330656e-05, | |
| "loss": 2.3324, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.02410929547281007, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4096385542168677e-05, | |
| "loss": 2.3137, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.024778998124832575, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.4765729585006695e-05, | |
| "loss": 2.2814, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.025448700776855078, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.5435073627844713e-05, | |
| "loss": 2.3252, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.026118403428877578, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6104417670682734e-05, | |
| "loss": 2.3306, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.02678810608090008, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.6773761713520752e-05, | |
| "loss": 2.3344, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.027457808732922584, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.7443105756358774e-05, | |
| "loss": 2.2686, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.028127511384945084, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.8112449799196788e-05, | |
| "loss": 2.2904, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.028797214036967587, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.878179384203481e-05, | |
| "loss": 2.3387, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.02946691668899009, | |
| "grad_norm": 0.0, | |
| "learning_rate": 2.9451137884872827e-05, | |
| "loss": 2.2865, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.03013661934101259, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.012048192771085e-05, | |
| "loss": 2.2472, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.030806321993035093, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.078982597054887e-05, | |
| "loss": 2.2773, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.031476024645057596, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.1459170013386885e-05, | |
| "loss": 2.3224, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.032145727297080096, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.21285140562249e-05, | |
| "loss": 2.1539, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.032815429949102595, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.279785809906292e-05, | |
| "loss": 2.3025, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.0334851326011251, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.346720214190094e-05, | |
| "loss": 2.2328, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.0341548352531476, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.413654618473896e-05, | |
| "loss": 2.2389, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.0348245379051701, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.4805890227576974e-05, | |
| "loss": 2.2465, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.03549424055719261, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.5475234270415e-05, | |
| "loss": 2.2608, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.03616394320921511, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.614457831325301e-05, | |
| "loss": 2.2254, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.03683364586123761, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.6813922356091035e-05, | |
| "loss": 2.2041, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.037503348513260114, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.748326639892905e-05, | |
| "loss": 2.3156, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.038173051165282613, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.815261044176707e-05, | |
| "loss": 2.3493, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.03884275381730511, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.882195448460509e-05, | |
| "loss": 2.2966, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.03951245646932762, | |
| "grad_norm": 0.0, | |
| "learning_rate": 3.949129852744311e-05, | |
| "loss": 2.2741, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.04018215912135012, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.0160642570281125e-05, | |
| "loss": 2.198, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.04085186177337262, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.082998661311915e-05, | |
| "loss": 2.2987, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.041521564425395126, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.149933065595716e-05, | |
| "loss": 2.3219, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.042191267077417625, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.2168674698795186e-05, | |
| "loss": 2.2609, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.042860969729440125, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.2838018741633203e-05, | |
| "loss": 2.1874, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.04353067238146263, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.350736278447122e-05, | |
| "loss": 2.2077, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.04420037503348513, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.417670682730924e-05, | |
| "loss": 2.187, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.04487007768550764, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.484605087014726e-05, | |
| "loss": 2.2983, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.04553978033753014, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.5515394912985275e-05, | |
| "loss": 2.3472, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.04620948298955264, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.61847389558233e-05, | |
| "loss": 2.2707, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.046879185641575144, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.685408299866131e-05, | |
| "loss": 2.3201, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.047548888293597644, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.7523427041499336e-05, | |
| "loss": 2.2628, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.04821859094562014, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.8192771084337354e-05, | |
| "loss": 2.3217, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.04888829359764265, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.886211512717537e-05, | |
| "loss": 2.3157, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.04955799624966515, | |
| "grad_norm": 0.0, | |
| "learning_rate": 4.953145917001339e-05, | |
| "loss": 2.2407, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.05022769890168765, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.020080321285141e-05, | |
| "loss": 2.2607, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.050897401553710156, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.0870147255689426e-05, | |
| "loss": 2.2192, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.051567104205732656, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.1539491298527444e-05, | |
| "loss": 2.2501, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.052236806857755155, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.220883534136547e-05, | |
| "loss": 2.2901, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.05290650950977766, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.2878179384203486e-05, | |
| "loss": 2.2391, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.05357621216180016, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.3547523427041504e-05, | |
| "loss": 2.2306, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.05424591481382266, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.4216867469879516e-05, | |
| "loss": 2.322, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.05491561746584517, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.488621151271755e-05, | |
| "loss": 2.2978, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.05558532011786767, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.555555555555556e-05, | |
| "loss": 2.3016, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.05625502276989017, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.6224899598393576e-05, | |
| "loss": 2.1525, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.056924725421912674, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.6894243641231594e-05, | |
| "loss": 2.2663, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.057594428073935174, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.756358768406962e-05, | |
| "loss": 2.2755, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.05826413072595767, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.823293172690764e-05, | |
| "loss": 2.2842, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.05893383337798018, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.8902275769745655e-05, | |
| "loss": 2.2635, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.05960353603000268, | |
| "grad_norm": 0.0, | |
| "learning_rate": 5.9571619812583666e-05, | |
| "loss": 2.2726, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.06027323868202518, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.02409638554217e-05, | |
| "loss": 2.2318, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.060942941334047686, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.091030789825971e-05, | |
| "loss": 2.3523, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.061612643986070185, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.157965194109773e-05, | |
| "loss": 2.317, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.062282346638092685, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.224899598393574e-05, | |
| "loss": 2.1225, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.06295204929011519, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.291834002677377e-05, | |
| "loss": 2.2357, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.06362175194213769, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.358768406961179e-05, | |
| "loss": 2.2447, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.06429145459416019, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.42570281124498e-05, | |
| "loss": 2.1638, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.06496115724618269, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.492637215528782e-05, | |
| "loss": 2.1353, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.06563085989820519, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.559571619812584e-05, | |
| "loss": 2.1564, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.0663005625502277, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.626506024096386e-05, | |
| "loss": 2.1921, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.0669702652022502, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.693440428380188e-05, | |
| "loss": 2.2064, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.0676399678542727, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.76037483266399e-05, | |
| "loss": 2.223, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.0683096705062952, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.827309236947793e-05, | |
| "loss": 2.3054, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.0689793731583177, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.894243641231593e-05, | |
| "loss": 2.2002, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.0696490758103402, | |
| "grad_norm": 0.0, | |
| "learning_rate": 6.961178045515395e-05, | |
| "loss": 2.249, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.07031877846236272, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.028112449799197e-05, | |
| "loss": 2.1752, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.07098848111438522, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.095046854083e-05, | |
| "loss": 2.2828, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.07165818376640772, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.161981258366802e-05, | |
| "loss": 2.2456, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.07232788641843022, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.228915662650602e-05, | |
| "loss": 2.3329, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.07299758907045271, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.295850066934404e-05, | |
| "loss": 2.2911, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.07366729172247521, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.362784471218207e-05, | |
| "loss": 2.3092, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.07433699437449773, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.429718875502009e-05, | |
| "loss": 2.3058, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.07500669702652023, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.49665327978581e-05, | |
| "loss": 2.3024, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.07567639967854273, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.563587684069612e-05, | |
| "loss": 2.3491, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.07634610233056523, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.630522088353414e-05, | |
| "loss": 2.3675, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.07701580498258773, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.697456492637216e-05, | |
| "loss": 2.3257, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.07768550763461023, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.764390896921018e-05, | |
| "loss": 2.3037, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.07835521028663274, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.83132530120482e-05, | |
| "loss": 2.2627, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.07902491293865524, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.898259705488621e-05, | |
| "loss": 2.3351, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.07969461559067774, | |
| "grad_norm": 0.0, | |
| "learning_rate": 7.965194109772423e-05, | |
| "loss": 2.2829, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.08036431824270024, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.032128514056225e-05, | |
| "loss": 2.2612, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.08103402089472274, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.099062918340027e-05, | |
| "loss": 2.2377, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.08170372354674524, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.16599732262383e-05, | |
| "loss": 2.2353, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.08237342619876775, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.232931726907632e-05, | |
| "loss": 2.2509, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.08304312885079025, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.299866131191432e-05, | |
| "loss": 2.2449, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.08371283150281275, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.366800535475234e-05, | |
| "loss": 2.2076, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.08438253415483525, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.433734939759037e-05, | |
| "loss": 2.2714, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.08505223680685775, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.500669344042839e-05, | |
| "loss": 2.2424, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.08572193945888025, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.567603748326641e-05, | |
| "loss": 2.2711, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.08639164211090276, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.634538152610442e-05, | |
| "loss": 2.2266, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.08706134476292526, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.701472556894244e-05, | |
| "loss": 2.2329, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.08773104741494776, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.768406961178046e-05, | |
| "loss": 2.2388, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.08840075006697026, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.835341365461848e-05, | |
| "loss": 2.3047, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.08907045271899276, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.90227576974565e-05, | |
| "loss": 2.2679, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.08974015537101528, | |
| "grad_norm": 0.0, | |
| "learning_rate": 8.969210174029451e-05, | |
| "loss": 2.3128, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.09040985802303778, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.036144578313253e-05, | |
| "loss": 2.2134, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.09107956067506028, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.103078982597055e-05, | |
| "loss": 2.2883, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.09174926332708278, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.170013386880857e-05, | |
| "loss": 2.2464, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.09241896597910527, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.23694779116466e-05, | |
| "loss": 2.2318, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.09308866863112777, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.303882195448462e-05, | |
| "loss": 2.229, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.09375837128315029, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.370816599732262e-05, | |
| "loss": 2.2054, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.09442807393517279, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.437751004016064e-05, | |
| "loss": 2.2667, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.09509777658719529, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.504685408299867e-05, | |
| "loss": 2.2674, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.09576747923921779, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.571619812583669e-05, | |
| "loss": 2.2865, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.09643718189124029, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.638554216867471e-05, | |
| "loss": 2.2337, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.09710688454326279, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.705488621151271e-05, | |
| "loss": 2.291, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.0977765871952853, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.772423025435074e-05, | |
| "loss": 2.3411, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.0984462898473078, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.839357429718876e-05, | |
| "loss": 2.1827, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.0991159924993303, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.906291834002678e-05, | |
| "loss": 2.1987, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.0997856951513528, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.97322623828648e-05, | |
| "loss": 2.2079, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.1004553978033753, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999995081044314e-05, | |
| "loss": 2.2484, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.1011251004553978, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999965020794615e-05, | |
| "loss": 2.3123, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.10179480310742031, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.99990763321247e-05, | |
| "loss": 2.2429, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.10246450575944281, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999822918611533e-05, | |
| "loss": 2.2205, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.10313420841146531, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999710877454811e-05, | |
| "loss": 2.2077, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.10380391106348781, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999571510354664e-05, | |
| "loss": 2.3028, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.10447361371551031, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999404818072808e-05, | |
| "loss": 2.252, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.10514331636753281, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.999210801520296e-05, | |
| "loss": 2.3754, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.10581301901955532, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.998989461757526e-05, | |
| "loss": 2.2761, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.10648272167157782, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.998740799994235e-05, | |
| "loss": 2.2168, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.10715242432360032, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.998464817589484e-05, | |
| "loss": 2.2639, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.10782212697562282, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.998161516051656e-05, | |
| "loss": 2.2687, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.10849182962764532, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.997830897038446e-05, | |
| "loss": 2.2239, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.10916153227966782, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.997472962356854e-05, | |
| "loss": 2.1802, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.10983123493169034, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.997087713963174e-05, | |
| "loss": 2.2154, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.11050093758371284, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.996675153962984e-05, | |
| "loss": 2.212, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.11117064023573534, | |
| "grad_norm": 0.0, | |
| "learning_rate": 9.996235284611131e-05, | |
| "loss": 2.251, | |
| "step": 1660 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 14932, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 1660, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.12344215413719e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |